Skip to content

Commit

Permalink
Merge branch 'master' into fromfmuensemble
Browse files Browse the repository at this point in the history
  • Loading branch information
berland authored Jan 22, 2021
2 parents 199f296 + 5213f4e commit 1b3e5f4
Show file tree
Hide file tree
Showing 7 changed files with 68 additions and 18 deletions.
13 changes: 9 additions & 4 deletions ecl2df/faults.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
import pandas as pd

from .eclfiles import EclFiles
from .common import parse_opmio_deckrecord
from .common import parse_opmio_deckrecord, write_dframe_stdout_file

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -76,6 +76,11 @@ def faults_main(args):
faults_df = df(deck)
if faults_df.empty:
logger.warning("Empty FAULT data, not written to disk!")
else:
faults_df.to_csv(args.output, index=False)
print("Wrote to " + args.output)
return
write_dframe_stdout_file(
faults_df,
args.output,
index=False,
caller_logger=logger,
logstr="Wrote to {}".format(args.output),
)
10 changes: 9 additions & 1 deletion ecl2df/nnc.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
from pathlib import Path

from ecl2df import common, EclFiles, grid, __version__
from ecl2df.common import write_dframe_stdout_file

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -278,5 +279,12 @@ def nnc_main(args):
nncdf = df(eclfiles, coords=args.coords, pillars=args.pillars)
if nncdf.empty:
logger.warning("Empty NNC dataframe being written to disk!")
return
write_dframe_stdout_file(
nncdf,
args.output,
index=False,
caller_logger=logger,
logstr="Wrote to {}".format(args.output),
)
nncdf.to_csv(args.output, index=False)
print("Wrote to " + args.output)
12 changes: 9 additions & 3 deletions ecl2df/wcon.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
import pandas as pd

from .eclfiles import EclFiles
from .common import parse_opmio_date_rec, OPMKEYWORDS
from .common import parse_opmio_date_rec, OPMKEYWORDS, write_dframe_stdout_file

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -153,5 +153,11 @@ def wcon_main(args):
wcon_df = df(deck)
if wcon_df.empty:
logger.warning("Empty wcon dataframe being written to disk!")
wcon_df.to_csv(args.output, index=False)
print("Wrote to " + args.output)
return
write_dframe_stdout_file(
wcon_df,
args.output,
index=False,
caller_logger=logger,
logstr="Wrote to {}".format(args.output),
)
1 change: 1 addition & 0 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@
"pre-commit",
"pytest",
"pytest-cov",
"pytest-mock",
]

DOCS_REQUIREMENTS = [
Expand Down
16 changes: 13 additions & 3 deletions tests/test_faults.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
"""Test module for nnc2df"""

import sys
import io
from pathlib import Path
import subprocess

import pandas as pd

Expand Down Expand Up @@ -60,12 +61,21 @@ def test_multiplestr2df():
assert len(faultsdf.loc["C"]) == 6


def test_main_subparser(tmpdir):
def test_main_subparser(tmpdir, mocker):
"""Test command line interface with subparsers"""
tmpcsvfile = tmpdir / "faultsdf.csv"
sys.argv = ["ecl2csv", "faults", DATAFILE, "-o", str(tmpcsvfile)]
mocker.patch("sys.argv", ["ecl2csv", "faults", DATAFILE, "-o", str(tmpcsvfile)])
ecl2csv.main()

assert Path(tmpcsvfile).is_file()
disk_df = pd.read_csv(str(tmpcsvfile))
assert not disk_df.empty


def test_magic_stdout():
"""Test that we can pipe the output into a dataframe"""
result = subprocess.run(
["ecl2csv", "faults", "-o", "-", DATAFILE], check=True, stdout=subprocess.PIPE
)
df_stdout = pd.read_csv(io.StringIO(result.stdout.decode()))
assert not df_stdout.empty
18 changes: 14 additions & 4 deletions tests/test_nnc.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,13 @@
"""Test module for nnc2df"""

import sys
import io
from pathlib import Path
import subprocess

import pandas as pd
import pytest

from ecl2df import nnc, faults, ecl2csv, trans
from ecl2df import nnc, faults, trans, ecl2csv
from ecl2df.eclfiles import EclFiles

TESTDIR = Path(__file__).absolute().parent
Expand Down Expand Up @@ -97,14 +98,23 @@ def test_df2ecl_editnnc(tmpdir):
print(nnc.df2ecl_editnnc(nnc.df(eclfiles).head(4).assign(TRANM=0.1)))


def test_main(tmpdir):
def test_main(tmpdir, mocker):
"""Test command line interface"""
tmpcsvfile = tmpdir.join("nnc.csv")
sys.argv = ["ecl2csv", "nnc", "-v", DATAFILE, "-o", str(tmpcsvfile)]
mocker.patch("sys.argv", ["ecl2csv", "nnc", "-v", DATAFILE, "-o", str(tmpcsvfile)])
ecl2csv.main()

assert Path(tmpcsvfile).is_file()
disk_df = pd.read_csv(str(tmpcsvfile))
assert not disk_df.empty
assert "I1" in disk_df
assert "TRAN" in disk_df


def test_magic_stdout():
"""Test that we can pipe the output into a dataframe"""
result = subprocess.run(
["ecl2csv", "nnc", "-o", "-", DATAFILE], check=True, stdout=subprocess.PIPE
)
df_stdout = pd.read_csv(io.StringIO(result.stdout.decode()))
assert not df_stdout.empty
16 changes: 13 additions & 3 deletions tests/test_wcon.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
"""Test module for wcon"""

import sys
import io
from pathlib import Path
import subprocess

import pandas as pd

Expand Down Expand Up @@ -141,12 +142,21 @@ def test_tstep():
assert "2001-05-07" in dates


def test_main_subparsers(tmpdir):
def test_main_subparsers(tmpdir, mocker):
"""Test command line interface"""
tmpcsvfile = tmpdir / ".TMP-wcondf.csv"
sys.argv = ["ecl2csv", "wcon", DATAFILE, "-o", str(tmpcsvfile)]
mocker.patch("sys.argv", ["ecl2csv", "wcon", DATAFILE, "-o", str(tmpcsvfile)])
ecl2csv.main()

assert Path(tmpcsvfile).is_file()
disk_df = pd.read_csv(str(tmpcsvfile))
assert not disk_df.empty


def test_magic_stdout():
"""Test that we can pipe the output into a dataframe"""
result = subprocess.run(
["ecl2csv", "wcon", "-o", "-", DATAFILE], check=True, stdout=subprocess.PIPE
)
df_stdout = pd.read_csv(io.StringIO(result.stdout.decode()))
assert not df_stdout.empty

0 comments on commit 1b3e5f4

Please sign in to comment.