Cleanup of auto tests and some PEP8 reformatting.

develop
Joseph D Hughes 2016-11-29 14:32:26 -05:00
parent 370141cf42
commit b6a36b5e50
33 changed files with 902 additions and 649 deletions

View File

@ -1,5 +1,12 @@
def test_import():
import flopy
try:
import flopy
except:
fail = True
assert fail is False, 'could not import flopy'
return
if __name__ == '__main__':
test_import()

View File

@ -1,5 +1,5 @@
import flopy
import os.path
import os
def test_loadfreyberg():
cwd = os.getcwd()

View File

@ -1,5 +1,6 @@
# Test export module
import sys
sys.path.insert(0, '..')
import copy
import os
@ -11,6 +12,22 @@ namfiles = [namfile for namfile in os.listdir(pth) if namfile.endswith('.nam')]
# skip = ["MNW2-Fig28.nam", "testsfr2.nam", "testsfr2_tab.nam"]
skip = []
npth = os.path.join('temp', 't007', 'netcdf')
# make the directory if it does not exist
if not os.path.isdir(npth):
os.makedirs(npth)
spth = os.path.join('temp', 't007', 'shapefile')
# make the directory if it does not exist
if not os.path.isdir(spth):
os.makedirs(spth)
tpth = os.path.join('temp', 't007')
# make the directory if it does not exist
if not os.path.isdir(tpth):
os.makedirs(tpth)
def export_netcdf(namfile):
if namfile in skip:
return
@ -34,9 +51,9 @@ def export_netcdf(namfile):
except:
return
fnc = m.export(os.path.join('temp', m.name + '.nc'))
fnc = m.export(os.path.join(npth, m.name + '.nc'))
fnc.write()
fnc_name = os.path.join('temp', m.name + '.nc')
fnc_name = os.path.join(npth, m.name + '.nc')
try:
fnc = m.export(fnc_name)
fnc.write()
@ -61,7 +78,7 @@ def export_shapefile(namfile):
assert m, 'Could not load namefile {}'.format(namfile)
assert isinstance(m, flopy.modflow.Modflow)
fnc_name = os.path.join('temp', m.name + '.shp')
fnc_name = os.path.join(spth, m.name + '.shp')
try:
fnc = m.export(fnc_name)
@ -97,7 +114,7 @@ def test_export_output():
hds_pth = os.path.join(model_ws, "freyberg.githds")
hds = flopy.utils.HeadFile(hds_pth)
out_pth = os.path.join("temp", "freyberg.out.nc")
out_pth = os.path.join(npth, "freyberg.out.nc")
nc = flopy.export.utils.output_helper(out_pth, ml,
{"freyberg.githds": hds})
var = nc.nc.variables.get("head")
@ -125,10 +142,10 @@ def test_mbase_sr():
print(ml.sr)
assert ml.sr.xul == 500
assert ml.sr.yul == 10
ml.model_ws = "temp"
ml.model_ws = tpth
ml.write_input()
ml1 = flopy.modflow.Modflow.load("test.nam", model_ws="temp")
ml1 = flopy.modflow.Modflow.load("test.nam", model_ws=ml.model_ws)
assert ml1.sr == ml.sr
assert ml1.start_datetime == ml.start_datetime
@ -158,7 +175,7 @@ def test_free_format_flag():
bas.ifrefm = True
assert ms.free_format_input == bas.ifrefm
ms.model_ws = "temp"
ms.model_ws = tpth
ms.write_input()
ms1 = flopy.modflow.Modflow.load(ms.namefile, model_ws=ms.model_ws)
assert ms1.free_format_input == ms.free_format_input
@ -241,7 +258,7 @@ def test_sr():
assert ms.start_datetime == "1-1-2016"
assert ms.dis.start_datetime == "1-1-2016"
ms.model_ws = "temp"
ms.model_ws = tpth
ms.write_input()
ms1 = flopy.modflow.Modflow.load(ms.namefile, model_ws=ms.model_ws)
assert ms1.sr == ms.sr
@ -253,56 +270,69 @@ def test_sr():
ms1.sr = sr
assert ms1.sr == ms.sr
def test_sr_scaling():
nlay, nrow, ncol = 1, 10, 5
delr, delc = 250, 500
xll, yll = 286.80, 29.03
# test scaling of length units
ms2 = flopy.modflow.Modflow()
dis = flopy.modflow.ModflowDis(ms2, nlay=nlay, nrow=nrow, ncol=ncol, delr=delr,
dis = flopy.modflow.ModflowDis(ms2, nlay=nlay, nrow=nrow, ncol=ncol,
delr=delr,
delc=delc)
ms2.sr = flopy.utils.SpatialReference(delr=ms2.dis.delr.array, delc=ms2.dis.delc.array, lenuni=3,
ms2.sr = flopy.utils.SpatialReference(delr=ms2.dis.delr.array,
delc=ms2.dis.delc.array, lenuni=3,
xll=xll, yll=yll, rotation=0)
ms2.sr.epsg = 26715
ms2.dis.export(os.path.join('temp', 'dis2.shp'))
ms2.dis.export(os.path.join(spth, 'dis2.shp'))
ms3 = flopy.modflow.Modflow()
dis = flopy.modflow.ModflowDis(ms3, nlay=nlay, nrow=nrow, ncol=ncol, delr=delr,
dis = flopy.modflow.ModflowDis(ms3, nlay=nlay, nrow=nrow, ncol=ncol,
delr=delr,
delc=delc)
ms3.sr = flopy.utils.SpatialReference(delr=ms3.dis.delr.array, delc=ms2.dis.delc.array, lenuni=3,
ms3.sr = flopy.utils.SpatialReference(delr=ms3.dis.delr.array,
delc=ms2.dis.delc.array, lenuni=3,
length_multiplier=.3048,
xll=xll, yll=yll, rotation=0)
ms3.dis.export(os.path.join('temp', 'dis3.shp'), epsg=26715)
assert np.array_equal(ms3.sr.get_vertices(nrow-1, 0)[1], [ms3.sr.xll, ms3.sr.yll])
assert np.array_equal(ms3.sr.get_vertices(nrow-1, 0)[1], ms2.sr.get_vertices(nrow-1, 0)[1])
xur, yur = ms3.sr.get_vertices(0, ncol-1)[3]
ms3.dis.export(os.path.join(spth, 'dis3.shp'), epsg=26715)
assert np.array_equal(ms3.sr.get_vertices(nrow - 1, 0)[1],
[ms3.sr.xll, ms3.sr.yll])
assert np.array_equal(ms3.sr.get_vertices(nrow - 1, 0)[1],
ms2.sr.get_vertices(nrow - 1, 0)[1])
xur, yur = ms3.sr.get_vertices(0, ncol - 1)[3]
assert xur == xll + ms3.sr.length_multiplier * delr * ncol
assert yur == yll + ms3.sr.length_multiplier * delc * nrow
def test_rotation():
m = flopy.modflow.Modflow(rotation=20.)
dis = flopy.modflow.ModflowDis(m, nlay=1, nrow=40, ncol=20,
delr=250.,
delc=250., top=10, botm=0)
xul, yul = 500000, 2934000
m.sr = flopy.utils.SpatialReference(delr=m.dis.delr.array, delc=m.dis.delc.array,
m.sr = flopy.utils.SpatialReference(delr=m.dis.delr.array,
delc=m.dis.delc.array,
xul=xul, yul=yul, rotation=45.)
xll, yll = m.sr.xll, m.sr.yll
assert m.dis.sr.xgrid[0, 0] == xul
assert m.dis.sr.ygrid[0, 0] == yul
m.sr = flopy.utils.SpatialReference(delr=m.dis.delr.array, delc=m.dis.delc.array,
m.sr = flopy.utils.SpatialReference(delr=m.dis.delr.array,
delc=m.dis.delc.array,
xul=xul, yul=yul, rotation=-45.)
assert m.dis.sr.xgrid[0, 0] == xul
assert m.dis.sr.ygrid[0, 0] == yul
xll2, yll2 = m.sr.xll, m.sr.yll
m.sr = flopy.utils.SpatialReference(delr=m.dis.delr.array, delc=m.dis.delc.array,
m.sr = flopy.utils.SpatialReference(delr=m.dis.delr.array,
delc=m.dis.delc.array,
xll=xll2, yll=yll2, rotation=-45.)
assert m.dis.sr.xgrid[0, 0] == xul
assert m.dis.sr.ygrid[0, 0] == yul
m.sr = flopy.utils.SpatialReference(delr=m.dis.delr.array, delc=m.dis.delc.array,
m.sr = flopy.utils.SpatialReference(delr=m.dis.delr.array,
delc=m.dis.delc.array,
xll=xll, yll=yll, rotation=45.)
assert m.dis.sr.xgrid[0, 0] == xul
assert m.dis.sr.ygrid[0, 0] == yul
def test_map_rotation():
m = flopy.modflow.Modflow(rotation=20.)
dis = flopy.modflow.ModflowDis(m, nlay=1, nrow=40, ncol=20,
@ -310,23 +340,28 @@ def test_map_rotation():
delc=250., top=10, botm=0)
# transformation assigned by arguments
xul, yul, rotation = 500000, 2934000, 45
modelmap = flopy.plot.ModelMap(model=m, xul=xul, yul=yul, rotation=rotation)
modelmap = flopy.plot.ModelMap(model=m, xul=xul, yul=yul,
rotation=rotation)
lc = modelmap.plot_grid()
xll, yll = modelmap.sr.xll, modelmap.sr.yll
def check_vertices():
xllp, yllp = lc._paths[0].vertices[0]
xulp, yulp = lc._paths[0].vertices[1]
assert (xllp, yllp) == (xll, yll)
assert (xulp, yulp) == (xul, yul)
check_vertices()
modelmap = flopy.plot.ModelMap(model=m, xll=xll, yll=yll, rotation=rotation)
modelmap = flopy.plot.ModelMap(model=m, xll=xll, yll=yll,
rotation=rotation)
lc = modelmap.plot_grid()
check_vertices()
# transformation in m.sr
sr = flopy.utils.SpatialReference(delr=m.dis.delr.array, delc=m.dis.delc.array,
xll=xll, yll=yll, rotation=rotation)
sr = flopy.utils.SpatialReference(delr=m.dis.delr.array,
delc=m.dis.delc.array,
xll=xll, yll=yll, rotation=rotation)
m.sr = copy.deepcopy(sr)
modelmap = flopy.plot.ModelMap(model=m)
lc = modelmap.plot_grid()
@ -357,9 +392,10 @@ def test_netcdf_classmethods():
ml = flopy.modflow.Modflow.load(nam_file, model_ws=model_ws, check=False,
verbose=True, load_only=[])
f = ml.export(os.path.join("temp", "freyberg.nc"))
f = ml.export(os.path.join(npth, "freyberg.nc"))
v1_set = set(f.nc.variables.keys())
new_f = flopy.export.NetCdf.zeros_like(f)
fnc = os.path.join(npth, "freyberg.new.nc")
new_f = flopy.export.NetCdf.zeros_like(f, output_filename=fnc)
v2_set = set(new_f.nc.variables.keys())
diff = v1_set.symmetric_difference(v2_set)
assert len(diff) == 0, str(diff)
@ -412,7 +448,7 @@ def test_shapefile_ibound():
except:
return
shape_name = os.path.join("temp", "test.shp")
shape_name = os.path.join(spth, "test.shp")
nam_file = "freyberg.nam"
model_ws = os.path.join('..', 'examples', 'data',
'freyberg_multilayer_transient')
@ -452,18 +488,18 @@ def build_sfr_netcdf():
if __name__ == '__main__':
#test_shapefile_ibound()
#test_netcdf_overloads()
#test_netcdf_classmethods()
#build_netcdf()
#build_sfr_netcdf()
test_shapefile_ibound()
# test_netcdf_overloads()
test_netcdf_classmethods()
build_netcdf()
build_sfr_netcdf()
test_sr()
#test_rotation()
test_rotation()
test_map_rotation()
#test_sr_scaling()
#test_free_format_flag()
#test_export_output()
#for namfile in namfiles:
#for namfile in ["fhb.nam"]:
#export_netcdf(namfile)
#export_shapefile(namfile)
test_sr_scaling()
test_free_format_flag()
test_export_output()
for namfile in namfiles:
# for namfile in ["fhb.nam"]:
export_netcdf(namfile)
export_shapefile(namfile)

View File

@ -6,9 +6,10 @@ These are the examples that are distributed with MODFLOW-2005.
import os
import flopy
# import matplotlib
# matplotlib.use('Agg')
tpth = os.path.join('temp', 't008')
# make the directory if it does not exist
if not os.path.isdir(tpth):
os.makedirs(tpth)
pth = os.path.join('..', 'examples', 'data', 'mf2005_test')
namfiles = [namfile for namfile in os.listdir(pth) if namfile.endswith('.nam')]
@ -62,47 +63,47 @@ def test_nwt_model_load():
def load_nwt(nwtfile):
ml = flopy.modflow.Modflow(model_ws="temp")
fn = os.path.join('temp', '{}.nwt'.format(ml.name))
ml = flopy.modflow.Modflow(model_ws=tpth)
fn = os.path.join(tpth, '{}.nwt'.format(ml.name))
if os.path.isfile(fn):
os.remove(fn)
if 'fmt.' in nwtfile.lower():
#ml.set_free_format(value=False)
# ml.set_free_format(value=False)
ml.array_free_format = False
else:
#ml.set_free_format(value=True)
# ml.set_free_format(value=True)
ml.array_free_format = True
nwt = flopy.modflow.ModflowNwt.load(nwtfile, ml)
assert isinstance(nwt,
flopy.modflow.ModflowNwt), '{} load unsuccessful'.format(
os.path.basename(nwtfile))
os.path.basename(nwtfile))
nwt.write_file()
assert os.path.isfile(fn), '{} write unsuccessful'.format(
os.path.basename(nwtfile))
os.path.basename(nwtfile))
nwt2 = flopy.modflow.ModflowNwt.load(fn, ml)
lst = [a for a in dir(nwt) if
not a.startswith('__') and not callable(getattr(nwt, a))]
for l in lst:
assert nwt2[l] == nwt[l], '{} data '.format(l) + \
'instantiated from {} load '.format(
os.path.basename(nwtfile)) + \
os.path.basename(nwtfile)) + \
' is not the same as written to {}'.format(
os.path.basename(fn))
os.path.basename(fn))
def load_nwt_model(nfile):
f = os.path.basename(nfile)
model_ws = os.path.dirname(nfile)
ml = flopy.modflow.Modflow.load(f, model_ws=model_ws)
assert isinstance(ml,
flopy.modflow.Modflow), 'Error: flopy model instance was not created'
assert isinstance(ml, flopy.modflow.Modflow), \
'Error: flopy model instance was not created'
# change the model work space and rewrite the files
ml.change_model_ws('temp')
ml.change_model_ws(tpth)
ml.write_input()
# reload the model that was just written
ml2 = flopy.modflow.Modflow.load(f, model_ws='temp')
ml2 = flopy.modflow.Modflow.load(f, model_ws=tpth)
# check that the data are the same
for pn in ml.get_package_list():
@ -113,9 +114,9 @@ def load_nwt_model(nfile):
for l in lst:
assert p[l] == p2[l], '{}.{} data '.format(pn, l) + \
'instantiated from {} load '.format(
model_ws) + \
model_ws) + \
' is not the same as written to {}'.format(
'temp')
'temp')
if __name__ == '__main__':
@ -126,4 +127,3 @@ if __name__ == '__main__':
for namfile in namfiles:
load_model(namfile)
load_only_bas6_model(namfile)

View File

@ -1,10 +1,11 @@
__author__ = 'aleaf'
import sys
#sys.path.append('/Users/aleaf/Documents/GitHub/flopy3')
# sys.path.append('/Users/aleaf/Documents/GitHub/flopy3')
import os
import numpy as np
import matplotlib
matplotlib.use('agg')
import flopy
@ -15,26 +16,29 @@ if os.path.split(os.getcwd())[-1] == 'flopy3':
else:
path = os.path.join('..', 'examples', 'data', 'mf2005_test')
path2 = os.path.join('..', 'examples', 'data', 'sfr_test')
outpath = 'temp'
outpath = os.path.join('temp', 't009')
# make the directory if it does not exist
if not os.path.isdir(outpath):
os.makedirs(outpath)
sfr_items = {0: {'mfnam': 'test1ss.nam',
'sfrfile': 'test1ss.sfr'},
1: {'mfnam': 'test1tr.nam',
'sfrfile': 'test1tr.sfr'},
2: {'mfnam': 'testsfr2_tab.nam',
'sfrfile': 'testsfr2_tab_ICALC1.sfr'},
3: {'mfnam': 'testsfr2_tab.nam',
'sfrfile': 'testsfr2_tab_ICALC2.sfr'},
4: {'mfnam': 'testsfr2.nam',
'sfrfile': 'testsfr2.sfr'},
5: {'mfnam': 'UZFtest2.nam',
'sfrfile': 'UZFtest2.sfr'},
6: {'mfnam': 'TL2009.nam',
'sfrfile': 'TL2009.sfr'}
}
'sfrfile': 'test1ss.sfr'},
1: {'mfnam': 'test1tr.nam',
'sfrfile': 'test1tr.sfr'},
2: {'mfnam': 'testsfr2_tab.nam',
'sfrfile': 'testsfr2_tab_ICALC1.sfr'},
3: {'mfnam': 'testsfr2_tab.nam',
'sfrfile': 'testsfr2_tab_ICALC2.sfr'},
4: {'mfnam': 'testsfr2.nam',
'sfrfile': 'testsfr2.sfr'},
5: {'mfnam': 'UZFtest2.nam',
'sfrfile': 'UZFtest2.sfr'},
6: {'mfnam': 'TL2009.nam',
'sfrfile': 'TL2009.sfr'}
}
def sfr_process(mfnam, sfrfile, model_ws, outfolder=outpath):
m = flopy.modflow.Modflow.load(mfnam, model_ws=model_ws, verbose=False)
sfr = m.get_package('SFR')
@ -57,15 +61,18 @@ def sfr_process(mfnam, sfrfile, model_ws, outfolder=outpath):
return m, sfr
def load_sfr_only(sfrfile):
m = flopy.modflow.Modflow()
sfr = flopy.modflow.ModflowSfr2.load(sfrfile, m)
return m, sfr
def load_all_sfr_only(path):
for i, item in sfr_items.items():
load_sfr_only(os.path.join(path, item['sfrfile']))
def interpolate_to_reaches(sfr):
reach_data = sfr.reach_data
segment_data = sfr.segment_data[0]
@ -79,35 +86,38 @@ def interpolate_to_reaches(sfr):
fp = [segment_data[segment_data['nseg'] == seg][segvars[0]][0],
segment_data[segment_data['nseg'] == seg][segvars[1]][0]]
xp = [dist[0], dist[-1]]
assert np.sum(np.abs(reaches[reachvar] - np.interp(dist, xp, fp).tolist())) < 0.01
assert np.sum(np.abs(
reaches[reachvar] - np.interp(dist, xp, fp).tolist())) < 0.01
return reach_data
def test_sfr():
def test_sfr():
load_all_sfr_only(path2)
m, sfr = sfr_process('test1ss.nam', 'test1ss.sfr', path)
m, sfr = sfr_process('test1tr.nam', 'test1tr.sfr', path)
#assert list(sfr.dataset_5.keys()) == [0, 1]
# assert list(sfr.dataset_5.keys()) == [0, 1]
m, sfr = sfr_process('testsfr2_tab.nam', 'testsfr2_tab_ICALC1.sfr', path)
assert list(sfr.dataset_5.keys()) == list(range(0, 50))
m, sfr = sfr_process('testsfr2_tab.nam', 'testsfr2_tab_ICALC2.sfr', path)
assert sfr.channel_geometry_data[0][1] == [[0.0, 2.0, 4.0, 6.0, 8.0, 10.0, 12.0, 14.0],
[6.0, 4.5, 3.5, 0.0, 0.3, 3.5, 4.5, 6.0]]
assert sfr.channel_geometry_data[0][1] == [
[0.0, 2.0, 4.0, 6.0, 8.0, 10.0, 12.0, 14.0],
[6.0, 4.5, 3.5, 0.0, 0.3, 3.5, 4.5, 6.0]]
m, sfr = sfr_process('testsfr2.nam', 'testsfr2.sfr', path)
assert round(sum(sfr.segment_data[49][0]), 7) == 3.9700007
m, sfr = sfr_process('UZFtest2.nam', 'UZFtest2.sfr', path)
assert isinstance(sfr.plot()[0], matplotlib.axes.Axes) # test the plot() method
assert isinstance(sfr.plot()[0],
matplotlib.axes.Axes) # test the plot() method
# trout lake example (only sfr file is included)
# can add tests for sfr connection with lak package
@ -117,24 +127,28 @@ def test_sfr():
sfr.isfropt = 1
sfr.reach_data = interpolate_to_reaches(sfr)
sfr.get_slopes()
assert sfr.reach_data.slope[29] == (sfr.reach_data.strtop[29] - sfr.reach_data.strtop[107])\
/sfr.reach_data.rchlen[29]
assert sfr.reach_data.slope[29] == (sfr.reach_data.strtop[29] -
sfr.reach_data.strtop[107]) \
/ sfr.reach_data.rchlen[29]
chk = sfr.check()
assert sfr.reach_data.slope.min() < 0.0001 and 'minimum slope' in chk.warnings
sfr.reach_data.slope[0] = 1.1
chk.slope(maximum_slope=1.0)
assert 'maximum slope' in chk.warnings
def test_sfr_renumbering():
# test segment renumbering
r = np.zeros((27, 2), dtype=[('iseg', int), ('ireach', int)])
r = np.core.records.fromarrays(r.transpose(), dtype=[('iseg', int), ('ireach', int)])
r = np.core.records.fromarrays(r.transpose(),
dtype=[('iseg', int), ('ireach', int)])
r['iseg'] = sorted(list(range(1, 10)) * 3)
r['ireach'] = [1, 2, 3] * 9
d = np.zeros((9, 2), dtype=[('nseg', int), ('outseg', int)])
d = np.core.records.fromarrays(d.transpose(), dtype=[('nseg', int), ('outseg', int)])
d = np.core.records.fromarrays(d.transpose(),
dtype=[('nseg', int), ('outseg', int)])
d['nseg'] = range(1, 10)
d['outseg'] = [4, 0, 6, 8, 3, 8, 1, 2, 8]
m = flopy.modflow.Modflow()
@ -146,47 +160,59 @@ def test_sfr_renumbering():
assert 'continuity in segment and reach numbering' in chk.passed
assert 'segment numbering order' in chk.passed
def test_example():
m = flopy.modflow.Modflow.load('test1ss.nam', version='mf2005',
exe_name='mf2005.exe',
model_ws=path,
load_only=['ghb', 'evt', 'rch', 'dis',
'bas6', 'oc', 'sip', 'lpf'])
reach_data = np.genfromtxt(
'../examples/data/sfr_examples/test1ss_reach_data.csv', delimiter=',',
names=True)
segment_data = np.genfromtxt(
'../examples/data/sfr_examples/test1ss_segment_data.csv',
delimiter=',', names=True)
# segment_data = {0: ss_segment_data}
m = flopy.modflow.Modflow.load('test1ss.nam', version='mf2005', exe_name='mf2005.exe',
model_ws=path, load_only=['ghb', 'evt', 'rch', 'dis', 'bas6', 'oc', 'sip', 'lpf'])
reach_data = np.genfromtxt('../examples/data/sfr_examples/test1ss_reach_data.csv', delimiter=',', names=True)
segment_data = np.genfromtxt('../examples/data/sfr_examples/test1ss_segment_data.csv', delimiter=',', names=True)
#segment_data = {0: ss_segment_data}
channel_flow_data = {
0: {1: [[0.5, 1.0, 2.0, 4.0, 7.0, 10.0, 20.0, 30.0, 50.0, 75.0, 100.0],
[0.25, 0.4, 0.55, 0.7, 0.8, 0.9, 1.1, 1.25, 1.4, 1.7, 2.6],
[3.0, 3.5, 4.2, 5.3, 7.0, 8.5, 12.0, 14.0, 17.0, 20.0, 22.0]]}}
channel_geometry_data = {
0: {7: [[0.0, 10.0, 80.0, 100.0, 150.0, 170.0, 240.0, 250.0],
[20.0, 13.0, 10.0, 2.0, 0.0, 10.0, 13.0, 20.0]],
8: [[0.0, 10.0, 80.0, 100.0, 150.0, 170.0, 240.0, 250.0],
[25.0, 17.0, 13.0, 4.0, 0.0, 10.0, 16.0, 20.0]]}}
channel_flow_data = {0: {1: [[0.5, 1.0, 2.0, 4.0, 7.0, 10.0, 20.0, 30.0, 50.0, 75.0, 100.0],
[0.25, 0.4, 0.55, 0.7, 0.8, 0.9, 1.1, 1.25, 1.4, 1.7, 2.6],
[3.0, 3.5, 4.2, 5.3, 7.0, 8.5, 12.0, 14.0, 17.0, 20.0, 22.0]]}}
channel_geometry_data = {0: {7: [[0.0, 10.0, 80.0, 100.0, 150.0, 170.0, 240.0, 250.0],
[20.0, 13.0, 10.0, 2.0, 0.0, 10.0, 13.0, 20.0]],
8: [[0.0, 10.0, 80.0, 100.0, 150.0, 170.0, 240.0, 250.0],
[25.0, 17.0, 13.0, 4.0, 0.0, 10.0, 16.0, 20.0]]}}
nstrm = len(reach_data) # number of reaches
nss = len(segment_data) # number of segments
nsfrpar = 0 # number of parameters (not supported)
nstrm = len(reach_data) # number of reaches
nss = len(segment_data) # number of segments
nsfrpar = 0 # number of parameters (not supported)
nparseg = 0
const = 1.486 # constant for manning's equation, units of cfs
dleak = 0.0001 # closure tolerance for stream stage computation
istcb1 = 53 # flag for writing SFR output to cell-by-cell budget (on unit 53)
istcb2 = 81 # flag for writing SFR output to text file
dataset_5 = {0: [nss, 0, 0]} # dataset 5 (see online guide)
const = 1.486 # constant for manning's equation, units of cfs
dleak = 0.0001 # closure tolerance for stream stage computation
istcb1 = 53 # flag for writing SFR output to cell-by-cell budget (on unit 53)
istcb2 = 81 # flag for writing SFR output to text file
dataset_5 = {0: [nss, 0, 0]} # dataset 5 (see online guide)
sfr = flopy.modflow.ModflowSfr2(m, nstrm=nstrm, nss=nss, const=const, dleak=dleak, istcb1=istcb1, istcb2=istcb2,
reach_data=reach_data,
segment_data=segment_data,
channel_geometry_data=channel_geometry_data,
channel_flow_data=channel_flow_data,
dataset_5=dataset_5)
sfr = flopy.modflow.ModflowSfr2(m, nstrm=nstrm, nss=nss, const=const,
dleak=dleak, istcb1=istcb1, istcb2=istcb2,
reach_data=reach_data,
segment_data=segment_data,
channel_geometry_data=channel_geometry_data,
channel_flow_data=channel_flow_data,
dataset_5=dataset_5)
# test handling of a 0-D array (produced by genfromtxt sometimes)
segment_data = np.array(segment_data[0])
sfr = flopy.modflow.ModflowSfr2(m, nstrm=nstrm, nss=nss, const=const, dleak=dleak, istcb1=istcb1, istcb2=istcb2,
reach_data=reach_data,
segment_data=segment_data,
channel_geometry_data=channel_geometry_data,
channel_flow_data=channel_flow_data,
dataset_5=dataset_5)
sfr = flopy.modflow.ModflowSfr2(m, nstrm=nstrm, nss=nss, const=const,
dleak=dleak, istcb1=istcb1, istcb2=istcb2,
reach_data=reach_data,
segment_data=segment_data,
channel_geometry_data=channel_geometry_data,
channel_flow_data=channel_flow_data,
dataset_5=dataset_5)
if __name__ == '__main__':
test_sfr()

View File

@ -9,26 +9,52 @@ import os
import flopy
from flopy.modflow.mfsfr2 import check
tpth = os.path.abspath(os.path.join('temp', 't010'))
# make the directory if it does not exist
if not os.path.isdir(tpth):
os.makedirs(tpth)
def load_check_sfr(mfnam, model_ws, checker_output_path):
if os.path.split(os.getcwd())[-1] == 'flopy3':
path = os.path.join('examples', 'data', 'mf2005_test')
cpth = os.path.join('py.test/temp')
else:
path = os.path.join('..', 'examples', 'data', 'mf2005_test')
cpth = os.path.join(tpth)
sfr_items = {0: {'mfnam': 'test1ss.nam',
'sfrfile': 'test1ss.sfr'},
1: {'mfnam': 'test1tr.nam',
'sfrfile': 'test1tr.sfr'},
2: {'mfnam': 'testsfr2_tab.nam',
'sfrfile': 'testsfr2_tab_ICALC1.sfr'},
3: {'mfnam': 'testsfr2_tab.nam',
'sfrfile': 'testsfr2_tab_ICALC2.sfr'},
4: {'mfnam': 'testsfr2.nam',
'sfrfile': 'testsfr2.sfr'},
5: {'mfnam': 'UZFtest2.nam',
'sfrfile': 'UZFtest2.sfr'},
}
def load_check_sfr(i, mfnam, model_ws, checker_output_path):
#print('Testing {}\n'.format(mfnam) + '='*100)
m = flopy.modflow.Modflow.load(mfnam, model_ws=model_ws)
m.model_ws = checker_output_path
checker_outfile = 'SFRcheck_{}.txt'.format(m.name)
checker_outfile = os.path.join(tpth, 'SFRcheck_{}.txt'.format(m.name))
return m.sfr.check(checker_outfile, level=1)
chk = m.sfr.check(checker_outfile, level=1)
if i == 1:
assert 'overlapping conductance' in chk.warnings
if i == 2:
assert 'segment elevations vs. model grid' in chk.warnings
return
def test_sfrcheck():
if os.path.split(os.getcwd())[-1] == 'flopy3':
path = os.path.join('examples', 'data', 'mf2005_test')
cpth = os.path.join('py.test/temp')
else:
path = os.path.join('..', 'examples', 'data', 'mf2005_test')
cpth = os.path.join('temp')
m = flopy.modflow.Modflow.load('test1tr.nam', model_ws=path, verbose=False)
# run level=0 check
@ -64,31 +90,14 @@ def test_sfrcheck():
chk.routing()
assert 'circular routing' in chk.errors
sfr_items = {0: {'mfnam': 'test1ss.nam',
'sfrfile': 'test1ss.sfr'},
1: {'mfnam': 'test1tr.nam',
'sfrfile': 'test1tr.sfr'},
2: {'mfnam': 'testsfr2_tab.nam',
'sfrfile': 'testsfr2_tab_ICALC1.sfr'},
3: {'mfnam': 'testsfr2_tab.nam',
'sfrfile': 'testsfr2_tab_ICALC2.sfr'},
4: {'mfnam': 'testsfr2.nam',
'sfrfile': 'testsfr2.sfr'},
5: {'mfnam': 'UZFtest2.nam',
'sfrfile': 'UZFtest2.sfr'},
}
passed = {}
warnings = {}
def test_sfrloadcheck():
for i, case in sfr_items.items():
chk = load_check_sfr(case['mfnam'], model_ws=path, checker_output_path=cpth)
passed[i] = chk.passed
warnings[i] = chk.warnings
assert 'overlapping conductance' in warnings[1]
assert 'segment elevations vs. model grid' in warnings[2]
yield load_check_sfr, i, case['mfnam'], path, cpth
if __name__ == '__main__':
test_sfrcheck()
for i, case in sfr_items.items():
test_sfrloadcheck(i, case['mfnam'], path, cpth)

View File

@ -6,7 +6,6 @@ Some basic tests for mflistfile.py module (not super rigorous)
import os
import flopy
import numpy as np
#import matplotlib.pyplot as plt
def test_mflistfile():
@ -30,9 +29,9 @@ def test_mflistfile():
bud = mflist.get_data(totim=times[0])
assert isinstance(bud, np.ndarray)
#plt.bar(bud['index'], bud['value'])
#plt.xticks(bud['index'], bud['name'], rotation=45, size=6)
#plt.show()
# plt.bar(bud['index'], bud['value'])
# plt.xticks(bud['index'], bud['name'], rotation=45, size=6)
# plt.show()
inc = mflist.get_incremental()
assert isinstance(inc, np.ndarray)
@ -51,5 +50,6 @@ def test_mflistfile():
return
if __name__ == '__main__':
test_mflistfile()

View File

@ -6,7 +6,7 @@ import os
import flopy
def test_mt3d_create_withmfmodel():
model_ws = os.path.join('.', 'temp')
model_ws = os.path.join('.', 'temp', 't013')
# Create a MODFLOW model
mf = flopy.modflow.Modflow(model_ws=model_ws)
@ -38,7 +38,7 @@ def test_mt3d_create_withmfmodel():
return
def test_mt3d_create_woutmfmodel():
model_ws = os.path.join('.', 'temp')
model_ws = os.path.join('.', 'temp', 't013')
# Create MT3D model
mt = flopy.mt3d.Mt3dms(model_ws=model_ws)

View File

@ -8,11 +8,15 @@ import numpy as np
path = os.path.join('..', 'examples', 'data', 'mf2005_test')
pthgw = os.path.join('..', 'examples', 'groundwater_paper', 'uspb', 'flopy')
cpth = os.path.join('temp')
cpth = os.path.join('temp', 't014')
# make the directory if it does not exist
if not os.path.isdir(cpth):
os.makedirs(cpth)
mf_items = ['str.nam', 'DG.nam']
pths = [path, pthgw]
def load_str(mfnam, pth):
m = flopy.modflow.Modflow.load(mfnam, model_ws=pth, verbose=True)
assert m.load_fail is False
@ -25,14 +29,14 @@ def load_str(mfnam, pth):
pth = os.path.join(cpth, '{}.str'.format(m.name))
str2 = flopy.modflow.ModflowStr.load(pth, m)
for name in str2.dtype.names:
assert np.array_equal(str2.stress_period_data[0][name], m.str.stress_period_data[0][name]) is True
assert np.array_equal(str2.stress_period_data[0][name],
m.str.stress_period_data[0][name]) is True
for name in str2.dtype2.names:
assert np.array_equal(str2.segment_data[0][name], m.str.segment_data[0][name]) is True
assert np.array_equal(str2.segment_data[0][name],
m.str.segment_data[0][name]) is True
return
def test_mf2005load():
for namfile, pth in zip(mf_items, pths):
yield load_str, namfile, pth

View File

@ -1,12 +1,10 @@
__author__ = 'aleaf'
#import sys
#sys.path.append('/Users/aleaf/Documents/GitHub/flopy3')
import os
import matplotlib
matplotlib.use('agg')
import flopy
import pytest
print(os.getcwd())
@ -18,10 +16,12 @@ else:
str_items = {0: {'mfnam': 'str.nam',
'sfrfile': 'str.str'}}
def test_str_plot():
m = flopy.modflow.Modflow.load(str_items[0]['mfnam'], model_ws=path, verbose=True)
def test_str_plot():
m = flopy.modflow.Modflow.load(str_items[0]['mfnam'], model_ws=path,
verbose=True)
assert isinstance(m.str.plot()[0], matplotlib.axes.Axes)
if __name__ == '__main__':
test_str_plot()
test_str_plot()

View File

@ -17,7 +17,7 @@ def test_usg_disu_load():
assert isinstance(disu, flopy.modflow.ModflowDisU)
# Change where model files are written
model_ws = 'temp'
model_ws = os.path.join('temp', 't016')
m.model_ws = model_ws
# Write the disu file
@ -52,7 +52,7 @@ def test_usg_sms_load():
assert isinstance(sms, flopy.modflow.ModflowSms)
# Change where model files are written
model_ws = 'temp'
model_ws = os.path.join('temp', 't016')
m.model_ws = model_ws
# Write the sms file

View File

@ -19,10 +19,10 @@ def test_formattedfile_read():
h0 = h.get_data(totim=times[0])
h1 = h.get_data(kstpkper=kstpkper[0])
h2 = h.get_data(idx=0)
assert np.array_equal(h0,
h1), 'formatted head read using totim != head read using kstpkper'
assert np.array_equal(h0,
h2), 'formatted head read using totim != head read using idx'
assert np.array_equal(h0, h1), \
'formatted head read using totim != head read using kstpkper'
assert np.array_equal(h0, h2), \
'formatted head read using totim != head read using idx'
ts = h.get_ts((0, 7, 5))
assert np.isclose(ts[0, 1], 944.487, 1e-6), \
@ -108,9 +108,9 @@ def test_cellbudgetfile_readrecord():
for kk in kstpkper:
t = v.get_data(kstpkper=kk, text='STREAM LEAKAGE', full3D=True)[0]
assert t.shape == (
1, 15, 10), '3D sfr budget data for kstpkper {} '.format(kk) + \
'does not have correct shape (1, 15,10) - ' + \
'returned shape {}'.format(t[0].shape)
1, 15, 10), '3D sfr budget data for kstpkper {} '.format(kk) + \
'does not have correct shape (1, 15,10) - ' + \
'returned shape {}'.format(t[0].shape)
idx = v.get_indices()
assert idx is None, 'get_indices() without record did not return None'
@ -152,9 +152,9 @@ def test_cellbudgetfile_readrecord_waux():
for kk in kstpkper:
t = v.get_data(kstpkper=kk, text='wells', full3D=True)[0]
assert t.shape == (
1, 15, 10), '3D wel budget data for kstpkper {} '.format(kk) + \
'does not have correct shape (1, 15,10) - ' + \
'returned shape {}'.format(t[0].shape)
1, 15, 10), '3D wel budget data for kstpkper {} '.format(kk) + \
'does not have correct shape (1, 15,10) - ' + \
'returned shape {}'.format(t[0].shape)
idx = v.get_indices()
assert idx is None, 'get_indices() without record did not return None'
@ -180,7 +180,7 @@ def test_binaryfile_writeread():
model = 'Pr3_MFNWT_lower.nam'
ml = flopy.modflow.Modflow.load(model, version='mfnwt', model_ws=pth)
# change the model work space
ml.change_model_ws('temp')
ml.change_model_ws(os.path.join('temp', 't017'))
#
ncol = ml.dis.ncol
nrow = ml.dis.nrow
@ -193,7 +193,7 @@ def test_binaryfile_writeread():
ilay=1, pertim=pertim,
totim=pertim, kstp=1, kper=1)
b = ml.dis.botm.array[0, :, :].astype(np.float64)
pth = os.path.join('temp', 'bottom.hds')
pth = os.path.join('temp', 't017', 'bottom.hds')
flopy.utils.Util2d.write_bin(b.shape, pth, b,
header_data=header)
@ -216,7 +216,7 @@ def test_binaryfile_writeread():
ilay=1, pertim=pertim,
totim=pertim, kstp=1, kper=1)
b = ml.dis.botm.array[0, :, :].astype(np.float32)
pth = os.path.join('temp', 'bottom_single.hds')
pth = os.path.join('temp', 't017', 'bottom_single.hds')
flopy.utils.Util2d.write_bin(b.shape, pth, b,
header_data=header)

View File

@ -4,6 +4,11 @@ import flopy
import flopy.pest.templatewriter as tplwriter
import flopy.pest.params as params
mpth = os.path.join('temp', 't018')
# make the directory if it does not exist
if not os.path.isdir(mpth):
os.makedirs(mpth)
def test_tpl_constant():
# Define the model dimensions
nlay = 3
@ -11,7 +16,7 @@ def test_tpl_constant():
ncol = 20
# Create the flopy model object and add the dis and lpf packages
m = flopy.modflow.Modflow(modelname='tpl1', model_ws='./temp')
m = flopy.modflow.Modflow(modelname='tpl1', model_ws=mpth)
dis = flopy.modflow.ModflowDis(m, nlay, nrow, ncol)
lpf = flopy.modflow.ModflowLpf(m, hk=10.)
@ -37,7 +42,7 @@ def test_tpl_constant():
tw = tplwriter.TemplateWriter(m, [p])
tw.write_template()
tplfile = os.path.join('./temp', 'tpl1.lpf.tpl')
tplfile = os.path.join(mpth, 'tpl1.lpf.tpl')
assert os.path.isfile(tplfile)
return
@ -49,7 +54,7 @@ def test_tpl_layered():
ncol = 20
# Create the flopy model object and add the dis and lpf packages
m = flopy.modflow.Modflow(modelname='tpl2', model_ws='./temp')
m = flopy.modflow.Modflow(modelname='tpl2', model_ws=mpth)
dis = flopy.modflow.ModflowDis(m, nlay, nrow, ncol)
lpf = flopy.modflow.ModflowLpf(m, hk=10.)
@ -71,7 +76,7 @@ def test_tpl_layered():
tw = tplwriter.TemplateWriter(m, [p])
tw.write_template()
tplfile = os.path.join('./temp', 'tpl2.lpf.tpl')
tplfile = os.path.join(mpth, 'tpl2.lpf.tpl')
assert os.path.isfile(tplfile)
return
@ -83,7 +88,7 @@ def test_tpl_zoned():
ncol = 20
# Create the flopy model object and add the dis and lpf packages
m = flopy.modflow.Modflow(modelname='tpl3', model_ws='./temp')
m = flopy.modflow.Modflow(modelname='tpl3', model_ws=mpth)
dis = flopy.modflow.ModflowDis(m, nlay, nrow, ncol)
lpf = flopy.modflow.ModflowLpf(m, hk=10.)
@ -118,7 +123,7 @@ def test_tpl_zoned():
tw = tplwriter.TemplateWriter(m, plist)
tw.write_template()
tplfile = os.path.join('./temp', 'tpl3.lpf.tpl')
tplfile = os.path.join(mpth, 'tpl3.lpf.tpl')
assert os.path.isfile(tplfile)
return

View File

@ -1,10 +1,16 @@
import os
import numpy as np
import flopy
mpth = os.path.join('temp', 't019')
# make the directory if it does not exist
if not os.path.isdir(mpth):
os.makedirs(mpth)
# Test hydmod data readers
def test_hydmodfile_create():
import os
import numpy as np
import flopy
model_ws = os.path.join('temp')
model_ws = os.path.join(mpth)
if not os.path.exists(model_ws):
os.makedirs(model_ws)
m = flopy.modflow.Modflow('test', model_ws=model_ws)
@ -18,17 +24,13 @@ def test_hydmodfile_create():
def test_hydmodfile_load():
import os
import numpy as np
import flopy
model = 'test1tr.nam'
pth = os.path.join('..', 'examples', 'data', 'hydmod_test')
m = flopy.modflow.Modflow.load(model, version='mf2005', model_ws=pth, verbose=True)
hydref = m.hyd
assert isinstance(hydref, flopy.modflow.ModflowHyd), 'Did not load hydmod package...test1tr.hyd'
model_ws = os.path.join('temp')
model_ws = os.path.join(mpth)
if not os.path.exists(model_ws):
os.makedirs(model_ws)
@ -37,7 +39,8 @@ def test_hydmodfile_load():
pth = os.path.join('..', 'examples', 'data', 'hydmod_test', 'test1tr.hyd')
hydload = flopy.modflow.ModflowHyd.load(pth, m)
assert np.array_equal(hydref.obsdata, hydload.obsdata), 'Written hydmod data not equal to loaded hydmod data'
assert np.array_equal(hydref.obsdata, hydload.obsdata), \
'Written hydmod data not equal to loaded hydmod data'
return
@ -52,16 +55,20 @@ def test_hydmodfile_read():
assert isinstance(h, flopy.utils.HydmodObs)
ntimes = h.get_ntimes()
assert ntimes == 101, 'Not enough times in hydmod file ()...'.format(os.path.basename(pth))
assert ntimes == 101, \
'Not enough times in hydmod file ()...'.format(os.path.basename(pth))
times = h.get_times()
assert len(times) == 101, 'Not enough times in hydmod file ()...'.format(os.path.basename(pth))
assert len(times) == 101, \
'Not enough times in hydmod file ()...'.format(os.path.basename(pth))
nitems = h.get_nobs()
assert nitems == 8, 'Not enough records in hydmod file ()...'.format(os.path.basename(pth))
assert nitems == 8, \
'Not enough records in hydmod file ()...'.format(os.path.basename(pth))
labels = h.get_obsnames()
assert len(labels) == 8, 'Not enough labels in hydmod file ()...'.format(os.path.basename(pth))
assert len(labels) == 8, \
'Not enough labels in hydmod file ()...'.format(os.path.basename(pth))
print(labels)
for idx in range(ntimes):
@ -74,11 +81,14 @@ def test_hydmodfile_read():
for label in labels:
data = h.get_data(obsname=label)
assert data.shape == (len(times),), 'data shape is not ({},)'.format(len(times))
assert data.shape == (len(times),), \
'data shape is not ({},)'.format(len(times))
data = h.get_data()
assert data.shape == (len(times),), 'data shape is not ({},)'.format(len(times))
assert len(data.dtype.names) == nitems + 1, 'data column length is not {}'.format(len(nitems+1))
assert data.shape == (len(times),), \
'data shape is not ({},)'.format(len(times))
assert len(data.dtype.names) == nitems + 1, \
'data column length is not {}'.format(len(nitems+1))
try:
import pandas as pd

View File

@ -25,7 +25,7 @@ def test_mfnwt_run():
return
modelname = 'watertable'
model_ws = os.path.join('temp')
model_ws = os.path.join('temp', 't020')
if not os.path.exists(model_ws):
os.makedirs(model_ws)

View File

@ -1,19 +1,30 @@
# Test modflow write adn run
import os
import numpy as np
import flopy
mpth = os.path.join('temp', 't021')
# make the directory if it does not exist
if not os.path.isdir(mpth):
os.makedirs(mpth)
def test_mflist_external():
import flopy.modflow as fmf
ml = fmf.Modflow("mflist_test",model_ws="temp",external_path="ref")
dis = fmf.ModflowDis(ml,1,10,10,nper=3,perlen=1.0)
wel_data = {0:[[0,0,0,-1],[1,1,1,-1]],1:[[0,0,0,-2],[1,1,1,-1]]}
wel = fmf.ModflowWel(ml,stress_period_data=wel_data)
ml = flopy.modflow.Modflow("mflist_test", model_ws=mpth,
external_path="ref")
dis = flopy.modflow.ModflowDis(ml, 1, 10, 10, nper=3, perlen=1.0)
wel_data = {0: [[0, 0, 0, -1], [1, 1, 1, -1]],
1: [[0, 0, 0, -2], [1, 1, 1, -1]]}
wel = flopy.modflow.ModflowWel(ml, stress_period_data=wel_data)
ml.write_input()
ml1 = fmf.Modflow.load("mflist_test.nam",
model_ws=ml.model_ws,
verbose=True,
forgive=False)
assert np.array_equal(ml.wel[0],ml1.wel[0])
assert np.array_equal(ml.wel[1],ml1.wel[1])
ml1 = flopy.modflow.Modflow.load("mflist_test.nam",
model_ws=ml.model_ws,
verbose=True,
forgive=False)
assert np.array_equal(ml.wel[0], ml1.wel[0])
assert np.array_equal(ml.wel[1], ml1.wel[1])
if __name__ == '__main__':
test_mflist_external()

View File

@ -1,5 +1,6 @@
# Test SWR binary read functionality
import os
import flopy
# import matplotlib.pyplot as plt
pth = os.path.join('..', 'examples', 'data', 'swr_test')
@ -8,13 +9,11 @@ files = ('SWR004.stg', 'SWR004.flow', 'SWR004.vel', 'swr005.qaq',
def test_swr_binary_stage(ipos=0):
import flopy
fpth = os.path.join(pth, files[ipos])
sobj = flopy.utils.SwrStage(fpth)
assert isinstance(sobj,
flopy.utils.SwrStage), 'SwrStage object not created'
assert isinstance(sobj, flopy.utils.SwrStage), \
'SwrStage object not created'
nrecords = sobj.get_nrecords()
assert nrecords == (18, 0), 'SwrStage records does not equal (18, 0)'
@ -24,40 +23,43 @@ def test_swr_binary_stage(ipos=0):
for idx in range(ntimes):
r = sobj.get_data(idx=idx)
assert r is not None, 'SwrStage could not read data with get_data(idx=)'
assert r.shape == (
18,), 'SwrStage stage data shape does not equal (18,)'
assert len(
r.dtype.names) == 2, 'SwrStage stage data dtype does not have 2 entries'
assert r is not None, \
'SwrStage could not read data with get_data(idx=)'
assert r.shape == (18,), \
'SwrStage stage data shape does not equal (18,)'
assert len(r.dtype.names) == 2, \
'SwrStage stage data dtype does not have 2 entries'
kswrkstpkper = sobj.get_kswrkstpkper()
assert kswrkstpkper.shape == (
336, 3), 'SwrStage kswrkstpkper shape does not equal (336, 3)'
assert kswrkstpkper.shape == (336, 3), \
'SwrStage kswrkstpkper shape does not equal (336, 3)'
for kkk in kswrkstpkper:
r = sobj.get_data(kswrkstpkper=kkk)
assert r is not None, 'SwrStage could not read data with get_data(kswrkstpkper=)'
assert r.shape == (
18,), 'SwrStage stage data shape does not equal (18,)'
assert len(
r.dtype.names) == 2, 'SwrStage stage data dtype does not have 2 entries'
assert r is not None, \
'SwrStage could not read data with get_data(kswrkstpkper=)'
assert r.shape == (18,), \
'SwrStage stage data shape does not equal (18,)'
assert len(r.dtype.names) == 2, \
'SwrStage stage data dtype does not have 2 entries'
times = sobj.get_times()
assert len(times) == 336, 'SwrStage times length does not equal 336'
for time in times:
r = sobj.get_data(totim=time)
assert r is not None, 'SwrStage could not read data with get_data(tottim=)'
assert r.shape == (
18,), 'SwrStage stage data shape does not equal (18,)'
assert len(
r.dtype.names) == 2, 'SwrStage stage data dtype does not have 2 entries'
assert r is not None, \
'SwrStage could not read data with get_data(tottim=)'
assert r.shape == (18,), \
'SwrStage stage data shape does not equal (18,)'
assert len(r.dtype.names) == 2, \
'SwrStage stage data dtype does not have 2 entries'
ts = sobj.get_ts(irec=17)
assert ts.shape == (
336,), 'SwrStage stage timeseries shape does not equal (336,)'
assert len(
ts.dtype.names) == 2, 'SwrStage stage time series stage data dtype does not have 2 entries'
assert ts.shape == (336,), \
'SwrStage stage timeseries shape does not equal (336,)'
assert len(ts.dtype.names) == 2, \
'SwrStage stage time series stage data dtype does not have 2 entries'
# plt.plot(ts['totim'], ts['stage'])
# plt.show()
@ -66,13 +68,11 @@ def test_swr_binary_stage(ipos=0):
def test_swr_binary_budget(ipos=1):
import flopy
fpth = os.path.join(pth, files[ipos])
sobj = flopy.utils.SwrBudget(fpth)
assert isinstance(sobj,
flopy.utils.SwrBudget), 'SwrBudget object not created'
assert isinstance(sobj, flopy.utils.SwrBudget), \
'SwrBudget object not created'
nrecords = sobj.get_nrecords()
assert nrecords == (18, 0), 'SwrBudget records does not equal (18, 0)'
@ -82,43 +82,46 @@ def test_swr_binary_budget(ipos=1):
for idx in range(ntimes):
r = sobj.get_data(idx=idx)
assert r is not None, 'SwrBudget could not read data with get_data(idx=)'
assert r.shape == (
18,), 'SwrBudget budget data shape does not equal (18,)'
assert len(
r.dtype.names) == 15, 'SwrBudget data dtype does not have 15 entries'
assert r is not None, \
'SwrBudget could not read data with get_data(idx=)'
assert r.shape == (18,), \
'SwrBudget budget data shape does not equal (18,)'
assert len(r.dtype.names) == 15, \
'SwrBudget data dtype does not have 15 entries'
# plt.bar(range(18), r['inf-out'])
# plt.show()
kswrkstpkper = sobj.get_kswrkstpkper()
assert kswrkstpkper.shape == (
336, 3), 'SwrBudget kswrkstpkper shape does not equal (336, 3)'
assert kswrkstpkper.shape == (336, 3), \
'SwrBudget kswrkstpkper shape does not equal (336, 3)'
for kkk in kswrkstpkper:
r = sobj.get_data(kswrkstpkper=kkk)
assert r is not None, 'SwrBudget could not read data with get_data(kswrkstpkper=)'
assert r.shape == (
18,), 'SwrBudget budget data shape does not equal (18,)'
assert len(
r.dtype.names) == 15, 'SwrBudget budget data dtype does not have 15 entries'
assert r is not None, \
'SwrBudget could not read data with get_data(kswrkstpkper=)'
assert r.shape == (18,), \
'SwrBudget budget data shape does not equal (18,)'
assert len(r.dtype.names) == 15, \
'SwrBudget budget data dtype does not have 15 entries'
times = sobj.get_times()
assert len(times) == 336, 'SwrBudget times length does not equal 336'
for time in times:
r = sobj.get_data(totim=time)
assert r is not None, 'SwrBudget could not read data with get_data(tottim=)'
assert r.shape == (
18,), 'SwrBudget budget data shape does not equal (18,)'
assert len(
r.dtype.names) == 15, 'SwrBudget budget data dtype does not have 15 entries'
assert r is not None, \
'SwrBudget could not read data with get_data(tottim=)'
assert r.shape == (18,), \
'SwrBudget budget data shape does not equal (18,)'
assert len(r.dtype.names) == 15, \
'SwrBudget budget data dtype does not have 15 entries'
ts = sobj.get_ts(irec=17)
assert ts.shape == (
336,), 'SwrBudget budget timeseries shape does not equal (336,)'
assert len(
ts.dtype.names) == 15, 'SwrBudget time series budget data dtype does not have 15 entries'
assert ts.shape == (336,), \
'SwrBudget budget timeseries shape does not equal (336,)'
assert len(ts.dtype.names) == 15, \
'SwrBudget time series budget data dtype does not have 15 entries'
# plt.plot(ts['totim'], ts['qbcflow'])
# plt.show()
@ -127,8 +130,6 @@ def test_swr_binary_budget(ipos=1):
def test_swr_binary_qm(ipos=2):
import flopy
fpth = os.path.join(pth, files[ipos])
sobj = flopy.utils.SwrFlow(fpth)
@ -146,46 +147,49 @@ def test_swr_binary_qm(ipos=2):
for idx in range(ntimes):
r = sobj.get_data(idx=idx)
assert r is not None, 'SwrFlow could not read data with get_data(idx=)'
assert r is not None, \
'SwrFlow could not read data with get_data(idx=)'
assert r.shape == (40,), 'SwrFlow qm data shape does not equal (40,)'
assert len(
r.dtype.names) == 3, 'SwrFlow qm data dtype does not have 3 entries'
assert len(r.dtype.names) == 3, \
'SwrFlow qm data dtype does not have 3 entries'
# plt.bar(range(40), r['flow'])
# plt.show()
kswrkstpkper = sobj.get_kswrkstpkper()
assert kswrkstpkper.shape == (
336, 3), 'SwrFlow kswrkstpkper shape does not equal (336, 3)'
assert kswrkstpkper.shape == (336, 3), \
'SwrFlow kswrkstpkper shape does not equal (336, 3)'
for kkk in kswrkstpkper:
r = sobj.get_data(kswrkstpkper=kkk)
assert r is not None, 'SwrFlow could not read data with get_data(kswrkstpkper=)'
assert r is not None, \
'SwrFlow could not read data with get_data(kswrkstpkper=)'
assert r.shape == (40,), 'SwrFlow qm data shape does not equal (40,)'
assert len(
r.dtype.names) == 3, 'SwrFlow qm data dtype does not have 3 entries'
assert len(r.dtype.names) == 3, \
'SwrFlow qm data dtype does not have 3 entries'
times = sobj.get_times()
assert len(times) == 336, 'SwrFlow times length does not equal 336'
for time in times:
r = sobj.get_data(totim=time)
assert r is not None, 'SwrFlow could not read data with get_data(tottim=)'
assert r is not None, \
'SwrFlow could not read data with get_data(tottim=)'
assert r.shape == (40,), 'SwrFlow qm data shape does not equal (40,)'
assert len(
r.dtype.names) == 3, 'SwrFlow qm data dtype does not have 3 entries'
assert len(r.dtype.names) == 3, \
'SwrFlow qm data dtype does not have 3 entries'
ts = sobj.get_ts(irec=17, iconn=16)
assert ts.shape == (
336,), 'SwrFlow qm timeseries shape does not equal (336,)'
assert len(
ts.dtype.names) == 3, 'SwrFlow time series qm data dtype does not have 3 entries'
assert ts.shape == (336,), \
'SwrFlow qm timeseries shape does not equal (336,)'
assert len(ts.dtype.names) == 3, \
'SwrFlow time series qm data dtype does not have 3 entries'
ts2 = sobj.get_ts(irec=16, iconn=17)
assert ts2.shape == (
336,), 'SwrFlow qm timeseries shape does not equal (336,)'
assert len(
ts2.dtype.names) == 3, 'SwrFlow time series qm data dtype does not have 3 entries'
assert ts2.shape == (336,), \
'SwrFlow qm timeseries shape does not equal (336,)'
assert len(ts2.dtype.names) == 3, \
'SwrFlow time series qm data dtype does not have 3 entries'
# plt.plot(ts['totim'], ts['velocity'])
# plt.plot(ts2['totim'], ts2['velocity'])
@ -195,8 +199,6 @@ def test_swr_binary_qm(ipos=2):
def test_swr_binary_qaq(ipos=3):
import flopy
fpth = os.path.join(pth, files[ipos])
sobj = flopy.utils.SwrExchange(fpth, verbose=True)
@ -211,11 +213,12 @@ def test_swr_binary_qaq(ipos=3):
for idx in range(ntimes):
r = sobj.get_data(idx=idx)
assert r is not None, 'SwrExchange could not read data with get_data(idx=)'
assert r.shape == (
21,), 'SwrExchange qaq data shape does not equal (21,)'
assert len(
r.dtype.names) == 11, 'SwrExchange qaq data dtype does not have 11 entries'
assert r is not None, \
'SwrExchange could not read data with get_data(idx=)'
assert r.shape == (21,), \
'SwrExchange qaq data shape does not equal (21,)'
assert len(r.dtype.names) == 11, \
'SwrExchange qaq data dtype does not have 11 entries'
# plt.bar(range(21), r['qaq'])
# plt.show()
@ -226,28 +229,30 @@ def test_swr_binary_qaq(ipos=3):
for kkk in kswrkstpkper:
r = sobj.get_data(kswrkstpkper=kkk)
assert r is not None, 'SwrExchange could not read data with get_data(kswrkstpkper=)'
assert r.shape == (
21,), 'SwrExchange qaq data shape does not equal (21,)'
assert len(
r.dtype.names) == 11, 'SwrExchange qaq data dtype does not have 11 entries'
assert r is not None, \
'SwrExchange could not read data with get_data(kswrkstpkper=)'
assert r.shape == (21,), \
'SwrExchange qaq data shape does not equal (21,)'
assert len(r.dtype.names) == 11, \
'SwrExchange qaq data dtype does not have 11 entries'
times = sobj.get_times()
assert len(times) == 350, 'SwrExchange times length does not equal 350'
for time in times:
r = sobj.get_data(totim=time)
assert r is not None, 'SwrExchange could not read data with get_data(tottim=)'
assert r.shape == (
21,), 'SwrExchange qaq data shape does not equal (21,)'
assert len(
r.dtype.names) == 11, 'SwrExchange qaq data dtype does not have 11 entries'
assert r is not None, \
'SwrExchange could not read data with get_data(tottim=)'
assert r.shape == (21,), \
'SwrExchange qaq data shape does not equal (21,)'
assert len(r.dtype.names) == 11, \
'SwrExchange qaq data dtype does not have 11 entries'
ts = sobj.get_ts(irec=17, klay=0)
assert ts.shape == (
350,), 'SwrExchange timeseries shape does not equal (350,)'
assert len(
ts.dtype.names) == 11, 'SwrExchange time series qaq data dtype does not have 11 entries'
assert ts.shape == (350,), \
'SwrExchange timeseries shape does not equal (350,)'
assert len(ts.dtype.names) == 11, \
'SwrExchange time series qaq data dtype does not have 11 entries'
# plt.plot(ts['totim'], ts['qaq'])
# plt.show()
@ -256,13 +261,11 @@ def test_swr_binary_qaq(ipos=3):
def test_swr_binary_structure(ipos=4):
import flopy
fpth = os.path.join(pth, files[ipos])
sobj = flopy.utils.SwrStructure(fpth, verbose=True)
assert isinstance(sobj,
flopy.utils.SwrStructure), 'SwrStructure object not created'
assert isinstance(sobj, flopy.utils.SwrStructure), \
'SwrStructure object not created'
nrecords = sobj.get_nrecords()
assert nrecords == (18, 0), 'SwrStructure records does not equal (18, 0)'
@ -272,40 +275,43 @@ def test_swr_binary_structure(ipos=4):
for idx in range(ntimes):
r = sobj.get_data(idx=idx)
assert r is not None, 'SwrStructure could not read data with get_data(idx=)'
assert r.shape == (
2,), 'SwrStructure structure data shape does not equal (2,)'
assert len(
r.dtype.names) == 8, 'SwrStructure structure data dtype does not have 8 entries'
assert r is not None, \
'SwrStructure could not read data with get_data(idx=)'
assert r.shape == (2,), \
'SwrStructure structure data shape does not equal (2,)'
assert len(r.dtype.names) == 8, \
'SwrStructure structure data dtype does not have 8 entries'
kswrkstpkper = sobj.get_kswrkstpkper()
assert kswrkstpkper.shape == (
336, 3), 'SwrStructure kswrkstpkper shape does not equal (336, 3)'
assert kswrkstpkper.shape == (336, 3), \
'SwrStructure kswrkstpkper shape does not equal (336, 3)'
for kkk in kswrkstpkper:
r = sobj.get_data(kswrkstpkper=kkk)
assert r is not None, 'SwrStructure could not read data with get_data(kswrkstpkper=)'
assert r.shape == (
2,), 'SwrStructure structure data shape does not equal (2,)'
assert len(
r.dtype.names) == 8, 'SwrStructure structure data dtype does not have 8 entries'
assert r is not None, \
'SwrStructure could not read data with get_data(kswrkstpkper=)'
assert r.shape == (2,), \
'SwrStructure structure data shape does not equal (2,)'
assert len(r.dtype.names) == 8, \
'SwrStructure structure data dtype does not have 8 entries'
times = sobj.get_times()
assert len(times) == 336, 'SwrStructure times length does not equal 336'
for time in times:
r = sobj.get_data(totim=time)
assert r is not None, 'SwrStructure could not read data with get_data(tottim=)'
assert r.shape == (
2,), 'SwrStructure structure data shape does not equal (2,)'
assert len(
r.dtype.names) == 8, 'SwrStructure structure data dtype does not have 8 entries'
assert r is not None, \
'SwrStructure could not read data with get_data(tottim=)'
assert r.shape == (2,), \
'SwrStructure structure data shape does not equal (2,)'
assert len(r.dtype.names) == 8, \
'SwrStructure structure data dtype does not have 8 entries'
ts = sobj.get_ts(irec=17, istr=0)
assert ts.shape == (
336,), 'SwrStructure timeseries shape does not equal (336,)'
assert len(
ts.dtype.names) == 8, 'SwrStructure time series structure data dtype does not have 8 entries'
assert ts.shape == (336,), \
'SwrStructure timeseries shape does not equal (336,)'
assert len(ts.dtype.names) == 8, \
'SwrStructure time series structure data dtype does not have 8 entries'
# plt.plot(ts['totim'], ts['strflow'])
# plt.show()
@ -317,8 +323,6 @@ def test_swr_binary_structure(ipos=4):
def test_swr_binary_obs(ipos=5):
import flopy
fpth = os.path.join(pth, files[ipos])
sobj = flopy.utils.SwrObs(fpth)
@ -337,30 +341,34 @@ def test_swr_binary_obs(ipos=5):
assert len(times) == 336, 'SwrFile times length does not equal 336'
ts = sobj.get_data()
assert ts.shape == (
336,), 'SwrObs length of data array does not equal (336,)'
assert len(
ts.dtype.names) == 10, 'SwrObs data does not have totim + 9 observations'
assert ts.shape == (336,), \
'SwrObs length of data array does not equal (336,)'
assert len(ts.dtype.names) == 10, \
'SwrObs data does not have totim + 9 observations'
ts = sobj.get_data(obsname='OBS5')
assert ts.shape == (
336,), 'SwrObs length of data array does not equal (336,)'
assert len(
ts.dtype.names) == 2, 'SwrObs data does not have totim + 1 observation'
assert ts.shape == (336,), \
'SwrObs length of data array does not equal (336,)'
assert len(ts.dtype.names) == 2, \
'SwrObs data does not have totim + 1 observation'
# plt.plot(ts['totim'], ts['OBS5'])
# plt.show()
for idx in range(ntimes):
d = sobj.get_data(idx=idx)
assert d.shape == (1,), 'SwrObs length of data array does not equal (1,)'
assert len(d.dtype.names) == nobs+1, 'SwrObs data does not have nobs + 1'
assert d.shape == (1,), \
'SwrObs length of data array does not equal (1,)'
assert len(d.dtype.names) == nobs+1, \
'SwrObs data does not have nobs + 1'
for time in times:
d = sobj.get_data(totim=time)
assert d.shape == (1,), 'SwrObs length of data array does not equal (1,)'
assert len(d.dtype.names) == nobs+1, 'SwrObs data does not have nobs + 1'
assert d.shape == (1,), \
'SwrObs length of data array does not equal (1,)'
assert len(d.dtype.names) == nobs+1, \
'SwrObs data does not have nobs + 1'
# test get_dataframes()
try:

View File

@ -3,7 +3,10 @@ import os
import numpy as np
import flopy
testpth = os.path.join('.', 'temp')
testpth = os.path.join('.', 'temp', 't023')
# make the directory if it does not exist
if not os.path.isdir(testpth):
os.makedirs(testpth)
def test_mt3d_multispecies():
# modflow model

View File

@ -6,6 +6,11 @@ model_ws = os.path.join('..', 'examples', 'data', 'mf2005_test')
testmodels = [os.path.join(model_ws, f) for f in os.listdir(model_ws)
if f.endswith('.nam')]
mpth = os.path.join('temp', 't024')
# make the directory if it does not exist
if not os.path.isdir(mpth):
os.makedirs(mpth)
def test_checker_on_load():
# load all of the models in the mf2005_test folder
@ -24,43 +29,50 @@ def checker_on_load(mfnam):
def test_bcs_check():
mf = flopy.modflow.Modflow(version='mf2005',
model_ws='temp')
model_ws=mpth)
# test check for isolated cells
dis = flopy.modflow.ModflowDis(mf, nlay=2, nrow=3, ncol=3, top=100, botm=95)
dis = flopy.modflow.ModflowDis(mf, nlay=2, nrow=3, ncol=3, top=100,
botm=95)
bas = flopy.modflow.ModflowBas(mf, ibound=np.ones((2, 3, 3), dtype=int))
chk = bas.check()
dis = flopy.modflow.ModflowDis(mf, nlay=3, nrow=5, ncol=5, top=100, botm=95)
dis = flopy.modflow.ModflowDis(mf, nlay=3, nrow=5, ncol=5, top=100,
botm=95)
ibound = np.zeros((3, 5, 5), dtype=int)
ibound[1, 1, 1] = 1 # fully isolated cell
ibound[0:2, 4, 4] = 1 # cell connected vertically to one other cell
ibound[1, 1, 1] = 1 # fully isolated cell
ibound[0:2, 4, 4] = 1 # cell connected vertically to one other cell
bas = flopy.modflow.ModflowBas(mf, ibound=ibound)
chk = bas.check()
assert chk.summary_array['desc'][0] == 'isolated cells in ibound array'
assert chk.summary_array.i[0] == 1 and chk.summary_array.i[0] == 1 and chk.summary_array.j[0] == 1
assert chk.summary_array.i[0] == 1 and chk.summary_array.i[0] == 1 and \
chk.summary_array.j[0] == 1
assert len(chk.summary_array) == 1
ghb = flopy.modflow.ModflowGhb(mf, stress_period_data={0: [0, 0, 0, 100, 1]})
riv = flopy.modflow.ModflowRiv(mf, stress_period_data={0: [[0, 0, 0, 101, 10, 100],
[0, 0, 1, 80, 10, 90]]})
ghb = flopy.modflow.ModflowGhb(mf,
stress_period_data={0: [0, 0, 0, 100, 1]})
riv = flopy.modflow.ModflowRiv(mf, stress_period_data={
0: [[0, 0, 0, 101, 10, 100],
[0, 0, 1, 80, 10, 90]]})
chk = ghb.check()
assert chk.summary_array['desc'][0] == 'BC in inactive cell'
chk = riv.check()
assert chk.summary_array['desc'][4] == 'RIV stage below rbots'
assert np.array_equal(chk.summary_array['j'], np.array([0, 1, 1, 1, 1]))
def test_properties_check():
def test_properties_check():
# test that storage values ignored for steady state
mf = flopy.modflow.Modflow(version='mf2005',
model_ws='temp')
dis = flopy.modflow.ModflowDis(mf, nrow=2, ncol=2, top=np.array([[100, np.nan],
[100, 100]]),
model_ws=mpth)
dis = flopy.modflow.ModflowDis(mf, nrow=2, ncol=2,
top=np.array([[100, np.nan],
[100, 100]]),
nper=3, steady=True)
chk = dis.check()
assert len(chk.summary_array) == 1
kij = (chk.summary_array['k'][0],chk.summary_array['i'][0],chk.summary_array['j'][0])
kij = (chk.summary_array['k'][0], chk.summary_array['i'][0],
chk.summary_array['j'][0])
assert kij == (0, 0, 1)
lpf = flopy.modflow.ModflowLpf(mf, sy=np.ones((2, 2)), ss=np.ones((2, 2)))
chk = lpf.check()
@ -73,14 +85,18 @@ def test_properties_check():
vka=np.array([[1e10, 0], [1, 1e-20]]))
chk = lpf.check()
ind1 = np.array([True if list(inds) == [0, 1, 1]
else False for inds in chk.view_summary_array_fields(['k', 'i', 'j'])])
else False for inds in
chk.view_summary_array_fields(['k', 'i', 'j'])])
ind1_errors = chk.summary_array[ind1]['desc']
ind2 = np.array([True if list(inds) == [0, 0, 1]
else False for inds in chk.view_summary_array_fields(['k', 'i', 'j'])])
else False for inds in
chk.view_summary_array_fields(['k', 'i', 'j'])])
ind2_errors = chk.summary_array[ind2]['desc']
ind3 = np.array([True if list(inds) == [0, 0, 0]
else False for inds in chk.view_summary_array_fields(['k', 'i', 'j'])])
else False for inds in
chk.view_summary_array_fields(['k', 'i', 'j'])])
ind3_errors = chk.summary_array[ind3]['desc']
assert 'zero or negative horizontal hydraulic conductivity values' in ind1_errors
assert 'horizontal hydraulic conductivity values below checker threshold of 1e-11' in ind1_errors
assert 'negative horizontal anisotropy values' in ind1_errors
@ -89,8 +105,9 @@ def test_properties_check():
assert 'zero or negative vertical hydraulic conductivity values' in ind2_errors
assert 'vertical hydraulic conductivity values above checker threshold of 100000.0' in ind3_errors
if __name__ == '__main__':
#for mfnam in testmodels:
# for mfnam in testmodels:
# checker_on_load(mfnam)
#test_bcs_check()
# test_bcs_check()
test_properties_check()

View File

@ -7,8 +7,11 @@ import os
import numpy as np
import flopy
workspace = os.path.join('temp', 't026')
# make the directory if it does not exist
if not os.path.isdir(workspace):
os.makedirs(workspace)
workspace = os.path.join('temp')
seawat_exe = 'swt_v4'
isseawat = flopy.which(seawat_exe)
@ -43,11 +46,11 @@ ssm_data[0] = ssm_sp1
def test_seawat_henry():
#SEAWAT model from a modflow model and an mt3d model
# SEAWAT model from a modflow model and an mt3d model
modelname = 'henry'
mf = flopy.modflow.Modflow(modelname, exe_name='swt_v4',
model_ws=workspace)
#shortened perlen to 0.1 to make this run faster -- should be about 0.5
# shortened perlen to 0.1 to make this run faster -- should be about 0.5
dis = flopy.modflow.ModflowDis(mf, nlay, nrow, ncol, nper=1, delr=delr,
delc=delc, laycbd=0, top=henry_top,
botm=henry_botm, perlen=0.1, nstp=15)
@ -88,10 +91,10 @@ def test_seawat_henry():
def test_seawat2_henry():
#SEAWAT model directly by adding packages
# SEAWAT model directly by adding packages
modelname = 'henry2'
m = flopy.seawat.swt.Seawat(modelname, 'nam', model_ws=workspace,
exe_name='swt_v4')
exe_name='swt_v4')
dis = flopy.modflow.ModflowDis(m, nlay, nrow, ncol, nper=1, delr=delr,
delc=delc, laycbd=0, top=henry_top,
botm=henry_botm, perlen=0.1, nstp=15)

View File

@ -2,6 +2,7 @@
test MNW2 package
"""
import sys
sys.path.insert(0, '..')
import os
import flopy
@ -10,19 +11,24 @@ import netCDF4
from flopy.utils.flopy_io import line_parse
from flopy.utils.util_list import MfList
cpth = os.path.join('temp')
cpth = os.path.join('temp', 't027')
# make the directory if it does not exist
if not os.path.isdir(cpth):
os.makedirs(cpth)
mf2005pth = os.path.join('..', 'examples', 'data', 'mnw2_examples')
def test_line_parse():
# ensure that line_parse is working correctly
# comment handling
line = line_parse('Well-A -1 ; 2a. WELLID,NNODES')
assert line == ['Well-A', '-1']
def test_load():
def test_load():
# load in the test problem (1 well, 3 stress periods)
m = flopy.modflow.Modflow.load('MNW2-Fig28.nam', model_ws=mf2005pth, verbose=True, forgive=False)
m = flopy.modflow.Modflow.load('MNW2-Fig28.nam', model_ws=mf2005pth,
verbose=True, forgive=False)
m.change_model_ws(cpth)
assert 'MNW2' in m.get_package_list()
assert 'MNWI' in m.get_package_list()
@ -38,91 +44,123 @@ def test_load():
mnw2_2.node_data.sort(order='wellid')
mnw2_3.node_data.sort(order='wellid')
assert np.array_equal(mnw2_2.node_data, mnw2_3.node_data)
assert (mnw2_2.stress_period_data[0].qdes - mnw2_3.stress_period_data[0].qdes).max() < 0.01
assert np.abs(mnw2_2.stress_period_data[0].qdes - mnw2_3.stress_period_data[0].qdes).min() < 0.01
assert (mnw2_2.stress_period_data[0].qdes - mnw2_3.stress_period_data[
0].qdes).max() < 0.01
assert np.abs(
mnw2_2.stress_period_data[0].qdes - mnw2_3.stress_period_data[
0].qdes).min() < 0.01
def test_make_package():
m4 = flopy.modflow.Modflow('mnw2example', model_ws=cpth)
dis = flopy.modflow.ModflowDis(nrow=5, ncol=5, nlay=3, nper=3, top=10, botm=0, model=m4)
dis = flopy.modflow.ModflowDis(nrow=5, ncol=5, nlay=3, nper=3, top=10,
botm=0, model=m4)
# make the package from the tables (ztop, zbotm format)
node_data = np.array([(0, 1, 1, 9.5, 7.1, 'well1', 'skin', -1, 0, 0, 0, 1.0, 2.0, 5.0, 6.2),
(1, 1, 1, 7.1, 5.1, 'well1', 'skin', -1, 0, 0, 0, 0.5, 2.0, 5.0, 6.2),
(2, 3, 3, 9.1, 3.7, 'well2', 'skin', -1, 0, 0, 0, 1.0, 2.0, 5.0, 4.1)],
dtype=[('index', '<i8'), ('i', '<i8'), ('j', '<i8'),
('ztop', '<f8'), ('zbotm', '<f8'),
('wellid', 'O'), ('losstype', 'O'), ('pumploc', '<i8'),
('qlimit', '<i8'), ('ppflag', '<i8'), ('pumpcap', '<i8'),
('rw', '<f8'), ('rskin', '<f8'), ('kskin', '<f8'),
('zpump', '<f8')]).view(np.recarray)
node_data = np.array(
[(0, 1, 1, 9.5, 7.1, 'well1', 'skin', -1, 0, 0, 0, 1.0, 2.0, 5.0, 6.2),
(1, 1, 1, 7.1, 5.1, 'well1', 'skin', -1, 0, 0, 0, 0.5, 2.0, 5.0, 6.2),
(
2, 3, 3, 9.1, 3.7, 'well2', 'skin', -1, 0, 0, 0, 1.0, 2.0, 5.0, 4.1)],
dtype=[('index', '<i8'), ('i', '<i8'), ('j', '<i8'),
('ztop', '<f8'), ('zbotm', '<f8'),
('wellid', 'O'), ('losstype', 'O'), ('pumploc', '<i8'),
('qlimit', '<i8'), ('ppflag', '<i8'), ('pumpcap', '<i8'),
('rw', '<f8'), ('rskin', '<f8'), ('kskin', '<f8'),
('zpump', '<f8')]).view(np.recarray)
stress_period_data = {0: np.array([(0, 0, 'well1', 0), (1, 0, 'well2', 0)],
dtype=[('index', '<i8'), ('per', '<i8'), ('wellid', 'O'), ('qdes', '<i8')]).view(np.recarray),
1: np.array([(2, 1, 'well1', 100), (3, 1, 'well2', 1000)],
dtype=[('index', '<i8'), ('per', '<i8'), ('wellid', 'O'), ('qdes', '<i8')]).view(np.recarray)}
dtype=[('index', '<i8'), ('per', '<i8'),
('wellid', 'O'),
('qdes', '<i8')]).view(
np.recarray),
1: np.array(
[(2, 1, 'well1', 100), (3, 1, 'well2', 1000)],
dtype=[('index', '<i8'), ('per', '<i8'),
('wellid', 'O'), ('qdes', '<i8')]).view(
np.recarray)}
mnw2_4 = flopy.modflow.ModflowMnw2(model=m4, mnwmax=2, nodtot=3,
node_data=node_data,
stress_period_data=stress_period_data,
itmp=[2, 2, -1], # reuse second per pumping for last stress period
)
node_data=node_data,
stress_period_data=stress_period_data,
itmp=[2, 2, -1],
# reuse second per pumping for last stress period
)
m4.write_input()
# make the package from the tables (k, i, j format)
node_data = np.array([(0, 3, 1, 1, 'well1', 'skin', -1, 0, 0, 0, 1.0, 2.0, 5.0, 6.2),
(1, 2, 1, 1, 'well1', 'skin', -1, 0, 0, 0, 0.5, 2.0, 5.0, 6.2),
(2, 1, 3, 3, 'well2', 'skin', -1, 0, 0, 0, 1.0, 2.0, 5.0, 4.1)],
dtype=[('index', '<i8'), ('k', '<i8'), ('i', '<i8'), ('j', '<i8'),
('wellid', 'O'), ('losstype', 'O'), ('pumploc', '<i8'),
('qlimit', '<i8'), ('ppflag', '<i8'), ('pumpcap', '<i8'),
('rw', '<f8'), ('rskin', '<f8'), ('kskin', '<f8'),
('zpump', '<f8')]).view(np.recarray)
node_data = np.array(
[(0, 3, 1, 1, 'well1', 'skin', -1, 0, 0, 0, 1.0, 2.0, 5.0, 6.2),
(1, 2, 1, 1, 'well1', 'skin', -1, 0, 0, 0, 0.5, 2.0, 5.0, 6.2),
(2, 1, 3, 3, 'well2', 'skin', -1, 0, 0, 0, 1.0, 2.0, 5.0, 4.1)],
dtype=[('index', '<i8'), ('k', '<i8'), ('i', '<i8'), ('j', '<i8'),
('wellid', 'O'), ('losstype', 'O'), ('pumploc', '<i8'),
('qlimit', '<i8'), ('ppflag', '<i8'), ('pumpcap', '<i8'),
('rw', '<f8'), ('rskin', '<f8'), ('kskin', '<f8'),
('zpump', '<f8')]).view(np.recarray)
stress_period_data = {0: np.array([(0, 0, 'well1', 0), (1, 0, 'well2', 0)],
dtype=[('index', '<i8'), ('per', '<i8'), ('wellid', 'O'), ('qdes', '<i8')]).view(np.recarray),
1: np.array([(2, 1, 'well1', 100), (3, 1, 'well2', 1000)],
dtype=[('index', '<i8'), ('per', '<i8'), ('wellid', 'O'), ('qdes', '<i8')]).view(np.recarray)}
dtype=[('index', '<i8'), ('per', '<i8'),
('wellid', 'O'),
('qdes', '<i8')]).view(
np.recarray),
1: np.array(
[(2, 1, 'well1', 100), (3, 1, 'well2', 1000)],
dtype=[('index', '<i8'), ('per', '<i8'),
('wellid', 'O'), ('qdes', '<i8')]).view(
np.recarray)}
mnw2_4 = flopy.modflow.ModflowMnw2(model=m4, mnwmax=2, nodtot=3,
node_data=node_data,
stress_period_data=stress_period_data,
itmp=[2, 2, -1], # reuse second per pumping for last stress period
)
node_data=node_data,
stress_period_data=stress_period_data,
itmp=[2, 2, -1],
# reuse second per pumping for last stress period
)
spd = m4.mnw2.stress_period_data[0]
inds = spd.k, spd.i, spd.j
assert np.array_equal(np.array(inds).transpose(), np.array([(2, 1, 1), (1, 3, 3)]))
assert np.array_equal(np.array(inds).transpose(),
np.array([(2, 1, 1), (1, 3, 3)]))
m4.write_input()
# make the package from the objects
mnw2fromobj = flopy.modflow.ModflowMnw2(model=m4, mnwmax=2,
mnw=mnw2_4.mnw,
itmp=[2, 2, -1], # reuse second per pumping for last stress period
)
mnw=mnw2_4.mnw,
itmp=[2, 2, -1],
# reuse second per pumping for last stress period
)
# verify that the two input methods produce the same results
assert np.array_equal(mnw2_4.stress_period_data[1], mnw2fromobj.stress_period_data[1])
assert np.array_equal(mnw2_4.stress_period_data[1],
mnw2fromobj.stress_period_data[1])
def test_export():
"""test export of package."""
m = flopy.modflow.Modflow.load('MNW2-Fig28.nam', model_ws=mf2005pth,
load_only=['dis', 'bas6', 'mnwi', 'mnw2', 'wel'], verbose=True, check=False)
load_only=['dis', 'bas6', 'mnwi', 'mnw2',
'wel'], verbose=True,
check=False)
m.wel.export(os.path.join(cpth, 'MNW2-Fig28_well.nc'))
m.mnw2.export(os.path.join(cpth, 'MNW2-Fig28.nc'))
nc = netCDF4.Dataset('../autotest/temp/MNW2-Fig28.nc')
fpth = os.path.join(cpth, 'MNW2-Fig28.nc')
nc = netCDF4.Dataset(fpth)
assert np.array_equal(nc.variables['mnw2_qdes'][:, 0, 29, 40],
np.array([0., -10000., -10000.], dtype='float32'))
assert np.sum(nc.variables['mnw2_rw'][:, :, 29, 40]) - 5.1987 < 1e-4
# need to add shapefile test
def test_checks():
m = flopy.modflow.Modflow.load('MNW2-Fig28.nam', model_ws=mf2005pth,
load_only=['dis', 'bas6', 'mnwi', 'wel'], verbose=True, check=False)
load_only=['dis', 'bas6', 'mnwi', 'wel'],
verbose=True, check=False)
chk = m.check()
assert 'MNWI package present without MNW2 packge.' in '.'.join(chk.summary_array.desc)
assert 'MNWI package present without MNW2 packge.' in '.'.join(
chk.summary_array.desc)
if __name__ == '__main__':
test_line_parse()
test_load()
test_make_package()
test_export()
test_checks()
test_checks()

View File

@ -4,7 +4,10 @@ import flopy
pthtest = os.path.join('..', 'examples', 'data', 'swtv4_test')
newpth = os.path.join('.', 'temp')
newpth = os.path.join('.', 'temp', 't028')
# make the directory if it does not exist
if not os.path.isdir(newpth):
os.makedirs(newpth)
swtv4_exe = 'swt_v4'
isswtv4 = flopy.which(swtv4_exe)
runmodel = False

View File

@ -1,10 +1,8 @@
import os
import flopy
import matplotlib.pyplot as plt
pthtest = os.path.join('..', 'examples', 'data', 'mfgrd_test')
newpth = os.path.join('.', 'temp')
def test_mfgrddis():
grbnam = 'nwtp3.dis.grb'
@ -26,7 +24,6 @@ def test_mfgrddis():
assert len(iverts) == 6400, errmsg
def test_mfgrddisv():
fn = os.path.join(pthtest, 'flow.disv.grb')
disv = flopy.utils.MfGrdFile(fn, verbose=True)
@ -37,6 +34,7 @@ def test_mfgrddisv():
errmsg = 'ncells of flow.disv {} not equal to 218.'.format(len(iverts))
assert len(iverts) == 218, errmsg
if __name__ == '__main__':
test_mfgrddis()
test_mfgrddisv()

View File

@ -1,8 +1,10 @@
import os
import flopy
newpth = os.path.join('.', 'temp')
newpth = os.path.join('.', 'temp', 't030')
# make the directory if it does not exist
if not os.path.isdir(newpth):
os.makedirs(newpth)
def test_vdf_vsc():
@ -43,7 +45,5 @@ def test_vdf_vsc():
return
if __name__ == '__main__':
test_vdf_vsc()

View File

@ -2,6 +2,7 @@
test modpath functionality
"""
import sys
sys.path.insert(0, '..')
import glob
import shutil
@ -13,41 +14,44 @@ from flopy.utils.modpathfile import EndpointFile, PathlineFile
from flopy.modpath.mpsim import StartingLocationsFile
mffiles = glob.glob('../examples/data/mp6/EXAMPLE*')
path = os.path.join('temp', 'mp6')
path = os.path.join('temp', 't031')
if not os.path.isdir(path):
os.makedirs(path)
for f in mffiles:
shutil.copy(f, os.path.join(path, os.path.split(f)[1]))
def test_mpsim():
def test_mpsim():
model_ws = path
m = flopy.modflow.Modflow.load('EXAMPLE.nam', model_ws=model_ws)
m.get_package_list()
mp = flopy.modpath.Modpath(modelname='ex6',
exe_name='mp6',
modflowmodel=m,
model_ws=path,
dis_file=m.name+'.dis',
head_file=m.name+'.hed',
budget_file=m.name+'.bud')
exe_name='mp6',
modflowmodel=m,
model_ws=path,
dis_file=m.name + '.dis',
head_file=m.name + '.hed',
budget_file=m.name + '.bud')
mpb = flopy.modpath.ModpathBas(mp, hdry=m.lpf.hdry, laytyp=m.lpf.laytyp, ibound=1, prsity=0.1)
mpb = flopy.modpath.ModpathBas(mp, hdry=m.lpf.hdry, laytyp=m.lpf.laytyp,
ibound=1, prsity=0.1)
sim = mp.create_mpsim(trackdir='forward', simtype='endpoint', packages='RCH')
sim = mp.create_mpsim(trackdir='forward', simtype='endpoint',
packages='RCH')
mp.write_input()
# replace the well with an mnw
node_data = np.array([(3, 12, 12, 'well1', 'skin', -1, 0, 0, 0, 1., 2., 5., 6.2),
(4, 12, 12, 'well1', 'skin', -1, 0, 0, 0, 0.5, 2., 5., 6.2)],
dtype=[('k', np.int), ('i', np.int), ('j', np.int),
('wellid', np.object), ('losstype', np.object),
('pumploc', np.int), ('qlimit', np.int),
('ppflag', np.int), ('pumpcap', np.int),
('rw', np.float), ('rskin', np.float),
('kskin', np.float), ('zpump', np.float)]).view(np.recarray)
node_data = np.array(
[(3, 12, 12, 'well1', 'skin', -1, 0, 0, 0, 1., 2., 5., 6.2),
(4, 12, 12, 'well1', 'skin', -1, 0, 0, 0, 0.5, 2., 5., 6.2)],
dtype=[('k', np.int), ('i', np.int), ('j', np.int),
('wellid', np.object), ('losstype', np.object),
('pumploc', np.int), ('qlimit', np.int),
('ppflag', np.int), ('pumpcap', np.int),
('rw', np.float), ('rskin', np.float),
('kskin', np.float), ('zpump', np.float)]).view(np.recarray)
stress_period_data = {0: np.array([(0, 'well1', -150000.0)],
dtype=[('per', np.int),
@ -60,7 +64,8 @@ def test_mpsim():
itmp=[1, -1, -1])
# test creation of modpath simulation file for MNW2
# (not a very robust test)
sim = mp.create_mpsim(trackdir='backward', simtype='pathline', packages='MNW2')
sim = mp.create_mpsim(trackdir='backward', simtype='pathline',
packages='MNW2')
mp.write_input()
sim = flopy.modpath.ModpathSim(model=mp)
@ -79,11 +84,14 @@ def test_mpsim():
assert int(stllines[4].strip()) == 2
assert stllines[6].strip().split()[-1] == 'p2'
def test_get_destination_data():
m = flopy.modflow.Modflow.load('EXAMPLE.nam', model_ws=path)
m.sr = SpatialReference(delr=m.dis.delr, delc=m.dis.delc, xul=0, yul=0, rotation=30)
sr = SpatialReference(delr=list(m.dis.delr), delc=list(m.dis.delc), xul=1000, yul=1000, rotation=30)
m.sr = SpatialReference(delr=m.dis.delr, delc=m.dis.delc, xul=0, yul=0,
rotation=30)
sr = SpatialReference(delr=list(m.dis.delr), delc=list(m.dis.delc),
xul=1000, yul=1000, rotation=30)
sr2 = SpatialReference(xll=sr.xll, yll=sr.yll, rotation=-30)
m.dis.export(path + '/dis.shp')
@ -94,12 +102,14 @@ def test_get_destination_data():
well_pthld = pthld.get_destination_pathline_data(dest_cells=[(4, 12, 12)])
# same particle IDs should be in both endpoing data and pathline data
assert len(set(well_epd.particleid).difference(set(well_pthld.particleid))) == 0
assert len(
set(well_epd.particleid).difference(set(well_pthld.particleid))) == 0
# check that all starting locations are included in the pathline data
# (pathline data slice not just endpoints)
starting_locs = well_epd[['k0', 'i0', 'j0']]
pathline_locs = np.array(well_pthld[['k', 'i', 'j']].tolist(), dtype=starting_locs.dtype)
pathline_locs = np.array(well_pthld[['k', 'i', 'j']].tolist(),
dtype=starting_locs.dtype)
assert np.all(np.in1d(starting_locs, pathline_locs))
# test writing a shapefile of endpoints
@ -108,23 +118,28 @@ def test_get_destination_data():
sr=m.sr)
# test writing shapefile of pathlines
fpth = os.path.join(path, 'pathlines_1per.shp')
pthld.write_shapefile(well_pthld, one_per_particle=True,
direction='starting', sr=m.sr,
shpname='temp/mp6/pathlines_1per.shp')
shpname=fpth)
fpth = os.path.join(path, 'pathlines_1per_end.shp')
pthld.write_shapefile(well_pthld, one_per_particle=True,
direction='ending', sr=m.sr,
shpname='temp/mp6/pathlines_1per_end.shp')
shpname=fpth)
# test writing shapefile of pathlines
fpth = os.path.join(path, 'pathlines_1per2.shp')
pthld.write_shapefile(well_pthld, one_per_particle=True,
direction='starting', sr=sr,
shpname='temp/mp6/pathlines_1per2.shp')
shpname=fpth)
# test writing shapefile of pathlines
fpth = os.path.join(path, 'pathlines_1per2_ll.shp')
pthld.write_shapefile(well_pthld, one_per_particle=True,
direction='starting', sr=sr2,
shpname='temp/mp6/pathlines_1per2_ll.shp')
shpname=fpth)
fpth = os.path.join(path, 'pathlines.shp')
pthld.write_shapefile(well_pthld, one_per_particle=False,
sr=m.sr,
shpname='temp/mp6/pathlines.shp')
shpname=fpth)
# test that endpoints were rotated and written correctly
from flopy.export.shapefile_utils import shp2recarray
@ -133,7 +148,8 @@ def test_get_destination_data():
xorig, yorig = m.sr.transform(well_epd.x0[0], well_epd.y0[0])
assert p3.x - xorig + p3.y - yorig < 1e-4
xorig, yorig = m.sr.xcentergrid[3, 4], m.sr.ycentergrid[3, 4]
assert np.abs(p3.x - xorig + p3.y - yorig) < 1e-4 # this also checks for 1-based
assert np.abs(
p3.x - xorig + p3.y - yorig) < 1e-4 # this also checks for 1-based
# test that particle attribute information is consistent with pathline file
ra = shp2recarray(os.path.join(path, 'pathlines.shp'))
@ -150,39 +166,51 @@ def test_get_destination_data():
# test use of arbitrary spatial reference and offset
ra = shp2recarray(os.path.join(path, 'pathlines_1per2.shp'))
p3_2 = ra.geometry[ra.particleid == 4][0]
assert np.abs(p3_2.x[0] - sr.xcentergrid[3, 4] + p3_2.y[0] - sr.ycentergrid[3, 4]) < 1e-4
assert np.abs(
p3_2.x[0] - sr.xcentergrid[3, 4] + p3_2.y[0] - sr.ycentergrid[
3, 4]) < 1e-4
# arbitrary spatial reference with ll specified instead of ul
ra = shp2recarray(os.path.join(path, 'pathlines_1per2_ll.shp'))
p3_2 = ra.geometry[ra.particleid == 4][0]
sr3 = SpatialReference(xll=sr.xll, yll=sr.yll, rotation=-30, delr=list(m.dis.delr), delc=list(m.dis.delc))
assert np.abs(p3_2.x[0] - sr3.xcentergrid[3, 4] + p3_2.y[0] - sr3.ycentergrid[3, 4]) < 1e-4
sr3 = SpatialReference(xll=sr.xll, yll=sr.yll, rotation=-30,
delr=list(m.dis.delr), delc=list(m.dis.delc))
assert np.abs(
p3_2.x[0] - sr3.xcentergrid[3, 4] + p3_2.y[0] - sr3.ycentergrid[
3, 4]) < 1e-4
xul = 3628793
yul = 21940389
m = flopy.modflow.Modflow.load('EXAMPLE.nam', model_ws=path)
m.sr = flopy.utils.reference.SpatialReference(delr=m.dis.delr, delc=m.dis.delc, lenuni=1,
xul=xul, yul=yul, rotation=0.0)
m.dis.export(path + '/dis2.shp')
m.sr = flopy.utils.reference.SpatialReference(delr=m.dis.delr,
delc=m.dis.delc, lenuni=1,
xul=xul, yul=yul,
rotation=0.0)
fpth = os.path.join(path, 'dis2.shp')
m.dis.export(fpth)
pthobj = flopy.utils.PathlineFile(os.path.join(path, 'EXAMPLE-3.pathline'))
pthobj.write_shapefile(shpname='temp/mp6/pathlines_1per3.shp',
fpth = os.path.join(path, 'pathlines_1per3.shp')
pthobj.write_shapefile(shpname=fpth,
direction='ending',
sr=m.sr)
def test_loadtxt():
from flopy.utils.flopy_io import loadtxt
pthfile = os.path.join(path, 'EXAMPLE-3.pathline')
pthld = PathlineFile(pthfile)
ra = loadtxt(pthfile, delimiter=' ', skiprows=3, dtype=pthld.dtype)
ra2 = loadtxt(pthfile, delimiter=' ', skiprows=3, dtype=pthld.dtype, use_pandas=False)
ra2 = loadtxt(pthfile, delimiter=' ', skiprows=3, dtype=pthld.dtype,
use_pandas=False)
assert np.array_equal(ra, ra2)
#epfilewithnans = os.path.join('../examples/data/mp6/', 'freybergmp.mpend')
#epd = EndpointFile(epfilewithnans)
# epfilewithnans = os.path.join('../examples/data/mp6/', 'freybergmp.mpend')
# epd = EndpointFile(epfilewithnans)
if __name__ == '__main__':
#test_mpsim()
# test_mpsim()
test_get_destination_data()
#test_loadtxt()
# test_loadtxt()

View File

@ -2,9 +2,11 @@
Test shapefile stuff
"""
import sys
import os
if sys.version_info[0] == 3:
if sys.version_info[1] < 4:
from imp import reload # python < 3.4
from imp import reload # python < 3.4
else:
from importlib import reload
# python2 (reload in default namespace)
@ -17,19 +19,25 @@ from flopy.utils.reference import SpatialReference
from flopy.export.shapefile_utils import recarray2shp, shp2recarray
from flopy.utils.reference import getprj, epsgRef
mpth = os.path.join('temp', 't032')
# make the directory if it does not exist
if not os.path.isdir(mpth):
os.makedirs(mpth)
def test_polygon_from_ij():
"""test creation of a polygon from an i, j location using get_vertices()."""
m = flopy.modflow.Modflow('toy_model', model_ws='temp')
m = flopy.modflow.Modflow('toy_model', model_ws=mpth)
botm = np.zeros((2, 10, 10))
botm[0, :, :] = 1.5
botm[1, 5, 5] = 4 # negative layer thickness!
botm[1, 5, 5] = 4 # negative layer thickness!
botm[1, 6, 6] = 4
dis = flopy.modflow.ModflowDis(nrow=10, ncol=10,
nlay=2, delr=100, delc=100,
top=3, botm=botm, model=m)
m.sr = SpatialReference(delr=m.dis.delr * .3048, delc=m.dis.delc * .3048, xul=600000, yul=5170000,
m.sr = SpatialReference(delr=m.dis.delr * .3048, delc=m.dis.delc * .3048,
xul=600000, yul=5170000,
proj4_str='EPSG:26715', rotation=-45)
recarray = np.array([(0, 5, 5, .1, True, 's0'),
@ -39,21 +47,28 @@ def test_polygon_from_ij():
('stuff', '<f4'), ('stuf', '|b1'),
('stf', np.object)]).view(np.recarray)
get_vertices = m.sr.get_vertices # function to get the referenced vertices for a model cell
geoms = [Polygon(get_vertices(i, j)) for i, j in zip(recarray.i, recarray.j)]
get_vertices = m.sr.get_vertices # function to get the referenced vertices for a model cell
geoms = [Polygon(get_vertices(i, j)) for i, j in
zip(recarray.i, recarray.j)]
assert geoms[0].type == 'Polygon'
assert geoms[0].bounds[-1] - 5169784.473861726 < 1e-6
recarray2shp(recarray, geoms, 'temp/test.shp', epsg=26715)
fpth = os.path.join(mpth, 'test.shp')
recarray2shp(recarray, geoms, fpth, epsg=26715)
import epsgref
reload(epsgref)
from epsgref import prj
assert 26715 in prj
shutil.copy('temp/test.prj', 'temp/26715.prj')
recarray2shp(recarray, geoms, 'temp/test.shp', prj='temp/26715.prj')
fpth = os.path.join(mpth, 'test.prj')
fpth2 = os.path.join(mpth, '26715.prj')
shutil.copy(fpth, fpth2)
fpth = os.path.join(mpth, 'test.shp')
recarray2shp(recarray, geoms, fpth, prj=fpth2)
def test_dtypes():
ra = shp2recarray('temp/test.shp')
fpth = os.path.join(mpth, 'test.shp')
ra = shp2recarray(fpth)
assert "int" in ra.dtype['k'].name
assert "float" in ra.dtype['stuff'].name
assert "bool" in ra.dtype['stuf'].name
@ -62,7 +77,6 @@ def test_dtypes():
def test_epsgref():
ep = epsgRef()
ep.reset()
@ -73,7 +87,7 @@ def test_epsgref():
assert 4326 in prj
ep.add(9999, 'junk')
ep._remove_pyc() # have to do this in python 2, otherwise won't refresh
ep._remove_pyc() # have to do this in python 2, otherwise won't refresh
reload(epsgref)
from epsgref import prj
assert 9999 in prj
@ -90,7 +104,8 @@ def test_epsgref():
from epsgref import prj
assert len(prj) == 0
if __name__ == '__main__':
test_polygon_from_ij()
test_dtypes()
test_epsgref()
test_epsgref()

View File

@ -3,9 +3,13 @@ import numpy as np
import flopy
from flopy.utils.util_array import Util2d
newpth = os.path.join('.', 'temp')
newpth = os.path.join('.', 'temp', 't033')
# make the directory if it does not exist
if not os.path.isdir(newpth):
os.makedirs(newpth)
startpth = os.getcwd()
def test_rchload():
nlay = 2
nrow = 3
@ -15,7 +19,7 @@ def test_rchload():
# create model 1
m1 = flopy.modflow.Modflow('rchload1', model_ws=newpth)
dis1 = flopy.modflow.ModflowDis(m1, nlay=nlay, nrow=nrow, ncol=ncol,
nper=nper)
nper=nper)
a = np.random.random((nrow, ncol))
rech1 = Util2d(m1, (nrow, ncol), np.float32, a, 'rech', cnstnt=1.0,
how='openclose')
@ -51,5 +55,6 @@ def test_rchload():
assert np.allclose(a1, a2)
os.chdir(startpth)
if __name__ == '__main__':
test_rchload()

View File

@ -2,6 +2,7 @@
test UZF package
"""
import sys
sys.path.insert(0, '..')
import os
import shutil
@ -11,34 +12,37 @@ import flopy
from flopy.utils.util_array import Util2d
import numpy as np
cpth = os.path.join('temp/uzf')
cpth = os.path.join('temp', 't034')
if not os.path.isdir(cpth):
os.makedirs(cpth)
def test_load_and_write():
def test_load_and_write():
# load in the test problem
m = flopy.modflow.Modflow('UZFtest2', model_ws=cpth, verbose=True)
m.model_ws = 'temp'
m.model_ws = cpth
path = os.path.join('..', 'examples', 'data', 'mf2005_test')
dis = flopy.modflow.ModflowDis.load(path + '/UZFtest2.dis', m)
uzf = flopy.modflow.ModflowUzf1.load(path + '/UZFtest2.uzf', m)
dis = flopy.modflow.ModflowDis.load(os.path.join(path, 'UZFtest2.dis'), m)
uzf = flopy.modflow.ModflowUzf1.load(os.path.join(path, 'UZFtest2.uzf'), m)
assert np.sum(uzf.iuzfbnd.array) == 116
assert np.array_equal(np.unique(uzf.irunbnd.array), np.arange(9))
assert np.abs(np.sum(uzf.vks.array)/uzf.vks.cnstnt - 116.) < 1e-5
assert np.abs(np.sum(uzf.vks.array) / uzf.vks.cnstnt - 116.) < 1e-5
assert uzf.eps._Util2d__value == 3.5
assert np.abs(uzf.thts._Util2d__value - .30) < 1e-5
assert np.abs(np.sum(uzf.extwc[0].array) / uzf.extwc[0].cnstnt - 176.0) < 1e4
assert np.abs(
np.sum(uzf.extwc[0].array) / uzf.extwc[0].cnstnt - 176.0) < 1e4
for per in [0, 1]:
assert np.abs(uzf.pet[per]._Util2d__value - 5e-8) < 1e-10
for per in range(m.nper):
assert np.abs(np.sum(uzf.finf[per].array) / uzf.finf[per].cnstnt - 339.0) < 1e4
assert np.abs(
np.sum(uzf.finf[per].array) / uzf.finf[per].cnstnt - 339.0) < 1e4
assert True
m.model_ws = cpth
uzf.write_file()
m2 = flopy.modflow.Modflow('UZFtest2_2', model_ws=cpth)
dis = flopy.modflow.ModflowDis(nrow=m.nrow, ncol=m.ncol, nper=12, model=m2)
uzf2 = flopy.modflow.ModflowUzf1.load(cpth + '/UZFtest2.uzf', m2)
uzf2 = flopy.modflow.ModflowUzf1.load(os.path.join(cpth, 'UZFtest2.uzf'),
m2)
attrs = dir(uzf)
for attr in attrs:
a1 = uzf.__getattribute__(attr)
@ -52,19 +56,25 @@ def test_load_and_write():
for i, a in enumerate(a1):
assert a == l2[i]
def test_create():
gpth = os.path.join('..', 'examples', 'data', 'mf2005_test', 'UZFtest2.*')
for f in glob.glob(gpth):
shutil.copy(f, cpth)
m = flopy.modflow.Modflow.load('UZFtest2.nam', version='mf2005', exe_name='mf2005',
model_ws=cpth, load_only=['ghb', 'dis', 'bas6', 'oc', 'sip', 'lpf', 'sfr'])
m = flopy.modflow.Modflow.load('UZFtest2.nam', version='mf2005',
exe_name='mf2005',
model_ws=cpth,
load_only=['ghb', 'dis', 'bas6', 'oc',
'sip', 'lpf', 'sfr'])
rm = [True if '.uz' in f else False for f in m.external_fnames]
m.external_fnames = [f for i, f in enumerate(m.external_fnames) if not rm[i]]
m.external_binflag = [f for i, f in enumerate(m.external_binflag) if not rm[i]]
m.external_output = [f for i, f in enumerate(m.external_output) if not rm[i]]
m.external_units = [f for i, f in enumerate(m.external_output) if not rm[i]]
m.external_fnames = [f for i, f in enumerate(m.external_fnames) if
not rm[i]]
m.external_binflag = [f for i, f in enumerate(m.external_binflag) if
not rm[i]]
m.external_output = [f for i, f in enumerate(m.external_output) if
not rm[i]]
m.external_units = [f for i, f in enumerate(m.external_output) if
not rm[i]]
datpth = os.path.join('..', 'examples', 'data', 'uzf_examples')
irnbndpth = os.path.join(datpth, 'irunbnd.dat')
@ -86,7 +96,8 @@ def test_create():
uzf = flopy.modflow.ModflowUzf1(m,
nuztop=1, iuzfopt=1, irunflg=1, ietflg=1,
iuzfcb1=0,
iuzfcb2=61, # binary output of recharge and groundwater discharge
iuzfcb2=61,
# binary output of recharge and groundwater discharge
ntrail2=25, nsets=20, nuzgag=4,
surfdep=1.0, uzgag=uzgag,
iuzfbnd=m.bas6.ibound.array,
@ -101,9 +112,10 @@ def test_create():
)
m.write_input()
m2 = flopy.modflow.Modflow.load('UZFtest2.nam', version='mf2005', exe_name='mf2005',
verbose=True,
model_ws=os.path.split(gpth)[0])
m2 = flopy.modflow.Modflow.load('UZFtest2.nam', version='mf2005',
exe_name='mf2005',
verbose=True,
model_ws=os.path.split(gpth)[0])
# verify that all of the arrays in the created UZF package are the same as those in the loaded example
attrs = dir(uzf)
for attr in attrs:
@ -122,4 +134,4 @@ def test_create():
if __name__ == '__main__':
test_load_and_write()
test_create()
test_create()

View File

@ -4,13 +4,15 @@ Test the lgr model
import os
import flopy
cpth = os.path.join('temp', 'mflgr')
cpth = os.path.join('temp', 't035')
if not os.path.isdir(cpth):
os.makedirs(cpth)
def test_load_and_write():
pth = os.path.join('..', 'examples', 'data', 'mflgr_v2', 'ex3')
lgr = flopy.modflowlgr.ModflowLgr.load('ex3.lgr', verbose=True, model_ws=pth)
lgr = flopy.modflowlgr.ModflowLgr.load('ex3.lgr', verbose=True,
model_ws=pth)
msg = 'modflow-lgr ex3 does not have 2 grids'
assert lgr.ngrids == 2, msg

View File

@ -11,6 +11,7 @@ cpth = os.path.join('temp', 't036')
if not os.path.isdir(cpth):
os.makedirs(cpth)
def test_unitnums_load_and_write():
mfnam = 'testsfr2_tab.nam'
@ -37,7 +38,8 @@ def test_unitnums_load_and_write():
exe_name=exe_name)
assert m.load_fail is False, 'failed to load all packages'
msg = 'modflow-2005 testsfr2_tab does not have 1 layer, 7 rows, and 100 colummns'
msg = 'modflow-2005 testsfr2_tab does not have ' + \
'1 layer, 7 rows, and 100 colummns'
v = (m.nlay, m.nrow, m.ncol, m.nper)
assert v == (1, 7, 100, 50), msg
@ -75,5 +77,6 @@ def test_unitnums_load_and_write():
return
if __name__ == '__main__':
test_unitnums_load_and_write()

View File

@ -4,10 +4,12 @@ Some basic tests for SWR2 load.
import os
import flopy
import numpy as np
path = os.path.join('..', 'examples', 'data', 'mf2005_test')
cpth = os.path.join('temp')
cpth = os.path.join('temp', 't037')
# make the directory if it does not exist
if not os.path.isdir(cpth):
os.makedirs(cpth)
mf_items = ['swiex1.nam', 'swiex2_strat.nam', 'swiex3.nam']
pths = []
@ -46,11 +48,13 @@ def load_swi(mfnam, pth):
assert success, 'base model run did not terminate successfully'
fn0 = os.path.join(lpth, mfnam)
# write free format files - wont run without resetting to free format - evt externa file issue
# write free format files -
# won't run without resetting to free format - evt external file issue
m.free_format_input = True
# rewrite files
m.change_model_ws(apth, reset_external=True) # l1b2k_bath wont run without this
m.change_model_ws(apth,
reset_external=True) # l1b2k_bath wont run without this
m.write_input()
if run:
try:
@ -68,7 +72,7 @@ def load_swi(mfnam, pth):
max_incpd=0.1, max_cumpd=0.1,
outfile=fsum)
except:
print('could not performbudget comparison')
print('could not perform budget comparison')
assert success, 'budget comparison failure'

View File

@ -1,4 +1,4 @@
from __future__ import print_function,division
from __future__ import print_function, division
import os
import platform
import socket
@ -14,7 +14,9 @@ ITMUNI = {0: "undefined", 1: "seconds", 2: "minutes", 3: "hours", 4: "days",
LENUNI = {0: "undefined", 1: "feet", 2: "meters", 3: "centimeters"}
PRECISION_STRS = ["f4", "f8", "i4"]
STANDARD_VARS = ["longitude","latitude","layer","elevation","delr","delc","time"]
STANDARD_VARS = ["longitude", "latitude", "layer", "elevation", "delr", "delc",
"time"]
class Logger(object):
"""
@ -67,14 +69,14 @@ class Logger(object):
s = str(t) + ' finished: ' + str(phrase) + ", took: " + \
str(t - self.items[phrase]) + '\n'
if self.echo:
print(s,)
print(s, )
if self.filename:
self.f.write(s)
self.items.pop(phrase)
else:
s = str(t) + ' starting: ' + str(phrase) + '\n'
if self.echo:
print(s,)
print(s, )
if self.filename:
self.f.write(s)
self.items[phrase] = copy.deepcopy(t)
@ -91,7 +93,7 @@ class Logger(object):
"""
s = str(datetime.now()) + " WARNING: " + message + '\n'
if self.echo:
print(s,)
print(s, )
if self.filename:
self.f.write(s)
return
@ -169,76 +171,76 @@ class NetCdf(object):
self.initialize_file(time_values=self.time_values_arg)
self.log("initializing file")
def __add__(self,other):
def __add__(self, other):
new_net = NetCdf.zeros_like(self)
if np.isscalar(other) or isinstance(other,np.ndarray):
if np.isscalar(other) or isinstance(other, np.ndarray):
for vname in self.var_attr_dict.keys():
new_net.nc.variables[vname][:] = self.nc.variables[vname][:] +\
new_net.nc.variables[vname][:] = self.nc.variables[vname][:] + \
other
elif isinstance(other,NetCdf):
elif isinstance(other, NetCdf):
for vname in self.var_attr_dict.keys():
new_net.nc.variables[vname][:] = self.nc.variables[vname][:] +\
new_net.nc.variables[vname][:] = self.nc.variables[vname][:] + \
other.nc.variables[vname][:]
else:
raise Exception("NetCdf.__add__(): unrecognized other:{0}".\
raise Exception("NetCdf.__add__(): unrecognized other:{0}". \
format(str(type(other))))
return new_net
def __sub__(self,other):
def __sub__(self, other):
new_net = NetCdf.zeros_like(self)
if np.isscalar(other) or isinstance(other,np.ndarray):
if np.isscalar(other) or isinstance(other, np.ndarray):
for vname in self.var_attr_dict.keys():
new_net.nc.variables[vname][:] = self.nc.variables[vname][:] -\
new_net.nc.variables[vname][:] = self.nc.variables[vname][:] - \
other
elif isinstance(other,NetCdf):
elif isinstance(other, NetCdf):
for vname in self.var_attr_dict.keys():
new_net.nc.variables[vname][:] = self.nc.variables[vname][:] -\
new_net.nc.variables[vname][:] = self.nc.variables[vname][:] - \
other.nc.variables[vname][:]
else:
raise Exception("NetCdf.__sub__(): unrecognized other:{0}".\
raise Exception("NetCdf.__sub__(): unrecognized other:{0}". \
format(str(type(other))))
return new_net
def __mul__(self,other):
def __mul__(self, other):
new_net = NetCdf.zeros_like(self)
if np.isscalar(other) or isinstance(other,np.ndarray):
if np.isscalar(other) or isinstance(other, np.ndarray):
for vname in self.var_attr_dict.keys():
new_net.nc.variables[vname][:] = self.nc.variables[vname][:] *\
new_net.nc.variables[vname][:] = self.nc.variables[vname][:] * \
other
elif isinstance(other,NetCdf):
elif isinstance(other, NetCdf):
for vname in self.var_attr_dict.keys():
new_net.nc.variables[vname][:] = self.nc.variables[vname][:] *\
new_net.nc.variables[vname][:] = self.nc.variables[vname][:] * \
other.nc.variables[vname][:]
else:
raise Exception("NetCdf.__mul__(): unrecognized other:{0}".\
raise Exception("NetCdf.__mul__(): unrecognized other:{0}". \
format(str(type(other))))
return new_net
def __div__(self,other):
def __div__(self, other):
return self.__truediv__(other)
def __truediv__(self,other):
def __truediv__(self, other):
new_net = NetCdf.zeros_like(self)
with np.errstate(invalid="ignore"):
if np.isscalar(other) or isinstance(other,np.ndarray):
if np.isscalar(other) or isinstance(other, np.ndarray):
for vname in self.var_attr_dict.keys():
new_net.nc.variables[vname][:] = self.nc.variables[vname][:] /\
new_net.nc.variables[vname][:] = self.nc.variables[vname][
:] / \
other
elif isinstance(other,NetCdf):
elif isinstance(other, NetCdf):
for vname in self.var_attr_dict.keys():
new_net.nc.variables[vname][:] = self.nc.variables[vname][:] /\
other.nc.variables[vname][:]
new_net.nc.variables[vname][:] = self.nc.variables[vname][
:] / \
other.nc.variables[vname][
:]
else:
raise Exception("NetCdf.__sub__(): unrecognized other:{0}".\
raise Exception("NetCdf.__sub__(): unrecognized other:{0}". \
format(str(type(other))))
return new_net
def append(self,other,suffix="_1"):
assert isinstance(other,NetCdf) or isinstance(other,dict)
if isinstance(other,NetCdf):
def append(self, other, suffix="_1"):
assert isinstance(other, NetCdf) or isinstance(other, dict)
if isinstance(other, NetCdf):
for vname in other.var_attr_dict.keys():
attrs = other.var_attr_dict[vname].copy()
var = other.nc.variables[vname]
@ -251,23 +253,24 @@ class NetCdf(object):
attrs["long_name"] += " " + suffix
else:
continue
assert new_vname not in self.nc.variables.keys(),\
"var already exists:{0} in {1}".\
format(new_vname,",".join(self.nc.variables.keys()))
assert new_vname not in self.nc.variables.keys(), \
"var already exists:{0} in {1}". \
format(new_vname, ",".join(self.nc.variables.keys()))
attrs["max"] = var[:].max()
attrs["min"] = var[:].min()
new_var = self.create_variable(new_vname,attrs,
var.dtype,
dimensions=var.dimensions)
new_var = self.create_variable(new_vname, attrs,
var.dtype,
dimensions=var.dimensions)
new_var[:] = var[:]
else:
for vname,array in other.items():
for vname, array in other.items():
vname_norm = self.normalize_name(vname)
assert vname_norm in self.nc.variables.keys(),"dict var not in " \
"self.vars:{0}-->".\
format(vname) +\
",".join(self.nc.variables.keys())
assert vname_norm in self.nc.variables.keys(), "dict var not in " \
"self.vars:{0}-->". \
format(
vname) + \
",".join(
self.nc.variables.keys())
new_vname = vname_norm + suffix
assert new_vname not in self.nc.variables.keys()
@ -277,34 +280,34 @@ class NetCdf(object):
attrs["name"] = new_vname
attrs["long_name"] = attrs["long_name"] + ' ' + suffix
var = self.nc.variables[vname_norm]
#assert var.shape == array.shape,\
# assert var.shape == array.shape,\
# "{0} shape ({1}) doesn't make array shape ({2})".\
# format(new_vname,str(var.shape),str(array.shape))
new_var = self.create_variable(new_vname,attrs,
var.dtype,
dimensions=var.dimensions)
new_var = self.create_variable(new_vname, attrs,
var.dtype,
dimensions=var.dimensions)
try:
new_var[:] = array
except:
new_var[:,0] = array
new_var[:, 0] = array
return
def copy(self,output_filename):
new_net = NetCdf.zeros_like(self,output_filename=output_filename)
def copy(self, output_filename):
new_net = NetCdf.zeros_like(self, output_filename=output_filename)
for vname in self.var_attr_dict.keys():
new_net.nc.variables[vname][:] = self.nc.variables[vname][:]
return new_net
@classmethod
def zeros_like(cls,other,output_filename=None,
verbose=None,logger=None):
new_net = NetCdf.empty_like(other,output_filename,verbose=verbose,
def zeros_like(cls, other, output_filename=None,
verbose=None, logger=None):
new_net = NetCdf.empty_like(other, output_filename, verbose=verbose,
logger=logger)
# add the vars to the instance
for vname in other.var_attr_dict.keys():
if new_net.nc.variables.get(vname) is not None:
new_net.logger.warn("variable {0} already defined, skipping".\
new_net.logger.warn("variable {0} already defined, skipping". \
format(vname))
continue
new_net.log("adding variable {0}".format(vname))
@ -317,9 +320,10 @@ class NetCdf(object):
mask = None
new_data = np.zeros_like(data)
new_data[mask] = FILLVALUE
new_var = new_net.create_variable(vname,other.var_attr_dict[vname],
var.dtype,
dimensions=var.dimensions)
new_var = new_net.create_variable(vname,
other.var_attr_dict[vname],
var.dtype,
dimensions=var.dimensions)
new_var[:] = new_data
new_net.log("adding variable {0}".format(vname))
global_attrs = {}
@ -330,19 +334,22 @@ class NetCdf(object):
return new_net
@classmethod
def empty_like(cls,other,output_filename=None,
verbose=None,logger=None):
def empty_like(cls, other, output_filename=None,
verbose=None, logger=None):
if output_filename is None:
output_filename = str(time.mktime(datetime.now().timetuple()))+".nc"
output_filename = str(
time.mktime(datetime.now().timetuple())) + ".nc"
while os.path.exists(output_filename):
output_filename = str(time.mktime(datetime.now().timetuple()))+".nc"
new_net = cls(output_filename,other.model,
time_values=other.time_values_arg,verbose=verbose,
output_filename = str(
time.mktime(datetime.now().timetuple())) + ".nc"
new_net = cls(output_filename, other.model,
time_values=other.time_values_arg, verbose=verbose,
logger=logger)
return new_net
def difference(self, other, minuend="self", mask_zero_diff=True,onlydiff=True):
def difference(self, other, minuend="self", mask_zero_diff=True,
onlydiff=True):
"""make a new NetCDF instance that is the difference with another
netcdf file
@ -374,8 +381,8 @@ class NetCdf(object):
"""
assert self.nc is not None,"can't call difference() if nc " +\
"hasn't been populated"
assert self.nc is not None, "can't call difference() if nc " + \
"hasn't been populated"
try:
import netCDF4
except Exception as e:
@ -383,19 +390,19 @@ class NetCdf(object):
self.logger.warn(mess)
raise Exception(mess)
if isinstance(other,str):
assert os.path.exists(other),"filename 'other' not found:" + \
"{0}".format(other)
other = netCDF4.Dataset(other,'r')
if isinstance(other, str):
assert os.path.exists(other), "filename 'other' not found:" + \
"{0}".format(other)
other = netCDF4.Dataset(other, 'r')
assert isinstance(other,netCDF4.Dataset)
assert isinstance(other, netCDF4.Dataset)
# check for similar variables
self_vars = set(self.nc.variables.keys())
other_vars = set(other.variables)
diff = self_vars.symmetric_difference(other_vars)
if len(diff) > 0:
self.logger.warn("variables are not the same between the two " +\
self.logger.warn("variables are not the same between the two " + \
"nc files: " + ','.join(diff))
return
@ -407,14 +414,14 @@ class NetCdf(object):
self.logger.warn("missing dimension in other:{0}".format(d))
return
if len(self_dimens[d]) != len(other_dimens[d]):
self.logger.warn("dimension not consistent: "+\
self.logger.warn("dimension not consistent: " + \
"{0}:{1}".format(self_dimens[d],
other_dimens[d]))
return
# should be good to go
time_values = self.nc.variables.get("time")[:]
new_net = NetCdf(self.output_filename.replace(".nc",".diff.nc"),
self.model,time_values=time_values)
new_net = NetCdf(self.output_filename.replace(".nc", ".diff.nc"),
self.model, time_values=time_values)
# add the vars to the instance
for vname in self_vars:
if vname not in self.var_attr_dict or \
@ -429,7 +436,7 @@ class NetCdf(object):
o_mask, s_mask = None, None
# keep the masks to apply later
if isinstance(s_data,np.ma.MaskedArray):
if isinstance(s_data, np.ma.MaskedArray):
self.logger.warn("masked array for {0}".format(vname))
s_mask = s_data.mask
s_data = np.array(s_data)
@ -437,14 +444,13 @@ class NetCdf(object):
else:
np.nan_to_num(s_data)
if isinstance(o_data,np.ma.MaskedArray):
if isinstance(o_data, np.ma.MaskedArray):
o_mask = o_data.mask
o_data = np.array(o_data)
o_data[o_mask] = 0.0
else:
np.nan_to_num(o_data)
# difference with self
if minuend.lower() == "self":
d_data = s_data - o_data
@ -457,10 +463,13 @@ class NetCdf(object):
# check for non-zero diffs
if onlydiff and d_data.sum() == 0.0:
self.logger.warn("var {0} has zero differences, skipping...".format(vname))
self.logger.warn(
"var {0} has zero differences, skipping...".format(vname))
continue
self.logger.warn("resetting diff attrs max,min:{0},{1}".format(d_data.min(),d_data.max()))
self.logger.warn(
"resetting diff attrs max,min:{0},{1}".format(d_data.min(),
d_data.max()))
attrs = self.var_attr_dict[vname].copy()
attrs["max"] = np.nanmax(d_data)
attrs["min"] = np.nanmin(d_data)
@ -478,20 +487,20 @@ class NetCdf(object):
d_data[np.isnan(d_data)] = FILLVALUE
if mask_zero_diff:
d_data[np.where(d_data==0.0)] = FILLVALUE
d_data[np.where(d_data == 0.0)] = FILLVALUE
var = new_net.create_variable(vname,attrs,
var = new_net.create_variable(vname, attrs,
s_var.dtype,
dimensions=s_var.dimensions)
var[:] = d_data
self.log("processing variable {0}".format(vname))
def _dt_str(self,dt):
def _dt_str(self, dt):
""" for datetime to string for year < 1900
"""
dt_str = '{0:04d}-{1:02d}-{2:02d}T{3:02d}:{4:02d}:{5:02}Z'.format(
dt.year,dt.month,dt.day,dt.hour,dt.minute,dt.second)
dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second)
return dt_str
def write(self):
@ -536,16 +545,16 @@ class NetCdf(object):
self.global_attributes["create_platform"] = platform.system()
self.global_attributes["create_directory"] = os.getcwd()
htol,rtol = -999,-999
htol, rtol = -999, -999
try:
htol,rtol = NetCdf.get_solver_H_R_tols(self.model)
htol, rtol = NetCdf.get_solver_H_R_tols(self.model)
except Exception as e:
self.logger.warn("unable to get solver tolerances:" +\
self.logger.warn("unable to get solver tolerances:" + \
"{0}".format(str(e)))
self.global_attributes["solver_head_tolerance"] = htol
self.global_attributes["solver_flux_tolerance"] = rtol
for n,v in self.model.sr.attribute_dict.items():
self.global_attributes["flopy_sr_"+n] = v
for n, v in self.model.sr.attribute_dict.items():
self.global_attributes["flopy_sr_" + n] = v
self.global_attributes["start_datetime"] = self.model.start_datetime
self.fillvalue = FILLVALUE
@ -581,7 +590,7 @@ class NetCdf(object):
proj4_str = "+init=" + proj4_str
self.log("building grid crs using proj4 string: {0}".format(proj4_str))
try:
self.grid_crs = Proj(proj4_str,preseve_units=True,errcheck=True)
self.grid_crs = Proj(proj4_str, preseve_units=True, errcheck=True)
except Exception as e:
self.log("error building grid crs:\n{0}".format(str(e)))
@ -591,8 +600,8 @@ class NetCdf(object):
# self.zs = -1.0 * self.model.dis.zcentroids[:,:,::-1]
self.zs = -1.0 * self.model.dis.zcentroids
if self.grid_units.lower().startswith('f'): #and \
#not self.model.sr.units.startswith("f"):
if self.grid_units.lower().startswith('f'): # and \
# not self.model.sr.units.startswith("f"):
self.log("converting feet to meters")
sr = copy.deepcopy(self.model.sr)
sr.delr /= 3.281
@ -609,7 +618,6 @@ class NetCdf(object):
ys = self.model.sr.ycentergrid.copy()
xs = self.model.sr.xcentergrid.copy()
# Transform to a known CRS
nc_crs = Proj(init=self.nc_epsg_str)
self.log("projecting grid cell center arrays " + \
@ -831,13 +839,14 @@ class NetCdf(object):
# Normalize variable name
name = self.normalize_name(name)
# if this is a core var like a dimension...
#long_name = attributes.pop("long_name",name)
# long_name = attributes.pop("long_name",name)
if name in STANDARD_VARS and name in self.nc.variables.keys():
return
if name not in self.var_attr_dict.keys() and\
name in self.nc.variables.keys():
if name not in self.var_attr_dict.keys() and \
name in self.nc.variables.keys():
if self.forgive:
self.logger.warn("skipping duplicate variable: {0}".format(name))
self.logger.warn(
"skipping duplicate variable: {0}".format(name))
return
else:
raise Exception("duplicate variable name: {0}".format(name))
@ -873,8 +882,8 @@ class NetCdf(object):
self.var_attr_dict[name] = attributes
var = self.nc.createVariable(name, precision_str, dimensions,
fill_value=self.fillvalue, zlib=True)#,
#chunksizes=tuple(chunks))
fill_value=self.fillvalue, zlib=True) # ,
# chunksizes=tuple(chunks))
for k, v in attributes.items():
try:
var.setncattr(k, v)
@ -884,8 +893,7 @@ class NetCdf(object):
self.log("creating variable: " + str(name))
return var
def add_global_attributes(self,attr_dict):
def add_global_attributes(self, attr_dict):
""" add global attribute to an initialized file
Parameters
@ -903,8 +911,8 @@ class NetCdf(object):
"""
if self.nc is None:
#self.initialize_file()
mess = "NetCDF.add_global_attributes() should only "+\
# self.initialize_file()
mess = "NetCDF.add_global_attributes() should only " + \
"be called after the file has been initialized"
self.logger.warn(mess)
raise Exception(mess)
@ -913,14 +921,13 @@ class NetCdf(object):
self.nc.setncatts(attr_dict)
self.log("setting global attributes")
@staticmethod
def get_solver_H_R_tols(model):
if model.pcg is not None:
return model.pcg.hclose,model.pcg.rclose
return model.pcg.hclose, model.pcg.rclose
elif model.nwt is not None:
return model.nwt.headtol,model.nwt.fluxtol
return model.nwt.headtol, model.nwt.fluxtol
elif model.sip is not None:
return model.sip.hclose,-999
return model.sip.hclose, -999
elif model.gmg is not None:
return model.gmg.hclose,model.gmg.rclose
return model.gmg.hclose, model.gmg.rclose

View File

@ -468,16 +468,15 @@ class BaseModel(object):
'\ncreating model workspace...\n {}\n'.format(new_pth))
os.makedirs(new_pth)
except:
# print '\n%s not valid, workspace-folder was changed to %s\n' % (new_pth, os.getcwd())
print(
'\n{0:s} not valid, workspace-folder was changed to {1:s}\n'.format(
new_pth, os.getcwd()))
line = '\n{} not valid, workspace-folder '.format(new_pth) + \
'was changed to {}\n'.format(os.getcwd())
print(line)
new_pth = os.getcwd()
# --reset the model workspace
old_pth = self._model_ws
self._model_ws = new_pth
sys.stdout.write(
'\nchanging model workspace...\n {}\n'.format(new_pth))
line = '\nchanging model workspace...\n {}\n'.format(new_pth)
sys.stdout.write(line)
# reset the paths for each package
for pp in (self.packagelist):
pp.fn_path = os.path.join(self.model_ws, pp.file_name[0])