fix(data check): Minimum number of data columns check fixed (#1062) (#1067)

* fix(data check): Minimum number of data column check fixed (#1062)

* fix(data check): Test case for nseg=1 (#1062)

* fix(data check): Minimum number of data columns check fixed (#1062)

* fix(data check)

* fix(data length checking): transport test added to flopy autotest

* fix(data length checking): transport test added to flopy autotest
develop
spaulins-usgs 2021-03-17 12:28:19 -07:00 committed by GitHub
parent 605366bba7
commit 179fc4e06b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 320 additions and 33 deletions

View File

@ -37,7 +37,8 @@ from flopy.mf6.mfbase import MFDataException
try:
import shapefile
if int(shapefile.__version__.split('.')[0]) < 2:
if int(shapefile.__version__.split(".")[0]) < 2:
shapefile = None
except ImportError:
shapefile = None
@ -1134,6 +1135,28 @@ def test005_advgw_tidal():
timeseries=ts_dict,
)
# test nseg = 1
evt_period = ModflowGwfevt.stress_period_data.empty(model, 150, nseg=1)
for col in range(0, 10):
for row in range(0, 15):
evt_period[0][col * 15 + row] = (
(0, row, col),
50.0,
0.0004,
10.0,
None,
)
evt_package_test = ModflowGwfevt(
model,
print_input=True,
print_flows=True,
save_flows=True,
maxbound=150,
nseg=1,
stress_period_data=evt_period,
)
evt_package_test.remove()
# test empty
evt_period = ModflowGwfevt.stress_period_data.empty(model, 150, nseg=3)
for col in range(0, 10):
@ -1264,7 +1287,15 @@ def test005_advgw_tidal():
("rv2-upper", "RIV", "riv2_upper"),
("rv-2-7-4", "RIV", (0, 6, 3)),
("rv2-8-5", "RIV", (0, 6, 4)),
("rv-2-9-6", "RIV", (0, 5, 5,)),
(
"rv-2-9-6",
"RIV",
(
0,
5,
5,
),
),
],
"riv_flowsA.csv": [
("riv1-3-1", "RIV", (0, 2, 0)),
@ -2816,6 +2847,196 @@ def test028_sfr():
return
def test_transport():
# init paths
test_ex_name = "test_transport"
name = "mst03"
pth = os.path.join(
"..", "examples", "data", "mf6", "create_tests", test_ex_name
)
run_folder = os.path.join(cpth, test_ex_name)
if not os.path.isdir(run_folder):
os.makedirs(run_folder)
expected_output_folder = os.path.join(pth, "expected_output")
expected_head_file = os.path.join(expected_output_folder, "gwf_mst03.hds")
expected_conc_file = os.path.join(expected_output_folder, "gwt_mst03.unc")
ws = os.path.join('temp', 't505', test_ex_name)
laytyp = [1]
ss = [1.e-10]
sy = [0.1]
nlay, nrow, ncol = 1, 1, 1
nper = 2
perlen = [2., 2.]
nstp = [14, 14]
tsmult = [1., 1.]
delr = 10.
delc = 10.
top = 10.
botm = [0.]
strt = top
hk = 1.0
nouter, ninner = 100, 300
hclose, rclose, relax = 1e-6, 1e-6, 0.97
tdis_rc = []
for idx in range(nper):
tdis_rc.append((perlen[idx], nstp[idx], tsmult[idx]))
idx = 0
# build MODFLOW 6 files
sim = flopy.mf6.MFSimulation(sim_name=name, version='mf6',
exe_name='mf6',
sim_ws=ws)
# create tdis package
tdis = flopy.mf6.ModflowTdis(sim, time_units='DAYS',
nper=nper, perioddata=tdis_rc)
# create gwf model
gwfname = 'gwf_' + name
newtonoptions = ['NEWTON', 'UNDER_RELAXATION']
gwf = flopy.mf6.ModflowGwf(sim, modelname=gwfname,
newtonoptions=newtonoptions,)
# create iterative model solution and register the gwf model with it
imsgwf = flopy.mf6.ModflowIms(sim, print_option='SUMMARY',
outer_dvclose=hclose,
outer_maximum=nouter,
under_relaxation='DBD',
under_relaxation_theta=0.7,
inner_maximum=ninner,
inner_dvclose=hclose, rcloserecord=rclose,
linear_acceleration='BICGSTAB',
scaling_method='NONE',
reordering_method='NONE',
relaxation_factor=relax,
filename='{}.ims'.format(gwfname))
sim.register_ims_package(imsgwf, [gwf.name])
dis = flopy.mf6.ModflowGwfdis(gwf, nlay=nlay, nrow=nrow, ncol=ncol,
delr=delr, delc=delc,
top=top, botm=botm,
idomain=np.ones((nlay, nrow, ncol),
dtype=int))
# initial conditions
ic = flopy.mf6.ModflowGwfic(gwf, strt=strt)
# node property flow
npf = flopy.mf6.ModflowGwfnpf(gwf, save_flows=False,
icelltype=laytyp[idx],
k=hk, k33=hk)
# storage
sto = flopy.mf6.ModflowGwfsto(gwf, save_flows=False,
iconvert=laytyp[idx],
ss=ss[idx], sy=sy[idx],
steady_state={0: False},
transient={0: True})
# wel files
welspdict = {0: [[(0, 0, 0), -25., 0.]],
1: [[(0, 0, 0), 25., 0.]]}
wel = flopy.mf6.ModflowGwfwel(gwf, print_input=True, print_flows=True,
stress_period_data=welspdict,
save_flows=False,
auxiliary='CONCENTRATION', pname='WEL-1')
# output control
oc = flopy.mf6.ModflowGwfoc(gwf,
budget_filerecord='{}.cbc'.format(gwfname),
head_filerecord='{}.hds'.format(gwfname),
headprintrecord=[
('COLUMNS', 10, 'WIDTH', 15,
'DIGITS', 6, 'GENERAL')],
saverecord=[('HEAD', 'ALL')],
printrecord=[('HEAD', 'ALL'),
('BUDGET', 'ALL')])
# create gwt model
gwtname = 'gwt_' + name
gwt = flopy.mf6.MFModel(sim, model_type='gwt6', modelname=gwtname,
model_nam_file='{}.nam'.format(gwtname))
# create iterative model solution and register the gwt model with it
imsgwt = flopy.mf6.ModflowIms(sim, print_option='SUMMARY',
outer_dvclose=hclose,
outer_maximum=nouter,
under_relaxation='NONE',
inner_maximum=ninner,
inner_dvclose=hclose, rcloserecord=rclose,
linear_acceleration='BICGSTAB',
scaling_method='NONE',
reordering_method='NONE',
relaxation_factor=relax,
filename='{}.ims'.format(gwtname))
sim.register_ims_package(imsgwt, [gwt.name])
dis = flopy.mf6.ModflowGwtdis(gwt, nlay=nlay, nrow=nrow, ncol=ncol,
delr=delr, delc=delc,
top=top, botm=botm,
idomain=1,
filename='{}.dis'.format(gwtname))
# initial conditions
ic = flopy.mf6.ModflowGwtic(gwt, strt=100.)
# advection
adv = flopy.mf6.ModflowGwtadv(gwt, scheme='UPSTREAM',
filename='{}.adv'.format(gwtname))
# mass storage and transfer
mst = flopy.mf6.ModflowGwtmst(gwt, porosity=sy[idx],
filename='{}.mst'.format(gwtname))
# sources
sourcerecarray = [('WEL-1', 'AUX', 'CONCENTRATION')]
ssm = flopy.mf6.ModflowGwtssm(gwt, sources=sourcerecarray,
filename='{}.ssm'.format(gwtname))
# output control
oc = flopy.mf6.ModflowGwtoc(gwt,
budget_filerecord='{}.cbc'.format(gwtname),
concentration_filerecord='{}.ucn'.format(gwtname),
concentrationprintrecord=[
('COLUMNS', 10, 'WIDTH', 15,
'DIGITS', 6, 'GENERAL')],
saverecord=[('CONCENTRATION', 'ALL')],
printrecord=[('CONCENTRATION', 'ALL'),
('BUDGET', 'ALL')])
# GWF GWT exchange
gwfgwt = flopy.mf6.ModflowGwfgwt(sim, exgtype='GWF6-GWT6',
exgmnamea=gwfname, exgmnameb=gwtname,
filename='{}.gwfgwt'.format(name))
# write MODFLOW 6 files
sim.write_simulation()
# run simulation
if run:
sim.run_simulation()
# compare output to expected results
head_file = os.path.join(os.getcwd(), expected_head_file)
head_new = os.path.join(run_folder, "gwf_mst03.hds")
outfile = os.path.join(run_folder, "head_compare.dat")
assert pymake.compare_heads(
None, None, files1=head_file, files2=head_new, outfile=outfile
)
conc_file = os.path.join(os.getcwd(), expected_conc_file)
conc_new = os.path.join(run_folder, "gwt_mst03.ucn")
assert pymake.compare_concs(
None, None, files1=conc_file, files2=conc_new, outfile=outfile
)
# clean up
sim.delete_output_files()
if __name__ == "__main__":
np001()
np002()
@ -2827,3 +3048,4 @@ if __name__ == "__main__":
test028_sfr()
test035_fhb()
test050_circle_island()
test_transport()

View File

@ -4,6 +4,7 @@ modeldimensions module. Contains the model dimension information
"""
import sys
from .simulationtime import SimulationTime
from .modelgrid import UnstructuredModelGrid, ModelGrid
from ..mfbase import StructException, FlopyException, VerbosityLevel
@ -81,6 +82,7 @@ class DataDimensions(object):
data=None,
data_item_num=None,
repeating_key=None,
min_size=False,
):
return self.get_model_dim(data_item_num).get_data_shape(
self.structure,
@ -89,6 +91,7 @@ class DataDimensions(object):
data,
self.package_dim.package_path,
repeating_key=repeating_key,
min_size=min_size,
)
def model_subspace_size(self, subspace_string="", data_item_num=None):
@ -403,6 +406,7 @@ class ModelDimensions(object):
path=None,
deconstruct_axis=True,
repeating_key=None,
min_size=False,
):
if structure is None:
raise FlopyException(
@ -492,6 +496,7 @@ class ModelDimensions(object):
path,
deconstruct_axis,
repeating_key=repeating_key,
min_size=min_size,
)
if self.locked and consistent_shape:
self.stored_shapes[data_item.path] = (
@ -509,6 +514,7 @@ class ModelDimensions(object):
path=None,
deconstruct_axis=True,
repeating_key=None,
min_size=False,
):
if isinstance(data, tuple):
data = [data]
@ -560,7 +566,7 @@ class ModelDimensions(object):
result = self.resolve_exp(
item,
self._find_in_dataset(
data_set_struct, item[0], data
data_set_struct, item[0], data, min_size
),
)
if result:
@ -584,7 +590,8 @@ class ModelDimensions(object):
elif DatumUtil.is_int(item[0]):
shape_dimensions.append(int(item[0]))
else:
# try to resolve dimension within the existing block
# try to resolve dimension within the
# existing block
result = self.simulation_data.mfdata.find_in_path(
parent_path, item[0]
)
@ -684,7 +691,7 @@ class ModelDimensions(object):
return value
@staticmethod
def _find_in_dataset(data_set_struct, item, data):
def _find_in_dataset(data_set_struct, item, data, min_size=False):
if data is not None:
# find the current data item in data_set_struct
for index, data_item in zip(
@ -695,12 +702,22 @@ class ModelDimensions(object):
data_item.name.lower() == item.lower()
and len(data[0]) > index
):
# always use the maximum value
max_val = 0
for data_line in data:
if data_line[index] > max_val:
max_val = data_line[index]
return max_val
if min_size:
# use the minimum value
min_val = sys.maxsize
for data_line in data:
if data_line[index] < min_val:
min_val = data_line[index]
if min_val == sys.maxsize:
return 0
return min_val
else:
# use the maximum value
max_val = 0
for data_line in data:
if data_line[index] > max_val:
max_val = data_line[index]
return max_val
return None
@staticmethod

View File

@ -356,6 +356,31 @@ class MFList(mfdata.MFMultiDimVar, DataListInterface):
def get_data(self, apply_mult=False, **kwargs):
return self._get_data(apply_mult, **kwargs)
def _get_min_record_entries(self, data=None):
try:
if isinstance(data, dict) and "data" in data:
data = data["data"]
type_list = self._get_storage_obj().build_type_list(
data=data, min_size=True
)
except Exception as ex:
type_, value_, traceback_ = sys.exc_info()
raise MFDataException(
self.structure.get_model(),
self.structure.get_package(),
self._path,
"getting min record entries",
self.structure.name,
inspect.stack()[0][3],
type_,
value_,
traceback_,
None,
self._simulation_data.debug,
ex,
)
return len(type_list)
def _set_data(self, data, autofill=False, check_data=True):
if isinstance(data, dict):
if "data" in data:
@ -366,7 +391,7 @@ class MFList(mfdata.MFMultiDimVar, DataListInterface):
data_check = data
if iterable(data_check) and check_data:
# verify data length
min_line_size = self.structure.get_min_record_entries()
min_line_size = self._get_min_record_entries(data)
if isinstance(data_check[0], np.record) or (
iterable(data_check[0]) and not isinstance(data_check[0], str)
):
@ -481,7 +506,6 @@ class MFList(mfdata.MFMultiDimVar, DataListInterface):
def _check_line_size(self, data_line, min_line_size):
if 0 < len(data_line) < min_line_size:
min_line_size = self.structure.get_min_record_entries()
message = (
"Data line {} only has {} entries, "
"minimum number of entries is "

View File

@ -2388,6 +2388,7 @@ class DataStorage(object):
key=None,
nseg=None,
cellid_expanded=False,
min_size=False,
):
if data_set is None:
self.jagged_record = False
@ -2438,17 +2439,27 @@ class DataStorage(object):
ks_data_item.type = DatumType.string
ks_data_item.name = "{}_data".format(ks_data_item.name)
ks_rec_type = ks_data_item.get_rec_type()
self._append_type_lists(
ks_data_item.name, ks_rec_type, ks_data_item.is_cellid
)
if not min_size:
self._append_type_lists(
ks_data_item.name,
ks_rec_type,
ks_data_item.is_cellid,
)
if (
index == len(data_set.data_item_structures) - 1
and data is not None
):
idx = 1
line_max_size = self._get_max_data_line_size(data)
(
line_max_size,
line_min_size,
) = self._get_max_min_data_line_size(data)
if min_size:
line_size = line_min_size
else:
line_size = line_max_size
type_list = self.resolve_typelist(data)
while len(type_list) < line_max_size:
while len(type_list) < line_size:
# keystrings at the end of a line can contain
# items of variable length. assume everything at
# the end of the data line is related to the last
@ -2515,6 +2526,7 @@ class DataStorage(object):
data_set,
data,
repeating_key=key,
min_size=min_size,
)
else:
resolved_shape = [1]
@ -2529,7 +2541,7 @@ class DataStorage(object):
resolved_shape[0] == -9999
or shape_rule is not None
):
if data is not None:
if data is not None and not min_size:
# shape is an indeterminate 1-d array and
# should consume the remainder of the data
max_s = PyListUtil.max_multi_dim_list_size(
@ -2539,10 +2551,12 @@ class DataStorage(object):
self._recarray_type_list
)
else:
# shape is indeterminate 1-d array and no data
# provided to resolve
# shape is indeterminate 1-d array and either
# no data provided to resolve or request is
# for minimum data size
resolved_shape[0] = 1
self.jagged_record = True
if not min_size:
self.jagged_record = True
if data_item.is_cellid:
if (
data_item.shape is not None
@ -2555,14 +2569,17 @@ class DataStorage(object):
grid = data_dim.get_model_grid()
size = grid.get_num_spatial_coordinates()
data_item.remove_cellid(resolved_shape, size)
for index in range(0, resolved_shape[0]):
if resolved_shape[0] > 1:
name = "{}_{}".format(data_item.name, index)
else:
name = data_item.name
self._append_type_lists(
name, data_type, data_item.is_cellid
)
if not data_item.optional or not min_size:
for index in range(0, resolved_shape[0]):
if resolved_shape[0] > 1:
name = "{}_{}".format(
data_item.name, index
)
else:
name = data_item.name
self._append_type_lists(
name, data_type, data_item.is_cellid
)
if cellid_expanded:
return self._recarray_type_list_ex
else:
@ -2625,13 +2642,18 @@ class DataStorage(object):
return current_length[0]
@staticmethod
def _get_max_data_line_size(data):
def _get_max_min_data_line_size(data):
max_size = 0
min_size = sys.maxsize
if data is not None:
for value in data:
if len(value) > max_size:
max_size = len(value)
return max_size
if len(value) < min_size:
min_size = len(value)
if min_size == sys.maxsize:
min_size = 0
return max_size, min_size
def get_data_dimensions(self, layer):
data_dimensions = self.data_dimensions.get_data_shape()[0]

View File

@ -173,7 +173,9 @@ class PyListUtil(object):
def max_multi_dim_list_size(current_list):
max_length = -1
for item in current_list:
if len(item) > max_length:
if isinstance(item, str):
return len(current_list)
elif len(item) > max_length:
max_length = len(item)
return max_length