Skip to content

Commit

Permalink
[DONE] fix lookup problems with TimeSeriesSubsetArrayField (#210)
Browse files Browse the repository at this point in the history
* fix lookup problems with TimeSeriesSubsetArrayField

* added test for model with boundaries

* Added note about broken breaches object
  • Loading branch information
jpprins1 authored Dec 21, 2023
1 parent 079728d commit bd0dd4b
Show file tree
Hide file tree
Showing 7 changed files with 66 additions and 5 deletions.
7 changes: 6 additions & 1 deletion HISTORY.rst
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,12 @@ History
2.2.2 (unreleased)
------------------

- Nothing changed yet.
- Bugfix: **only** in case of **boundaries**: correct sorting of timeseries in all `TimeSeriesSubsetArrayField`. This
affected: Nodes [infiltration_rate_simple, ucx, ucy, leak, intercepted_volume, q_sss]
Lines: [qp, up1, breach_depth, breach_width]

- Note: The `depth` and `width` field are broken for `breaches` object.
Please use `breach_depth` and `breach_width` on the `lines` object instead.


2.2.1 (2023-12-05)
Expand Down
14 changes: 14 additions & 0 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,20 @@ def ga(request):
yield ga


@pytest.fixture
def gr_bergermeer_with_boundaries():
"""
Bergermeer including boundaries which results in node_id not to be continous(step 1)
for `Mesh2D_node_id`
"""
gr = GridH5ResultAdmin(
os.path.join(test_file_dir, "bergermeer_with_boundaries/", "gridadmin.h5"),
os.path.join(test_file_dir, "bergermeer_with_boundaries/", "results_3di.nc"),
)
yield gr
gr.close()


@pytest.fixture(params=["gridadmin.h5", "gridadmin_v2.h5"])
def ga_export(request, tmp_path):
exporter = GridAdminH5Export(
Expand Down
3 changes: 3 additions & 0 deletions tests/test_files/bergermeer_with_boundaries/gridadmin.h5
Git LFS file not shown
3 changes: 3 additions & 0 deletions tests/test_files/bergermeer_with_boundaries/readme.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
Note about results_3di.nc:
'Mesh1D_breach_width' and 'Mesh1D_breach_depth' have been manually altered to
include non -9999 values for 1 or 2 lines.
3 changes: 3 additions & 0 deletions tests/test_files/bergermeer_with_boundaries/results_3di.nc
Git LFS file not shown
35 changes: 35 additions & 0 deletions tests/test_gridresultadmin.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,44 @@
import numpy as np
import pytest

from threedigrid.admin.gridresultadmin import GridH5ResultAdmin
from threedigrid.admin.nodes.models import Nodes


def test_gr_with_boundaries(gr_bergermeer_with_boundaries: GridH5ResultAdmin):
gr_one_d_line_subset = gr_bergermeer_with_boundaries.lines.subset("1D_ALL")
gr_two_d_node_subset = gr_bergermeer_with_boundaries.nodes.subset("2D_ALL")

assert np.all(
gr_one_d_line_subset.id
== gr_bergermeer_with_boundaries.netcdf_file["Mesh1DLine_id"][:]
)
assert np.all(
gr_two_d_node_subset.id
== gr_bergermeer_with_boundaries.netcdf_file["Mesh2DNode_id"][:]
)

# Check ordering/correctness of breach_depth and breach_width
assert np.all(
gr_one_d_line_subset.breach_depth
== gr_bergermeer_with_boundaries.netcdf_file["Mesh1D_breach_depth"][:]
)
assert np.all(
gr_one_d_line_subset.breach_width
== gr_bergermeer_with_boundaries.netcdf_file["Mesh1D_breach_width"][:]
)

# Check ucx and ucy on nodes
assert np.all(
gr_two_d_node_subset.ucx
== gr_bergermeer_with_boundaries.netcdf_file["Mesh2D_ucx"][:]
)
assert np.all(
gr_two_d_node_subset.ucy
== gr_bergermeer_with_boundaries.netcdf_file["Mesh2D_ucy"][:]
)


def test_nodes_timeseries_start_end_time_kwargs(gr):
ts = gr.nodes.timestamps
qs_s1 = gr.nodes.timeseries(start_time=ts[0], end_time=ts[6]).s1
Expand Down
6 changes: 2 additions & 4 deletions threedigrid/orm/base/fields.py
Original file line number Diff line number Diff line change
Expand Up @@ -238,7 +238,6 @@ def get_value(self, datasource, name, **kwargs):
# to support h5py >= 3.1.0
timeseries_filter_to_use = np.argwhere(timeseries_filter).flatten()

lookup_index = kwargs.get("lookup_index")
if self._source_name not in list(datasource.keys()):
return np.array([])
source_data = datasource[self._source_name][timeseries_filter_to_use, :]
Expand All @@ -249,11 +248,10 @@ def get_value(self, datasource, name, **kwargs):
if source_data.shape[1] == subset_index.shape[0] - 1:
subset_index = subset_index[1:]

# Note: subset_index already contains correct sorting
# that matches with the NetCDF timeseries.
templ[:, subset_index] = source_data

# sort the stacked array by lookup
if lookup_index is not None:
return templ[:, lookup_index]
return templ

def __repr__(self):
Expand Down

0 comments on commit bd0dd4b

Please sign in to comment.