Skip to content

Commit

Permalink
Merge pull request #67 from fema-ffrd/bugfix/volume-accounting
Browse files Browse the repository at this point in the history
Fix path to Unsteady/Summary/Volume Accounting
  • Loading branch information
thwllms authored Feb 4, 2025
2 parents 3bce1e5 + 81331ec commit 2607d01
Show file tree
Hide file tree
Showing 2 changed files with 5 additions and 5 deletions.
4 changes: 2 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -12,11 +12,11 @@ classifiers = [
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
]
version = "0.7.1"
version = "0.7.2"
dependencies = ["h5py", "geopandas>=1.0,<2.0", "pyarrow", "xarray"]

[project.optional-dependencies]
dev = ["pre-commit", "ruff", "pytest", "pytest-cov", "kerchunk", "zarr", "dask", "fsspec", "s3fs", "fiona==1.9.6"]
dev = ["pre-commit", "ruff", "pytest", "pytest-cov", "kerchunk", "zarr==2.18.2", "dask", "fsspec", "s3fs", "fiona==1.9.6"]
docs = ["sphinx", "numpydoc", "sphinx_rtd_theme"]

[project.urls]
Expand Down
6 changes: 3 additions & 3 deletions src/rashdf/plan.py
Original file line number Diff line number Diff line change
Expand Up @@ -160,7 +160,7 @@ class RasPlanHdf(RasGeomHdf):
OBS_DATA_PATH = "Event Conditions/Observed Data"
RESULTS_UNSTEADY_PATH = "Results/Unsteady"
RESULTS_UNSTEADY_SUMMARY_PATH = f"{RESULTS_UNSTEADY_PATH}/Summary"
VOLUME_ACCOUNTING_PATH = f"{RESULTS_UNSTEADY_PATH}/Volume Accounting"
VOLUME_ACCOUNTING_PATH = f"{RESULTS_UNSTEADY_SUMMARY_PATH}/Volume Accounting"
BASE_OUTPUT_PATH = f"{RESULTS_UNSTEADY_PATH}/Output/Output Blocks/Base Output"
SUMMARY_OUTPUT_2D_FLOW_AREAS_PATH = (
f"{BASE_OUTPUT_PATH}/Summary Output/2D Flow Areas"
Expand Down Expand Up @@ -1511,7 +1511,7 @@ def cross_sections_additional_velocity_total(self) -> DataFrame:
def _zmeta(self, ds: xr.Dataset) -> Dict:
"""Given a xarray Dataset, return kerchunk-style zarr reference metadata."""
from kerchunk.hdf import SingleHdf5ToZarr
import zarr
from zarr.storage import MemoryStore
import base64

encoding = {}
Expand Down Expand Up @@ -1546,7 +1546,7 @@ def _zmeta(self, ds: xr.Dataset) -> Dict:
chunk_meta[chunk_key] = [str(self._loc), value["offset"], value["size"]]
# "Write" the Dataset to a temporary in-memory zarr store (which
# is the same a Python dictionary)
zarr_tmp = zarr.MemoryStore()
zarr_tmp = MemoryStore()
# Use compute=False here because we don't _actually_ want to write
# the data to the zarr store, we just want to generate the metadata.
ds.to_zarr(zarr_tmp, mode="w", compute=False, encoding=encoding)
Expand Down

0 comments on commit 2607d01

Please sign in to comment.