Skip to content

Commit

Permalink
subset from interface and reformat (#57)
Browse files Browse the repository at this point in the history
* Update environment

* Update docs env

* Update env

* update build

* update build

* update build

* update build

* Update docs

* Update changelog

* Add option to create subset from interface

* Reformat and update docs

* Update changelog

* fix np.int
  • Loading branch information
wpreimes authored Jan 10, 2023
1 parent eaca29e commit 33a8fea
Show file tree
Hide file tree
Showing 16 changed files with 615 additions and 727 deletions.
5 changes: 5 additions & 0 deletions .readthedocs.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,11 @@ build:
tools:
python: mambaforge-4.10

python:
install:
- method: pip
path: .

sphinx:
configuration: docs/conf.py

Expand Down
1 change: 1 addition & 0 deletions CHANGELOG.rst
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ Version 1.3.0
- Add module to assign custom metadata readers to ISMN_Interface
- Notebook added that describes using a custom metadata reader
- RTD build uses a separate, smaller environment.yml now (and mamba)
- ISMN_Interface now has a method to create an instance of itself for a selection of (`ISMN_Interface.subset_from_ids`)


Version 1.2.0
Expand Down
12 changes: 10 additions & 2 deletions README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -26,9 +26,9 @@ https://ismn.earth

The following tutorials are available in ``docs/examples``:

`1) ISMN reader basic functionality <docs/examples/interface.ipynb>`_
`1) ISMN reader basic functionality <https://ismn.readthedocs.io/en/latest/examples/interface.html>`_

`2) Adding custom metadata readers <docs/examples/custom_meta.ipynb>`_
`2) Adding custom metadata readers <https://ismn.readthedocs.io/en/latest/examples/custom_meta.html>`_

Citation
========
Expand Down Expand Up @@ -238,6 +238,14 @@ If you want to contribute please follow these steps:
We use pytest so a simple function called test_my_feature is enough
- submit a pull request to our master branch

Code Formatting
---------------
To apply pep8 conform styling to any changed files [we use `yapf`](https://github.com/google/yapf). The correct
settings are already set in `setup.cfg`. Therefore the following command
should be enough:

yapf file.py --in-place

Release new version
-------------------

Expand Down
393 changes: 86 additions & 307 deletions docs/examples/interface.ipynb

Large diffs are not rendered by default.

1 change: 1 addition & 0 deletions environment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,5 +17,6 @@ dependencies:
- sphinx
- nbsphinx
- sphinx_rtd_theme
- yapf
- pytest
- pytest-cov
5 changes: 5 additions & 0 deletions setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -121,3 +121,8 @@ version = 4.0.2
package = ismn
extensions =
no_skeleton

[yapf]
based_on_style = yapf
indent_width = 4
column_limit = 79
26 changes: 16 additions & 10 deletions src/ismn/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@


def zip(func):

def wrapper(cls, *args, **kwargs):
if not cls.zip:
raise IOError("Zip archive expected, use @dir functions instead.")
Expand All @@ -42,9 +43,11 @@ def wrapper(cls, *args, **kwargs):


def dir(func):

def wrapper(cls, *args, **kwargs):
if cls.zip:
raise IOError("Unzipped archive expected, use @zip functions instead.")
raise IOError(
"Unzipped archive expected, use @zip functions instead.")
return func(cls, *args, **kwargs)

return wrapper
Expand Down Expand Up @@ -124,8 +127,8 @@ def clean_subpath(self, subpath) -> Union[Path, PurePosixPath]:
subpath = PurePosixPath(subpath)
else:
assert (
self.path / Path(subpath)
).exists(), "Subpath does not exist in archive"
self.path /
Path(subpath)).exists(), "Subpath does not exist in archive"

return subpath

Expand Down Expand Up @@ -185,7 +188,8 @@ def __scan_dir(self, station_subdirs: bool = True) -> OrderedDict:
if net not in cont.keys():
cont[net] = np.array([])
if station_subdirs:
cont[net] = np.append(cont[net], Path(net, stat.name))
cont[net] = np.append(cont[net],
Path(net, stat.name))
else:
cont[net] = np.append(cont[net], stat.name)

Expand All @@ -194,7 +198,9 @@ def __scan_dir(self, station_subdirs: bool = True) -> OrderedDict:
return self.cont

@dir
def __find_files_dir(self, subpath: str = None, fn_templ: str = "*.csv") -> list:
def __find_files_dir(self,
subpath: str = None,
fn_templ: str = "*.csv") -> list:
"""
Find files in the archive or a subdirectory of the archive
that match to the passed filename template.
Expand All @@ -209,7 +215,9 @@ def __find_files_dir(self, subpath: str = None, fn_templ: str = "*.csv") -> list
return filenames

@zip
def __find_files_zip(self, subpath: str = None, fn_templ: str = "*.csv") -> list:
def __find_files_zip(self,
subpath: str = None,
fn_templ: str = "*.csv") -> list:
"""
Find files in zip archive that match the passed template and subdir.
"""
Expand All @@ -224,8 +232,7 @@ def __find_files_zip(self, subpath: str = None, fn_templ: str = "*.csv") -> list
filter(
lambda f: fnmatch.fnmatch(f, f"{subpath}/{fn_templ}"),
all_files,
)
).copy()
)).copy()

return filterlist

Expand Down Expand Up @@ -330,8 +337,7 @@ def extract_dir(self, subdir_in_archive, out_path):
ls = np.array(self.zip.namelist())

filterlist = list(
filter(lambda x: x.startswith(str(subdir_in_archive)), ls)
).copy()
filter(lambda x: x.startswith(str(subdir_in_archive)), ls)).copy()

self.zip.extractall(members=filterlist, path=out_path)

Expand Down
3 changes: 2 additions & 1 deletion src/ismn/citations.txt
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,8 @@ TAHMO;We acknowledge the work of Nicolaas Cornelis van de Giesen and Frank Annor
TERENO;Zacharias, S., H.R. Bogena, L. Samaniego, M. Mauder, R. Fuß, T. Puetz, M. Frenzel, M. Schwank, C. Baessler, K. Butterbach-Bahl, O. Bens, E. Borg, A. Brauer, P. Dietrich, I. Hajnsek, G. Helle, R. Kiese, H. Kunstmann, S. Klotz, J.C. Munch, H. Papen, E. Priesack, H. P. Schmid, R. Steinbrecher, U. Rosenbaum, G. Teutsch, H. Vereecken. 2011. A Network of Terrestrial Environmental Observatories in Germany. Vadose Zone J. 10. 955–973. doi:10.2136/vzj2010.0139
TERENO;Bogena, H., Kunkel, R., Puetz, T., Vereecken, H., Kruger, E., Zacharias, S., Dietrich, P., Wollschlaeger, U., Kunstmann, H., Papen, H., Schmid, H., Munch, J., Priesack, E., Schwank, M., Bens, O., Brauer, A., Borg, E. & Hajnsek, I. (2012), ‘Tereno - long-term monitoring network for terrestrial environmental research’, Hydrologie und Wasserbewirtschaftung 56, 138–143.
TERENO;Bogena, H. R. (2016), ‘Tereno: German network of terrestrial environmental observatories’, Journal of large-scale research facilities JLSRF 2, 52.
UDC_SMOS;Schlenz, F., Dall'Amico, J., Loew, A., Mauser, W. (2012): Uncertainty Assessment of the SMOS Validation in the Upper Danube Catchment. IEEE Transactions on Geoscience and Remote Sensing, 50(5), pp.1517–1529. doi: 10.1109/TGRS.2011.2171694.
TxSON;Caldwell, T. G., T. Bongiovanni, M. H. Cosh, T. J. Jackson, A. Colliander, C. J. Abolt, R. Casteel, T. Larson, B. R. Scanlon, and M. H. Young (2019), The Texas Soil Observation Network: A comprehensive soil moisture dataset for remote sensing and land surface model validation, Vadose Zone Journal, 18:100034, doi:10.2136/vzj2019.04.0034
UDC_SMOS;Schlenz, F., Dall'Amico, J., Loew, A., Mauser, W. (2012): Uncertainty Assessment of the SMOS Validation in the Upper Danube Catchment. IEEE Transactions on Geoscience and Remote Sensing, 50(5), pp.1517–1529. doi: 10.1109/TGRS.2011.2171694.
UDC_SMOS;A. Loew, J. T. Dall'Amico, F. Schlenz, W. Mauser (2009): The Upper Danube soil moisture validation site: measurements and activities, paper presented at Earth Observation and Water Cycle conference, Frascati (Rome), 18 - 20 November 2009, to be published in ESA Special dataation SP-674.
UMBRIA;Brocca, L., Hasenauer, S., Lacava, T., Melone, F., Moramarco, T., Wagner, W., Dorigo, W., Matgen, P., Martínez-Fernández, J., Llorens, P., Latron, J., Martin, C., Bittelli, M. (2011). Soil moisture estimation through ASCAT and AMSR-E sensors: an intercomparison and validation study across Europe. Remote Sensing of Environment, 115, 3390-3408, doi:10.1016/j.rse.2011.08.003.
UMBRIA;Brocca, L., Melone, F., Moramarco, T. (2008). On the estimation of antecedent wetness condition in rainfall-runoff modelling. Hydrological Processes, 22 (5), 629-642.
Expand Down
78 changes: 42 additions & 36 deletions src/ismn/components.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,15 +106,14 @@ def __init__(
self.name = name if name is not None else self.__repr__()

def __repr__(self):
return (
f"{self.instrument}_{self.variable}_"
f"{self.depth.start:1.6f}_{self.depth.end:1.6f}"
)
return (f"{self.instrument}_{self.variable}_"
f"{self.depth.start:1.6f}_{self.depth.end:1.6f}")

@property
def metadata(self) -> MetaData:
return MetaData() if self.filehandler is None else self.filehandler.metadata

return MetaData(
) if self.filehandler is None else self.filehandler.metadata

@property
def data(self):
return self.read_data()
Expand Down Expand Up @@ -261,7 +260,7 @@ def __init__(self, name, lon, lat, elev):

def __repr__(self):
# Provide basic station information.
return f"Sensors at '{self.name}': {[s.name for s in self.sensors.values()]}"
return f"Station '{self.name}' with Sensors: {[s.name for s in self.sensors.values()]}"

@property
def metadata(self) -> MetaData:
Expand Down Expand Up @@ -317,9 +316,10 @@ def get_depths(self, variable=None):

return depths

def get_min_max_obs_timestamp(
self, variable="soil moisture", min_depth=None, max_depth=None
):
def get_min_max_obs_timestamp(self,
variable="soil moisture",
min_depth=None,
max_depth=None):
"""
Goes through the sensors associated with this station
and checks the metadata to get and approximate time coverage of the station.
Expand Down Expand Up @@ -467,14 +467,10 @@ def get_sensors(self, variable, depth_from, depth_to):
sensors : numpy.ndarray
array of sensors found for the given combination of variable and depths
"""
return np.array(
[
s
for s in self.iter_sensors(
variable=variable, depth=Depth(depth_from, depth_to)
)
]
)
return np.array([
s for s in self.iter_sensors(
variable=variable, depth=Depth(depth_from, depth_to))
])


class Network(IsmnComponent):
Expand Down Expand Up @@ -513,7 +509,7 @@ def __init__(self, name, stations=None):

def __repr__(self):
# Provide basic Network information.
return f"Stations in '{self.name}': {list(self.stations.keys())}"
return f"Network '{self.name}' with Stations: {list(self.stations.keys())}"

def __getitem__(self, item: Union[int, str]):
# shortcut to access networks directly
Expand Down Expand Up @@ -662,7 +658,6 @@ class NetworkCollection(IsmnComponent):
"""

def __init__(self, networks):

"""
Create network collection from previously created Networks.
Expand All @@ -682,21 +677,30 @@ def __init__(self, networks):
lons += net_lons
lats += net_lats

self.grid = BasicGrid(lons, lats) if (len(lons) > 0 and len(lats) > 0) else None
self.grid = BasicGrid(lons, lats) if (len(lons) > 0 and
len(lats) > 0) else None

def __repr__(self, indent: str = ""):
return ",\n".join(
[
f"{indent}{net.name}: {list(net.stations.keys())}"
for net in self.networks.values()
]
)
return ",\n".join([
f"{indent}{net.name}: {list(net.stations.keys())}"
for net in self.networks.values()
])

def __getitem__(self, item: Union[int, str]):
def __getitem__(self, item: Union[int, str, list]) -> \
Union["NetworkCollection", Network]:
# shortcut to access networks directly
if isinstance(item, int):
item = list(self.networks.keys())[item]
return self.networks[item]
if isinstance(item, (int, str)):
if isinstance(item, int):
item = list(self.networks.keys())[item]
net: Network = self.networks[item]
return net
else:
keys = list(self.networks.keys())
sub: NetworkCollection = NetworkCollection(networks=[
self.networks[n] if isinstance(n, str) else self
.networks[keys[n]] for n in item
])
return sub

def iter_networks(self) -> Network:
"""
Expand Down Expand Up @@ -741,7 +745,8 @@ def station4gpi(self, gpi):
in_grid = np.isin(idxs, self.grid.activegpis)

if not all(in_grid):
raise ValueError(f"Index not found in loaded grid: {idxs[~in_grid]}")
raise ValueError(
f"Index not found in loaded grid: {idxs[~in_grid]}")

lon, lat = self.grid.gpi2lonlat(idxs)

Expand Down Expand Up @@ -798,15 +803,16 @@ def export_citations(self, out_file=None):
references: OrderedDict
Network names as keys and network references as values
"""
refs = OrderedDict(
[(net.name, net.get_citations()) for net in self.iter_networks()]
)
refs = OrderedDict([
(net.name, net.get_citations()) for net in self.iter_networks()
])

if out_file is not None:
with open(out_file, mode="w") as out_file:
for name, reflist in refs.items():
out_file.write(f"References for Network {name}:\n")
out_file.write("-----------------------------------------\n")
out_file.write(
"-----------------------------------------\n")
for ref in reflist:
out_file.write(f"{ref}\n")
out_file.write("\n")
Expand Down
Loading

0 comments on commit 33a8fea

Please sign in to comment.