Skip to content

Commit 188e5aa

Browse files
committed
Clean up
1 parent 1f6a7f5 commit 188e5aa

File tree

9 files changed

+39
-38
lines changed

9 files changed

+39
-38
lines changed

Diff for: README.md

+1-3
Original file line numberDiff line numberDiff line change
@@ -5,9 +5,7 @@ Support for N-dimensional arrays in LinkML.
55
# Quick reference for common commands
66

77
```bash
8-
cd linkml-model
9-
poetry run gen-json-schema tests/input/examples/schema_definition-array-2.yaml
10-
poetry run gen-pydantic tests/input/examples/schema_definition-array-2.yaml
8+
poetry run gen-pydantic tests/input/temperature_schema.yaml > tests/array_classes_lol.py
119
```
1210

1311
# Acknowledgements

Diff for: src/linkml_arrays/dumpers/yaml_array_file_dumper.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
"""Base class for dumping a LinkML model to a YAML file with paths to files containing individual arrays."""
1+
"""Base class for dumping a LinkML model to YAML with paths to files containing arrays."""
22

33
import os
44
from abc import ABCMeta, abstractmethod

Diff for: src/linkml_arrays/dumpers/yaml_dumper.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
"""Class for dumping a LinkML model to a YAML file."""
1+
"""Class for dumping a LinkML model to YAML."""
22

33
from typing import Union
44

Diff for: src/linkml_arrays/dumpers/yaml_hdf5_dumper.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
"""Class for dumping a LinkML model to a YAML file with paths to HDF5 files."""
1+
"""Class for dumping a LinkML model to YAML with paths to HDF5 files."""
22

33
from pathlib import Path
44
from typing import List, Union
@@ -10,7 +10,7 @@
1010

1111

1212
class YamlHdf5Dumper(YamlArrayFileDumper):
13-
"""Dumper class for LinkML models to YAML files with paths to HDF5 files, one for each array.
13+
"""Dumper class for LinkML models to YAML with paths to HDF5 files, one per array.
1414
1515
Each array is written to an HDF5 dataset at path "/data" in a new HDF5 file.
1616
"""

Diff for: src/linkml_arrays/dumpers/yaml_numpy_dumper.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
"""Class for dumping a LinkML model to a YAML file with paths to NumPy files."""
1+
"""Class for dumping a LinkML model to YAML with paths to NumPy files."""
22

33
from pathlib import Path
44
from typing import List, Union
@@ -9,7 +9,7 @@
99

1010

1111
class YamlNumpyDumper(YamlArrayFileDumper):
12-
"""Dumper class for LinkML models to YAML files with paths to NumPy .npy files, one for each array.
12+
"""Dumper class for LinkML models to YAML with paths to .npy files, one per array.
1313
1414
Each array is written to an HDF5 dataset at path "/data" in a new HDF5 file.
1515
"""

Diff for: src/linkml_arrays/loaders/yaml_loader.py

-1
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,6 @@
22

33
from typing import Type, Union
44

5-
import numpy as np
65
import yaml
76
from linkml_runtime import SchemaView
87
from linkml_runtime.linkml_model import ClassDefinition

Diff for: tests/test_dumpers/test_dumpers.py

+8-8
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
11
"""Test dumping LinkML pydantic models with arrays as lists-of-lists to various file formats."""
22

33
import os
4-
import unittest
54
from pathlib import Path
65

76
import h5py
@@ -30,7 +29,7 @@
3029
INPUT_DIR = Path(__file__).parent.parent / "input"
3130

3231

33-
def create_container() -> Container:
32+
def _create_container() -> Container:
3433
latitude_in_deg = LatitudeInDegSeries(name="my_latitude", values=[[1, 2], [3, 4]])
3534
longitude_in_deg = LongitudeInDegSeries(name="my_longitude", values=[[5, 6], [7, 8]])
3635
date = DateSeries(values=["2020-01-01", "2020-01-02"])
@@ -39,9 +38,10 @@ def create_container() -> Container:
3938
conversion_factor=1000.0,
4039
values=[[[0, 1], [2, 3]], [[4, 5], [6, 7]]],
4140
)
41+
# NOTE: currently no way to pass in the actual LatitudeInDegSeries object
4242
temperature_dataset = TemperatureDataset(
4343
name="my_temperature",
44-
latitude_in_deg="my_latitude", # currently no way to pass in the actual LatitudeInDegSeries object
44+
latitude_in_deg="my_latitude",
4545
longitude_in_deg="my_longitude",
4646
date=date,
4747
day_in_d=days_in_d_since,
@@ -59,7 +59,7 @@ def create_container() -> Container:
5959

6060
def test_yaml_dumper():
6161
"""Test YamlDumper dumping to a YAML file."""
62-
container = create_container()
62+
container = _create_container()
6363

6464
schemaview = SchemaView(INPUT_DIR / "temperature_schema.yaml")
6565
ret = YamlDumper().dumps(container, schemaview=schemaview)
@@ -75,7 +75,7 @@ def test_yaml_dumper():
7575

7676
def test_yaml_numpy_dumper():
7777
"""Test YamlNumpyDumper dumping to a YAML file and NumPy .npy files in a directory."""
78-
container = create_container()
78+
container = _create_container()
7979

8080
schemaview = SchemaView(INPUT_DIR / "temperature_schema.yaml")
8181
ret = YamlNumpyDumper().dumps(container, schemaview=schemaview, output_dir="./out")
@@ -91,7 +91,7 @@ def test_yaml_numpy_dumper():
9191

9292
def test_yaml_hdf5_dumper():
9393
"""Test YamlNumpyDumper dumping to a YAML file and HDF5 datasets in a directory."""
94-
container = create_container()
94+
container = _create_container()
9595

9696
schemaview = SchemaView(INPUT_DIR / "temperature_schema.yaml")
9797
ret = YamlHdf5Dumper().dumps(container, schemaview=schemaview, output_dir="./out")
@@ -107,7 +107,7 @@ def test_yaml_hdf5_dumper():
107107

108108
def test_hdf5_dumper(tmp_path):
109109
"""Test Hdf5Dumper dumping to an HDF5 file."""
110-
container = create_container()
110+
container = _create_container()
111111

112112
schemaview = SchemaView(INPUT_DIR / "temperature_schema.yaml")
113113
output_file_path = tmp_path / "my_container.h5"
@@ -137,7 +137,7 @@ def test_hdf5_dumper(tmp_path):
137137

138138
def test_zarr_directory_store_dumper(tmp_path):
139139
"""Test ZarrDumper dumping to an HDF5 file."""
140-
container = create_container()
140+
container = _create_container()
141141

142142
schemaview = SchemaView(INPUT_DIR / "temperature_schema.yaml")
143143
output_file_path = tmp_path / "my_container.zarr"

Diff for: tests/test_loaders/test_loaders.py

+10-12
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,7 @@
1-
"""Test loading data from various file formats into LinkML pydantic models with arrays as lists-of-lists."""
1+
"""Test loading data from various file formats into pydantic models with arrays as LoLs."""
22

3-
import unittest
43
from pathlib import Path
54

6-
import numpy as np
75
from hbreader import hbread
86
from linkml_runtime import SchemaView
97

@@ -25,7 +23,7 @@
2523
)
2624

2725

28-
def check_container(container: Container):
26+
def _check_container(container: Container):
2927
assert isinstance(container, Container)
3028
assert container.name == "my_container"
3129

@@ -40,8 +38,9 @@ def check_container(container: Container):
4038
assert isinstance(container.temperature_dataset, TemperatureDataset)
4139
assert container.temperature_dataset.name == "my_temperature"
4240
assert container.temperature_dataset.latitude_in_deg == "my_latitude"
43-
# currently no way to get the actual LatitudeInDegSeries object from the TemperatureDataset object
44-
# because the TemperatureDataset Pydantic object expects a string for the latitude_in_deg field
41+
# currently no way to get the actual LatitudeInDegSeries object from the
42+
# TemperatureDataset object because the TemperatureDataset Pydantic object
43+
# expects a string for the latitude_in_deg field
4544
# to be isomorphic with the json schema / yaml representation
4645

4746
assert container.temperature_dataset.longitude_in_deg == "my_longitude"
@@ -65,31 +64,31 @@ def test_yaml_loader():
6564
data_yaml = hbread("container_yaml.yaml", base_path=str(Path(__file__) / "../../input"))
6665
schemaview = SchemaView(Path(__file__) / "../../input/temperature_schema.yaml")
6766
container = YamlLoader().loads(data_yaml, target_class=Container, schemaview=schemaview)
68-
check_container(container)
67+
_check_container(container)
6968

7069

7170
def test_yaml_numpy_loader():
7271
"""Test loading of pydantic-style classes from YAML + Numpy arrays."""
7372
read_yaml = hbread("container_yaml_numpy.yaml", base_path=str(Path(__file__) / "../../input"))
7473
schemaview = SchemaView(Path(__file__) / "../../input/temperature_schema.yaml")
7574
container = YamlNumpyLoader().loads(read_yaml, target_class=Container, schemaview=schemaview)
76-
check_container(container)
75+
_check_container(container)
7776

7877

7978
def test_yaml_hdf5_loader():
8079
"""Test loading of pydantic-style classes from YAML + Numpy arrays."""
8180
read_yaml = hbread("container_yaml_hdf5.yaml", base_path=str(Path(__file__) / "../../input"))
8281
schemaview = SchemaView(Path(__file__) / "../../input/temperature_schema.yaml")
8382
container = YamlHdf5Loader().loads(read_yaml, target_class=Container, schemaview=schemaview)
84-
check_container(container)
83+
_check_container(container)
8584

8685

8786
def test_hdf5_loader():
8887
"""Test loading of pydantic-style classes from HDF5 datasets."""
8988
file_path = str(Path(__file__).parent.parent / "input" / "my_container.h5")
9089
schemaview = SchemaView(Path(__file__) / "../../input/temperature_schema.yaml")
9190
container = Hdf5Loader().loads(file_path, target_class=Container, schemaview=schemaview)
92-
check_container(container)
91+
_check_container(container)
9392

9493

9594
def test_zarr_directory_store_loader():
@@ -99,5 +98,4 @@ def test_zarr_directory_store_loader():
9998
container = ZarrDirectoryStoreLoader().loads(
10099
file_path, target_class=Container, schemaview=schemaview
101100
)
102-
check_container(container)
103-
101+
_check_container(container)

Diff for: tox.ini

+14-8
Original file line numberDiff line numberDiff line change
@@ -53,7 +53,7 @@ description = Run documentation linters.
5353
skip_install = true
5454
deps =
5555
darglint
56-
flake8<5.0.0
56+
flake8
5757
flake8-black
5858
flake8-bandit
5959
flake8-bugbear
@@ -73,14 +73,20 @@ description = Run the flake8 tool with several plugins (bandit, docstrings, impo
7373
[flake8]
7474
max-line-length = 100
7575
ignore =
76-
DAR101 # Missing parameter(s) in Docstring: - with_git_hash
77-
DAR201 # Missing "Returns" in Docstring: - return
78-
DAR301 # Missing "Yields" in Docstring: - yield
79-
E111 # indentation is not a multiple of 4
80-
T201 # print found.
81-
S101 # Use of assert detected.
76+
DAR101
77+
DAR201
78+
DAR301
79+
E111
80+
T201
81+
S101
82+
; DAR101 # Missing parameter(s) in Docstring: - with_git_hash
83+
; DAR201 # Missing "Returns" in Docstring: - return
84+
; DAR301 # Missing "Yields" in Docstring: - yield
85+
; E111 # indentation is not a multiple of 4
86+
; T201 # print found.
87+
; S101 # Use of assert detected.
8288
exclude =
83-
tests/test_dumpers/array_classes.py
89+
tests/array_classes_lol.py
8490

8591
[testenv:mypy]
8692
deps = mypy

0 commit comments

Comments
 (0)