Skip to content

Commit 3179f83

Browse files
authored
Merge pull request #404 from bioimage-io/bump_release
bump patch
2 parents e680c0e + 96e06b6 commit 3179f83

File tree

3 files changed

+29
-14
lines changed

3 files changed

+29
-14
lines changed

README.md

+2-1
Original file line numberDiff line numberDiff line change
@@ -131,10 +131,11 @@ The model specification and its validation tools can be found at <https://github
131131

132132
## Changelog
133133

134-
### 0.6.8 (to be released)
134+
### 0.6.8
135135

136136
* testing model inference will now check all weight formats
137137
(previously only the first one for which model adapter creation succeeded had been checked)
138+
* fix predict with blocking (Thanks @thodkatz)
138139

139140
### 0.6.7
140141

bioimageio/core/VERSION

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,3 @@
11
{
2-
"version": "0.6.7"
2+
"version": "0.6.8"
33
}

bioimageio/core/model_adapters/_model_adapter.py

+26-12
Original file line numberDiff line numberDiff line change
@@ -60,12 +60,17 @@ def create(
6060
)
6161

6262
weights = model_description.weights
63-
errors: List[str] = []
63+
errors: List[Tuple[WeightsFormat, Exception]] = []
6464
weight_format_priority_order = (
6565
DEFAULT_WEIGHT_FORMAT_PRIORITY_ORDER
6666
if weight_format_priority_order is None
6767
else weight_format_priority_order
6868
)
69+
# limit weight formats to the ones present
70+
weight_format_priority_order = [
71+
w for w in weight_format_priority_order if getattr(weights, w) is not None
72+
]
73+
6974
for wf in weight_format_priority_order:
7075
if wf == "pytorch_state_dict" and weights.pytorch_state_dict is not None:
7176
try:
@@ -77,7 +82,7 @@ def create(
7782
devices=devices,
7883
)
7984
except Exception as e:
80-
errors.append(f"{wf}: {e}")
85+
errors.append((wf, e))
8186
elif (
8287
wf == "tensorflow_saved_model_bundle"
8388
and weights.tensorflow_saved_model_bundle is not None
@@ -89,7 +94,7 @@ def create(
8994
model_description=model_description, devices=devices
9095
)
9196
except Exception as e:
92-
errors.append(f"{wf}: {e}")
97+
errors.append((wf, e))
9398
elif wf == "onnx" and weights.onnx is not None:
9499
try:
95100
from ._onnx_model_adapter import ONNXModelAdapter
@@ -98,7 +103,7 @@ def create(
98103
model_description=model_description, devices=devices
99104
)
100105
except Exception as e:
101-
errors.append(f"{wf}: {e}")
106+
errors.append((wf, e))
102107
elif wf == "torchscript" and weights.torchscript is not None:
103108
try:
104109
from ._torchscript_model_adapter import TorchscriptModelAdapter
@@ -107,7 +112,7 @@ def create(
107112
model_description=model_description, devices=devices
108113
)
109114
except Exception as e:
110-
errors.append(f"{wf}: {e}")
115+
errors.append((wf, e))
111116
elif wf == "keras_hdf5" and weights.keras_hdf5 is not None:
112117
# keras can either be installed as a separate package or used as part of tensorflow
113118
# we try to first import the keras model adapter using the separate package and,
@@ -125,15 +130,24 @@ def create(
125130
model_description=model_description, devices=devices
126131
)
127132
except Exception as e:
128-
errors.append(f"{wf}: {e}")
133+
errors.append((wf, e))
129134

130135
assert errors
131-
error_list = "\n - ".join(errors)
132-
raise ValueError(
133-
"None of the weight format specific model adapters could be created for"
134-
+ f" '{model_description.id or model_description.name}'"
135-
+ f" in this environment. Errors are:\n\n{error_list}.\n\n"
136-
)
136+
if len(weight_format_priority_order) == 1:
137+
assert len(errors) == 1
138+
raise ValueError(
139+
f"The '{weight_format_priority_order[0]}' model adapter could not be created"
140+
+ f" in this environment:\n{errors[0][1].__class__.__name__}({errors[0][1]}).\n\n"
141+
)
142+
143+
else:
144+
error_list = "\n - ".join(
145+
f"{wf}: {e.__class__.__name__}({e})" for wf, e in errors
146+
)
147+
raise ValueError(
148+
"None of the weight format specific model adapters could be created"
149+
+ f" in this environment. Errors are:\n\n{error_list}.\n\n"
150+
)
137151

138152
@final
139153
def load(self, *, devices: Optional[Sequence[str]] = None) -> None:

0 commit comments

Comments
 (0)