Skip to content

Commit 3d61e05

Browse files
committed
fix type annotations of return types in __main__
1 parent eaa0515 commit 3d61e05

File tree

1 file changed

+8
-8
lines changed

1 file changed

+8
-8
lines changed

bioimageio/core/__main__.py

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -56,7 +56,7 @@ def package(
5656
show_default=False,
5757
),
5858
verbose: bool = typer.Option(False, help="show traceback of exceptions"),
59-
) -> int:
59+
):
6060
# typer bug: typer returns empty tuple instead of None if weights_order_priority is not given
6161
weights_priority_order = weights_priority_order or None
6262

@@ -85,7 +85,7 @@ def test_model(
8585
weight_format: Optional[WeightFormatEnum] = typer.Option(None, help="The weight format to use."),
8686
devices: Optional[List[str]] = typer.Option(None, help="Devices for running the model."),
8787
decimal: int = typer.Option(4, help="The test precision."),
88-
) -> int:
88+
):
8989
# this is a weird typer bug: default devices are empty tuple although they should be None
9090
if len(devices) == 0:
9191
devices = None
@@ -126,7 +126,7 @@ def test_resource(
126126
weight_format: Optional[WeightFormatEnum] = typer.Option(None, help="(for model only) The weight format to use."),
127127
devices: Optional[List[str]] = typer.Option(None, help="(for model only) Devices for running the model."),
128128
decimal: int = typer.Option(4, help="(for model only) The test precision."),
129-
) -> int:
129+
):
130130
# this is a weird typer bug: default devices are empty tuple although they should be None
131131
if len(devices) == 0:
132132
devices = None
@@ -164,7 +164,7 @@ def predict_image(
164164
tiling: Optional[bool] = typer.Option(None, help="Whether to run prediction in tiling mode."),
165165
weight_format: Optional[WeightFormatEnum] = typer.Option(None, help="The weight format to use."),
166166
devices: Optional[List[str]] = typer.Option(None, help="Devices for running the model."),
167-
) -> int:
167+
):
168168

169169
if isinstance(padding, str):
170170
padding = json.loads(padding.replace("'", '"'))
@@ -203,7 +203,7 @@ def predict_images(
203203
tiling: Optional[bool] = typer.Option(None, help="Whether to run prediction in tiling mode."),
204204
weight_format: Optional[WeightFormatEnum] = typer.Option(None, help="The weight format to use."),
205205
devices: Optional[List[str]] = typer.Option(None, help="Devices for running the model."),
206-
) -> int:
206+
):
207207
input_files = glob(input_pattern)
208208
input_names = [os.path.split(infile)[1] for infile in input_files]
209209
output_files = [os.path.join(output_folder, fname) for fname in input_names]
@@ -246,7 +246,7 @@ def convert_torch_weights_to_onnx(
246246
opset_version: Optional[int] = typer.Argument(12, help="Onnx opset version."),
247247
use_tracing: bool = typer.Option(True, help="Whether to use torch.jit tracing or scripting."),
248248
verbose: bool = typer.Option(True, help="Verbosity"),
249-
) -> int:
249+
):
250250
ret_code = torch_converter.convert_weights_to_onnx(model_rdf, output_path, opset_version, use_tracing, verbose)
251251
sys.exit(ret_code)
252252

@@ -259,7 +259,7 @@ def convert_torch_weights_to_torchscript(
259259
),
260260
output_path: Path = typer.Argument(..., help="Where to save the torchscript weights."),
261261
use_tracing: bool = typer.Option(True, help="Whether to use torch.jit tracing or scripting."),
262-
) -> int:
262+
):
263263
ret_code = torch_converter.convert_weights_to_torchscript(model_rdf, output_path, use_tracing)
264264
sys.exit(ret_code)
265265

@@ -274,7 +274,7 @@ def convert_keras_weights_to_tensorflow(
274274
..., help="Path to the model resource description file (rdf.yaml) or zipped model."
275275
),
276276
output_path: Path = typer.Argument(..., help="Where to save the tensorflow weights."),
277-
) -> int:
277+
):
278278
ret_code = keras_converter.convert_weights_to_tensorflow_saved_model_bundle(model_rdf, output_path)
279279
sys.exit(ret_code)
280280

0 commit comments

Comments
 (0)