From 70f84e314ef0085644b4e932f7ddcf650df501ac Mon Sep 17 00:00:00 2001 From: HanJin Choi <156498561+Hanjin-Choi@users.noreply.github.com> Date: Tue, 24 Sep 2024 15:36:45 +0900 Subject: [PATCH] [res/tfl_recipes] Add REGRESS_Issue_13863 (#14064) This commit adds REGRESS_Issue_13863 tflite recipe. ONE-DCO-1.0-Signed-off-by: HanJin Choi hanjin4647@gmail.com --- compiler/common-artifacts/exclude.lst | 1 + .../luci/service/src/Nodes/CircleReshape.cpp | 35 ++++++++++++++++- compiler/luci/tests/test.lst | 2 + compiler/onnx-tools/onnx-extract.py | 38 +++++++++++++++++++ .../Reshape_004/test.recipe | 31 +++++++++++++++ 5 files changed, 105 insertions(+), 2 deletions(-) create mode 100644 compiler/onnx-tools/onnx-extract.py create mode 100644 res/TensorFlowLiteRecipes/Reshape_004/test.recipe diff --git a/compiler/common-artifacts/exclude.lst b/compiler/common-artifacts/exclude.lst index 4b934277853..a3f206df6d4 100644 --- a/compiler/common-artifacts/exclude.lst +++ b/compiler/common-artifacts/exclude.lst @@ -125,6 +125,7 @@ tcgenerate(ReLU6_dynamic_000) # TestDataGenerator does not support unknown dimen tcgenerate(ReLUN1To1_000) tcgenerate(ReLUN1To1_dynamic_000) # TestDataGenerator does not support unknown dimension tcgenerate(Reshape_003) # luci-interpreter doesn't support reshape without built-in option +tcgenerate(Reshape_004) # has 0 in shape tcgenerate(ReverseSequence_000) tcgenerate(ReverseV2_000) tcgenerate(Round_000) diff --git a/compiler/luci/service/src/Nodes/CircleReshape.cpp b/compiler/luci/service/src/Nodes/CircleReshape.cpp index 28eb6303735..cd95f125d4c 100644 --- a/compiler/luci/service/src/Nodes/CircleReshape.cpp +++ b/compiler/luci/service/src/Nodes/CircleReshape.cpp @@ -84,15 +84,34 @@ loco::TensorShape Algorithm::visit(const luci::CircleReshape *node) { LUCI_ASSERT(const_shape_node->dtype() == S32, "Only support int32 CircleConst"); + // NOTE for rank(shape_by_input before) < rank(shape_by_input after), + // shape_by_input after will be filled with unknown dims shape_by_input.rank(const_shape_node->size()); for (uint32_t axis = 0; axis < shape_by_input.rank(); ++axis) { - shape_by_input.dim(axis) = const_shape_node->at(axis); if (const_shape_node->at(axis) < 0) { shape_by_input.dim(axis).unset(); } + else if (const_shape_node->at(axis) == 0) + { + const auto node_tensor = loco::must_cast(node->tensor()); + // set dim value to input + if (node_tensor->shape_status() == luci::ShapeStatus::VALID && axis < node_tensor->rank()) + shape_by_input.dim(axis) = node_tensor->dim(axis); + else + { + // stop to check if this case exist for debugging + LUCI_ASSERT(false, "Check Reshape shape with 0"); + } + } + else + { + shape_by_input.dim(axis).set(const_shape_node->at(axis)); + } + // check valid or stop for debugging + assert(shape_by_input.dim(axis).value() > 0 || !shape_by_input.dim(axis).known()); } } else @@ -143,7 +162,7 @@ loco::TensorShape Algorithm::visit(const luci::CircleReshape *node) { for (uint32_t dim_index = 0; dim_index < output_shape.rank(); ++dim_index) { - const uint32_t dim_value = output_shape.dim(dim_index).value(); + uint32_t dim_value = output_shape.dim(dim_index).value(); if (not output_shape.dim(dim_index).known()) { LUCI_ASSERT(unknown_dim_index == UINT32_MAX, "More than one unknown dimension"); @@ -151,6 +170,18 @@ loco::TensorShape Algorithm::visit(const luci::CircleReshape *node) } else { + if (!dim_value) + { + // refer https://github.com/Samsung/ONE/issues/14074#issuecomment-2370795003 + // set dim value to follow input + if (dim_index < input_shape.rank()) + dim_value = input_shape.dim(dim_index).value(); + else + { + // stop to check if this case exist for debugging + LUCI_ASSERT(false, "Check Reshape shape with 0"); + } + } output_element_count *= dim_value; } } diff --git a/compiler/luci/tests/test.lst b/compiler/luci/tests/test.lst index c04800a9b29..35d780bd8a2 100644 --- a/compiler/luci/tests/test.lst +++ b/compiler/luci/tests/test.lst @@ -142,6 +142,7 @@ addread(Reshape_000) addread(Reshape_001) addread(Reshape_002) #addread(Reshape_003) # no input, no option is not supported +addread(Reshape_004) addread(Reshape_U8_000) addread(ResizeBilinear_000) addread(ResizeBilinear_U8_000) @@ -374,6 +375,7 @@ addwrite(Reshape_000) addwrite(Reshape_001) addwrite(Reshape_002) #addwrite(Reshape_003) # no input, no option is not supported +addwrite(Reshape_004) addwrite(Reshape_U8_000) addwrite(ResizeBilinear_000) addwrite(ResizeBilinear_U8_000) diff --git a/compiler/onnx-tools/onnx-extract.py b/compiler/onnx-tools/onnx-extract.py new file mode 100644 index 00000000000..3306dc5b0ef --- /dev/null +++ b/compiler/onnx-tools/onnx-extract.py @@ -0,0 +1,38 @@ +#!/usr/bin/env python3 + +# Copyright (c) 2024 Samsung Electronics Co., Ltd. All Rights Reserved +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import onnx +import os +import sys + + +def _help_exit(cmd_name): + print('Produce shape-infered ONNX file') + print('Usage: {0} [onnx_in_path] [onnx_out_path]'.format(cmd_name)) + print('') + exit() + + +def main(): + if len(sys.argv) < 3: + _help_exit(os.path.basename(sys.argv[0])) + + onnx.checker.check_model(sys.argv[1]) + onnx.shape_inference.infer_shapes_path(sys.argv[1], sys.argv[2]) + + +if __name__ == "__main__": + main() diff --git a/res/TensorFlowLiteRecipes/Reshape_004/test.recipe b/res/TensorFlowLiteRecipes/Reshape_004/test.recipe new file mode 100644 index 00000000000..43b73c3f710 --- /dev/null +++ b/res/TensorFlowLiteRecipes/Reshape_004/test.recipe @@ -0,0 +1,31 @@ +# NOTE test model for 0 in shape. +# May not work in interpreter. +operand { + name: "ifm" + type: FLOAT32 + shape { dim: 1 dim: 3 dim: 2 dim: 3 } +} +operand { + name: "shape" + type: INT32 + shape { dim: 3 } + filler { tag: "explicit" arg: "0" arg: "3" arg: "-1" } +} +operand { + name: "ofm" + type: FLOAT32 + shape { dim: 1 dim: 3 dim: 6 } +} +operation { + type: "Reshape" + reshape_options { + new_shape: 0 + new_shape: 3 + new_shape: -1 + } + input: "ifm" + input: "shape" + output: "ofm" +} +input: "ifm" +output: "ofm"