Skip to content

Commit

Permalink
[res/tfl_recipes] Add REGRESS_Issue_13863 (#14064)
Browse files Browse the repository at this point in the history
This commit adds REGRESS_Issue_13863 tflite recipe.

ONE-DCO-1.0-Signed-off-by: HanJin Choi [email protected]
  • Loading branch information
Hanjin-Choi authored and seanshpark committed Oct 2, 2024
1 parent ced67ad commit 70f84e3
Show file tree
Hide file tree
Showing 5 changed files with 105 additions and 2 deletions.
1 change: 1 addition & 0 deletions compiler/common-artifacts/exclude.lst
Original file line number Diff line number Diff line change
Expand Up @@ -125,6 +125,7 @@ tcgenerate(ReLU6_dynamic_000) # TestDataGenerator does not support unknown dimen
tcgenerate(ReLUN1To1_000)
tcgenerate(ReLUN1To1_dynamic_000) # TestDataGenerator does not support unknown dimension
tcgenerate(Reshape_003) # luci-interpreter doesn't support reshape without built-in option
tcgenerate(Reshape_004) # has 0 in shape
tcgenerate(ReverseSequence_000)
tcgenerate(ReverseV2_000)
tcgenerate(Round_000)
Expand Down
35 changes: 33 additions & 2 deletions compiler/luci/service/src/Nodes/CircleReshape.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -84,15 +84,34 @@ loco::TensorShape Algorithm::visit(const luci::CircleReshape *node)
{
LUCI_ASSERT(const_shape_node->dtype() == S32, "Only support int32 CircleConst");

// NOTE for rank(shape_by_input before) < rank(shape_by_input after),
// shape_by_input after will be filled with unknown dims
shape_by_input.rank(const_shape_node->size<S32>());

for (uint32_t axis = 0; axis < shape_by_input.rank(); ++axis)
{
shape_by_input.dim(axis) = const_shape_node->at<S32>(axis);
if (const_shape_node->at<S32>(axis) < 0)
{
shape_by_input.dim(axis).unset();
}
else if (const_shape_node->at<S32>(axis) == 0)
{
const auto node_tensor = loco::must_cast<luci::CircleNode *>(node->tensor());
// set dim value to input
if (node_tensor->shape_status() == luci::ShapeStatus::VALID && axis < node_tensor->rank())
shape_by_input.dim(axis) = node_tensor->dim(axis);
else
{
// stop to check if this case exist for debugging
LUCI_ASSERT(false, "Check Reshape shape with 0");
}
}
else
{
shape_by_input.dim(axis).set(const_shape_node->at<S32>(axis));
}
// check valid or stop for debugging
assert(shape_by_input.dim(axis).value() > 0 || !shape_by_input.dim(axis).known());
}
}
else
Expand Down Expand Up @@ -143,14 +162,26 @@ loco::TensorShape Algorithm::visit(const luci::CircleReshape *node)
{
for (uint32_t dim_index = 0; dim_index < output_shape.rank(); ++dim_index)
{
const uint32_t dim_value = output_shape.dim(dim_index).value();
uint32_t dim_value = output_shape.dim(dim_index).value();
if (not output_shape.dim(dim_index).known())
{
LUCI_ASSERT(unknown_dim_index == UINT32_MAX, "More than one unknown dimension");
unknown_dim_index = dim_index;
}
else
{
if (!dim_value)
{
// refer https://github.com/Samsung/ONE/issues/14074#issuecomment-2370795003
// set dim value to follow input
if (dim_index < input_shape.rank())
dim_value = input_shape.dim(dim_index).value();
else
{
// stop to check if this case exist for debugging
LUCI_ASSERT(false, "Check Reshape shape with 0");
}
}
output_element_count *= dim_value;
}
}
Expand Down
2 changes: 2 additions & 0 deletions compiler/luci/tests/test.lst
Original file line number Diff line number Diff line change
Expand Up @@ -142,6 +142,7 @@ addread(Reshape_000)
addread(Reshape_001)
addread(Reshape_002)
#addread(Reshape_003) # no input, no option is not supported
addread(Reshape_004)
addread(Reshape_U8_000)
addread(ResizeBilinear_000)
addread(ResizeBilinear_U8_000)
Expand Down Expand Up @@ -374,6 +375,7 @@ addwrite(Reshape_000)
addwrite(Reshape_001)
addwrite(Reshape_002)
#addwrite(Reshape_003) # no input, no option is not supported
addwrite(Reshape_004)
addwrite(Reshape_U8_000)
addwrite(ResizeBilinear_000)
addwrite(ResizeBilinear_U8_000)
Expand Down
38 changes: 38 additions & 0 deletions compiler/onnx-tools/onnx-extract.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
#!/usr/bin/env python3

# Copyright (c) 2024 Samsung Electronics Co., Ltd. All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import onnx
import os
import sys


def _help_exit(cmd_name):
print('Produce shape-infered ONNX file')
print('Usage: {0} [onnx_in_path] [onnx_out_path]'.format(cmd_name))
print('')
exit()


def main():
if len(sys.argv) < 3:
_help_exit(os.path.basename(sys.argv[0]))

onnx.checker.check_model(sys.argv[1])
onnx.shape_inference.infer_shapes_path(sys.argv[1], sys.argv[2])


if __name__ == "__main__":
main()
31 changes: 31 additions & 0 deletions res/TensorFlowLiteRecipes/Reshape_004/test.recipe
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
# NOTE test model for 0 in shape.
# May not work in interpreter.
operand {
name: "ifm"
type: FLOAT32
shape { dim: 1 dim: 3 dim: 2 dim: 3 }
}
operand {
name: "shape"
type: INT32
shape { dim: 3 }
filler { tag: "explicit" arg: "0" arg: "3" arg: "-1" }
}
operand {
name: "ofm"
type: FLOAT32
shape { dim: 1 dim: 3 dim: 6 }
}
operation {
type: "Reshape"
reshape_options {
new_shape: 0
new_shape: 3
new_shape: -1
}
input: "ifm"
input: "shape"
output: "ofm"
}
input: "ifm"
output: "ofm"

0 comments on commit 70f84e3

Please sign in to comment.