From 0b04a6431c3e9de981cee28bafcfc597bf7a80d3 Mon Sep 17 00:00:00 2001 From: SimpleML Team Date: Mon, 26 Feb 2024 02:35:00 -0800 Subject: [PATCH] Automated Code Change PiperOrigin-RevId: 610342411 --- .../tensorflow/ops/inference/kernel.cc | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) diff --git a/tensorflow_decision_forests/tensorflow/ops/inference/kernel.cc b/tensorflow_decision_forests/tensorflow/ops/inference/kernel.cc index 93179c9..6197746 100644 --- a/tensorflow_decision_forests/tensorflow/ops/inference/kernel.cc +++ b/tensorflow_decision_forests/tensorflow/ops/inference/kernel.cc @@ -1275,7 +1275,7 @@ class SimpleMLInferenceOp : public OpKernel { void Compute(OpKernelContext* ctx) override { // Make sure the model is available. - tf::StatusOr model_resource_or = + absl::StatusOr model_resource_or = GetModelResource(ctx); OP_REQUIRES_OK(ctx, model_resource_or.status()); YggdrasilModelResource* model_resource = model_resource_or.value(); @@ -1341,7 +1341,8 @@ class SimpleMLInferenceOp : public OpKernel { // "SimpleMLInferenceOp" class. // // This method is thread safe. - tf::StatusOr GetModelResource(OpKernelContext* ctx) { + absl::StatusOr GetModelResource( + OpKernelContext* ctx) { { tf::tf_shared_lock l(model_container_mutex_); if (model_container_) { @@ -1364,7 +1365,7 @@ class SimpleMLInferenceOp : public OpKernel { // Imports the model resource. // // The returned resource IS tracked with ref count. - virtual tf::StatusOr ImportModelResource( + virtual absl::StatusOr ImportModelResource( OpKernelContext* ctx) { // The resource exists but is not tracked in the class. // @@ -1414,7 +1415,7 @@ class SimpleMLInferenceOp : public OpKernel { // Gets the c++ references on all the input tensor values of the inference op. // In other words, get the input tensor and cast them to the expected type. - tf::StatusOr LinkInputTensors( + absl::StatusOr LinkInputTensors( OpKernelContext* ctx, const FeatureIndex& feature_index) { const Tensor* numerical_features_tensor = nullptr; const Tensor* boolean_features_tensor = nullptr; @@ -1471,8 +1472,8 @@ class SimpleMLInferenceOp : public OpKernel { // Allocates and gets the c++ references to all the output tensor values of // the inference op. - tf::StatusOr LinkOutputTensors(OpKernelContext* ctx, - const int batch_size) { + absl::StatusOr LinkOutputTensors(OpKernelContext* ctx, + const int batch_size) { Tensor* dense_predictions_tensor = nullptr; Tensor* dense_col_representation_tensor = nullptr; @@ -1489,7 +1490,7 @@ class SimpleMLInferenceOp : public OpKernel { } // Allocates and gets the c++ references to the output leaves. - tf::StatusOr LinkOutputLeavesTensors( + absl::StatusOr LinkOutputLeavesTensors( OpKernelContext* ctx, const int batch_size, const int num_trees) { Tensor* leaves_tensor = nullptr; TF_RETURN_IF_ERROR(ctx->allocate_output( @@ -1574,7 +1575,7 @@ class SimpleMLInferenceOpWithHandle : public SimpleMLInferenceOp { ~SimpleMLInferenceOpWithHandle() override {} - tf::StatusOr ImportModelResource( + absl::StatusOr ImportModelResource( OpKernelContext* ctx) override { YggdrasilModelResource* res; TF_RETURN_IF_ERROR(GetModelResourceFromResourceHandle(ctx, &res)); @@ -1594,7 +1595,7 @@ class SimpleMLInferenceLeafIndexOpWithHandle : public SimpleMLInferenceOp { ~SimpleMLInferenceLeafIndexOpWithHandle() override {} - tf::StatusOr ImportModelResource( + absl::StatusOr ImportModelResource( OpKernelContext* ctx) override { YggdrasilModelResource* res; TF_RETURN_IF_ERROR(GetModelResourceFromResourceHandle(ctx, &res));