diff options
Diffstat (limited to 'tensorflow/contrib/lite/toco/graph_transformations/unfuse_activation_functions.cc')
-rw-r--r-- | tensorflow/contrib/lite/toco/graph_transformations/unfuse_activation_functions.cc | 12 |
1 files changed, 8 insertions, 4 deletions
diff --git a/tensorflow/contrib/lite/toco/graph_transformations/unfuse_activation_functions.cc b/tensorflow/contrib/lite/toco/graph_transformations/unfuse_activation_functions.cc index 69bad2fa89..4ada5c3fd0 100644 --- a/tensorflow/contrib/lite/toco/graph_transformations/unfuse_activation_functions.cc +++ b/tensorflow/contrib/lite/toco/graph_transformations/unfuse_activation_functions.cc @@ -25,13 +25,16 @@ limitations under the License. namespace toco { -bool UnfuseActivationFunctions::Run(Model* model, std::size_t op_index) { +::tensorflow::Status UnfuseActivationFunctions::Run(Model* model, + std::size_t op_index, + bool* modified) { + *modified = false; const auto it = model->operators.begin() + op_index; auto* op = it->get(); // If a conv operation has an im2col array, yield: it should be dropped first. if ((op->type == OperatorType::kConv) && (op->outputs.size() == 2)) { - return false; + return ::tensorflow::Status::OK(); } Operator* ac_op = nullptr; @@ -46,7 +49,7 @@ bool UnfuseActivationFunctions::Run(Model* model, std::size_t op_index) { ac_op = new Relu1Operator; break; default: - return false; + return ::tensorflow::Status::OK(); } // At this point we know that the op has a fused activation function. At the @@ -74,7 +77,8 @@ bool UnfuseActivationFunctions::Run(Model* model, std::size_t op_index) { ac_op->inputs = {tmp_array_name}; op->outputs = {tmp_array_name}; - return true; + *modified = true; + return ::tensorflow::Status::OK(); } } // namespace toco |