aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/cc
diff options
context:
space:
mode:
authorGravatar Yan Facai (颜发才) <facai.yan@gmail.com>2018-08-14 15:05:02 +0800
committerGravatar Yan Facai (颜发才) <facai.yan@gmail.com>2018-08-14 15:05:02 +0800
commit8f4bb0fed956a35b767c0984d9587636cba661bb (patch)
treebcf8460276aeb730449afec0d930818ac73f4163 /tensorflow/cc
parent0552c2976cf6e4f0f09556d0a3ae5a76509e9a46 (diff)
parentcb53bfaf48588962f5799391d3a7a411dda72b49 (diff)
Merge remote-tracking branch 'upstream/master' into ENH/unsafe_div
Diffstat (limited to 'tensorflow/cc')
-rw-r--r--tensorflow/cc/BUILD3
-rw-r--r--tensorflow/cc/gradients/array_grad.cc18
-rw-r--r--tensorflow/cc/gradients/array_grad_test.cc8
-rw-r--r--tensorflow/cc/gradients/math_grad.cc20
-rw-r--r--tensorflow/cc/gradients/math_grad_test.cc11
-rw-r--r--tensorflow/cc/saved_model/loader.cc3
6 files changed, 60 insertions, 3 deletions
diff --git a/tensorflow/cc/BUILD b/tensorflow/cc/BUILD
index 588a45ea43..f56521dac0 100644
--- a/tensorflow/cc/BUILD
+++ b/tensorflow/cc/BUILD
@@ -379,9 +379,11 @@ tf_cc_test(
srcs = ["gradients/math_grad_test.cc"],
deps = [
":cc_ops",
+ ":client_session",
":grad_op_registry",
":grad_testutil",
":gradient_checker",
+ ":gradients",
":math_grad",
":testutil",
"//tensorflow/core:lib_internal",
@@ -626,7 +628,6 @@ tf_cc_binary(
copts = tf_copts(),
linkopts = select({
"//tensorflow:windows": [],
- "//tensorflow:windows_msvc": [],
"//tensorflow:darwin": [
"-lm",
"-lpthread",
diff --git a/tensorflow/cc/gradients/array_grad.cc b/tensorflow/cc/gradients/array_grad.cc
index b353accddc..e9173227aa 100644
--- a/tensorflow/cc/gradients/array_grad.cc
+++ b/tensorflow/cc/gradients/array_grad.cc
@@ -120,6 +120,24 @@ Status SplitGrad(const Scope& scope, const Operation& op,
}
REGISTER_GRADIENT_OP("Split", SplitGrad);
+Status FillGrad(const Scope& scope, const Operation& op,
+ const std::vector<Output>& grad_inputs,
+ std::vector<Output>* grad_outputs) {
+ // y = fill(fill_shape, x)
+ // No gradient returned for the fill_shape argument.
+ grad_outputs->push_back(NoGradient());
+ // The gradient for x (which must be a scalar) is just the sum of
+ // all the gradients from the shape it fills.
+ // We use ReduceSum to implement this, which needs an argument providing
+ // the indices of all the dimensions of the incoming gradient.
+ // grad(x) = reduce_sum(grad(y), [0..rank(grad(y))])
+ auto all_dims = Range(scope, Const(scope, 0), Rank(scope, grad_inputs[0]),
+ Const(scope, 1));
+ grad_outputs->push_back(ReduceSum(scope, grad_inputs[0], all_dims));
+ return scope.status();
+}
+REGISTER_GRADIENT_OP("Fill", FillGrad);
+
Status DiagGrad(const Scope& scope, const Operation& op,
const std::vector<Output>& grad_inputs,
std::vector<Output>* grad_outputs) {
diff --git a/tensorflow/cc/gradients/array_grad_test.cc b/tensorflow/cc/gradients/array_grad_test.cc
index d09275b648..f41de3dc20 100644
--- a/tensorflow/cc/gradients/array_grad_test.cc
+++ b/tensorflow/cc/gradients/array_grad_test.cc
@@ -108,6 +108,14 @@ TEST_F(ArrayGradTest, SplitGrad) {
RunTest({x}, {x_shape}, y.output, {y_shape, y_shape});
}
+TEST_F(ArrayGradTest, FillGrad) {
+ TensorShape x_shape({});
+ auto x = Placeholder(scope_, DT_FLOAT, Placeholder::Shape(x_shape));
+ TensorShape y_shape({2, 5, 3});
+ auto y = Fill(scope_, {2, 5, 3}, x);
+ RunTest(x, x_shape, y, y_shape);
+}
+
TEST_F(ArrayGradTest, DiagGrad) {
TensorShape x_shape({5, 2});
auto x = Placeholder(scope_, DT_FLOAT, Placeholder::Shape(x_shape));
diff --git a/tensorflow/cc/gradients/math_grad.cc b/tensorflow/cc/gradients/math_grad.cc
index cd215f740d..1329b568ab 100644
--- a/tensorflow/cc/gradients/math_grad.cc
+++ b/tensorflow/cc/gradients/math_grad.cc
@@ -1022,6 +1022,26 @@ Status ProdGrad(const Scope& scope, const Operation& op,
}
REGISTER_GRADIENT_OP("Prod", ProdGrad);
+Status SegmentSumGrad(const Scope& scope, const Operation& op,
+ const std::vector<Output>& grad_inputs,
+ std::vector<Output>* grad_outputs) {
+ // The SegmentSum operation sums segments of the Tensor that have the same
+ // index in the segment_ids parameter.
+ // i.e z = [2, 3, 4, 5], segment_ids [0, 0, 0, 1]
+ // will produce [2 + 3 + 4, 5] = [9, 5]
+ // The gradient that will flow back to the gather operation will look like
+ // [x1, x2], it will have the same shape as the output of the SegmentSum
+ // operation. The differentiation step of the SegmentSum operation just
+ // broadcast the gradient in order to retrieve the z's shape.
+ // dy/dz = [x1, x1, x1, x2]
+ grad_outputs->push_back(Gather(scope, grad_inputs[0], op.input(1)));
+
+ // stop propagation along segment_ids
+ grad_outputs->push_back(NoGradient());
+ return scope.status();
+}
+REGISTER_GRADIENT_OP("SegmentSum", SegmentSumGrad);
+
// MatMulGrad helper function used to compute two MatMul operations
// based on input matrix transposition combinations.
Status MatMulGradHelper(const Scope& scope, const bool is_batch,
diff --git a/tensorflow/cc/gradients/math_grad_test.cc b/tensorflow/cc/gradients/math_grad_test.cc
index 147428cc39..c16938322c 100644
--- a/tensorflow/cc/gradients/math_grad_test.cc
+++ b/tensorflow/cc/gradients/math_grad_test.cc
@@ -45,10 +45,10 @@ using ops::Placeholder;
using ops::Pow;
using ops::Prod;
using ops::RealDiv;
+using ops::SegmentSum;
using ops::SquaredDifference;
using ops::Sub;
using ops::Sum;
-using ops::Where3;
// TODO(andydavis) Test gradient function against numeric gradients output.
// TODO(andydavis) As more gradients are added move common test functions
@@ -932,5 +932,14 @@ TEST_F(NaryGradTest, Prod) {
RunTest({x}, {x_shape}, {y}, {y_shape});
}
+TEST_F(NaryGradTest, SegmentSum) {
+ TensorShape x_shape({3, 4});
+ auto x = Placeholder(scope_, DT_FLOAT, Placeholder::Shape(x_shape));
+ auto y = SegmentSum(scope_, x, {0, 0, 1});
+ // the sum is always on the first dimension
+ TensorShape y_shape({2, 4});
+ RunTest({x}, {x_shape}, {y}, {y_shape});
+}
+
} // namespace
} // namespace tensorflow
diff --git a/tensorflow/cc/saved_model/loader.cc b/tensorflow/cc/saved_model/loader.cc
index 98be66a6ad..3830416159 100644
--- a/tensorflow/cc/saved_model/loader.cc
+++ b/tensorflow/cc/saved_model/loader.cc
@@ -170,7 +170,8 @@ Status RunRestore(const RunOptions& run_options, const string& export_dir,
variables_directory, MetaFilename(kSavedModelVariablesFilename));
if (!Env::Default()->FileExists(variables_index_path).ok()) {
LOG(INFO) << "The specified SavedModel has no variables; no checkpoints "
- "were restored.";
+ "were restored. File does not exist: "
+ << variables_index_path;
return Status::OK();
}
const string variables_path =