diff options
author | Cao Zongyan <zongyan.cao@alibaba-inc.com> | 2018-09-05 09:02:40 +0800 |
---|---|---|
committer | Cao Zongyan <zongyan.cao@alibaba-inc.com> | 2018-09-05 09:02:40 +0800 |
commit | a95281ce1b449d8f92a3799ff9c1dbf661b70bc4 (patch) | |
tree | d4c4fecf32f069fe09e82a61a931a4e957bbf6e7 /tensorflow/cc | |
parent | d2ad105d2dff3c79d8f49f5fb8ce74c38f424e74 (diff) |
Avoid golden API file changing.
Diffstat (limited to 'tensorflow/cc')
-rw-r--r-- | tensorflow/cc/gradients/nn_grad_test.cc | 3 |
1 files changed, 1 insertions, 2 deletions
diff --git a/tensorflow/cc/gradients/nn_grad_test.cc b/tensorflow/cc/gradients/nn_grad_test.cc index d8c2a1a0fc..f5a09e09dc 100644 --- a/tensorflow/cc/gradients/nn_grad_test.cc +++ b/tensorflow/cc/gradients/nn_grad_test.cc @@ -42,7 +42,6 @@ using ops::MaxPoolV2; using ops::Placeholder; using ops::Relu; using ops::Relu6; -using ops::LeakyRelu; using ops::Selu; using ops::Softmax; using ops::Softplus; @@ -165,7 +164,7 @@ TEST_F(NNGradTest, Relu6Grad) { TEST_F(NNGradTest, LeakyReluGrad) { TensorShape shape({5, 2}); auto x = Placeholder(scope_, DT_FLOAT, Placeholder::Shape(shape)); - auto y = LeakyRelu(scope_, x); + auto y = ops::internal::LeakyRelu(scope_, x); // Avoid input values where Leaky ReLU gradient is not well defined (around // zero). Tensor x_init_value = test::AsTensor<float>( |