aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/cc
diff options
context:
space:
mode:
authorGravatar Yan Facai (颜发才) <facai.yan@gmail.com>2018-08-09 19:34:39 +0800
committerGravatar Yan Facai (颜发才) <facai.yan@gmail.com>2018-08-09 19:34:39 +0800
commita90fce71faaa356b531157c2e00804046961b39d (patch)
tree5f270102fc14a6e52644402b99e16a45fbefb617 /tensorflow/cc
parent7613b773e03987c89fe5e5883c411588bce59673 (diff)
CLN: clang-format format cc codes
Diffstat (limited to 'tensorflow/cc')
-rw-r--r--tensorflow/cc/gradients/math_grad.cc9
-rw-r--r--tensorflow/cc/gradients/math_grad_test.cc6
2 files changed, 9 insertions, 6 deletions
diff --git a/tensorflow/cc/gradients/math_grad.cc b/tensorflow/cc/gradients/math_grad.cc
index 84552e7c5e..c6e60689fa 100644
--- a/tensorflow/cc/gradients/math_grad.cc
+++ b/tensorflow/cc/gradients/math_grad.cc
@@ -442,16 +442,17 @@ Status RealDivGrad(const Scope& scope, const Operation& op,
REGISTER_GRADIENT_OP("RealDiv", RealDivGrad);
Status UnsafeDivGrad(const Scope& scope, const Operation& op,
- const std::vector<Output>& grad_inputs,
- std::vector<Output>* grad_outputs) {
+ const std::vector<Output>& grad_inputs,
+ std::vector<Output>* grad_outputs) {
auto x_1 = ConjugateHelper(scope, op.input(0));
auto x_2 = ConjugateHelper(scope, op.input(1));
// y = x_1 / x_2
// dy/dx_1 = 1/x_2
// dy/dx_2 = -x_1/x_2^2
auto gx_1 = UnsafeDiv(scope, grad_inputs[0], x_2);
- auto gx_2 = Mul(scope, grad_inputs[0],
- UnsafeDiv(scope, UnsafeDiv(scope, Neg(scope, x_1), x_2), x_2));
+ auto gx_2 =
+ Mul(scope, grad_inputs[0],
+ UnsafeDiv(scope, UnsafeDiv(scope, Neg(scope, x_1), x_2), x_2));
return BinaryGradCommon(scope, op, grad_outputs, gx_1, gx_2);
}
REGISTER_GRADIENT_OP("UnsafeDiv", UnsafeDivGrad);
diff --git a/tensorflow/cc/gradients/math_grad_test.cc b/tensorflow/cc/gradients/math_grad_test.cc
index 330d1722af..12a19bcf28 100644
--- a/tensorflow/cc/gradients/math_grad_test.cc
+++ b/tensorflow/cc/gradients/math_grad_test.cc
@@ -860,7 +860,8 @@ TEST_F(NaryGradTest, UnsafeDiv) {
const auto x = Placeholder(scope_, DT_FLOAT, Placeholder::Shape(x_shape));
// Test x / (1 + |x|) rather than x_1 / x_2 to avoid triggering large
// division errors in the numeric estimator used by the gradient checker.
- const auto y = UnsafeDiv(scope_, x, Add(scope_, Const<float>(scope_, 1), Abs(scope_, x)));
+ const auto y = UnsafeDiv(
+ scope_, x, Add(scope_, Const<float>(scope_, 1), Abs(scope_, x)));
RunTest({x}, {x_shape}, {y}, {x_shape});
}
{
@@ -873,7 +874,8 @@ TEST_F(NaryGradTest, UnsafeDiv) {
TF_EXPECT_OK(AddSymbolicGradients(scope_, {y}, {x}, &grad_outputs));
ClientSession session(scope_);
std::vector<Tensor> grad_result;
- TF_EXPECT_OK(session.Run({{x, {-3.0f, 0.0f, 3.0f}}}, grad_outputs, &grad_result));
+ TF_EXPECT_OK(
+ session.Run({{x, {-3.0f, 0.0f, 3.0f}}}, grad_outputs, &grad_result));
EXPECT_EQ(grad_result.size(), 1);
EXPECT_EQ(grad_result[0].NumElements(), 3);
EXPECT_EQ(grad_result[0].flat<float>()(0), 0.0f);