diff options
author | A. Unique TensorFlower <gardener@tensorflow.org> | 2016-08-25 07:59:14 -0800 |
---|---|---|
committer | TensorFlower Gardener <gardener@tensorflow.org> | 2016-08-25 09:04:38 -0700 |
commit | e78cbe072bb8fd50dd8be6033de9bcb5f62d59fd (patch) | |
tree | 2183e953675655acbafe9ff0a6571cb3ee67c7d7 /tensorflow/core/ops/math_grad_test.cc | |
parent | a856685175f0919dd2ab03ac447d2708dc0fffe3 (diff) |
Fix gradient of pow for complex types
Change: 131294380
Diffstat (limited to 'tensorflow/core/ops/math_grad_test.cc')
-rw-r--r-- | tensorflow/core/ops/math_grad_test.cc | 19 |
1 files changed, 19 insertions, 0 deletions
diff --git a/tensorflow/core/ops/math_grad_test.cc b/tensorflow/core/ops/math_grad_test.cc index 9af73b2da0..e937fc5ab1 100644 --- a/tensorflow/core/ops/math_grad_test.cc +++ b/tensorflow/core/ops/math_grad_test.cc @@ -684,6 +684,25 @@ TEST_F(MathGradTest, Pow) { } } +TEST_F(MathGradTest, ComplexPow) { + auto x = test::AsTensor<complex64>({0.f, 2.f, -2.f}, TensorShape({3})); + auto y = test::AsTensor<complex64>({2.f, 2.f, 2.f}, TensorShape({3})); + Tensor dx; + Tensor dy; + auto g = [](complex64 x, complex64 y) { return y * std::pow(x, y - 1.f); }; + auto h = [](complex64 x, complex64 y) { + return std::pow(x, y) * (x != complex64(0) ? std::log(x) : 0); + }; + SymGrad("Pow", x, y, &dx, &dy); + + test::ExpectClose( + dx, test::AsTensor<complex64>({g(0.f, 2.f), g(2.f, 2.f), g(-2.f, 2.f)}, + TensorShape({3}))); + test::ExpectClose( + dy, test::AsTensor<complex64>({h(0.f, 2.f), h(2.f, 2.f), h(-2.f, 2.f)}, + TensorShape({3}))); +} + TEST_F(MathGradTest, Maximum) { auto x = test::AsTensor<float>({-3.f, -2.f, -1.f, 1.f, 2.f, 3.f}, TensorShape({2, 3})); |