diff options
author | karl@kubx.ca <karl@kubx.ca> | 2018-06-25 22:12:24 -0400 |
---|---|---|
committer | karl@kubx.ca <karl@kubx.ca> | 2018-07-25 21:10:29 -0400 |
commit | 2b303fddafec6b96a6868aaa76f55cc392b96586 (patch) | |
tree | 8b1da320c69ba5239f8bdd37bfac95cd02704d65 /tensorflow/c/c_api_test.cc | |
parent | b24037513f12a5812a21b7ea92ff904ee9ea6cd8 (diff) |
Add scope name to TF_AddGradients
Diffstat (limited to 'tensorflow/c/c_api_test.cc')
-rw-r--r-- | tensorflow/c/c_api_test.cc | 44 |
1 files changed, 29 insertions, 15 deletions
diff --git a/tensorflow/c/c_api_test.cc b/tensorflow/c/c_api_test.cc index e674b1623c..2fe9e91583 100644 --- a/tensorflow/c/c_api_test.cc +++ b/tensorflow/c/c_api_test.cc @@ -1474,16 +1474,18 @@ class CApiGradientsTest : public ::testing::Test { TF_DeleteStatus(s_); } - void TestGradientsSuccess(bool grad_inputs_provided) { + void TestGradientsSuccess(bool grad_inputs_provided, + const char* scope_name = nullptr) { TF_Output inputs[2]; TF_Output outputs[1]; TF_Output grad_outputs[2]; TF_Output expected_grad_outputs[2]; BuildSuccessGraph(inputs, outputs); - BuildExpectedGraph(grad_inputs_provided, expected_grad_outputs); + BuildExpectedGraph(grad_inputs_provided, scope_name, expected_grad_outputs); - AddGradients(grad_inputs_provided, inputs, 2, outputs, 1, grad_outputs); + AddGradients(grad_inputs_provided, scope_name, inputs, 2, outputs, 1, + grad_outputs); EXPECT_EQ(TF_OK, TF_GetCode(s_)) << TF_Message(s_); @@ -1505,7 +1507,8 @@ class CApiGradientsTest : public ::testing::Test { BuildErrorGraph(inputs, outputs); - AddGradients(grad_inputs_provided, inputs, 1, outputs, 1, grad_outputs); + AddGradients(grad_inputs_provided, nullptr, inputs, 1, outputs, 1, + grad_outputs); string expected_msg = "No gradient defined for op: TestOpWithNoGradient. Please see " @@ -1549,19 +1552,20 @@ class CApiGradientsTest : public ::testing::Test { EXPECT_EQ(*a_data, *b_data); } - void AddGradients(bool grad_inputs_provided, TF_Output* inputs, int ninputs, - TF_Output* outputs, int noutputs, TF_Output* grad_outputs) { + void AddGradients(bool grad_inputs_provided, const char* scope_name, + TF_Output* inputs, int ninputs, TF_Output* outputs, + int noutputs, TF_Output* grad_outputs) { if (grad_inputs_provided) { TF_Output grad_inputs[1]; const float grad_inputs_val[] = {1.0, 1.0, 1.0, 1.0}; TF_Operation* grad_inputs_op = FloatConst2x2(graph_, s_, grad_inputs_val, "GradInputs"); grad_inputs[0] = TF_Output{grad_inputs_op, 0}; - TF_AddGradients(graph_, outputs, noutputs, inputs, ninputs, grad_inputs, - s_, grad_outputs); + TF_AddGradients(graph_, scope_name, outputs, noutputs, inputs, ninputs, + grad_inputs, s_, grad_outputs); } else { - TF_AddGradients(graph_, outputs, noutputs, inputs, ninputs, nullptr, s_, - grad_outputs); + TF_AddGradients(graph_, scope_name, outputs, noutputs, inputs, ninputs, + nullptr, s_, grad_outputs); } } @@ -1600,6 +1604,7 @@ class CApiGradientsTest : public ::testing::Test { } void BuildExpectedGraph(bool grad_inputs_provided, + const char* grad_scope_name, TF_Output* expected_grad_outputs) { // The expected graph looks like this if grad_inputs_provided. // If grad_inputs_provided is false, Const_0 will be a OnesLike op. @@ -1628,6 +1633,10 @@ class CApiGradientsTest : public ::testing::Test { // const float const0_val[] = {1.0, 2.0, 3.0, 4.0}; const float const1_val[] = {1.0, 0.0, 0.0, 1.0}; + const char* grad_prefix = grad_scope_name; + if (grad_scope_name == nullptr) { + grad_prefix = "gradients"; + } TF_Operation* const0 = FloatConst2x2(expected_graph_, s_, const0_val, "Const_0"); TF_Operation* const1 = @@ -1640,13 +1649,14 @@ class CApiGradientsTest : public ::testing::Test { const float const3_val[] = {1.0, 1.0, 1.0, 1.0}; const3 = FloatConst2x2(expected_graph_, s_, const3_val, "GradInputs"); } else { - const3 = OnesLike(expected_graph_, s_, matmul, "gradients/OnesLike"); + const3 = OnesLike(expected_graph_, s_, matmul, + strings::StrCat(grad_prefix, "/OnesLike").c_str()); } TF_Operation* matmul1 = MatMul(expected_graph_, s_, const3, const1, - "gradients/MatMul", false, true); + strings::StrCat(grad_prefix, "/MatMul").c_str(), false, true); TF_Operation* matmul2 = MatMul(expected_graph_, s_, const0, const3, - "gradients/MatMul_1", true, false); + strings::StrCat(grad_prefix, "/MatMul_1").c_str(), true, false); expected_grad_outputs[0] = {matmul1, 0}; expected_grad_outputs[1] = {matmul2, 0}; } @@ -1717,6 +1727,10 @@ TEST_F(CApiGradientsTest, Gradients_NoGradInputs) { TestGradientsSuccess(false); } +TEST_F(CApiGradientsTest, Gradients_NoGradInputsWithScopeName) { + TestGradientsSuccess(false, "gradscope"); +} + TEST_F(CApiGradientsTest, OpWithNoGradientRegistered_GradInputs) { TestGradientsError(true); } @@ -1743,11 +1757,11 @@ TEST_F(CApiGradientsTest, MultipleCallsToAddGradients) { TF_Output outputs[1] = {{xy, 0}}; TF_Output inputs[1] = {{x, 0}}; - TF_AddGradients(graph_, outputs, 1, inputs, 1, nullptr, s_, &dxy_dx); + TF_AddGradients(graph_, nullptr, outputs, 1, inputs, 1, nullptr, s_, &dxy_dx); ASSERT_EQ(TF_OK, TF_GetCode(s_)) << TF_Message(s_); inputs[0] = {y, 0}; - TF_AddGradients(graph_, outputs, 1, inputs, 1, nullptr, s_, &dxy_dy); + TF_AddGradients(graph_, nullptr, outputs, 1, inputs, 1, nullptr, s_, &dxy_dy); ASSERT_EQ(TF_OK, TF_GetCode(s_)) << TF_Message(s_); TF_SessionOptions* opts = TF_NewSessionOptions(); |