aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/c
diff options
context:
space:
mode:
authorGravatar karl@kubx.ca <karl@kubx.ca>2018-06-25 22:12:24 -0400
committerGravatar karl@kubx.ca <karl@kubx.ca>2018-07-25 21:10:29 -0400
commit2b303fddafec6b96a6868aaa76f55cc392b96586 (patch)
tree8b1da320c69ba5239f8bdd37bfac95cd02704d65 /tensorflow/c
parentb24037513f12a5812a21b7ea92ff904ee9ea6cd8 (diff)
Add scope name to TF_AddGradients
Diffstat (limited to 'tensorflow/c')
-rw-r--r--tensorflow/c/c_api.cc7
-rw-r--r--tensorflow/c/c_api.h6
-rw-r--r--tensorflow/c/c_api_test.cc44
-rw-r--r--tensorflow/c/while_loop_test.cc4
4 files changed, 40 insertions, 21 deletions
diff --git a/tensorflow/c/c_api.cc b/tensorflow/c/c_api.cc
index 10bc8cdbee..96653154e5 100644
--- a/tensorflow/c/c_api.cc
+++ b/tensorflow/c/c_api.cc
@@ -2387,8 +2387,9 @@ void TF_FinishWhile(const TF_WhileParams* params, TF_Status* status,
void TF_AbortWhile(const TF_WhileParams* params) { FreeWhileResources(params); }
-void TF_AddGradients(TF_Graph* g, TF_Output* y, int ny, TF_Output* x, int nx,
- TF_Output* dx, TF_Status* status, TF_Output* dy) {
+void TF_AddGradients(TF_Graph* g, const char* scope_name, TF_Output* y,
+ int ny, TF_Output* x, int nx, TF_Output* dx,
+ TF_Status* status, TF_Output* dy) {
#ifdef __ANDROID__
status->status = tensorflow::errors::Unimplemented(
"Adding gradients is not supported in Android. File a bug at "
@@ -2407,7 +2408,7 @@ void TF_AddGradients(TF_Graph* g, TF_Output* y, int ny, TF_Output* x, int nx,
tensorflow::Scope scope =
NewInternalScope(&g->graph, &status->status, &g->refiner)
- .NewSubScope("gradients");
+ .NewSubScope(scope_name != nullptr ? scope_name : "gradients");
if (dx != nullptr) {
std::vector<tensorflow::Output> dx_arg = OutputsFromTFOutputs(dx, ny);
diff --git a/tensorflow/c/c_api.h b/tensorflow/c/c_api.h
index c8ae6f2dd1..e896f68ce0 100644
--- a/tensorflow/c/c_api.h
+++ b/tensorflow/c/c_api.h
@@ -1138,12 +1138,16 @@ TF_CAPI_EXPORT extern void TF_AbortWhile(const TF_WhileParams* params);
// shapes in `y`.
// The partial derivatives are returned in `dy`. `dy` should be allocated to
// size `nx`.
+// `scope_name` names the scope (or sub-scope) into which all gradients
+// operations are added. If `scope_name` is nullptr, "gradients" is used by
+// default.
//
// WARNING: This function does not yet support all the gradients that python
// supports. See
// https://www.tensorflow.org/code/tensorflow/cc/gradients/README.md
// for instructions on how to add C++ more gradients.
-TF_CAPI_EXPORT void TF_AddGradients(TF_Graph* g, TF_Output* y, int ny,
+TF_CAPI_EXPORT void TF_AddGradients(TF_Graph* g, const char* scope_name,
+ TF_Output* y, int ny,
TF_Output* x, int nx, TF_Output* dx,
TF_Status* status, TF_Output* dy);
diff --git a/tensorflow/c/c_api_test.cc b/tensorflow/c/c_api_test.cc
index e674b1623c..2fe9e91583 100644
--- a/tensorflow/c/c_api_test.cc
+++ b/tensorflow/c/c_api_test.cc
@@ -1474,16 +1474,18 @@ class CApiGradientsTest : public ::testing::Test {
TF_DeleteStatus(s_);
}
- void TestGradientsSuccess(bool grad_inputs_provided) {
+ void TestGradientsSuccess(bool grad_inputs_provided,
+ const char* scope_name = nullptr) {
TF_Output inputs[2];
TF_Output outputs[1];
TF_Output grad_outputs[2];
TF_Output expected_grad_outputs[2];
BuildSuccessGraph(inputs, outputs);
- BuildExpectedGraph(grad_inputs_provided, expected_grad_outputs);
+ BuildExpectedGraph(grad_inputs_provided, scope_name, expected_grad_outputs);
- AddGradients(grad_inputs_provided, inputs, 2, outputs, 1, grad_outputs);
+ AddGradients(grad_inputs_provided, scope_name, inputs, 2, outputs, 1,
+ grad_outputs);
EXPECT_EQ(TF_OK, TF_GetCode(s_)) << TF_Message(s_);
@@ -1505,7 +1507,8 @@ class CApiGradientsTest : public ::testing::Test {
BuildErrorGraph(inputs, outputs);
- AddGradients(grad_inputs_provided, inputs, 1, outputs, 1, grad_outputs);
+ AddGradients(grad_inputs_provided, nullptr, inputs, 1, outputs, 1,
+ grad_outputs);
string expected_msg =
"No gradient defined for op: TestOpWithNoGradient. Please see "
@@ -1549,19 +1552,20 @@ class CApiGradientsTest : public ::testing::Test {
EXPECT_EQ(*a_data, *b_data);
}
- void AddGradients(bool grad_inputs_provided, TF_Output* inputs, int ninputs,
- TF_Output* outputs, int noutputs, TF_Output* grad_outputs) {
+ void AddGradients(bool grad_inputs_provided, const char* scope_name,
+ TF_Output* inputs, int ninputs, TF_Output* outputs,
+ int noutputs, TF_Output* grad_outputs) {
if (grad_inputs_provided) {
TF_Output grad_inputs[1];
const float grad_inputs_val[] = {1.0, 1.0, 1.0, 1.0};
TF_Operation* grad_inputs_op =
FloatConst2x2(graph_, s_, grad_inputs_val, "GradInputs");
grad_inputs[0] = TF_Output{grad_inputs_op, 0};
- TF_AddGradients(graph_, outputs, noutputs, inputs, ninputs, grad_inputs,
- s_, grad_outputs);
+ TF_AddGradients(graph_, scope_name, outputs, noutputs, inputs, ninputs,
+ grad_inputs, s_, grad_outputs);
} else {
- TF_AddGradients(graph_, outputs, noutputs, inputs, ninputs, nullptr, s_,
- grad_outputs);
+ TF_AddGradients(graph_, scope_name, outputs, noutputs, inputs, ninputs,
+ nullptr, s_, grad_outputs);
}
}
@@ -1600,6 +1604,7 @@ class CApiGradientsTest : public ::testing::Test {
}
void BuildExpectedGraph(bool grad_inputs_provided,
+ const char* grad_scope_name,
TF_Output* expected_grad_outputs) {
// The expected graph looks like this if grad_inputs_provided.
// If grad_inputs_provided is false, Const_0 will be a OnesLike op.
@@ -1628,6 +1633,10 @@ class CApiGradientsTest : public ::testing::Test {
//
const float const0_val[] = {1.0, 2.0, 3.0, 4.0};
const float const1_val[] = {1.0, 0.0, 0.0, 1.0};
+ const char* grad_prefix = grad_scope_name;
+ if (grad_scope_name == nullptr) {
+ grad_prefix = "gradients";
+ }
TF_Operation* const0 =
FloatConst2x2(expected_graph_, s_, const0_val, "Const_0");
TF_Operation* const1 =
@@ -1640,13 +1649,14 @@ class CApiGradientsTest : public ::testing::Test {
const float const3_val[] = {1.0, 1.0, 1.0, 1.0};
const3 = FloatConst2x2(expected_graph_, s_, const3_val, "GradInputs");
} else {
- const3 = OnesLike(expected_graph_, s_, matmul, "gradients/OnesLike");
+ const3 = OnesLike(expected_graph_, s_, matmul,
+ strings::StrCat(grad_prefix, "/OnesLike").c_str());
}
TF_Operation* matmul1 = MatMul(expected_graph_, s_, const3, const1,
- "gradients/MatMul", false, true);
+ strings::StrCat(grad_prefix, "/MatMul").c_str(), false, true);
TF_Operation* matmul2 = MatMul(expected_graph_, s_, const0, const3,
- "gradients/MatMul_1", true, false);
+ strings::StrCat(grad_prefix, "/MatMul_1").c_str(), true, false);
expected_grad_outputs[0] = {matmul1, 0};
expected_grad_outputs[1] = {matmul2, 0};
}
@@ -1717,6 +1727,10 @@ TEST_F(CApiGradientsTest, Gradients_NoGradInputs) {
TestGradientsSuccess(false);
}
+TEST_F(CApiGradientsTest, Gradients_NoGradInputsWithScopeName) {
+ TestGradientsSuccess(false, "gradscope");
+}
+
TEST_F(CApiGradientsTest, OpWithNoGradientRegistered_GradInputs) {
TestGradientsError(true);
}
@@ -1743,11 +1757,11 @@ TEST_F(CApiGradientsTest, MultipleCallsToAddGradients) {
TF_Output outputs[1] = {{xy, 0}};
TF_Output inputs[1] = {{x, 0}};
- TF_AddGradients(graph_, outputs, 1, inputs, 1, nullptr, s_, &dxy_dx);
+ TF_AddGradients(graph_, nullptr, outputs, 1, inputs, 1, nullptr, s_, &dxy_dx);
ASSERT_EQ(TF_OK, TF_GetCode(s_)) << TF_Message(s_);
inputs[0] = {y, 0};
- TF_AddGradients(graph_, outputs, 1, inputs, 1, nullptr, s_, &dxy_dy);
+ TF_AddGradients(graph_, nullptr, outputs, 1, inputs, 1, nullptr, s_, &dxy_dy);
ASSERT_EQ(TF_OK, TF_GetCode(s_)) << TF_Message(s_);
TF_SessionOptions* opts = TF_NewSessionOptions();
diff --git a/tensorflow/c/while_loop_test.cc b/tensorflow/c/while_loop_test.cc
index d2d887f32c..12225fd1cb 100644
--- a/tensorflow/c/while_loop_test.cc
+++ b/tensorflow/c/while_loop_test.cc
@@ -431,8 +431,8 @@ TEST_F(CApiWhileLoopTest, Gradients) {
// Create backprop graph
TF_Output grad_output;
- TF_AddGradients(graph_, outputs_.data(), outputs_.size(), inputs_.data(), 1,
- nullptr, s_, &grad_output);
+ TF_AddGradients(graph_, nullptr, outputs_.data(), outputs_.size(),
+ inputs_.data(), 1, nullptr, s_, &grad_output);
ASSERT_EQ(TF_OK, TF_GetCode(s_)) << TF_Message(s_);
// Run gradient