aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/core/ops/ops.pbtxt
diff options
context:
space:
mode:
authorGravatar Vijay Vasudevan <vrv@google.com>2016-07-01 11:55:31 -0700
committerGravatar GitHub <noreply@github.com>2016-07-01 11:55:31 -0700
commit06fca4d9bc7ebff88493e105e4a1fe5002616469 (patch)
treee717d6a2e26d71455b0be40deedb1c11ab1eba0b /tensorflow/core/ops/ops.pbtxt
parent1c9e73a7a54413e205c9d0b127c61f0cb1adda34 (diff)
parentde32f9237b5958f7a6951e79f2649572d29a1980 (diff)
Merge pull request #3151 from rmlarsen/branch_126416482
Branch 126416482
Diffstat (limited to 'tensorflow/core/ops/ops.pbtxt')
-rw-r--r--tensorflow/core/ops/ops.pbtxt60
1 files changed, 60 insertions, 0 deletions
diff --git a/tensorflow/core/ops/ops.pbtxt b/tensorflow/core/ops/ops.pbtxt
index 474516bf4c..afd6507b0d 100644
--- a/tensorflow/core/ops/ops.pbtxt
+++ b/tensorflow/core/ops/ops.pbtxt
@@ -11797,6 +11797,36 @@ op {
description: "Specifically, `y = 1 / (1 + exp(-x))`."
}
op {
+ name: "SigmoidGrad"
+ input_arg {
+ name: "x"
+ type_attr: "T"
+ }
+ input_arg {
+ name: "y"
+ type_attr: "T"
+ }
+ output_arg {
+ name: "z"
+ type_attr: "T"
+ }
+ attr {
+ name: "T"
+ type: "type"
+ allowed_values {
+ list {
+ type: DT_HALF
+ type: DT_FLOAT
+ type: DT_DOUBLE
+ type: DT_COMPLEX64
+ type: DT_COMPLEX128
+ }
+ }
+ }
+ summary: "Computes the gradient of the sigmoid of `x` wrt its input."
+ description: "Specifically, `grad = dy * y * (1 - y)`, where `y = sigmoid(x)`, and\n`dy` is the corresponding input gradient."
+}
+op {
name: "Sign"
input_arg {
name: "x"
@@ -14644,6 +14674,36 @@ op {
summary: "Computes hyperbolic tangent of `x` element-wise."
}
op {
+ name: "TanhGrad"
+ input_arg {
+ name: "x"
+ type_attr: "T"
+ }
+ input_arg {
+ name: "y"
+ type_attr: "T"
+ }
+ output_arg {
+ name: "z"
+ type_attr: "T"
+ }
+ attr {
+ name: "T"
+ type: "type"
+ allowed_values {
+ list {
+ type: DT_HALF
+ type: DT_FLOAT
+ type: DT_DOUBLE
+ type: DT_COMPLEX64
+ type: DT_COMPLEX128
+ }
+ }
+ }
+ summary: "Computes the gradient for the tanh of `x` wrt its input."
+ description: "Specifically, `grad = dy * (1 - y*y)`, where `y = tanh(x)`, and `dy`\nis the corresponding input gradient."
+}
+op {
name: "TemporaryVariable"
output_arg {
name: "ref"