aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/contrib/slim/python
diff options
context:
space:
mode:
Diffstat (limited to 'tensorflow/contrib/slim/python')
-rw-r--r--tensorflow/contrib/slim/python/slim/learning.py5
-rw-r--r--tensorflow/contrib/slim/python/slim/nets/resnet_v1.py2
-rw-r--r--tensorflow/contrib/slim/python/slim/nets/resnet_v2.py2
3 files changed, 5 insertions, 4 deletions
diff --git a/tensorflow/contrib/slim/python/slim/learning.py b/tensorflow/contrib/slim/python/slim/learning.py
index 6a200de1ea..8a2c74742a 100644
--- a/tensorflow/contrib/slim/python/slim/learning.py
+++ b/tensorflow/contrib/slim/python/slim/learning.py
@@ -389,7 +389,7 @@ def create_train_op(total_loss,
total_loss: A `Tensor` representing the total loss.
optimizer: A tf.Optimizer to use for computing the gradients.
global_step: A `Tensor` representing the global step variable. If left as
- `_USE_GLOBAL_STEP`, then slim.variables.global_step() is used.
+ `_USE_GLOBAL_STEP`, then tf.contrib.framework.global_step() is used.
update_ops: An optional list of updates to execute. If `update_ops` is
`None`, then the update ops are set to the contents of the
`tf.GraphKeys.UPDATE_OPS` collection. If `update_ops` is not `None`, but
@@ -578,7 +578,8 @@ def train(train_op,
is_chief: Specifies whether or not the training is being run by the primary
replica during replica training.
global_step: The `Tensor` representing the global step. If left as `None`,
- then slim.variables.get_or_create_global_step() is used.
+ then training_util.get_or_create_global_step(), that is,
+ tf.contrib.framework.global_step() is used.
number_of_steps: The max number of gradient steps to take during training,
as measured by 'global_step': training will stop if global_step is
greater than 'number_of_steps'. If the value is left as None, training
diff --git a/tensorflow/contrib/slim/python/slim/nets/resnet_v1.py b/tensorflow/contrib/slim/python/slim/nets/resnet_v1.py
index 235a595de4..11c4214176 100644
--- a/tensorflow/contrib/slim/python/slim/nets/resnet_v1.py
+++ b/tensorflow/contrib/slim/python/slim/nets/resnet_v1.py
@@ -207,7 +207,7 @@ def resnet_v1(inputs,
net = resnet_utils.stack_blocks_dense(net, blocks, output_stride)
if global_pool:
# Global average pooling.
- net = math_ops.reduce_mean(net, [1, 2], name='pool5', keep_dims=True)
+ net = math_ops.reduce_mean(net, [1, 2], name='pool5', keepdims=True)
if num_classes is not None:
net = layers.conv2d(
net,
diff --git a/tensorflow/contrib/slim/python/slim/nets/resnet_v2.py b/tensorflow/contrib/slim/python/slim/nets/resnet_v2.py
index 61665c9c8b..19e0538dd1 100644
--- a/tensorflow/contrib/slim/python/slim/nets/resnet_v2.py
+++ b/tensorflow/contrib/slim/python/slim/nets/resnet_v2.py
@@ -221,7 +221,7 @@ def resnet_v2(inputs,
net, activation_fn=nn_ops.relu, scope='postnorm')
if global_pool:
# Global average pooling.
- net = math_ops.reduce_mean(net, [1, 2], name='pool5', keep_dims=True)
+ net = math_ops.reduce_mean(net, [1, 2], name='pool5', keepdims=True)
if num_classes is not None:
net = layers_lib.conv2d(
net,