aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/contrib/learn/python/learn/ops/losses_ops.py
diff options
context:
space:
mode:
Diffstat (limited to 'tensorflow/contrib/learn/python/learn/ops/losses_ops.py')
-rw-r--r--tensorflow/contrib/learn/python/learn/ops/losses_ops.py10
1 files changed, 5 insertions, 5 deletions
diff --git a/tensorflow/contrib/learn/python/learn/ops/losses_ops.py b/tensorflow/contrib/learn/python/learn/ops/losses_ops.py
index 086e5d78bb..b040ab3bb6 100644
--- a/tensorflow/contrib/learn/python/learn/ops/losses_ops.py
+++ b/tensorflow/contrib/learn/python/learn/ops/losses_ops.py
@@ -20,14 +20,14 @@ from __future__ import division
from __future__ import print_function
from tensorflow.contrib.framework import deprecated
-from tensorflow.contrib.losses.python.losses import loss_ops
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops as array_ops_
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn
+from tensorflow.python.ops.losses import losses
-@deprecated('2016-12-01', 'Use `tf.contrib.losses.mean_squared_error` '
+@deprecated('2016-12-01', 'Use `tf.losses.mean_squared_error` '
'and explicit logits computation.')
def mean_squared_error_regressor(tensor_in, labels, weights, biases, name=None):
"""Returns prediction and loss for mean squared error regression."""
@@ -36,10 +36,10 @@ def mean_squared_error_regressor(tensor_in, labels, weights, biases, name=None):
predictions = nn.xw_plus_b(tensor_in, weights, biases)
if len(labels.get_shape()) == 1 and len(predictions.get_shape()) == 2:
predictions = array_ops_.squeeze(predictions, squeeze_dims=[1])
- return predictions, loss_ops.mean_squared_error(predictions, labels)
+ return predictions, losses.mean_squared_error(labels, predictions)
-@deprecated('2016-12-01', 'Use `tf.contrib.losses.softmax_cross_entropy` '
+@deprecated('2016-12-01', 'Use `tf.losses.softmax_cross_entropy` '
'and explicit logits computation.')
def softmax_classifier(tensor_in,
labels,
@@ -72,4 +72,4 @@ def softmax_classifier(tensor_in,
logits = nn.xw_plus_b(tensor_in, weights, biases)
if class_weight is not None:
logits = math_ops.multiply(logits, class_weight)
- return nn.softmax(logits), loss_ops.softmax_cross_entropy(logits, labels)
+ return nn.softmax(logits), losses.softmax_cross_entropy(labels, logits)