aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/contrib/losses/python/losses/loss_ops_test.py
diff options
context:
space:
mode:
Diffstat (limited to 'tensorflow/contrib/losses/python/losses/loss_ops_test.py')
-rw-r--r--tensorflow/contrib/losses/python/losses/loss_ops_test.py52
1 files changed, 52 insertions, 0 deletions
diff --git a/tensorflow/contrib/losses/python/losses/loss_ops_test.py b/tensorflow/contrib/losses/python/losses/loss_ops_test.py
index 94b8dfca57..81a4aaba2b 100644
--- a/tensorflow/contrib/losses/python/losses/loss_ops_test.py
+++ b/tensorflow/contrib/losses/python/losses/loss_ops_test.py
@@ -243,6 +243,34 @@ class SoftmaxCrossEntropyLossTest(test.TestCase):
expected_value = 400.0 * label_smoothing / 3.0
self.assertAlmostEqual(loss.eval(), expected_value, 3)
+ def testLossWithDynamicallyShapedWeights1D(self):
+ logits = constant_op.constant([[10.0, 0.0, 0.0],
+ [0.0, 10.0, 0.0],
+ [0.0, 0.0, 10.0]])
+ labels = constant_op.constant([[0, 0, 1],
+ [1, 0, 0],
+ [0, 1, 0]])
+ weights = [2.3, 2.4, 2.5]
+ weights_placeholder = array_ops.placeholder(dtypes.float32, shape=[None])
+ loss = loss_ops.softmax_cross_entropy(logits, labels, weights_placeholder)
+ with self.test_session() as sess:
+ loss = sess.run(loss, {weights_placeholder: weights})
+ self.assertAlmostEqual(np.average(weights) * 10.0, loss, 3)
+
+ def testLossWithDynamicallyShapedWeights2D(self):
+ logits = constant_op.constant([[10.0, 0.0, 0.0],
+ [0.0, 10.0, 0.0],
+ [0.0, 0.0, 10.0]])
+ labels = constant_op.constant([[0, 0, 1],
+ [1, 0, 0],
+ [0, 1, 0]])
+ weights = [[2.3], [2.4], [2.5]]
+ weights_placeholder = array_ops.placeholder(dtypes.float32, shape=[None, None])
+ loss = loss_ops.softmax_cross_entropy(logits, labels, weights_placeholder)
+ with self.test_session() as sess:
+ loss = sess.run(loss, {weights_placeholder: weights})
+ self.assertAlmostEqual(np.average(weights) * 10.0, loss, 3)
+
class SparseSoftmaxCrossEntropyLossTest(test.TestCase):
@@ -445,6 +473,30 @@ class SparseSoftmaxCrossEntropyLossTest(test.TestCase):
loss_ops.sparse_softmax_cross_entropy(
logits, labels, weights=weights).eval()
+ def testLossWithDynamicallyShapedWeights1D(self):
+ logits = constant_op.constant([[10.0, 0.0, 0.0],
+ [0.0, 10.0, 0.0],
+ [0.0, 0.0, 10.0]])
+ labels = constant_op.constant([2, 0, 1])
+ weights = [2.3, 2.4, 2.5]
+ weights_placeholder = array_ops.placeholder(dtypes.float32, shape=[None])
+ loss = loss_ops.sparse_softmax_cross_entropy(logits, labels, weights_placeholder)
+ with self.test_session() as sess:
+ loss = sess.run(loss, {weights_placeholder: weights})
+ self.assertAlmostEqual(np.average(weights) * 10.0, loss, 3)
+
+ def testLossWithDynamicallyShapedWeights2D(self):
+ logits = constant_op.constant([[10.0, 0.0, 0.0],
+ [0.0, 10.0, 0.0],
+ [0.0, 0.0, 10.0]])
+ labels = constant_op.constant([2, 0, 1])
+ weights = [[2.3], [2.4], [2.5]]
+ weights_placeholder = array_ops.placeholder(dtypes.float32, shape=[None, None])
+ loss = loss_ops.sparse_softmax_cross_entropy(logits, labels, weights_placeholder)
+ with self.test_session() as sess:
+ loss = sess.run(loss, {weights_placeholder: weights})
+ self.assertAlmostEqual(np.average(weights) * 10.0, loss, 3)
+
class SigmoidCrossEntropyLossTest(test.TestCase):