aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/contrib/eager
diff options
context:
space:
mode:
authorGravatar Rakesh Chada <rakesh.chada@human.x.ai>2018-07-30 11:27:18 -0400
committerGravatar GitHub <noreply@github.com>2018-07-30 11:27:18 -0400
commit4312fbefce84436c0ef987ae79d9529d84649b3d (patch)
tree0c6f4ea98a6b49d15476628c29f0f3e7fdb9ff79 /tensorflow/contrib/eager
parent1e51e4d127c2cf1b92ae504f852df0f7e36ca785 (diff)
Moved total loss calculation logic
Diffstat (limited to 'tensorflow/contrib/eager')
-rw-r--r--tensorflow/contrib/eager/python/examples/nmt_with_attention/nmt_with_attention.ipynb10
1 files changed, 5 insertions, 5 deletions
diff --git a/tensorflow/contrib/eager/python/examples/nmt_with_attention/nmt_with_attention.ipynb b/tensorflow/contrib/eager/python/examples/nmt_with_attention/nmt_with_attention.ipynb
index 5a24c9a548..1ab1b71bd0 100644
--- a/tensorflow/contrib/eager/python/examples/nmt_with_attention/nmt_with_attention.ipynb
+++ b/tensorflow/contrib/eager/python/examples/nmt_with_attention/nmt_with_attention.ipynb
@@ -678,23 +678,23 @@
" # using teacher forcing\n",
" dec_input = tf.expand_dims(targ[:, t], 1)\n",
" \n",
+ " batch_loss = (loss / int(targ.shape[1]))\n",
+ " \n",
+ " total_loss += batch_loss\n",
+ " \n",
" variables = encoder.variables + decoder.variables\n",
" \n",
" gradients = tape.gradient(loss, variables)\n",
" \n",
" optimizer.apply_gradients(zip(gradients, variables), tf.train.get_or_create_global_step())\n",
" \n",
- " batch_loss = (loss / int(targ.shape[1]))\n",
- " \n",
- " total_loss += batch_loss\n",
- " \n",
" if batch % 100 == 0:\n",
" print('Epoch {} Batch {} Loss {:.4f}'.format(epoch + 1,\n",
" batch,\n",
" batch_loss.numpy()))\n",
" \n",
" print('Epoch {} Loss {:.4f}'.format(epoch + 1,\n",
- " total_loss/N_BATCH))\n",
+ " total_loss / N_BATCH))\n",
" print('Time taken for 1 epoch {} sec\\n'.format(time.time() - start))"
],
"execution_count": 0,