aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/contrib/grid_rnn
diff options
context:
space:
mode:
authorGravatar A. Unique TensorFlower <nobody@tensorflow.org>2016-05-23 11:54:39 -0800
committerGravatar TensorFlower Gardener <gardener@tensorflow.org>2016-05-23 13:04:24 -0700
commit8943617dc9547ea13e4c6906ee070abe49faa13b (patch)
treeaa9fb77f22d0fe4bec0ff9d6fcde4a95ec82cf67 /tensorflow/contrib/grid_rnn
parent892ca4ddc12852a7b4633fd08f163941356cb4e6 (diff)
Move core Slim layers into contrib.layers.
Change: 123027385
Diffstat (limited to 'tensorflow/contrib/grid_rnn')
-rw-r--r--tensorflow/contrib/grid_rnn/python/ops/grid_rnn_cell.py12
1 files changed, 7 insertions, 5 deletions
diff --git a/tensorflow/contrib/grid_rnn/python/ops/grid_rnn_cell.py b/tensorflow/contrib/grid_rnn/python/ops/grid_rnn_cell.py
index 9c75b4ae2f..3976727609 100644
--- a/tensorflow/contrib/grid_rnn/python/ops/grid_rnn_cell.py
+++ b/tensorflow/contrib/grid_rnn/python/ops/grid_rnn_cell.py
@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
-
+# TODO(b/28879898) Fix all lint issues and clean the code.
"""Module for constructing GridRNN cells"""
from __future__ import absolute_import
from __future__ import division
@@ -335,10 +335,12 @@ def _propagate(dim_indices, conf, cell, c_prev, m_prev, new_output, new_state, f
with vs.variable_scope('non_recurrent' if conf.tied else 'non_recurrent/cell_{}'.format(i)):
if conf.tied and not(first_call and i == dim_indices[0]):
vs.get_variable_scope().reuse_variables()
- new_output[d.idx] = layers.fully_connected(linear_args, num_output_units=conf.num_units,
- activation_fn=d.non_recurrent_fn,
- weight_init=vs.get_variable_scope().initializer or
- layers.initializers.xavier_initializer)
+ new_output[d.idx] = layers.legacy_fully_connected(
+ linear_args,
+ num_output_units=conf.num_units,
+ activation_fn=d.non_recurrent_fn,
+ weight_init=vs.get_variable_scope().initializer or
+ layers.initializers.xavier_initializer)
else:
if c_prev[i] is not None:
cell_state = array_ops.concat(1, [c_prev[i], last_dim_output])