diff options
author | Martin Wicke <wicke@google.com> | 2017-01-29 17:50:23 -0800 |
---|---|---|
committer | TensorFlower Gardener <gardener@tensorflow.org> | 2017-01-29 18:08:20 -0800 |
commit | cb45a7d952ee126efe43ef1b2c08fb3503e2f6bd (patch) | |
tree | 1501df0fa66f1419d0cf778be9d2cf5eb95773db /tensorflow/contrib/linear_optimizer | |
parent | 4148fd588de61020d81cf2018c2c7c334e05b568 (diff) |
Seal contrib interfaces (as much a feasible). If you were using a symbol which is now hidden, it should be added to the _allowed_symbols list in the appropriate __init__.py file.
Change: 145943844
Diffstat (limited to 'tensorflow/contrib/linear_optimizer')
-rw-r--r-- | tensorflow/contrib/linear_optimizer/__init__.py | 5 | ||||
-rw-r--r-- | tensorflow/contrib/linear_optimizer/python/ops/sharded_mutable_dense_hashtable.py | 8 |
2 files changed, 9 insertions, 4 deletions
diff --git a/tensorflow/contrib/linear_optimizer/__init__.py b/tensorflow/contrib/linear_optimizer/__init__.py index 83bd8b5fcf..d447487b4a 100644 --- a/tensorflow/contrib/linear_optimizer/__init__.py +++ b/tensorflow/contrib/linear_optimizer/__init__.py @@ -17,6 +17,8 @@ ## This package provides optimizers to train linear models. @@SdcaModel +@@SparseFeatureColumn +@@SDCAOptimizer """ from __future__ import absolute_import from __future__ import division @@ -25,3 +27,6 @@ from __future__ import print_function from tensorflow.contrib.linear_optimizer.python.ops.sdca_ops import SdcaModel from tensorflow.contrib.linear_optimizer.python.ops.sparse_feature_column import SparseFeatureColumn from tensorflow.contrib.linear_optimizer.python.sdca_optimizer import SDCAOptimizer + +from tensorflow.python.util.all_util import remove_undocumented +remove_undocumented(__name__) diff --git a/tensorflow/contrib/linear_optimizer/python/ops/sharded_mutable_dense_hashtable.py b/tensorflow/contrib/linear_optimizer/python/ops/sharded_mutable_dense_hashtable.py index 494dfb6c99..7e214905b1 100644 --- a/tensorflow/contrib/linear_optimizer/python/ops/sharded_mutable_dense_hashtable.py +++ b/tensorflow/contrib/linear_optimizer/python/ops/sharded_mutable_dense_hashtable.py @@ -20,7 +20,7 @@ from __future__ import print_function from six.moves import range -from tensorflow.contrib.lookup import lookup_ops +from tensorflow.contrib import lookup from tensorflow.python.framework import dtypes from tensorflow.python.framework import ops from tensorflow.python.framework import tensor_shape @@ -30,7 +30,7 @@ from tensorflow.python.ops import data_flow_ops from tensorflow.python.ops import math_ops -class ShardedMutableDenseHashTable(lookup_ops.LookupInterface): +class ShardedMutableDenseHashTable(lookup.LookupInterface): """A sharded version of MutableDenseHashTable. It is designed to be interface compatible with LookupInterface and @@ -41,7 +41,7 @@ class ShardedMutableDenseHashTable(lookup_ops.LookupInterface): internally. The shard is computed via the modulo operation on the key. """ - # TODO(andreasst): consider moving this to lookup_ops + # TODO(andreasst): consider moving this to lookup module def __init__(self, key_dtype, @@ -56,7 +56,7 @@ class ShardedMutableDenseHashTable(lookup_ops.LookupInterface): table_shards = [] for i in range(num_shards): table_shards.append( - lookup_ops.MutableDenseHashTable( + lookup.MutableDenseHashTable( key_dtype=key_dtype, value_dtype=value_dtype, default_value=default_value, |