diff options
author | 2018-04-26 11:25:43 -0700 | |
---|---|---|
committer | 2018-04-26 11:31:14 -0700 | |
commit | a8481834bb881f67e7b9523480c28f5b987e62e8 (patch) | |
tree | fea7992e3bc87f5e0db1cf2c99c0c289572b6ed4 /tensorflow/python/training | |
parent | 6b6976e3ba19484f893092712e4577daeb92ad3b (diff) |
Removing @@ comments from core TensorFlow. They are no longer needed for exporting symbols to the TensorFlow API.
PiperOrigin-RevId: 194426855
Diffstat (limited to 'tensorflow/python/training')
-rw-r--r-- | tensorflow/python/training/basic_session_run_hooks.py | 13 | ||||
-rw-r--r-- | tensorflow/python/training/session_run_hook.py | 5 | ||||
-rw-r--r-- | tensorflow/python/training/training.py | 82 |
3 files changed, 1 insertions, 99 deletions
diff --git a/tensorflow/python/training/basic_session_run_hooks.py b/tensorflow/python/training/basic_session_run_hooks.py index 47339e057f..d1cc7d8ce3 100644 --- a/tensorflow/python/training/basic_session_run_hooks.py +++ b/tensorflow/python/training/basic_session_run_hooks.py @@ -12,18 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== -"""Some common SessionRunHook classes. - -@@LoggingTensorHook -@@StopAtStepHook -@@CheckpointSaverHook -@@StepCounterHook -@@NanLossDuringTrainingError -@@NanTensorHook -@@SummarySaverHook -@@GlobalStepWaiterHook -@@ProfilerHook -""" +"""Some common SessionRunHook classes.""" from __future__ import absolute_import from __future__ import division diff --git a/tensorflow/python/training/session_run_hook.py b/tensorflow/python/training/session_run_hook.py index 89f4030065..5daea93128 100644 --- a/tensorflow/python/training/session_run_hook.py +++ b/tensorflow/python/training/session_run_hook.py @@ -84,11 +84,6 @@ Note that if sess.run() raises OutOfRangeError or StopIteration then hooks.after_run() will not be called but hooks.end() will still be called. If sess.run() raises any other exception then neither hooks.after_run() nor hooks.end() will be called. - -@@SessionRunHook -@@SessionRunArgs -@@SessionRunContext -@@SessionRunValues """ from __future__ import absolute_import diff --git a/tensorflow/python/training/training.py b/tensorflow/python/training/training.py index 4ae7f84510..427e25d0f6 100644 --- a/tensorflow/python/training/training.py +++ b/tensorflow/python/training/training.py @@ -16,88 +16,6 @@ """Support for training models. See the @{$python/train} guide. - -@@Optimizer -@@GradientDescentOptimizer -@@AdadeltaOptimizer -@@AdagradOptimizer -@@AdagradDAOptimizer -@@MomentumOptimizer -@@AdamOptimizer -@@FtrlOptimizer -@@ProximalGradientDescentOptimizer -@@ProximalAdagradOptimizer -@@RMSPropOptimizer -@@custom_gradient -@@gradients -@@AggregationMethod -@@GradientTape -@@stop_gradient -@@hessians -@@clip_by_value -@@clip_by_norm -@@clip_by_average_norm -@@clip_by_global_norm -@@global_norm -@@cosine_decay -@@cosine_decay_restarts -@@linear_cosine_decay -@@noisy_linear_cosine_decay -@@exponential_decay -@@inverse_time_decay -@@natural_exp_decay -@@piecewise_constant -@@polynomial_decay -@@ExponentialMovingAverage -@@Coordinator -@@QueueRunner -@@LooperThread -@@add_queue_runner -@@start_queue_runners -@@Server -@@Supervisor -@@SessionManager -@@ClusterSpec -@@replica_device_setter -@@MonitoredTrainingSession -@@MonitoredSession -@@SingularMonitoredSession -@@Scaffold -@@SessionCreator -@@ChiefSessionCreator -@@WorkerSessionCreator -@@summary_iterator -@@SessionRunHook -@@SessionRunArgs -@@SessionRunContext -@@SessionRunValues -@@LoggingTensorHook -@@StopAtStepHook -@@CheckpointSaverHook -@@CheckpointSaverListener -@@NewCheckpointReader -@@StepCounterHook -@@NanLossDuringTrainingError -@@NanTensorHook -@@SummarySaverHook -@@GlobalStepWaiterHook -@@FinalOpsHook -@@FeedFnHook -@@ProfilerHook -@@SecondOrStepTimer -@@global_step -@@basic_train_loop -@@get_global_step -@@get_or_create_global_step -@@create_global_step -@@assert_global_step -@@write_graph -@@load_checkpoint -@@load_variable -@@list_variables -@@init_from_checkpoint -@@warm_start -@@VocabInfo """ # Optimizers. |