aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/contrib/tfprof
diff options
context:
space:
mode:
authorGravatar A. Unique TensorFlower <gardener@tensorflow.org>2017-06-26 18:00:10 -0700
committerGravatar TensorFlower Gardener <gardener@tensorflow.org>2017-06-26 18:05:57 -0700
commitc9e3617b4b018a19d83aa395aadb1e1f6017af9d (patch)
tree538130e53e0279b6ae316d4bf2388edfbacb74cf /tensorflow/contrib/tfprof
parent3576c25078090c35658ca0512cbd03697d36e06b (diff)
Support advisor in all places (Command line, APIs)
Add expensive operation checker PiperOrigin-RevId: 160222348
Diffstat (limited to 'tensorflow/contrib/tfprof')
-rw-r--r--tensorflow/contrib/tfprof/python/tools/tfprof/internal/pywrap_tensorflow_print_model_analysis.i1
-rw-r--r--tensorflow/contrib/tfprof/python/tools/tfprof/model_analyzer.py121
-rw-r--r--tensorflow/contrib/tfprof/python/tools/tfprof/model_analyzer_test.py30
-rw-r--r--tensorflow/contrib/tfprof/python/tools/tfprof/profiler_test.py16
4 files changed, 139 insertions, 29 deletions
diff --git a/tensorflow/contrib/tfprof/python/tools/tfprof/internal/pywrap_tensorflow_print_model_analysis.i b/tensorflow/contrib/tfprof/python/tools/tfprof/internal/pywrap_tensorflow_print_model_analysis.i
index 40f29ae8a2..582c36e339 100644
--- a/tensorflow/contrib/tfprof/python/tools/tfprof/internal/pywrap_tensorflow_print_model_analysis.i
+++ b/tensorflow/contrib/tfprof/python/tools/tfprof/internal/pywrap_tensorflow_print_model_analysis.i
@@ -43,7 +43,6 @@ using tensorflow::int64;
%unignore tensorflow::tfprof::DeleteProfiler;
%unignore tensorflow::tfprof::AddStep;
%unignore tensorflow::tfprof::Profile;
-%unignore tensorflow::tfprof::Advise;
%include "tensorflow/tools/tfprof/internal/print_model_analysis.h"
diff --git a/tensorflow/contrib/tfprof/python/tools/tfprof/model_analyzer.py b/tensorflow/contrib/tfprof/python/tools/tfprof/model_analyzer.py
index 419beac0b9..c781d2af4e 100644
--- a/tensorflow/contrib/tfprof/python/tools/tfprof/model_analyzer.py
+++ b/tensorflow/contrib/tfprof/python/tools/tfprof/model_analyzer.py
@@ -20,6 +20,8 @@ from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
+import six
+
from tensorflow.contrib.tfprof.python.tools.tfprof import tfprof_logger
from tensorflow.contrib.tfprof.python.tools.tfprof.internal import pywrap_tensorflow_print_model_analysis_lib as print_mdl
from tensorflow.python.framework import errors
@@ -108,49 +110,77 @@ PRINT_ALL_TIMING_MEMORY = {
'dump_to_file': ''
}
+# The following options are for 'advise' tfprof_cmd.
+# Show all advice.
+ALL_ADVICE = {
+ 'ExpensiveOperationChecker': {},
+ 'AcceleratorUtilizationChecker': {},
+ 'JobChecker': {}, # Only available internally.
+ 'OperationChecker': {},
+}
+
# pylint: enable=bad-whitespace
# pylint: enable=bad-continuation
-def _build_options(tfprof_options):
+def _build_options(options):
"""Build tfprof.OptionsProto.
Args:
- tfprof_options: A dictionary of options.
+ options: A dictionary of options.
Returns:
tfprof.OptionsProto.
"""
opts = tfprof_options_pb2.OptionsProto()
- opts.max_depth = tfprof_options.get('max_depth', 10)
- opts.min_bytes = tfprof_options.get('min_bytes', 0)
- opts.min_micros = tfprof_options.get('min_micros', 0)
- opts.min_params = tfprof_options.get('min_params', 0)
- opts.min_float_ops = tfprof_options.get('min_float_ops', 0)
- opts.min_occurrence = tfprof_options.get('min_occurrence', 0)
+ opts.max_depth = options.get('max_depth', 10)
+ opts.min_bytes = options.get('min_bytes', 0)
+ opts.min_micros = options.get('min_micros', 0)
+ opts.min_params = options.get('min_params', 0)
+ opts.min_float_ops = options.get('min_float_ops', 0)
+ opts.min_occurrence = options.get('min_occurrence', 0)
- opts.step = tfprof_options.get('step', -1)
+ opts.step = options.get('step', -1)
- opts.order_by = tfprof_options.get('order_by', 'name')
+ opts.order_by = options.get('order_by', 'name')
- for p in tfprof_options.get('account_type_regexes', []):
+ for p in options.get('account_type_regexes', []):
opts.account_type_regexes.append(p)
- for p in tfprof_options.get('start_name_regexes', []):
+ for p in options.get('start_name_regexes', []):
opts.start_name_regexes.append(p)
- for p in tfprof_options.get('trim_name_regexes', []):
+ for p in options.get('trim_name_regexes', []):
opts.trim_name_regexes.append(p)
- for p in tfprof_options.get('show_name_regexes', []):
+ for p in options.get('show_name_regexes', []):
opts.show_name_regexes.append(p)
- for p in tfprof_options.get('hide_name_regexes', []):
+ for p in options.get('hide_name_regexes', []):
opts.hide_name_regexes.append(p)
- opts.account_displayed_op_only = tfprof_options.get(
- 'account_displayed_op_only', False)
+ opts.account_displayed_op_only = options.get('account_displayed_op_only',
+ False)
- for p in tfprof_options.get('select', []):
+ for p in options.get('select', []):
opts.select.append(p)
- opts.output = tfprof_options.get('output', 'stdout')
- opts.dump_to_file = tfprof_options.get('dump_to_file', '')
+ opts.output = options.get('output', 'stdout')
+ opts.dump_to_file = options.get('dump_to_file', '')
+
+ return opts
+
+
+def _build_advisor_options(options):
+ """Build tfprof.AdvisorOptionsProto.
+ Args:
+ options: A dictionary of options. See ALL_ADVICE example.
+ Returns:
+ tfprof.AdvisorOptionsProto.
+ """
+ opts = tfprof_options_pb2.AdvisorOptionsProto()
+ if options is None:
+ return opts
+ for checker, checker_opts in six.iteritems(options):
+ checker_ops_pb = tfprof_options_pb2.AdvisorOptionsProto.CheckerOption()
+ for k, v in six.iteritems(checker_opts):
+ checker_ops_pb[k] = v
+ opts.checkers[checker].MergeFrom(checker_ops_pb)
return opts
@@ -190,7 +220,7 @@ class Profiler(object):
else:
_ = sess.run(...)
# Auto detect problems and generate advice.
- profiler.advise()
+ profiler.advise(model_analyzer.ALL_ADVICE)
"""
def __init__(self, graph, op_log=None):
@@ -288,9 +318,19 @@ class Profiler(object):
print_mdl.Profile('graph'.encode('utf-8'), opts.SerializeToString()))
return tfprof_node
- def advise(self):
- """Automatically detect problems and generate reports."""
- print_mdl.Advise()
+ def advise(self, options=ALL_ADVICE): # pylint: disable=dangerous-default-value
+ """Automatically detect problems and generate reports.
+
+ Args:
+ options: A dict of options.
+ Returns:
+ A Advise proto that conains the reports from all checkers.
+ """
+ advise_pb = tfprof_output_pb2.AdviceProto()
+ opts = _build_advisor_options(options)
+ advise_pb.ParseFromString(
+ print_mdl.Profile('advise'.encode('utf-8'), opts.SerializeToString()))
+ return advise_pb
def print_model_analysis(graph,
@@ -354,3 +394,36 @@ def print_model_analysis(graph,
None, None, 'unknown tfprof_cmd: %s\n' % tfprof_cmd)
return tfprof_node
+
+
+def advise(graph, run_meta=None, tfprof_options=ALL_ADVICE): # pylint: disable=dangerous-default-value
+ """Auto profile and advise.
+
+ Builds profiles and automatically check anormalies of various
+ aspects. See go/tfprof or README for examples and tutorials.
+
+ Args:
+ graph: tf.Graph.
+ run_meta: tensorflow::RunMetadata proto. Allows auto-profile
+ time and memroy.
+ tfprof_options: see ALL_ADVICE example above.
+ Returns:
+ Returns AdviceProto proto
+ """
+ # pylint: disable=protected-access
+ op_log = tfprof_logger._merge_default_with_oplog(
+ graph, None, run_meta, add_trace=True)
+ # pylint: enable=protected-access
+
+ run_meta_str = run_meta.SerializeToString() if run_meta else b''
+
+ opts = _build_advisor_options(tfprof_options)
+ ret = tfprof_output_pb2.AdviceProto()
+ ret.ParseFromString(
+ print_mdl.PrintModelAnalysis(
+ graph.as_graph_def(add_shapes=True).SerializeToString(),
+ run_meta_str,
+ op_log.SerializeToString(),
+ 'advise'.encode('utf-8'),
+ opts.SerializeToString()))
+ return ret
diff --git a/tensorflow/contrib/tfprof/python/tools/tfprof/model_analyzer_test.py b/tensorflow/contrib/tfprof/python/tools/tfprof/model_analyzer_test.py
index 9db752c577..fea27a82a5 100644
--- a/tensorflow/contrib/tfprof/python/tools/tfprof/model_analyzer_test.py
+++ b/tensorflow/contrib/tfprof/python/tools/tfprof/model_analyzer_test.py
@@ -126,7 +126,7 @@ class PrintModelAnalysisTest(test.TestCase):
opts['account_displayed_op_only'] = False
opts['select'] = ['params', 'float_ops']
- with session.Session() as sess, ops.device('/cpu:0'):
+ with session.Session() as sess:
x = lib.BuildFullModel()
sess.run(variables.global_variables_initializer())
@@ -176,6 +176,7 @@ class PrintModelAnalysisTest(test.TestCase):
opts['select'] = [
'bytes', 'params', 'float_ops', 'device'
]
+ opts['output'] = 'none'
with session.Session() as sess:
x = lib.BuildSmallModel()
@@ -276,6 +277,33 @@ class PrintModelAnalysisTest(test.TestCase):
self.assertEqual(total_children, 15)
self.assertGreater(input_shapes, 0)
+ def testAdvisor(self):
+ ops.reset_default_graph()
+
+ with session.Session() as sess:
+ x = lib.BuildFullModel()
+
+ sess.run(variables.global_variables_initializer())
+ run_meta = config_pb2.RunMetadata()
+ _ = sess.run(
+ x,
+ options=config_pb2.RunOptions(
+ trace_level=config_pb2.RunOptions.FULL_TRACE),
+ run_metadata=run_meta)
+
+ advice_pb = model_analyzer.advise(sess.graph, run_meta)
+ self.assertTrue('AcceleratorUtilizationChecker' in advice_pb.checkers)
+ self.assertTrue('ExpensiveOperationChecker' in advice_pb.checkers)
+ self.assertTrue('OperationChecker' in advice_pb.checkers)
+
+ checker = advice_pb.checkers['AcceleratorUtilizationChecker']
+ if test.is_gpu_available():
+ self.assertGreater(len(checker.reports), 0)
+ else:
+ self.assertEqual(len(checker.reports), 0)
+ checker = advice_pb.checkers['ExpensiveOperationChecker']
+ self.assertGreater(len(checker.reports), 0)
+
if __name__ == '__main__':
test.main()
diff --git a/tensorflow/contrib/tfprof/python/tools/tfprof/profiler_test.py b/tensorflow/contrib/tfprof/python/tools/tfprof/profiler_test.py
index 5daaafd7c8..c7113b6a57 100644
--- a/tensorflow/contrib/tfprof/python/tools/tfprof/profiler_test.py
+++ b/tensorflow/contrib/tfprof/python/tools/tfprof/profiler_test.py
@@ -129,7 +129,7 @@ class ProfilerTest(test.TestCase):
opts = model_analyzer.PRINT_ALL_TIMING_MEMORY.copy()
opts['account_type_regexes'] = ['.*']
- with session.Session() as sess, ops.device('/cpu:0'):
+ with session.Session() as sess:
r1, r2, r3 = lib.BuildSplitableModel()
sess.run(variables.global_variables_initializer())
@@ -179,8 +179,18 @@ class ProfilerTest(test.TestCase):
self.assertEqual(lib.SearchTFProfNode(pb2, 'add'), None)
self.assertGreater(lib.SearchTFProfNode(pb3, 'add').exec_micros, 0)
- # TODO(xpan): Better test of advisor.
- profiler.advise()
+ advice_pb = profiler.advise(model_analyzer.ALL_ADVICE)
+ self.assertTrue('AcceleratorUtilizationChecker' in advice_pb.checkers)
+ self.assertTrue('ExpensiveOperationChecker' in advice_pb.checkers)
+ self.assertTrue('OperationChecker' in advice_pb.checkers)
+
+ checker = advice_pb.checkers['AcceleratorUtilizationChecker']
+ if test.is_gpu_available():
+ self.assertGreater(len(checker.reports), 0)
+ else:
+ self.assertEqual(len(checker.reports), 0)
+ checker = advice_pb.checkers['ExpensiveOperationChecker']
+ self.assertGreater(len(checker.reports), 0)
if __name__ == '__main__':