aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/tools/docs
diff options
context:
space:
mode:
authorGravatar Mark Daoust <markdaoust@google.com>2018-06-15 13:59:22 -0700
committerGravatar TensorFlower Gardener <gardener@tensorflow.org>2018-06-15 14:02:16 -0700
commitc2956886be6d00d1915ccc52794b7205de3f53be (patch)
treec9eaf2ea076665166e7e7a19f030a166fcc2ee26 /tensorflow/tools/docs
parent817c39bd37131b9624ef35f3d014e8645c91312e (diff)
Quiet the doc generator.
Delete most print statements, use logging instead of print, and close files (to clear the "Unclosed file" warnings). Normally this produces thousands of lines of output. Mostly noise. PiperOrigin-RevId: 200769210
Diffstat (limited to 'tensorflow/tools/docs')
-rw-r--r--tensorflow/tools/docs/BUILD5
-rw-r--r--tensorflow/tools/docs/generate_lib.py38
-rw-r--r--tensorflow/tools/docs/parser.py11
-rw-r--r--tensorflow/tools/docs/py_guide_parser.py3
4 files changed, 22 insertions, 35 deletions
diff --git a/tensorflow/tools/docs/BUILD b/tensorflow/tools/docs/BUILD
index 58b5ef8345..eea712c279 100644
--- a/tensorflow/tools/docs/BUILD
+++ b/tensorflow/tools/docs/BUILD
@@ -37,7 +37,10 @@ py_library(
srcs = ["parser.py"],
srcs_version = "PY2AND3",
visibility = ["//visibility:public"],
- deps = ["@astor_archive//:astor"],
+ deps = [
+ "//tensorflow/python:platform",
+ "@astor_archive//:astor",
+ ],
)
py_test(
diff --git a/tensorflow/tools/docs/generate_lib.py b/tensorflow/tools/docs/generate_lib.py
index 853ec6194f..67c413cccb 100644
--- a/tensorflow/tools/docs/generate_lib.py
+++ b/tensorflow/tools/docs/generate_lib.py
@@ -21,6 +21,7 @@ from __future__ import print_function
import argparse
import fnmatch
import os
+import shutil
import six
@@ -81,12 +82,8 @@ def write_docs(output_dir,
raise ValueError("'output_dir' must be an absolute path.\n"
" output_dir='%s'" % output_dir)
- try:
- if not os.path.exists(output_dir):
- os.makedirs(output_dir)
- except OSError as e:
- print('Creating output dir "%s" failed: %s' % (output_dir, e))
- raise
+ if not os.path.exists(output_dir):
+ os.makedirs(output_dir)
# These dictionaries are used for table-of-contents generation below
# They will contain, after the for-loop below::
@@ -129,8 +126,6 @@ def write_docs(output_dir,
module_children.setdefault(subname, []).append(full_name)
break
- print('Writing docs for %s (%r).' % (full_name, py_object))
-
# Generate docs for `py_object`, resolving references.
page_info = parser.docs_for_object(full_name, py_object, parser_config)
@@ -151,10 +146,9 @@ def write_docs(output_dir,
text = text.encode('utf-8')
with open(path, 'wb') as f:
f.write(text)
- except OSError as e:
- print('Cannot write documentation for %s to %s: %s' % (full_name,
- directory, e))
- raise
+ except OSError:
+ raise OSError(
+ 'Cannot write documentation for %s to %s' % (full_name, directory))
if yaml_toc:
# Generate table of contents
@@ -433,16 +427,11 @@ def _other_docs(src_dir, output_dir, reference_resolver, file_pattern='*.md'):
# Make the directory under output_dir.
new_dir = os.path.join(output_dir,
os.path.relpath(path=dirpath, start=src_dir))
- try:
- if not os.path.exists(new_dir):
- os.makedirs(new_dir)
- except OSError as e:
- print('Creating output dir "%s" failed: %s' % (new_dir, e))
- raise
+ if not os.path.exists(new_dir):
+ os.makedirs(new_dir)
for base_name in filenames:
if base_name in EXCLUDED:
- print('Skipping excluded file %s...' % base_name)
continue
full_in_path = os.path.join(dirpath, base_name)
@@ -451,24 +440,19 @@ def _other_docs(src_dir, output_dir, reference_resolver, file_pattern='*.md'):
suffix = os.path.relpath(path=full_in_path, start=src_dir)
full_out_path = os.path.join(output_dir, suffix)
if not fnmatch.fnmatch(base_name, file_pattern):
- print('Copying un-matched file %s...' % suffix)
- open(full_out_path, 'wb').write(open(full_in_path, 'rb').read())
+ shutil.copyfile(full_in_path, full_out_path)
continue
if dirpath.endswith('/api_guides/python'):
- print('Processing Python guide %s...' % base_name)
content = tag_updater.process(full_in_path)
else:
- print('Processing doc %s...' % suffix)
- content = open(full_in_path, 'rb').read().decode('utf-8')
+ with open(full_in_path, 'rb') as f:
+ content = f.read().decode('utf-8')
content = reference_resolver.replace_references(content,
relative_path_to_root)
with open(full_out_path, 'wb') as f:
f.write(content.encode('utf-8'))
- print('Done.')
-
-
class DocGenerator(object):
"""Main entry point for generating docs."""
diff --git a/tensorflow/tools/docs/parser.py b/tensorflow/tools/docs/parser.py
index 50c9052741..64e02589bb 100644
--- a/tensorflow/tools/docs/parser.py
+++ b/tensorflow/tools/docs/parser.py
@@ -25,12 +25,12 @@ import itertools
import json
import os
import re
-import sys
import astor
import six
from google.protobuf.message import Message as ProtoMessage
+from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util import tf_inspect
@@ -53,7 +53,7 @@ class _Errors(object):
template = 'ERROR:\n output file name: %s\n %s\n\n'
for full_name, message in self._errors:
- print(template % (full_name, message), file=sys.stderr)
+ logging.warn(template, full_name, message)
def append(self, full_name, message):
"""Add an error to the collection.
@@ -761,8 +761,9 @@ def _generate_signature(func, reverse_index):
lookup_text = public_name + default_text[len(internal_name):]
break
if default_text is lookup_text:
- print('WARNING: Using default arg, failed lookup: %s, repr: %r' %
- (default_text, default))
+ logging.warn(
+ 'WARNING: Using default arg, failed lookup: %s, repr: %r',
+ default_text, default)
else:
default_text = lookup_text
else:
@@ -1213,8 +1214,6 @@ class _ClassPageInfo(object):
if not child_doc.brief.strip() and short_name in [
'__del__', '__copy__'
]:
- print('Skipping %s, defined in %s, no docstring.' % (child_name,
- defining_class))
continue
try:
diff --git a/tensorflow/tools/docs/py_guide_parser.py b/tensorflow/tools/docs/py_guide_parser.py
index 328f42d18f..b00694dc40 100644
--- a/tensorflow/tools/docs/py_guide_parser.py
+++ b/tensorflow/tools/docs/py_guide_parser.py
@@ -44,7 +44,8 @@ class PyGuideParser(object):
def process(self, full_path):
"""Read and process the file at `full_path`."""
- md_string = open(full_path, 'rb').read().decode('utf-8')
+ with open(full_path, 'rb') as f:
+ md_string = f.read().decode('utf-8')
self._lines = md_string.split('\n')
seen = set()