aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/tensorboard
diff options
context:
space:
mode:
authorGravatar Gunhan Gulsoy <gunan@google.com>2017-01-23 21:37:43 -0800
committerGravatar TensorFlower Gardener <gardener@tensorflow.org>2017-01-23 21:46:23 -0800
commit67c4e3daed28e22fb3977327cac14a1b7b230f04 (patch)
tree1e214516f645a59e5f0e6e26691ef4a220a468c1 /tensorflow/tensorboard
parent863bab34202f650282dfe00aaa082a4796fdd839 (diff)
Fixes for python 3 tests:
- string/bytes compatibility in bundle_sum_py_test and graph_transforms - Fix http_test (tensorboard) by renaming http.py to http_util.py. - Fix some linter errors. Change: 145375395
Diffstat (limited to 'tensorflow/tensorboard')
-rw-r--r--tensorflow/tensorboard/backend/BUILD2
-rw-r--r--tensorflow/tensorboard/backend/application.py62
-rw-r--r--tensorflow/tensorboard/lib/python/BUILD10
-rw-r--r--tensorflow/tensorboard/lib/python/http_util.py149
-rw-r--r--tensorflow/tensorboard/lib/python/http_util_test.py156
-rw-r--r--tensorflow/tensorboard/plugins/projector/plugin.py2
6 files changed, 345 insertions, 36 deletions
diff --git a/tensorflow/tensorboard/backend/BUILD b/tensorflow/tensorboard/backend/BUILD
index 66993ed77a..f32d25329f 100644
--- a/tensorflow/tensorboard/backend/BUILD
+++ b/tensorflow/tensorboard/backend/BUILD
@@ -17,7 +17,7 @@ py_library(
"//tensorflow/python:platform",
"//tensorflow/python:summary",
"//tensorflow/tensorboard:projector",
- "//tensorflow/tensorboard/lib/python:http",
+ "//tensorflow/tensorboard/lib/python:http_util",
"@org_pocoo_werkzeug//:werkzeug",
"@six_archive//:six",
],
diff --git a/tensorflow/tensorboard/backend/application.py b/tensorflow/tensorboard/backend/application.py
index c0e990d45e..086da7f90b 100644
--- a/tensorflow/tensorboard/backend/application.py
+++ b/tensorflow/tensorboard/backend/application.py
@@ -42,7 +42,7 @@ from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.summary import event_accumulator
from tensorflow.python.summary import event_multiplexer
from tensorflow.tensorboard.backend import process_graph
-from tensorflow.tensorboard.lib.python import http
+from tensorflow.tensorboard.lib.python import http_util
DATA_PREFIX = '/data'
@@ -256,7 +256,8 @@ class TensorBoardWSGIApp(object):
@wrappers.Request.application
def _serve_logdir(self, request):
"""Respond with a JSON object containing this TensorBoard's logdir."""
- return http.Respond(request, {'logdir': self._logdir}, 'application/json')
+ return http_util.Respond(
+ request, {'logdir': self._logdir}, 'application/json')
@wrappers.Request.application
def _serve_scalars(self, request):
@@ -271,40 +272,40 @@ class TensorBoardWSGIApp(object):
writer = csv.writer(string_io)
writer.writerow(['Wall time', 'Step', 'Value'])
writer.writerows(values)
- return http.Respond(request, string_io.getvalue(), 'text/csv')
+ return http_util.Respond(request, string_io.getvalue(), 'text/csv')
else:
- return http.Respond(request, values, 'application/json')
+ return http_util.Respond(request, values, 'application/json')
@wrappers.Request.application
def _serve_graph(self, request):
"""Given a single run, return the graph definition in json format."""
run = request.args.get('run', None)
if run is None:
- return http.Respond(request, 'query parameter "run" is required',
- 'text/plain', 400)
+ return http_util.Respond(
+ request, 'query parameter "run" is required', 'text/plain', 400)
try:
graph = self._multiplexer.Graph(run)
except ValueError:
- return http.Respond(request, '404 Not Found', code=404)
+ return http_util.Respond(request, '404 Not Found', code=404)
limit_attr_size = request.args.get('limit_attr_size', None)
if limit_attr_size is not None:
try:
limit_attr_size = int(limit_attr_size)
except ValueError:
- return http.Respond(request,
- 'query parameter `limit_attr_size` must be integer',
- 'text/plain', 400)
+ return http_util.Respond(
+ request, 'query parameter `limit_attr_size` must be integer',
+ 'text/plain', 400)
large_attrs_key = request.args.get('large_attrs_key', None)
try:
process_graph.prepare_graph_for_ui(graph, limit_attr_size,
large_attrs_key)
except ValueError as e:
- return http.Respond(request, e.message, 'text/plain', 400)
+ return http_util.Respond(request, e.message, 'text/plain', 400)
- return http.Respond(request, str(graph), 'text/x-protobuf') # pbtxt
+ return http_util.Respond(request, str(graph), 'text/x-protobuf') # pbtxt
@wrappers.Request.application
def _serve_run_metadata(self, request):
@@ -312,16 +313,17 @@ class TensorBoardWSGIApp(object):
tag = request.args.get('tag', None)
run = request.args.get('run', None)
if tag is None:
- return http.Respond(request, 'query parameter "tag" is required',
- 'text/plain', 400)
+ return http_util.Respond(
+ request, 'query parameter "tag" is required', 'text/plain', 400)
if run is None:
- return http.Respond(request, 'query parameter "run" is required',
- 'text/plain', 400)
+ return http_util.Respond(
+ request, 'query parameter "run" is required', 'text/plain', 400)
try:
run_metadata = self._multiplexer.RunMetadata(run, tag)
except ValueError:
- return http.Respond(request, '404 Not Found', code=404)
- return http.Respond(request, str(run_metadata), 'text/x-protobuf') # pbtxt
+ return http_util.Respond(request, '404 Not Found', code=404)
+ return http_util.Respond(
+ request, str(run_metadata), 'text/x-protobuf') # pbtxt
@wrappers.Request.application
def _serve_histograms(self, request):
@@ -329,7 +331,7 @@ class TensorBoardWSGIApp(object):
tag = request.args.get('tag')
run = request.args.get('run')
values = self._multiplexer.Histograms(run, tag)
- return http.Respond(request, values, 'application/json')
+ return http_util.Respond(request, values, 'application/json')
@wrappers.Request.application
def _serve_compressed_histograms(self, request):
@@ -355,9 +357,10 @@ class TensorBoardWSGIApp(object):
for value in compressed_histogram.compressed_histogram_values:
row += [value.rank_in_bps, value.value]
writer.writerow(row)
- return http.Respond(request, string_io.getvalue(), 'text/csv')
+ return http_util.Respond(request, string_io.getvalue(), 'text/csv')
else:
- return http.Respond(request, compressed_histograms, 'application/json')
+ return http_util.Respond(
+ request, compressed_histograms, 'application/json')
@wrappers.Request.application
def _serve_images(self, request):
@@ -379,7 +382,7 @@ class TensorBoardWSGIApp(object):
images = self._multiplexer.Images(run, tag)
response = self._image_response_for_run(images, run, tag)
- return http.Respond(request, response, 'application/json')
+ return http_util.Respond(request, response, 'application/json')
@wrappers.Request.application
def _serve_image(self, request):
@@ -390,7 +393,7 @@ class TensorBoardWSGIApp(object):
image = self._multiplexer.Images(run, tag)[index]
encoded_image_string = image.encoded_image_string
content_type = _content_type_for_image(encoded_image_string)
- return http.Respond(request, encoded_image_string, content_type)
+ return http_util.Respond(request, encoded_image_string, content_type)
def _query_for_individual_image(self, run, tag, index):
"""Builds a URL for accessing the specified image.
@@ -435,7 +438,7 @@ class TensorBoardWSGIApp(object):
audio_list = self._multiplexer.Audio(run, tag)
response = self._audio_response_for_run(audio_list, run, tag)
- return http.Respond(request, response, 'application/json')
+ return http_util.Respond(request, response, 'application/json')
@wrappers.Request.application
def _serve_individual_audio(self, request):
@@ -444,7 +447,8 @@ class TensorBoardWSGIApp(object):
run = request.args.get('run')
index = int(request.args.get('index'))
audio = self._multiplexer.Audio(run, tag)[index]
- return http.Respond(request, audio.encoded_audio_string, audio.content_type)
+ return http_util.Respond(
+ request, audio.encoded_audio_string, audio.content_type)
def _query_for_individual_audio(self, run, tag, index):
"""Builds a URL for accessing the specified audio.
@@ -495,7 +499,7 @@ class TensorBoardWSGIApp(object):
logging.warning('Unable to get first event timestamp for run %s',
run_name)
run_data['firstEventTimestamp'] = None
- return http.Respond(request, runs, 'application/json')
+ return http_util.Respond(request, runs, 'application/json')
@wrappers.Request.application
def _serve_index(self, request):
@@ -521,7 +525,7 @@ class TensorBoardWSGIApp(object):
orig_path = path.lstrip('/')
if not self._path_is_safe(orig_path):
logging.warning('path not safe: %s', orig_path)
- return http.Respond(request, 'Naughty naughty!', 'text/plain', 400)
+ return http_util.Respond(request, 'Naughty naughty!', 'text/plain', 400)
# Resource loader wants a path relative to //WORKSPACE/tensorflow.
path = os.path.join('tensorboard', orig_path)
# Open the file and read it.
@@ -544,10 +548,10 @@ class TensorBoardWSGIApp(object):
contents = resource_loader.load_resource(path)
except IOError:
logging.info('path %s not found, sending 404', path)
- return http.Respond(request, 'Not found', 'text/plain', code=404)
+ return http_util.Respond(request, 'Not found', 'text/plain', code=404)
mimetype, content_encoding = mimetypes.guess_type(path)
mimetype = mimetype or 'application/octet-stream'
- return http.Respond(
+ return http_util.Respond(
request,
contents,
mimetype,
diff --git a/tensorflow/tensorboard/lib/python/BUILD b/tensorflow/tensorboard/lib/python/BUILD
index afbfb62d7c..740355f5ac 100644
--- a/tensorflow/tensorboard/lib/python/BUILD
+++ b/tensorflow/tensorboard/lib/python/BUILD
@@ -8,8 +8,8 @@ licenses(["notice"]) # Apache 2.0
exports_files(["LICENSE"])
py_library(
- name = "http",
- srcs = ["http.py"],
+ name = "http_util",
+ srcs = ["http_util.py"],
srcs_version = "PY2AND3",
visibility = ["//visibility:public"],
deps = [
@@ -21,12 +21,12 @@ py_library(
)
py_test(
- name = "http_test",
+ name = "http_util_test",
size = "small",
- srcs = ["http_test.py"],
+ srcs = ["http_util_test.py"],
srcs_version = "PY2AND3",
deps = [
- ":http",
+ ":http_util",
"//tensorflow/python:client_testlib",
"@org_pocoo_werkzeug//:werkzeug",
"@six_archive//:six",
diff --git a/tensorflow/tensorboard/lib/python/http_util.py b/tensorflow/tensorboard/lib/python/http_util.py
new file mode 100644
index 0000000000..7110178bbc
--- /dev/null
+++ b/tensorflow/tensorboard/lib/python/http_util.py
@@ -0,0 +1,149 @@
+# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""TensorBoard HTTP utilities."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+
+import gzip
+import json
+import re
+import time
+import wsgiref.handlers
+
+import six
+
+from werkzeug import wrappers
+
+from tensorflow.python.util import compat
+from tensorflow.tensorboard.lib.python import json_util
+
+
+_EXTRACT_MIMETYPE_PATTERN = re.compile(r'^[^;\s]*')
+_EXTRACT_CHARSET_PATTERN = re.compile(r'charset=([-_0-9A-Za-z]+)')
+
+# Allows *, gzip or x-gzip, but forbid gzip;q=0
+# https://tools.ietf.org/html/rfc7231#section-5.3.4
+_ALLOWS_GZIP_PATTERN = re.compile(
+ r'(?:^|,|\s)(?:(?:x-)?gzip|\*)(?!;q=0)(?:\s|,|$)')
+
+_TEXTUAL_MIMETYPES = set([
+ 'application/javascript',
+ 'application/json',
+ 'application/json+protobuf',
+ 'image/svg+xml',
+ 'text/css',
+ 'text/csv',
+ 'text/html',
+ 'text/plain',
+ 'text/tab-separated-values',
+ 'text/x-protobuf',
+])
+
+_JSON_MIMETYPES = set([
+ 'application/json',
+ 'application/json+protobuf',
+])
+
+
+def Respond(request,
+ content,
+ content_type,
+ code=200,
+ expires=0,
+ content_encoding=None,
+ encoding='utf-8'):
+ """Construct a werkzeug Response.
+
+ Responses are transmitted to the browser with compression if: a) the browser
+ supports it; b) it's sane to compress the content_type in question; and c)
+ the content isn't already compressed, as indicated by the content_encoding
+ parameter.
+
+ Browser and proxy caching is completely disabled by default. If the expires
+ parameter is greater than zero then the response will be able to be cached by
+ the browser for that many seconds; however, proxies are still forbidden from
+ caching so that developers can bypass the cache with Ctrl+Shift+R.
+
+ For textual content that isn't JSON, the encoding parameter is used as the
+ transmission charset which is automatically appended to the Content-Type
+ header. That is unless of course the content_type parameter contains a
+ charset parameter. If the two disagree, the characters in content will be
+ transcoded to the latter.
+
+ If content_type declares a JSON media type, then content MAY be a dict, list,
+ tuple, or set, in which case this function has an implicit composition with
+ json_util.Cleanse and json.dumps. The encoding parameter is used to decode
+ byte strings within the JSON object; therefore transmitting binary data
+ within JSON is not permitted. JSON is transmitted as ASCII unless the
+ content_type parameter explicitly defines a charset parameter, in which case
+ the serialized JSON bytes will use that instead of escape sequences.
+
+ Args:
+ request: A werkzeug Request object. Used mostly to check the
+ Accept-Encoding header.
+ content: Payload data as byte string, unicode string, or maybe JSON.
+ content_type: Media type and optionally an output charset.
+ code: Numeric HTTP status code to use.
+ expires: Second duration for browser caching.
+ content_encoding: Encoding if content is already encoded, e.g. 'gzip'.
+ encoding: Input charset if content parameter has byte strings.
+
+ Returns:
+ A werkzeug Response object (a WSGI application).
+ """
+
+ mimetype = _EXTRACT_MIMETYPE_PATTERN.search(content_type).group(0)
+ charset_match = _EXTRACT_CHARSET_PATTERN.search(content_type)
+ charset = charset_match.group(1) if charset_match else encoding
+ textual = charset_match or mimetype in _TEXTUAL_MIMETYPES
+ if mimetype in _JSON_MIMETYPES and (isinstance(content, dict) or
+ isinstance(content, list) or
+ isinstance(content, set) or
+ isinstance(content, tuple)):
+ content = json.dumps(json_util.Cleanse(content, encoding),
+ ensure_ascii=not charset_match)
+ if charset != encoding:
+ content = compat.as_text(content, encoding)
+ content = compat.as_bytes(content, charset)
+ if textual and not charset_match and mimetype not in _JSON_MIMETYPES:
+ content_type += '; charset=' + charset
+ if (not content_encoding and textual and
+ _ALLOWS_GZIP_PATTERN.search(request.headers.get('Accept-Encoding', ''))):
+ out = six.BytesIO()
+ f = gzip.GzipFile(fileobj=out, mode='wb', compresslevel=3)
+ f.write(content)
+ f.close()
+ content = out.getvalue()
+ content_encoding = 'gzip'
+ if request.method == 'HEAD':
+ content = ''
+ headers = []
+
+ headers.append(('Content-Length', str(len(content))))
+ if content_encoding:
+ headers.append(('Content-Encoding', content_encoding))
+ if expires > 0:
+ e = wsgiref.handlers.format_date_time(time.time() + float(expires))
+ headers.append(('Expires', e))
+ headers.append(('Cache-Control', 'private, max-age=%d' % expires))
+ else:
+ headers.append(('Expires', '0'))
+ headers.append(('Cache-Control', 'no-cache, must-revalidate'))
+
+ return wrappers.Response(
+ response=content, status=code, headers=headers, content_type=content_type)
diff --git a/tensorflow/tensorboard/lib/python/http_util_test.py b/tensorflow/tensorboard/lib/python/http_util_test.py
new file mode 100644
index 0000000000..a2a7a1b3d2
--- /dev/null
+++ b/tensorflow/tensorboard/lib/python/http_util_test.py
@@ -0,0 +1,156 @@
+# -*- coding: utf-8 -*-
+# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Tests HTTP utilities."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+
+import gzip
+
+import six
+from werkzeug import test as wtest
+from werkzeug import wrappers
+from tensorflow.python.platform import test
+from tensorflow.tensorboard.lib.python import http_util
+
+
+class RespondTest(test.TestCase):
+
+ def testHelloWorld(self):
+ q = wrappers.Request(wtest.EnvironBuilder().get_environ())
+ r = http_util.Respond(q, '<b>hello world</b>', 'text/html')
+ self.assertEqual(r.status_code, 200)
+ self.assertEqual(r.response[0], six.b('<b>hello world</b>'))
+
+ def testHeadRequest_doesNotWrite(self):
+ builder = wtest.EnvironBuilder(method='HEAD')
+ env = builder.get_environ()
+ request = wrappers.Request(env)
+ r = http_util.Respond(request, '<b>hello world</b>', 'text/html')
+ self.assertEqual(r.status_code, 200)
+ self.assertEqual(r.response[0], six.b(''))
+
+ def testPlainText_appendsUtf8ToContentType(self):
+ q = wrappers.Request(wtest.EnvironBuilder().get_environ())
+ r = http_util.Respond(q, 'hello', 'text/plain')
+ h = r.headers
+ self.assertEqual(h.get('Content-Type'), 'text/plain; charset=utf-8')
+
+ def testContentLength_isInBytes(self):
+ q = wrappers.Request(wtest.EnvironBuilder().get_environ())
+ r = http_util.Respond(q, '爱', 'text/plain')
+ self.assertEqual(r.headers.get('Content-Length'), '3')
+ q = wrappers.Request(wtest.EnvironBuilder().get_environ())
+ r = http_util.Respond(q, '爱'.encode('utf-8'), 'text/plain')
+ self.assertEqual(r.headers.get('Content-Length'), '3')
+
+ def testResponseCharsetTranscoding(self):
+ bean = '要依法治国是赞美那些谁是公义的和惩罚恶人。 - 韩非'
+
+ # input is unicode string, output is gbk string
+ q = wrappers.Request(wtest.EnvironBuilder().get_environ())
+ r = http_util.Respond(q, bean, 'text/plain; charset=gbk')
+ self.assertEqual(r.response[0], bean.encode('gbk'))
+
+ # input is utf-8 string, output is gbk string
+ q = wrappers.Request(wtest.EnvironBuilder().get_environ())
+ r = http_util.Respond(q, bean.encode('utf-8'), 'text/plain; charset=gbk')
+ self.assertEqual(r.response[0], bean.encode('gbk'))
+
+ # input is object with unicode strings, output is gbk json
+ q = wrappers.Request(wtest.EnvironBuilder().get_environ())
+ r = http_util.Respond(q, {'red': bean}, 'application/json; charset=gbk')
+ self.assertEqual(r.response[0], b'{"red": "' + bean.encode('gbk') + b'"}')
+
+ # input is object with utf-8 strings, output is gbk json
+ q = wrappers.Request(wtest.EnvironBuilder().get_environ())
+ r = http_util.Respond(
+ q, {'red': bean.encode('utf-8')}, 'application/json; charset=gbk')
+ self.assertEqual(r.response[0], b'{"red": "' + bean.encode('gbk') + b'"}')
+
+ # input is object with gbk strings, output is gbk json
+ q = wrappers.Request(wtest.EnvironBuilder().get_environ())
+ r = http_util.Respond(
+ q, {'red': bean.encode('gbk')},
+ 'application/json; charset=gbk',
+ encoding='gbk')
+ self.assertEqual(r.response[0], b'{"red": "' + bean.encode('gbk') + b'"}')
+
+ def testAcceptGzip_compressesResponse(self):
+ fall_of_hyperion_canto1_stanza1 = '\n'.join([
+ 'Fanatics have their dreams, wherewith they weave',
+ 'A paradise for a sect; the savage too',
+ 'From forth the loftiest fashion of his sleep',
+ 'Guesses at Heaven; pity these have not',
+ 'Trac\'d upon vellum or wild Indian leaf',
+ 'The shadows of melodious utterance.',
+ 'But bare of laurel they live, dream, and die;',
+ 'For Poesy alone can tell her dreams,',
+ 'With the fine spell of words alone can save',
+ 'Imagination from the sable charm',
+ 'And dumb enchantment. Who alive can say,',
+ '\'Thou art no Poet may\'st not tell thy dreams?\'',
+ 'Since every man whose soul is not a clod',
+ 'Hath visions, and would speak, if he had loved',
+ 'And been well nurtured in his mother tongue.',
+ 'Whether the dream now purpos\'d to rehearse',
+ 'Be poet\'s or fanatic\'s will be known',
+ 'When this warm scribe my hand is in the grave.',
+ ])
+
+ e1 = wtest.EnvironBuilder(headers={'Accept-Encoding': '*'}).get_environ()
+ any_encoding = wrappers.Request(e1)
+
+ r = http_util.Respond(
+ any_encoding, fall_of_hyperion_canto1_stanza1, 'text/plain')
+ self.assertEqual(r.headers.get('Content-Encoding'), 'gzip')
+
+ self.assertEqual(
+ _gunzip(r.response[0]), fall_of_hyperion_canto1_stanza1.encode('utf-8'))
+
+ e2 = wtest.EnvironBuilder(headers={'Accept-Encoding': 'gzip'}).get_environ()
+ gzip_encoding = wrappers.Request(e2)
+
+ r = http_util.Respond(
+ gzip_encoding, fall_of_hyperion_canto1_stanza1, 'text/plain')
+ self.assertEqual(r.headers.get('Content-Encoding'), 'gzip')
+ self.assertEqual(
+ _gunzip(r.response[0]), fall_of_hyperion_canto1_stanza1.encode('utf-8'))
+
+ r = http_util.Respond(
+ any_encoding, fall_of_hyperion_canto1_stanza1, 'image/png')
+ self.assertEqual(
+ r.response[0], fall_of_hyperion_canto1_stanza1.encode('utf-8'))
+
+ def testJson_getsAutoSerialized(self):
+ q = wrappers.Request(wtest.EnvironBuilder().get_environ())
+ r = http_util.Respond(q, [1, 2, 3], 'application/json')
+ self.assertEqual(r.response[0], b'[1, 2, 3]')
+
+ def testExpires_setsCruiseControl(self):
+ q = wrappers.Request(wtest.EnvironBuilder().get_environ())
+ r = http_util.Respond(q, '<b>hello world</b>', 'text/html', expires=60)
+ self.assertEqual(r.headers.get('Cache-Control'), 'private, max-age=60')
+
+
+def _gunzip(bs):
+ return gzip.GzipFile('', 'rb', 9, six.BytesIO(bs)).read()
+
+
+if __name__ == '__main__':
+ test.main()
diff --git a/tensorflow/tensorboard/plugins/projector/plugin.py b/tensorflow/tensorboard/plugins/projector/plugin.py
index 8629d8506f..a4cb488c98 100644
--- a/tensorflow/tensorboard/plugins/projector/plugin.py
+++ b/tensorflow/tensorboard/plugins/projector/plugin.py
@@ -33,7 +33,7 @@ from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.pywrap_tensorflow import NewCheckpointReader
from tensorflow.python.training.saver import checkpoint_exists
from tensorflow.python.training.saver import latest_checkpoint
-from tensorflow.tensorboard.lib.python.http import Respond
+from tensorflow.tensorboard.lib.python.http_util import Respond
from tensorflow.tensorboard.plugins.base_plugin import TBPlugin
# HTTP routes.