aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/contrib/quantize
diff options
context:
space:
mode:
authorGravatar Suharsh Sivakumar <suharshs@google.com>2018-03-15 23:56:10 -0700
committerGravatar TensorFlower Gardener <gardener@tensorflow.org>2018-03-15 23:59:53 -0700
commitbd1dac4cba7831b327d0c9410e494ee7cf2b93a5 (patch)
tree37a2f41339382858bc81bb874a09688c8c58b411 /tensorflow/contrib/quantize
parent17024c0afd19e3713ab5016602372c5244e11183 (diff)
Don't put quantization variables in EMA collection by default.
PiperOrigin-RevId: 189302082
Diffstat (limited to 'tensorflow/contrib/quantize')
-rw-r--r--tensorflow/contrib/quantize/python/quantize.py4
-rw-r--r--tensorflow/contrib/quantize/python/quantize_graph.py8
2 files changed, 10 insertions, 2 deletions
diff --git a/tensorflow/contrib/quantize/python/quantize.py b/tensorflow/contrib/quantize/python/quantize.py
index 0608ab9302..6cc097b20e 100644
--- a/tensorflow/contrib/quantize/python/quantize.py
+++ b/tensorflow/contrib/quantize/python/quantize.py
@@ -44,7 +44,7 @@ def Quantize(graph,
activation_bits=8,
ema_decay=0.999,
quant_delay=None,
- vars_collection=ops.GraphKeys.MOVING_AVERAGE_VARIABLES):
+ vars_collection=ops.GraphKeys.GLOBAL_VARIABLES):
"""Updates graph with quantization operations.
Args:
@@ -262,7 +262,7 @@ def _InsertQuantOp(context,
bits=8,
ema_decay=0.999,
quant_delay=None,
- vars_collection=ops.GraphKeys.MOVING_AVERAGE_VARIABLES,
+ vars_collection=ops.GraphKeys.GLOBAL_VARIABLES,
narrow_range=False):
"""Inserts a quant op between a producer op and (multiple) consumer ops.
diff --git a/tensorflow/contrib/quantize/python/quantize_graph.py b/tensorflow/contrib/quantize/python/quantize_graph.py
index 5a3a74cec4..be4fc39651 100644
--- a/tensorflow/contrib/quantize/python/quantize_graph.py
+++ b/tensorflow/contrib/quantize/python/quantize_graph.py
@@ -72,6 +72,8 @@ def _create_graph(input_graph=None,
def create_training_graph(input_graph=None, quant_delay=0):
"""Rewrites a training input_graph in place for simulated quantization.
+ Variables added by the rewrite get added to the global variables collection.
+
The graph has fake quantization ops inserted to simulate the error
introduced by quantization. Since the graph is transformed in place,
the expected behavior of previously held references to nodes and tensors may
@@ -118,6 +120,8 @@ def create_training_graph(input_graph=None, quant_delay=0):
def create_eval_graph(input_graph=None):
"""Rewrites an eval input_graph in place for simulated quantization.
+ Variables added by the rewrite get added to the global variables collection.
+
The graph has fake quantization ops inserted to simulate the error
introduced by quantization. Since the graph is transformed in place,
the expected behavior of previously held references to nodes and tensors may
@@ -141,6 +145,8 @@ def experimental_create_training_graph(input_graph=None,
freeze_bn_delay=int(2e5)):
"""Rewrites a training input_graph in place for simulated quantization.
+ Variables added by the rewrite get added to the global variables collection.
+
This function has additional experimental options not (yet) available to
create_training_graph. The resulting behavior may be undefined.
@@ -188,6 +194,8 @@ def experimental_create_eval_graph(input_graph=None,
activation_bits=8):
"""Rewrites an eval input_graph in place for simulated quantization.
+ Variables added by the rewrite get added to the global variables collection.
+
This function has additional experimental options not (yet) available to
create_eval_graph. The resulting behavior may be undefined.