aboutsummaryrefslogtreecommitdiffhomepage
diff options
context:
space:
mode:
-rw-r--r--README.md2
-rw-r--r--RELEASE.md6
-rw-r--r--tensorflow/c/c_api.cc63
-rw-r--r--tensorflow/cc/BUILD2
-rw-r--r--tensorflow/compiler/tests/BUILD13
-rw-r--r--tensorflow/compiler/tests/conv2d_test.py315
-rw-r--r--tensorflow/compiler/tests/fused_batchnorm_test.py50
-rw-r--r--tensorflow/compiler/tests/pooling_ops_3d_test.py79
-rw-r--r--tensorflow/compiler/tests/test_utils.py63
-rw-r--r--tensorflow/compiler/tests/variable_ops_test.py215
-rw-r--r--tensorflow/compiler/tf2xla/kernels/pooling_ops.cc13
-rw-r--r--tensorflow/compiler/tf2xla/kernels/stateless_random_ops.cc2
-rw-r--r--tensorflow/compiler/tf2xla/kernels/unary_ops.cc10
-rw-r--r--tensorflow/compiler/tf2xla/kernels/variable_ops.cc178
-rw-r--r--tensorflow/compiler/tf2xla/lib/random.cc2
-rw-r--r--tensorflow/compiler/tf2xla/xla_op_kernel.cc9
-rw-r--r--tensorflow/compiler/tf2xla/xla_op_registry_test.cc2
-rw-r--r--tensorflow/compiler/tf2xla/xla_resource.h2
-rw-r--r--tensorflow/compiler/xla/client/lib/arithmetic.cc51
-rw-r--r--tensorflow/compiler/xla/client/lib/arithmetic.h14
-rw-r--r--tensorflow/compiler/xla/client/xla_client/xla_builder.cc9
-rw-r--r--tensorflow/compiler/xla/client/xla_client/xla_builder.h17
-rw-r--r--tensorflow/compiler/xla/python/xla_client.py5
-rw-r--r--tensorflow/compiler/xla/service/algebraic_simplifier.cc27
-rw-r--r--tensorflow/compiler/xla/service/algebraic_simplifier_test.cc27
-rw-r--r--tensorflow/contrib/autograph/README.md2
-rw-r--r--tensorflow/contrib/bayesflow/python/kernel_tests/monte_carlo_test.py45
-rw-r--r--tensorflow/contrib/checkpoint/__init__.py4
-rw-r--r--tensorflow/contrib/checkpoint/python/containers_test.py16
-rw-r--r--tensorflow/contrib/checkpoint/python/split_dependency_test.py19
-rw-r--r--tensorflow/contrib/coder/python/layers/entropybottleneck.py8
-rw-r--r--tensorflow/contrib/cudnn_rnn/python/ops/cudnn_rnn_ops.py2
-rw-r--r--tensorflow/contrib/data/__init__.py2
-rw-r--r--tensorflow/contrib/data/python/kernel_tests/batch_dataset_op_test.py74
-rw-r--r--tensorflow/contrib/data/python/kernel_tests/bucketing_test.py22
-rw-r--r--tensorflow/contrib/eager/README.md2
-rw-r--r--tensorflow/contrib/eager/python/examples/notebooks/3_datasets.ipynb6
-rw-r--r--tensorflow/contrib/eager/python/g3doc/guide.md4
-rw-r--r--tensorflow/contrib/eager/python/tfe.py2
-rw-r--r--tensorflow/contrib/keras/api/keras/layers/__init__.py8
-rw-r--r--tensorflow/contrib/layers/python/layers/layers.py10
-rw-r--r--tensorflow/contrib/lite/g3doc/apis.md3
-rw-r--r--tensorflow/contrib/lite/java/aar_with_jni.bzl4
-rw-r--r--tensorflow/contrib/lite/kernels/lstm.cc104
-rw-r--r--tensorflow/contrib/lite/python/convert.py29
-rw-r--r--tensorflow/contrib/lite/python/lite.py10
-rw-r--r--tensorflow/contrib/lite/python/tflite_convert.py35
-rw-r--r--tensorflow/contrib/lite/toco/README.md2
-rw-r--r--tensorflow/contrib/lite/toco/g3doc/cmdline_examples.md347
-rw-r--r--tensorflow/contrib/lite/toco/g3doc/cmdline_reference.md282
-rw-r--r--tensorflow/contrib/lite/toco/g3doc/python_api.md22
-rw-r--r--tensorflow/contrib/lite/toco/g3doc/toco_landscape.svg2
-rw-r--r--tensorflow/contrib/optimizer_v2/checkpointable_utils_test.py101
-rw-r--r--tensorflow/contrib/tpu/python/tpu/tpu_estimator.py2
-rw-r--r--tensorflow/core/common_runtime/constant_folding.cc69
-rw-r--r--tensorflow/core/common_runtime/constant_folding_test.cc36
-rw-r--r--tensorflow/core/framework/kernel_def.proto5
-rw-r--r--tensorflow/core/framework/op_kernel.cc10
-rw-r--r--tensorflow/core/framework/op_kernel.h4
-rw-r--r--tensorflow/core/framework/op_kernel_test.cc2
-rw-r--r--tensorflow/core/graph/validate.cc54
-rw-r--r--tensorflow/core/graph/validate.h9
-rw-r--r--tensorflow/core/kernels/batching_util/serial_device_batch_scheduler_test.cc32
-rw-r--r--tensorflow/core/kernels/bias_op.cc53
-rw-r--r--tensorflow/core/kernels/bias_op_gpu.h52
-rw-r--r--tensorflow/core/kernels/data/map_and_batch_dataset_op.cc80
-rw-r--r--tensorflow/core/kernels/inplace_ops.cc2
-rw-r--r--tensorflow/core/kernels/matmul_op.cc29
-rw-r--r--tensorflow/core/ops/compat/ops_history.v1.pbtxt1051
-rw-r--r--tensorflow/core/ops/nn_ops.cc2
-rw-r--r--tensorflow/core/ops/ops.pbtxt39
-rw-r--r--tensorflow/core/protobuf/config.proto6
-rw-r--r--tensorflow/core/util/tensor_format.cc2
-rw-r--r--tensorflow/docs_src/api_guides/python/client.md2
-rw-r--r--tensorflow/docs_src/api_guides/python/input_dataset.md3
-rw-r--r--tensorflow/docs_src/api_guides/python/reading_data.md8
-rw-r--r--tensorflow/docs_src/deploy/distributed.md2
-rw-r--r--tensorflow/docs_src/extend/architecture.md5
-rw-r--r--tensorflow/docs_src/get_started/_index.yaml12
-rw-r--r--tensorflow/docs_src/get_started/next_steps.md4
-rw-r--r--tensorflow/docs_src/guide/checkpoints.md (renamed from tensorflow/docs_src/programmers_guide/checkpoints.md)8
-rw-r--r--tensorflow/docs_src/guide/custom_estimators.md (renamed from tensorflow/docs_src/programmers_guide/custom_estimators.md)0
-rw-r--r--tensorflow/docs_src/guide/datasets.md (renamed from tensorflow/docs_src/programmers_guide/datasets.md)0
-rw-r--r--tensorflow/docs_src/guide/datasets_for_estimators.md (renamed from tensorflow/docs_src/programmers_guide/datasets_for_estimators.md)8
-rw-r--r--tensorflow/docs_src/guide/debugger.md (renamed from tensorflow/docs_src/programmers_guide/debugger.md)0
-rw-r--r--tensorflow/docs_src/guide/eager.md (renamed from tensorflow/docs_src/programmers_guide/eager.md)0
-rw-r--r--tensorflow/docs_src/guide/embedding.md (renamed from tensorflow/docs_src/programmers_guide/embedding.md)0
-rw-r--r--tensorflow/docs_src/guide/estimators.md (renamed from tensorflow/docs_src/programmers_guide/estimators.md)2
-rw-r--r--tensorflow/docs_src/guide/faq.md (renamed from tensorflow/docs_src/programmers_guide/faq.md)0
-rw-r--r--tensorflow/docs_src/guide/feature_columns.md (renamed from tensorflow/docs_src/programmers_guide/feature_columns.md)2
-rw-r--r--tensorflow/docs_src/guide/graph_viz.md (renamed from tensorflow/docs_src/programmers_guide/graph_viz.md)0
-rw-r--r--tensorflow/docs_src/guide/graphs.md (renamed from tensorflow/docs_src/programmers_guide/graphs.md)2
-rw-r--r--tensorflow/docs_src/guide/index.md (renamed from tensorflow/docs_src/programmers_guide/index.md)34
-rw-r--r--tensorflow/docs_src/guide/keras.md (renamed from tensorflow/docs_src/programmers_guide/keras.md)28
-rw-r--r--tensorflow/docs_src/guide/leftnav_files (renamed from tensorflow/docs_src/programmers_guide/leftnav_files)0
-rw-r--r--tensorflow/docs_src/guide/low_level_intro.md (renamed from tensorflow/docs_src/programmers_guide/low_level_intro.md)2
-rw-r--r--tensorflow/docs_src/guide/premade_estimators.md (renamed from tensorflow/docs_src/programmers_guide/premade_estimators.md)8
-rw-r--r--tensorflow/docs_src/guide/saved_model.md (renamed from tensorflow/docs_src/programmers_guide/saved_model.md)4
-rw-r--r--tensorflow/docs_src/guide/summaries_and_tensorboard.md (renamed from tensorflow/docs_src/programmers_guide/summaries_and_tensorboard.md)0
-rw-r--r--tensorflow/docs_src/guide/tensorboard_histograms.md (renamed from tensorflow/docs_src/programmers_guide/tensorboard_histograms.md)0
-rw-r--r--tensorflow/docs_src/guide/tensors.md (renamed from tensorflow/docs_src/programmers_guide/tensors.md)4
-rw-r--r--tensorflow/docs_src/guide/using_gpu.md (renamed from tensorflow/docs_src/programmers_guide/using_gpu.md)0
-rw-r--r--tensorflow/docs_src/guide/using_tpu.md (renamed from tensorflow/docs_src/programmers_guide/using_tpu.md)4
-rw-r--r--tensorflow/docs_src/guide/variables.md (renamed from tensorflow/docs_src/programmers_guide/variables.md)0
-rw-r--r--tensorflow/docs_src/guide/version_compat.md (renamed from tensorflow/docs_src/programmers_guide/version_compat.md)0
-rw-r--r--tensorflow/docs_src/install/install_go.md2
-rw-r--r--tensorflow/docs_src/install/install_java.md2
-rw-r--r--tensorflow/docs_src/install/leftnav_files1
-rw-r--r--tensorflow/docs_src/tutorials/deep_cnn.md2
-rw-r--r--tensorflow/docs_src/tutorials/layers.md2
-rw-r--r--tensorflow/examples/how_tos/reading_data/fully_connected_reader.py2
-rw-r--r--tensorflow/java/README.md5
-rw-r--r--tensorflow/java/src/main/java/org/tensorflow/package-info.java2
-rw-r--r--tensorflow/python/data/__init__.py2
-rw-r--r--tensorflow/python/data/ops/dataset_ops.py4
-rw-r--r--tensorflow/python/debug/BUILD2
-rw-r--r--tensorflow/python/debug/README.md4
-rw-r--r--tensorflow/python/debug/examples/README.md4
-rw-r--r--tensorflow/python/estimator/BUILD6
-rw-r--r--tensorflow/python/estimator/estimator.py24
-rw-r--r--tensorflow/python/estimator/estimator_test.py40
-rw-r--r--tensorflow/python/estimator/keras.py2
-rw-r--r--tensorflow/python/estimator/model_fn.py100
-rw-r--r--tensorflow/python/estimator/model_fn_test.py21
-rw-r--r--tensorflow/python/feature_column/feature_column.py123
-rw-r--r--tensorflow/python/feature_column/feature_column_test.py10
-rwxr-xr-xtensorflow/python/keras/BUILD2
-rw-r--r--tensorflow/python/keras/applications/densenet.py4
-rw-r--r--tensorflow/python/keras/applications/inception_resnet_v2.py4
-rw-r--r--tensorflow/python/keras/applications/inception_v3.py4
-rw-r--r--tensorflow/python/keras/applications/mobilenet.py6
-rw-r--r--tensorflow/python/keras/applications/nasnet.py4
-rw-r--r--tensorflow/python/keras/applications/resnet50.py3
-rw-r--r--tensorflow/python/keras/applications/vgg16.py3
-rw-r--r--tensorflow/python/keras/applications/vgg19.py3
-rw-r--r--tensorflow/python/keras/applications/xception.py4
-rw-r--r--tensorflow/python/keras/engine/__init__.py6
-rw-r--r--tensorflow/python/keras/engine/network.py45
-rw-r--r--tensorflow/python/keras/engine/sequential.py4
-rw-r--r--tensorflow/python/keras/engine/topology_test.py80
-rw-r--r--tensorflow/python/keras/layers/__init__.py8
-rw-r--r--tensorflow/python/keras/layers/advanced_activations.py4
-rw-r--r--tensorflow/python/keras/layers/convolutional.py4
-rw-r--r--tensorflow/python/keras/layers/convolutional_recurrent.py4
-rw-r--r--tensorflow/python/keras/layers/core.py4
-rw-r--r--tensorflow/python/keras/layers/cudnn_recurrent.py2
-rw-r--r--tensorflow/python/keras/layers/embeddings.py2
-rw-r--r--tensorflow/python/keras/layers/local.py4
-rw-r--r--tensorflow/python/keras/layers/noise.py2
-rw-r--r--tensorflow/python/keras/layers/normalization.py4
-rw-r--r--tensorflow/python/keras/layers/pooling.py4
-rw-r--r--tensorflow/python/keras/layers/recurrent.py4
-rw-r--r--tensorflow/python/keras/layers/serialization.py4
-rw-r--r--tensorflow/python/keras/layers/wrappers.py4
-rw-r--r--tensorflow/python/keras/optimizers.py2
-rw-r--r--tensorflow/python/keras/utils/layer_utils.py41
-rw-r--r--tensorflow/python/kernel_tests/distributions/beta_test.py13
-rw-r--r--tensorflow/python/kernel_tests/distributions/dirichlet_test.py10
-rw-r--r--tensorflow/python/kernel_tests/distributions/gamma_test.py13
-rw-r--r--tensorflow/python/kernel_tests/distributions/student_t_test.py16
-rw-r--r--tensorflow/python/ops/conv2d_benchmark.py114
-rw-r--r--tensorflow/python/ops/distributions/beta.py24
-rw-r--r--tensorflow/python/ops/distributions/dirichlet.py23
-rw-r--r--tensorflow/python/ops/distributions/gamma.py24
-rw-r--r--tensorflow/python/ops/distributions/student_t.py21
-rw-r--r--tensorflow/python/ops/gradients_impl.py1
-rw-r--r--tensorflow/python/ops/math_ops.py2
-rw-r--r--tensorflow/python/ops/rnn_cell_impl.py3
-rw-r--r--tensorflow/python/ops/script_ops.py2
-rw-r--r--tensorflow/python/tools/saved_model_cli.py4
-rw-r--r--tensorflow/python/training/checkpointable/BUILD17
-rw-r--r--tensorflow/python/training/checkpointable/base.py58
-rw-r--r--tensorflow/python/training/checkpointable/base_test.py32
-rw-r--r--tensorflow/python/training/checkpointable/data_structures.py8
-rw-r--r--tensorflow/python/training/checkpointable/data_structures_base.py27
-rw-r--r--tensorflow/python/training/checkpointable/tracking.py103
-rw-r--r--tensorflow/python/training/checkpointable/tracking_test.py49
-rw-r--r--tensorflow/python/training/checkpointable/util.py25
-rw-r--r--tensorflow/python/training/checkpointable/util_test.py105
-rw-r--r--tensorflow/python/training/saver_test.py13
-rw-r--r--tensorflow/tools/api/golden/tensorflow.train.-checkpoint.pbtxt2
-rw-r--r--tensorflow/workspace.bzl8
-rw-r--r--third_party/examples/eager/spinn/README.md2
-rw-r--r--third_party/llvm/llvm.autogenerated.BUILD15
184 files changed, 3752 insertions, 1635 deletions
diff --git a/README.md b/README.md
index 63853137cf..42d7bbc104 100644
--- a/README.md
+++ b/README.md
@@ -14,7 +14,7 @@ data flow graphs. The graph nodes represent mathematical operations, while
the graph edges represent the multidimensional data arrays (tensors) that flow
between them. This flexible architecture enables you to deploy computation to one
or more CPUs or GPUs in a desktop, server, or mobile device without rewriting
-code. TensorFlow also includes [TensorBoard](https://www.tensorflow.org/programmers_guide/summaries_and_tensorboard), a data visualization toolkit.
+code. TensorFlow also includes [TensorBoard](https://www.tensorflow.org/guide/summaries_and_tensorboard), a data visualization toolkit.
TensorFlow was originally developed by researchers and engineers
working on the Google Brain team within Google's Machine Intelligence Research
diff --git a/RELEASE.md b/RELEASE.md
index e09e9c6190..76c1401a01 100644
--- a/RELEASE.md
+++ b/RELEASE.md
@@ -465,7 +465,7 @@ answered questions, and were part of inspiring discussions.
## Major Features And Improvements
* `tf.keras` is now part of the core TensorFlow API.
-* [`tf.data`](http://tensorflow.org/programmers_guide/datasets) is now part of
+* [`tf.data`](http://tensorflow.org/guide/datasets) is now part of
the core TensorFlow API.
* The API is now subject to backwards compatibility guarantees.
* For a guide to migrating from the `tf.contrib.data` API, see the
@@ -485,7 +485,7 @@ answered questions, and were part of inspiring discussions.
* TensorFlow Debugger (tfdbg):
* Add `eval` command to allow evaluation of arbitrary Python/numpy expressions
in tfdbg command-line interface. See
- [Debugging TensorFlow Programs](https://www.tensorflow.org/programmers_guide/debugger)
+ [Debugging TensorFlow Programs](https://www.tensorflow.org/guide/debugger)
for more details.
* Usability improvement: The frequently used tensor filter `has_inf_or_nan` is
now added to `Session` wrappers and hooks by default. So there is no need
@@ -772,7 +772,7 @@ answered questions, and were part of inspiring discussions.
* Support client-provided ClusterSpec's and propagate them to all workers to enable the creation of dynamic TensorFlow clusters.
* TensorFlow C library now available for Windows.
* We released a new open-source version of TensorBoard.
-* [`SavedModel CLI`](https://www.tensorflow.org/versions/master/programmers_guide/saved_model_cli) tool available to inspect and execute MetaGraph in SavedModel
+* [`SavedModel CLI`](https://www.tensorflow.org/versions/master/guide/saved_model_cli) tool available to inspect and execute MetaGraph in SavedModel
* Android releases of TensorFlow are now pushed to jcenter for easier
integration into apps. See
https://github.com/tensorflow/tensorflow/blob/master/tensorflow/contrib/android/README.md
diff --git a/tensorflow/c/c_api.cc b/tensorflow/c/c_api.cc
index 12f0d8bff4..9d5f98d4d6 100644
--- a/tensorflow/c/c_api.cc
+++ b/tensorflow/c/c_api.cc
@@ -45,6 +45,7 @@ limitations under the License.
#include "tensorflow/core/graph/graph.h"
#include "tensorflow/core/graph/graph_constructor.h"
#include "tensorflow/core/graph/node_builder.h"
+#include "tensorflow/core/graph/validate.h"
#include "tensorflow/core/lib/core/coding.h"
#include "tensorflow/core/lib/core/errors.h"
#include "tensorflow/core/lib/core/status.h"
@@ -390,64 +391,6 @@ void TF_Reset_Helper(const TF_SessionOptions* opt, const char** containers,
status->status = Reset(opt->options, container_names);
}
-// This traverses the specified nodes in topological order to verify there are
-// no cycles. Starting with inputless nodes, it visits nodes whose inputs have
-// all been visited, and counts the total number of visited nodes. If there is a
-// cycle, nodes in the cycle will never be visited, and the visited count will
-// be less than the total node count.
-Status ValidateNoCycles(const Graph& g) {
- // TODO(nolivia): check this on a subset of the graph instead of all of it.
- // A node is ready when all of its inputs have been visited.
- std::vector<const Node*> ready;
- std::vector<int> pending_count(g.num_node_ids(), 0);
-
- for (int i = 0; i < g.num_node_ids(); ++i) {
- const Node* n = g.FindNodeId(i);
- if (n == nullptr) continue;
- pending_count[i] = n->in_edges().size();
- if (n->IsMerge()) {
- // While-loop cycles are legal cycles so we manually adjust the
- // pending_count to make sure that the loop is visited.
- for (const Edge* e : n->in_edges()) {
- if (!e->IsControlEdge() && e->src()->IsNextIteration()) {
- pending_count[i]--;
- }
- }
- }
- if (pending_count[i] == 0) {
- ready.push_back(n);
- }
- }
-
- int processed = 0;
- while (!ready.empty()) {
- const Node* node = ready.back();
- ready.pop_back();
- ++processed;
-
- for (const Edge* out : node->out_edges()) {
- const int output_id = out->dst()->id();
- pending_count[output_id]--;
- if (pending_count[output_id] == 0) {
- ready.push_back(out->dst());
- }
- }
- }
-
- if (processed < g.num_nodes()) {
- std::vector<string> nodes_in_cycle;
- for (int i = 0; i < pending_count.size() && nodes_in_cycle.size() < 3;
- ++i) {
- if (pending_count[i] != 0) {
- nodes_in_cycle.push_back(g.FindNodeId(i)->name());
- }
- }
- return errors::InvalidArgument(
- "Graph is invalid, contains a cycle with ", g.num_nodes() - processed,
- " nodes, including: ", str_util::Join(nodes_in_cycle, ", "));
- }
- return Status::OK();
-}
} // namespace
} // namespace tensorflow
@@ -746,7 +689,9 @@ bool ExtendSessionGraphHelper(TF_Session* session, TF_Status* status) {
const auto num_nodes = graph.num_node_ids();
if (session->last_num_graph_nodes < num_nodes) {
- status->status = tensorflow::ValidateNoCycles(session->graph->graph);
+ // TODO(nolivia): check this on a subset of the graph instead of all of
+ // it.
+ status->status = graph::ValidateGraphHasNoCycle(session->graph->graph);
if (!status->status.ok()) {
session->graph->mu.unlock();
return false;
diff --git a/tensorflow/cc/BUILD b/tensorflow/cc/BUILD
index 079e063d3e..a98f0b00b2 100644
--- a/tensorflow/cc/BUILD
+++ b/tensorflow/cc/BUILD
@@ -530,7 +530,7 @@ cc_library_with_android_deps(
"//tensorflow/core/api_def:base_api_def",
],
deps = [
- "//tensorflow/core:framework",
+ "//tensorflow/core:framework_headers_lib",
"//tensorflow/core:lib",
"//tensorflow/core:lib_internal",
"//tensorflow/core:op_gen_lib",
diff --git a/tensorflow/compiler/tests/BUILD b/tensorflow/compiler/tests/BUILD
index 0426ab40e5..e72c409d65 100644
--- a/tensorflow/compiler/tests/BUILD
+++ b/tensorflow/compiler/tests/BUILD
@@ -51,6 +51,16 @@ py_library(
],
)
+py_library(
+ name = "test_utils",
+ testonly = 1,
+ srcs = ["test_utils.py"],
+ srcs_version = "PY2AND3",
+ deps = [
+ "//third_party/py/numpy",
+ ],
+)
+
py_test(
name = "xla_test_test",
size = "small",
@@ -247,6 +257,7 @@ tf_xla_py_test(
srcs = ["conv2d_test.py"],
shard_count = 10,
deps = [
+ ":test_utils",
":xla_test",
"//tensorflow/python:array_ops",
"//tensorflow/python:framework",
@@ -254,6 +265,7 @@ tf_xla_py_test(
"//tensorflow/python:nn_ops",
"//tensorflow/python:nn_ops_gen",
"//tensorflow/python:platform_test",
+ "@absl_py//absl/testing:parameterized",
],
)
@@ -758,6 +770,7 @@ tf_xla_py_test(
size = "medium",
srcs = ["fused_batchnorm_test.py"],
deps = [
+ ":test_utils",
":xla_test",
"//tensorflow/python:framework",
"//tensorflow/python:math_ops",
diff --git a/tensorflow/compiler/tests/conv2d_test.py b/tensorflow/compiler/tests/conv2d_test.py
index 62577b70ce..d12e1ff1e8 100644
--- a/tensorflow/compiler/tests/conv2d_test.py
+++ b/tensorflow/compiler/tests/conv2d_test.py
@@ -22,8 +22,10 @@ from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
+from absl.testing import parameterized
import numpy as np
+from tensorflow.compiler.tests import test_utils
from tensorflow.compiler.tests.xla_test import XLATestCase
from tensorflow.python.framework import dtypes
from tensorflow.python.ops import array_ops
@@ -32,7 +34,15 @@ from tensorflow.python.ops import nn_ops
from tensorflow.python.platform import googletest
-class Conv2DTest(XLATestCase):
+DATA_FORMATS = (
+ ("_data_format_NHWC", "NHWC"),
+ ("_data_format_NCHW", "NCHW"),
+ ("_data_format_HWNC", "HWNC"),
+ ("_data_format_HWCN", "HWCN"),
+)
+
+
+class Conv2DTest(XLATestCase, parameterized.TestCase):
def _VerifyValues(self,
input_sizes=None,
@@ -40,6 +50,8 @@ class Conv2DTest(XLATestCase):
strides=None,
dilations=None,
padding=None,
+ data_format_src="NHWC",
+ data_format_dst="NHWC",
expected=None):
"""Tests that tf.nn.conv2d produces the expected value.
@@ -51,8 +63,12 @@ class Conv2DTest(XLATestCase):
strides: Strides.
dilations: RHS dilations.
padding: Padding type.
+ data_format_src: Data format input is in.
+ data_format_dst: Data format verification will run and input is converted
+ to.
expected: Expected output.
"""
+
total_size_1 = np.prod(input_sizes)
total_size_2 = np.prod(filter_sizes)
x1 = np.arange(1, total_size_1 + 1, dtype=np.float32).reshape(input_sizes)
@@ -62,6 +78,18 @@ class Conv2DTest(XLATestCase):
dilations = [1, 1]
dilations = [1] + dilations + [1]
+ # Convert between data formats.
+ expected = test_utils.ConvertBetweenDataFormats(expected, data_format_src,
+ data_format_dst)
+ x1 = test_utils.ConvertBetweenDataFormats(x1, data_format_src,
+ data_format_dst)
+ input_sizes = test_utils.PermuteDimsBetweenDataFormats(
+ input_sizes, data_format_src, data_format_dst)
+ strides = test_utils.PermuteDimsBetweenDataFormats(strides, data_format_src,
+ data_format_dst)
+ dilations = test_utils.PermuteDimsBetweenDataFormats(
+ dilations, data_format_src, data_format_dst)
+
with self.test_session() as sess:
t1 = array_ops.placeholder(dtypes.float32, shape=input_sizes)
t2 = array_ops.placeholder(dtypes.float32, shape=filter_sizes)
@@ -71,12 +99,14 @@ class Conv2DTest(XLATestCase):
t2,
strides=strides,
padding=padding,
- data_format="NHWC",
+ data_format=data_format_dst,
dilations=dilations)
+
value = sess.run(out, {t1: x1, t2: x2})
self.assertAllClose(expected, value, 1e-3)
- def testConv2D1x1Filter(self):
+ @parameterized.named_parameters(*DATA_FORMATS)
+ def testConv2D1x1Filter(self, data_format):
expected_output = np.reshape([
30.0, 36.0, 42.0, 66.0, 81.0, 96.0, 102.0, 126.0, 150.0, 138.0, 171.0,
204.0, 174.0, 216.0, 258.0, 210.0, 261.0, 312.0
@@ -86,9 +116,12 @@ class Conv2DTest(XLATestCase):
filter_sizes=[1, 1, 3, 3],
strides=[1, 1],
padding="VALID",
+ data_format_src="NHWC",
+ data_format_dst=data_format,
expected=expected_output)
- def testConv2D2x2Filter(self):
+ @parameterized.named_parameters(*DATA_FORMATS)
+ def testConv2D2x2Filter(self, data_format):
expected_output = np.reshape(
[2271.0, 2367.0, 2463.0, 2901.0, 3033.0, 3165.0], [1, 1, 2, 3])
self._VerifyValues(
@@ -96,9 +129,12 @@ class Conv2DTest(XLATestCase):
filter_sizes=[2, 2, 3, 3],
strides=[1, 1],
padding="VALID",
+ data_format_src="NHWC",
+ data_format_dst=data_format,
expected=expected_output)
- def testConv2D2x2Filter2x1Dilation(self):
+ @parameterized.named_parameters(*DATA_FORMATS)
+ def testConv2D2x2Filter2x1Dilation(self, data_format):
expected_output = np.array([[[[72], [82], [92]], [[112], [122], [132]]]])
self._VerifyValues(
input_sizes=[1, 4, 4, 1],
@@ -106,9 +142,12 @@ class Conv2DTest(XLATestCase):
strides=[1, 1],
dilations=[2, 1],
padding="VALID",
+ data_format_src="NHWC",
+ data_format_dst=data_format,
expected=expected_output)
- def testConv2D1x2Filter(self):
+ @parameterized.named_parameters(*DATA_FORMATS)
+ def testConv2D1x2Filter(self, data_format):
expected_output = np.reshape([
231.0, 252.0, 273.0, 384.0, 423.0, 462.0, 690.0, 765.0, 840.0, 843.0,
936.0, 1029.0
@@ -118,18 +157,24 @@ class Conv2DTest(XLATestCase):
filter_sizes=[1, 2, 3, 3],
strides=[1, 1],
padding="VALID",
+ data_format_src="NHWC",
+ data_format_dst=data_format,
expected=expected_output)
- def testConv2D2x2FilterStride2(self):
+ @parameterized.named_parameters(*DATA_FORMATS)
+ def testConv2D2x2FilterStride2(self, data_format):
expected_output = np.reshape([2271.0, 2367.0, 2463.0], [1, 1, 1, 3])
self._VerifyValues(
input_sizes=[1, 2, 3, 3],
filter_sizes=[2, 2, 3, 3],
strides=[2, 2],
padding="VALID",
+ data_format_src="NHWC",
+ data_format_dst=data_format,
expected=expected_output)
- def testConv2D2x2FilterStride2Same(self):
+ @parameterized.named_parameters(*DATA_FORMATS)
+ def testConv2D2x2FilterStride2Same(self, data_format):
expected_output = np.reshape(
[2271.0, 2367.0, 2463.0, 1230.0, 1305.0, 1380.0], [1, 1, 2, 3])
self._VerifyValues(
@@ -137,47 +182,61 @@ class Conv2DTest(XLATestCase):
filter_sizes=[2, 2, 3, 3],
strides=[2, 2],
padding="SAME",
+ data_format_src="NHWC",
+ data_format_dst=data_format,
expected=expected_output)
- def testConv2DEmptyDilation(self):
+ @parameterized.named_parameters(*DATA_FORMATS)
+ def testConv2DEmptyDilation(self, data_format):
self._VerifyValues(
input_sizes=[0, 2, 3, 3],
filter_sizes=[1, 1, 3, 3],
strides=[1, 1],
dilations=[2, 1],
padding="VALID",
+ data_format_src="NHWC",
+ data_format_dst=data_format,
expected=np.zeros([0, 2, 3, 3]))
- def testConv2D2x2FilterDilation(self):
+ @parameterized.named_parameters(*DATA_FORMATS)
+ def testConv2D2x2FilterDilation(self, data_format):
self._VerifyValues(
input_sizes=[1, 2, 3, 3],
filter_sizes=[2, 2, 3, 3],
strides=[1, 1],
dilations=[1, 2],
padding="VALID",
+ data_format_src="NHWC",
+ data_format_dst=data_format,
expected=np.reshape([2667, 2781, 2895], [1, 1, 1, 3]))
- def testConv2D1x2FilterDilation(self):
+ @parameterized.named_parameters(*DATA_FORMATS)
+ def testConv2D1x2FilterDilation(self, data_format):
self._VerifyValues(
input_sizes=[1, 2, 3, 3],
filter_sizes=[1, 2, 3, 3],
strides=[1, 1],
dilations=[2, 1],
padding="VALID",
+ data_format_src="NHWC",
+ data_format_dst=data_format,
expected=np.array([[[[231, 252, 273], [384, 423, 462]],
[[690, 765, 840], [843, 936, 1029]]]]))
- def testConv2DKernelSizeMatchesInputSizeDilation(self):
+ @parameterized.named_parameters(*DATA_FORMATS)
+ def testConv2DKernelSizeMatchesInputSizeDilation(self, data_format):
self._VerifyValues(
input_sizes=[1, 3, 3, 1],
filter_sizes=[2, 2, 1, 2],
strides=[1, 1],
dilations=[2, 2],
padding="VALID",
+ data_format_src="NHWC",
+ data_format_dst=data_format,
expected=np.reshape([108, 128], [1, 1, 1, 2]))
-class Conv2DBackpropInputTest(XLATestCase):
+class Conv2DBackpropInputTest(XLATestCase, parameterized.TestCase):
def _VerifyValues(self,
input_sizes=None,
@@ -186,6 +245,8 @@ class Conv2DBackpropInputTest(XLATestCase):
strides=None,
dilations=None,
padding=None,
+ data_format_src="NHWC",
+ data_format_dst="NHWC",
expected=None):
"""Tests that gen_nn_ops.conv2d_backprop_input produces the expected output.
@@ -198,8 +259,12 @@ class Conv2DBackpropInputTest(XLATestCase):
strides: Strides.
dilations: Dilations.
padding: Padding type.
+ data_format_src: Data format input is in.
+ data_format_dst: Data format verification will run and input is converted
+ to.
expected: Expected output.
"""
+
total_size_1 = np.prod(filter_sizes)
total_size_2 = np.prod(out_backprop_sizes)
x1 = np.arange(1, total_size_1 + 1, dtype=np.float32).reshape(filter_sizes)
@@ -209,6 +274,23 @@ class Conv2DBackpropInputTest(XLATestCase):
if dilations is not None:
dilations = [1] + dilations + [1]
+ expected = np.reshape(expected, input_sizes)
+
+ # Convert between data formats.
+ expected = test_utils.ConvertBetweenDataFormats(expected, data_format_src,
+ data_format_dst)
+ x2 = test_utils.ConvertBetweenDataFormats(x2, data_format_src,
+ data_format_dst)
+ input_sizes = test_utils.PermuteDimsBetweenDataFormats(
+ input_sizes, data_format_src, data_format_dst)
+ out_backprop_sizes = test_utils.PermuteDimsBetweenDataFormats(
+ out_backprop_sizes, data_format_src, data_format_dst)
+ strides = test_utils.PermuteDimsBetweenDataFormats(strides, data_format_src,
+ data_format_dst)
+ if dilations is not None:
+ dilations = test_utils.PermuteDimsBetweenDataFormats(
+ dilations, data_format_src, data_format_dst)
+
with self.test_session() as sess:
t1 = array_ops.placeholder(dtypes.float32, shape=filter_sizes)
t2 = array_ops.placeholder(dtypes.float32, shape=out_backprop_sizes)
@@ -220,12 +302,14 @@ class Conv2DBackpropInputTest(XLATestCase):
strides=strides,
dilations=dilations,
padding=padding,
- data_format="NHWC")
+ data_format=data_format_dst)
+
value = sess.run(out, {t1: x1, t2: x2})
self.assertAllEqual(input_sizes, value.shape)
- self.assertAllClose(expected, np.ravel(value), 1e-3)
+ self.assertAllClose(expected, value, 1e-3)
- def testConv2D1x1Filter(self):
+ @parameterized.named_parameters(*DATA_FORMATS)
+ def testConv2D1x1Filter(self, data_format):
expected_output = [
5, 11, 17, 11, 25, 39, 17, 39, 61, 23, 53, 83, 29, 67, 105, 35, 81, 127,
41, 95, 149, 47, 109, 171, 53, 123, 193, 59, 137, 215, 65, 151, 237, 71,
@@ -237,9 +321,12 @@ class Conv2DBackpropInputTest(XLATestCase):
out_backprop_sizes=[1, 4, 4, 2],
strides=[1, 1],
padding="VALID",
+ data_format_src="NHWC",
+ data_format_dst=data_format,
expected=expected_output)
- def testConv2D1x2FilterStride3Width5(self):
+ @parameterized.named_parameters(*DATA_FORMATS)
+ def testConv2D1x2FilterStride3Width5(self, data_format):
expected_output = [1, 2, 0, 2, 4]
self._VerifyValues(
input_sizes=[1, 1, 5, 1],
@@ -247,9 +334,12 @@ class Conv2DBackpropInputTest(XLATestCase):
out_backprop_sizes=[1, 1, 2, 1],
strides=[3, 3],
padding="VALID",
+ data_format_src="NHWC",
+ data_format_dst=data_format,
expected=expected_output)
- def testConv2D1x2FilterStride3Width6(self):
+ @parameterized.named_parameters(*DATA_FORMATS)
+ def testConv2D1x2FilterStride3Width6(self, data_format):
expected_output = [1, 2, 0, 2, 4, 0]
self._VerifyValues(
input_sizes=[1, 1, 6, 1],
@@ -257,9 +347,12 @@ class Conv2DBackpropInputTest(XLATestCase):
out_backprop_sizes=[1, 1, 2, 1],
strides=[3, 3],
padding="VALID",
+ data_format_src="NHWC",
+ data_format_dst=data_format,
expected=expected_output)
- def testConv2D1x2FilterStride3Width7(self):
+ @parameterized.named_parameters(*DATA_FORMATS)
+ def testConv2D1x2FilterStride3Width7(self, data_format):
expected_output = [1, 2, 0, 2, 4, 0, 0]
self._VerifyValues(
input_sizes=[1, 1, 7, 1],
@@ -267,9 +360,12 @@ class Conv2DBackpropInputTest(XLATestCase):
out_backprop_sizes=[1, 1, 2, 1],
strides=[3, 3],
padding="VALID",
+ data_format_src="NHWC",
+ data_format_dst=data_format,
expected=expected_output)
- def testConv2D2x2FilterC1Same(self):
+ @parameterized.named_parameters(*DATA_FORMATS)
+ def testConv2D2x2FilterC1Same(self, data_format):
expected_output = [1, 4, 7, 7, 23, 33]
self._VerifyValues(
input_sizes=[1, 2, 3, 1],
@@ -277,9 +373,12 @@ class Conv2DBackpropInputTest(XLATestCase):
out_backprop_sizes=[1, 2, 3, 1],
strides=[1, 1],
padding="SAME",
+ data_format_src="NHWC",
+ data_format_dst=data_format,
expected=expected_output)
- def testConv2D2x2Filter(self):
+ @parameterized.named_parameters(*DATA_FORMATS)
+ def testConv2D2x2Filter(self, data_format):
expected_output = [
14, 32, 50, 100, 163, 226, 167, 212, 257, 122, 140, 158, 478, 541, 604,
437, 482, 527
@@ -290,9 +389,12 @@ class Conv2DBackpropInputTest(XLATestCase):
out_backprop_sizes=[1, 1, 2, 3],
strides=[1, 1],
padding="VALID",
+ data_format_src="NHWC",
+ data_format_dst=data_format,
expected=expected_output)
- def testConv2D2x2FilterSame(self):
+ @parameterized.named_parameters(*DATA_FORMATS)
+ def testConv2D2x2FilterSame(self, data_format):
expected_output = [
14, 32, 50, 100, 163, 226, 217, 334, 451, 190, 307, 424, 929, 1217,
1505, 1487, 1883, 2279
@@ -303,9 +405,12 @@ class Conv2DBackpropInputTest(XLATestCase):
out_backprop_sizes=[1, 2, 3, 3],
strides=[1, 1],
padding="SAME",
+ data_format_src="NHWC",
+ data_format_dst=data_format,
expected=expected_output)
- def testConv2D1x2Filter(self):
+ @parameterized.named_parameters(*DATA_FORMATS)
+ def testConv2D1x2Filter(self, data_format):
expected_output = [1, 4, 4, 3, 10, 8, 5, 16, 12]
self._VerifyValues(
input_sizes=[1, 3, 3, 1],
@@ -313,9 +418,12 @@ class Conv2DBackpropInputTest(XLATestCase):
out_backprop_sizes=[1, 3, 2, 1],
strides=[1, 1],
padding="VALID",
+ data_format_src="NHWC",
+ data_format_dst=data_format,
expected=expected_output)
- def testConv2D1x2FilterSame(self):
+ @parameterized.named_parameters(*DATA_FORMATS)
+ def testConv2D1x2FilterSame(self, data_format):
expected_output = [1, 4, 7, 4, 13, 16, 7, 22, 25]
self._VerifyValues(
input_sizes=[1, 3, 3, 1],
@@ -323,9 +431,12 @@ class Conv2DBackpropInputTest(XLATestCase):
out_backprop_sizes=[1, 3, 3, 1],
strides=[1, 1],
padding="SAME",
+ data_format_src="NHWC",
+ data_format_dst=data_format,
expected=expected_output)
- def testConv2D2x2FilterStride2(self):
+ @parameterized.named_parameters(*DATA_FORMATS)
+ def testConv2D2x2FilterStride2(self, data_format):
expected_output = [1, 2, 5, 4, 6, 0, 0, 0, 0, 0, 3, 6, 13, 8, 12]
self._VerifyValues(
input_sizes=[1, 3, 5, 1],
@@ -333,9 +444,12 @@ class Conv2DBackpropInputTest(XLATestCase):
out_backprop_sizes=[1, 2, 2, 1],
strides=[2, 2],
padding="VALID",
+ data_format_src="NHWC",
+ data_format_dst=data_format,
expected=expected_output)
- def testConv2D2x2FilterStride2Same(self):
+ @parameterized.named_parameters(*DATA_FORMATS)
+ def testConv2D2x2FilterStride2Same(self, data_format):
expected_output = [1, 2, 2, 3, 4, 6]
self._VerifyValues(
input_sizes=[1, 2, 3, 1],
@@ -343,9 +457,13 @@ class Conv2DBackpropInputTest(XLATestCase):
out_backprop_sizes=[1, 1, 2, 1],
strides=[2, 2],
padding="SAME",
+ data_format_src="NHWC",
+ data_format_dst=data_format,
expected=expected_output)
- def testConv2D2x2Depth3ValidBackpropInputStride1x1Dilation2x1(self):
+ @parameterized.named_parameters(*DATA_FORMATS)
+ def testConv2D2x2Depth3ValidBackpropInputStride1x1Dilation2x1(
+ self, data_format):
self._VerifyValues(
input_sizes=[1, 3, 6, 1],
filter_sizes=[2, 2, 1, 1],
@@ -353,9 +471,12 @@ class Conv2DBackpropInputTest(XLATestCase):
strides=[1, 1],
dilations=[2, 1],
padding="VALID",
+ data_format_src="NHWC",
+ data_format_dst=data_format,
expected=[1, 4, 7, 10, 13, 10, 0, 0, 0, 0, 0, 0, 3, 10, 17, 24, 31, 20])
- def testConv2D2x2Depth1ValidBackpropInputDilation1x2(self):
+ @parameterized.named_parameters(*DATA_FORMATS)
+ def testConv2D2x2Depth1ValidBackpropInputDilation1x2(self, data_format):
self._VerifyValues(
input_sizes=[1, 2, 3, 1],
filter_sizes=[2, 2, 1, 1],
@@ -363,9 +484,12 @@ class Conv2DBackpropInputTest(XLATestCase):
strides=[1, 1],
dilations=[1, 2],
padding="VALID",
+ data_format_src="NHWC",
+ data_format_dst=data_format,
expected=[1, 0, 2, 3, 0, 4])
- def testConv2DEmptyBackpropInputDilation1x2(self):
+ @parameterized.named_parameters(*DATA_FORMATS)
+ def testConv2DEmptyBackpropInputDilation1x2(self, data_format):
self._VerifyValues(
input_sizes=[0, 2, 3, 1],
filter_sizes=[2, 2, 1, 1],
@@ -373,9 +497,12 @@ class Conv2DBackpropInputTest(XLATestCase):
strides=[1, 1],
dilations=[1, 2],
padding="VALID",
+ data_format_src="NHWC",
+ data_format_dst=data_format,
expected=np.zeros([0]))
- def testConv2D2x2Depth3ValidBackpropInputDilation2x1(self):
+ @parameterized.named_parameters(*DATA_FORMATS)
+ def testConv2D2x2Depth3ValidBackpropInputDilation2x1(self, data_format):
# The GPU version of this test is not very stable. So adjusting the
# error threshold to 1e-4.
self._VerifyValues(
@@ -385,12 +512,16 @@ class Conv2DBackpropInputTest(XLATestCase):
strides=[1, 1],
dilations=[2, 1],
padding="VALID",
+ data_format_src="NHWC",
+ data_format_dst=data_format,
expected=[
14, 32, 50, 68, 86, 104, 0, 0, 0, 0, 0, 0, 122, 140, 158, 176, 194,
212
])
- def testConv2DKernelSizeMatchesInputSizeBackpropInputDilation2x2(self):
+ @parameterized.named_parameters(*DATA_FORMATS)
+ def testConv2DKernelSizeMatchesInputSizeBackpropInputDilation2x2(
+ self, data_format):
self._VerifyValues(
input_sizes=[1, 3, 3, 1],
filter_sizes=[2, 2, 1, 2],
@@ -398,10 +529,12 @@ class Conv2DBackpropInputTest(XLATestCase):
strides=[1, 1],
dilations=[2, 2],
padding="VALID",
+ data_format_src="NHWC",
+ data_format_dst=data_format,
expected=[5, 0, 11, 0, 0, 0, 17, 0, 23])
-class Conv2DBackpropFilterTest(XLATestCase):
+class Conv2DBackpropFilterTest(XLATestCase, parameterized.TestCase):
def _VerifyValues(self,
input_sizes=None,
@@ -410,6 +543,8 @@ class Conv2DBackpropFilterTest(XLATestCase):
strides=None,
dilations=None,
padding=None,
+ data_format_src="NHWC",
+ data_format_dst="NHWC",
expected=None):
"""Tests that gen_nn_ops.conv2d_backprop_filter produces the right output.
@@ -422,6 +557,9 @@ class Conv2DBackpropFilterTest(XLATestCase):
strides: Stride.
dilations: Dilations.
padding: Padding type.
+ data_format_src: Data format input is in.
+ data_format_dst: Data format verification will run and input is converted
+ to.
expected: Expected output.
"""
@@ -434,6 +572,23 @@ class Conv2DBackpropFilterTest(XLATestCase):
if dilations is not None:
dilations = [1] + dilations + [1]
+ expected = np.reshape(expected, filter_sizes)
+
+ # Convert between data formats.
+ x1 = test_utils.ConvertBetweenDataFormats(x1, data_format_src,
+ data_format_dst)
+ x2 = test_utils.ConvertBetweenDataFormats(x2, data_format_src,
+ data_format_dst)
+ input_sizes = test_utils.PermuteDimsBetweenDataFormats(
+ input_sizes, data_format_src, data_format_dst)
+ out_backprop_sizes = test_utils.PermuteDimsBetweenDataFormats(
+ out_backprop_sizes, data_format_src, data_format_dst)
+ strides = test_utils.PermuteDimsBetweenDataFormats(strides, data_format_src,
+ data_format_dst)
+ if dilations is not None:
+ dilations = test_utils.PermuteDimsBetweenDataFormats(
+ dilations, data_format_src, data_format_dst)
+
with self.test_session() as sess:
t1 = array_ops.placeholder(dtypes.float32, shape=input_sizes)
t2 = array_ops.placeholder(dtypes.float32, shape=out_backprop_sizes)
@@ -445,13 +600,14 @@ class Conv2DBackpropFilterTest(XLATestCase):
strides=strides,
dilations=dilations,
padding=padding,
- data_format="NHWC")
+ data_format=data_format_dst)
value = sess.run(tensor, {t1: x1, t2: x2})
self.assertAllEqual(filter_sizes, value.shape)
- self.assertAllClose(expected, np.ravel(value), 1e-3)
+ self.assertAllClose(expected, value, 1e-3)
- def testConv2D1x1Filter(self):
+ @parameterized.named_parameters(*DATA_FORMATS)
+ def testConv2D1x1Filter(self, data_format):
expected_output = [8056, 8432, 8312, 8704, 8568, 8976]
self._VerifyValues(
input_sizes=[1, 4, 4, 3],
@@ -459,9 +615,12 @@ class Conv2DBackpropFilterTest(XLATestCase):
out_backprop_sizes=[1, 4, 4, 2],
strides=[1, 1],
padding="VALID",
+ data_format_src="NHWC",
+ data_format_dst=data_format,
expected=expected_output)
- def testConv2D1x2Filter(self):
+ @parameterized.named_parameters(*DATA_FORMATS)
+ def testConv2D1x2Filter(self, data_format):
expected_output = [120, 141]
self._VerifyValues(
input_sizes=[1, 3, 3, 1],
@@ -469,9 +628,12 @@ class Conv2DBackpropFilterTest(XLATestCase):
out_backprop_sizes=[1, 3, 2, 1],
strides=[1, 1],
padding="VALID",
+ data_format_src="NHWC",
+ data_format_dst=data_format,
expected=expected_output)
- def testConv2D2x2FilterDepth1(self):
+ @parameterized.named_parameters(*DATA_FORMATS)
+ def testConv2D2x2FilterDepth1(self, data_format):
expected_output = [5, 8, 14, 17]
self._VerifyValues(
input_sizes=[1, 2, 3, 1],
@@ -479,9 +641,12 @@ class Conv2DBackpropFilterTest(XLATestCase):
out_backprop_sizes=[1, 1, 2, 1],
strides=[1, 1],
padding="VALID",
+ data_format_src="NHWC",
+ data_format_dst=data_format,
expected=expected_output)
- def testConv2D2x2Filter(self):
+ @parameterized.named_parameters(*DATA_FORMATS)
+ def testConv2D2x2Filter(self, data_format):
expected_output = [
17, 22, 27, 22, 29, 36, 27, 36, 45, 32, 43, 54, 37, 50, 63, 42, 57, 72,
62, 85, 108, 67, 92, 117, 72, 99, 126, 77, 106, 135, 82, 113, 144, 87,
@@ -493,9 +658,12 @@ class Conv2DBackpropFilterTest(XLATestCase):
out_backprop_sizes=[1, 1, 2, 3],
strides=[1, 1],
padding="VALID",
+ data_format_src="NHWC",
+ data_format_dst=data_format,
expected=expected_output)
- def testConv2D1x2FilterStride3Width5(self):
+ @parameterized.named_parameters(*DATA_FORMATS)
+ def testConv2D1x2FilterStride3Width5(self, data_format):
expected_output = [9, 12]
self._VerifyValues(
input_sizes=[1, 1, 5, 1],
@@ -503,9 +671,12 @@ class Conv2DBackpropFilterTest(XLATestCase):
out_backprop_sizes=[1, 1, 2, 1],
strides=[3, 3],
padding="VALID",
+ data_format_src="NHWC",
+ data_format_dst=data_format,
expected=expected_output)
- def testConv2D1x2FilterStride3Width6(self):
+ @parameterized.named_parameters(*DATA_FORMATS)
+ def testConv2D1x2FilterStride3Width6(self, data_format):
expected_output = [9, 12]
self._VerifyValues(
input_sizes=[1, 1, 6, 1],
@@ -513,9 +684,12 @@ class Conv2DBackpropFilterTest(XLATestCase):
out_backprop_sizes=[1, 1, 2, 1],
strides=[3, 3],
padding="VALID",
+ data_format_src="NHWC",
+ data_format_dst=data_format,
expected=expected_output)
- def testConv2D1x2FilterStride3Width7(self):
+ @parameterized.named_parameters(*DATA_FORMATS)
+ def testConv2D1x2FilterStride3Width7(self, data_format):
expected_output = [9, 12]
self._VerifyValues(
input_sizes=[1, 1, 7, 1],
@@ -523,9 +697,12 @@ class Conv2DBackpropFilterTest(XLATestCase):
out_backprop_sizes=[1, 1, 2, 1],
strides=[3, 3],
padding="VALID",
+ data_format_src="NHWC",
+ data_format_dst=data_format,
expected=expected_output)
- def testConv2D1x3Filter(self):
+ @parameterized.named_parameters(*DATA_FORMATS)
+ def testConv2D1x3Filter(self, data_format):
expected_output = [5, 8, 11]
self._VerifyValues(
input_sizes=[1, 1, 4, 1],
@@ -533,9 +710,12 @@ class Conv2DBackpropFilterTest(XLATestCase):
out_backprop_sizes=[1, 1, 2, 1],
strides=[1, 1],
padding="VALID",
+ data_format_src="NHWC",
+ data_format_dst=data_format,
expected=expected_output)
- def testConv2D1x3FilterSame(self):
+ @parameterized.named_parameters(*DATA_FORMATS)
+ def testConv2D1x3FilterSame(self, data_format):
expected_output = [20, 30, 20]
self._VerifyValues(
input_sizes=[1, 1, 4, 1],
@@ -543,9 +723,12 @@ class Conv2DBackpropFilterTest(XLATestCase):
out_backprop_sizes=[1, 1, 4, 1],
strides=[1, 1],
padding="SAME",
+ data_format_src="NHWC",
+ data_format_dst=data_format,
expected=expected_output)
- def testConv2D1x3FilterSameOutbackprop2(self):
+ @parameterized.named_parameters(*DATA_FORMATS)
+ def testConv2D1x3FilterSameOutbackprop2(self, data_format):
expected_output = [7, 10, 3]
self._VerifyValues(
input_sizes=[1, 1, 4, 1],
@@ -553,9 +736,12 @@ class Conv2DBackpropFilterTest(XLATestCase):
out_backprop_sizes=[1, 1, 2, 1],
strides=[2, 2],
padding="SAME",
+ data_format_src="NHWC",
+ data_format_dst=data_format,
expected=expected_output)
- def testConv2D2x2FilterC1Same(self):
+ @parameterized.named_parameters(*DATA_FORMATS)
+ def testConv2D2x2FilterC1Same(self, data_format):
expected_output = [91, 58, 32, 17]
self._VerifyValues(
input_sizes=[1, 2, 3, 1],
@@ -563,9 +749,12 @@ class Conv2DBackpropFilterTest(XLATestCase):
out_backprop_sizes=[1, 2, 3, 1],
strides=[1, 1],
padding="SAME",
+ data_format_src="NHWC",
+ data_format_dst=data_format,
expected=expected_output)
- def testConv2D2x2FilterStride2(self):
+ @parameterized.named_parameters(*DATA_FORMATS)
+ def testConv2D2x2FilterStride2(self, data_format):
expected_output = [92, 102, 112]
self._VerifyValues(
input_sizes=[1, 3, 5, 1],
@@ -573,9 +762,12 @@ class Conv2DBackpropFilterTest(XLATestCase):
out_backprop_sizes=[1, 2, 2, 1],
strides=[2, 2],
padding="VALID",
+ data_format_src="NHWC",
+ data_format_dst=data_format,
expected=expected_output)
- def testConv2D2x2FilterStride2Same(self):
+ @parameterized.named_parameters(*DATA_FORMATS)
+ def testConv2D2x2FilterStride2Same(self, data_format):
expected_output = [7, 2, 16, 5]
self._VerifyValues(
input_sizes=[1, 2, 3, 1],
@@ -583,9 +775,13 @@ class Conv2DBackpropFilterTest(XLATestCase):
out_backprop_sizes=[1, 1, 2, 1],
strides=[2, 2],
padding="SAME",
+ data_format_src="NHWC",
+ data_format_dst=data_format,
expected=expected_output)
- def testConv2D2x2Depth3ValidBackpropFilterStride1x1Dilation2x1(self):
+ @parameterized.named_parameters(*DATA_FORMATS)
+ def testConv2D2x2Depth3ValidBackpropFilterStride1x1Dilation2x1(
+ self, data_format):
self._VerifyValues(
input_sizes=[1, 3, 6, 1],
filter_sizes=[2, 2, 1, 1],
@@ -593,9 +789,12 @@ class Conv2DBackpropFilterTest(XLATestCase):
strides=[1, 1],
dilations=[2, 1],
padding="VALID",
+ data_format_src="NHWC",
+ data_format_dst=data_format,
expected=[55, 70, 235, 250])
- def testConv2D2x2Depth1ValidBackpropFilterDilation1x2(self):
+ @parameterized.named_parameters(*DATA_FORMATS)
+ def testConv2D2x2Depth1ValidBackpropFilterDilation1x2(self, data_format):
self._VerifyValues(
input_sizes=[1, 2, 3, 1],
filter_sizes=[2, 2, 1, 1],
@@ -603,9 +802,12 @@ class Conv2DBackpropFilterTest(XLATestCase):
strides=[1, 1],
dilations=[1, 2],
padding="VALID",
+ data_format_src="NHWC",
+ data_format_dst=data_format,
expected=[1, 3, 4, 6])
- def testConv2DEmptyBackpropFilterDilation1x2(self):
+ @parameterized.named_parameters(*DATA_FORMATS)
+ def testConv2DEmptyBackpropFilterDilation1x2(self, data_format):
self._VerifyValues(
input_sizes=[1, 2, 3, 1],
filter_sizes=[2, 2, 1, 0],
@@ -613,9 +815,12 @@ class Conv2DBackpropFilterTest(XLATestCase):
strides=[1, 1],
dilations=[1, 2],
padding="VALID",
+ data_format_src="NHWC",
+ data_format_dst=data_format,
expected=np.zeros([0]))
- def testConv2D2x2Depth3ValidBackpropFilterDilation2x2(self):
+ @parameterized.named_parameters(*DATA_FORMATS)
+ def testConv2D2x2Depth3ValidBackpropFilterDilation2x2(self, data_format):
self._VerifyValues(
input_sizes=[1, 3, 4, 3],
filter_sizes=[2, 2, 3, 3],
@@ -623,13 +828,17 @@ class Conv2DBackpropFilterTest(XLATestCase):
strides=[1, 1],
dilations=[2, 2],
padding="VALID",
+ data_format_src="NHWC",
+ data_format_dst=data_format,
expected=[
17, 22, 27, 22, 29, 36, 27, 36, 45, 47, 64, 81, 52, 71, 90, 57, 78,
99, 137, 190, 243, 142, 197, 252, 147, 204, 261, 167, 232, 297, 172,
239, 306, 177, 246, 315
])
- def testConv2DKernelSizeMatchesInputSizeBackpropFilterDilation2x2(self):
+ @parameterized.named_parameters(*DATA_FORMATS)
+ def testConv2DKernelSizeMatchesInputSizeBackpropFilterDilation2x2(
+ self, data_format):
self._VerifyValues(
input_sizes=[1, 3, 3, 1],
filter_sizes=[2, 2, 1, 2],
@@ -637,6 +846,8 @@ class Conv2DBackpropFilterTest(XLATestCase):
strides=[1, 1],
dilations=[2, 2],
padding="VALID",
+ data_format_src="NHWC",
+ data_format_dst=data_format,
expected=[1, 2, 3, 6, 7, 14, 9, 18])
diff --git a/tensorflow/compiler/tests/fused_batchnorm_test.py b/tensorflow/compiler/tests/fused_batchnorm_test.py
index df4a683d9b..34cca512d4 100644
--- a/tensorflow/compiler/tests/fused_batchnorm_test.py
+++ b/tensorflow/compiler/tests/fused_batchnorm_test.py
@@ -21,6 +21,7 @@ from __future__ import print_function
from absl.testing import parameterized
import numpy as np
+from tensorflow.compiler.tests import test_utils
from tensorflow.compiler.tests.xla_test import XLATestCase
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gen_nn_ops
@@ -31,24 +32,6 @@ from tensorflow.python.platform import test
class FusedBatchNormTest(XLATestCase, parameterized.TestCase):
- def _convertBetweenDataFormats(self, x, data_format_src, data_format_dst):
- valid_data_formats = ["NHWC", "NCHW", "HWNC", "HWCN"]
- if data_format_src not in valid_data_formats:
- raise ValueError("data_format_src must be of %s, got %s." %
- (valid_data_formats, data_format_src))
- if data_format_dst not in valid_data_formats:
- raise ValueError("data_format_dst must be of %s, got %s." %
- (valid_data_formats, data_format_dst))
- if len(x.shape) != 4:
- raise ValueError("x must be 4D, got shape %s." % x.shape)
-
- if data_format_src == data_format_dst:
- return x
-
- dim_map = {d: i for i, d in enumerate(data_format_src)}
- transpose_dims = [dim_map[d] for d in data_format_dst]
- return np.transpose(x, transpose_dims)
-
def _reference_training(self, x, scale, offset, epsilon, data_format):
if data_format != "NHWC":
raise ValueError("data_format must be NHWC, got %s." % data_format)
@@ -102,10 +85,10 @@ class FusedBatchNormTest(XLATestCase, parameterized.TestCase):
with self.test_session() as sess, self.test_scope():
# To avoid constant folding
- x_val_converted = self._convertBetweenDataFormats(x_val, data_format_src,
- data_format)
- y_ref_converted = self._convertBetweenDataFormats(y_ref, data_format_src,
- data_format)
+ x_val_converted = test_utils.ConvertBetweenDataFormats(
+ x_val, data_format_src, data_format)
+ y_ref_converted = test_utils.ConvertBetweenDataFormats(
+ y_ref, data_format_src, data_format)
t_val = array_ops.placeholder(
np.float32, shape=x_val_converted.shape, name="x")
@@ -149,10 +132,10 @@ class FusedBatchNormTest(XLATestCase, parameterized.TestCase):
with self.test_session() as sess, self.test_scope():
# To avoid constant folding
- x_val_converted = self._convertBetweenDataFormats(x_val, data_format_src,
- data_format)
- y_ref_converted = self._convertBetweenDataFormats(y_ref, data_format_src,
- data_format)
+ x_val_converted = test_utils.ConvertBetweenDataFormats(
+ x_val, data_format_src, data_format)
+ y_ref_converted = test_utils.ConvertBetweenDataFormats(
+ y_ref, data_format_src, data_format)
t_val = array_ops.placeholder(
np.float32, shape=x_val_converted.shape, name="x")
@@ -236,12 +219,13 @@ class FusedBatchNormTest(XLATestCase, parameterized.TestCase):
self.skipTest("GPU does not support data format HWCN.")
with self.test_session() as sess, self.test_scope():
- grad_val_converted = self._convertBetweenDataFormats(
+ grad_val_converted = test_utils.ConvertBetweenDataFormats(
grad_val, data_format_src, data_format)
- x_val_converted = self._convertBetweenDataFormats(x_val, data_format_src,
- data_format)
- grad_x_ref_converted = self._convertBetweenDataFormats(
+ x_val_converted = test_utils.ConvertBetweenDataFormats(
+ x_val, data_format_src, data_format)
+ grad_x_ref_converted = test_utils.ConvertBetweenDataFormats(
grad_x_ref, data_format_src, data_format)
+
grad = array_ops.placeholder(
np.float32, shape=x_val_converted.shape, name="grad")
x = array_ops.placeholder(
@@ -289,10 +273,10 @@ class FusedBatchNormTest(XLATestCase, parameterized.TestCase):
self.skipTest("GPU does not support data format HWCN.")
with self.test_session() as sess, self.test_scope():
- grad_val_converted = self._convertBetweenDataFormats(
+ grad_val_converted = test_utils.ConvertBetweenDataFormats(
grad_val, data_format_src, data_format)
- x_val_converted = self._convertBetweenDataFormats(x_val, data_format_src,
- data_format)
+ x_val_converted = test_utils.ConvertBetweenDataFormats(
+ x_val, data_format_src, data_format)
grad = array_ops.placeholder(
np.float32, shape=x_val_converted.shape, name="grad")
diff --git a/tensorflow/compiler/tests/pooling_ops_3d_test.py b/tensorflow/compiler/tests/pooling_ops_3d_test.py
index 4eed903963..d9285186ba 100644
--- a/tensorflow/compiler/tests/pooling_ops_3d_test.py
+++ b/tensorflow/compiler/tests/pooling_ops_3d_test.py
@@ -187,8 +187,14 @@ class Pooling3DTest(XLATestCase):
padding="VALID",
expected=[29.5, 32.5, 50.5, 53.5, 176.5, 179.5, 197.5, 200.5])
- def _VerifyGradient(self, pool_func, pool_grad_func, input_sizes, ksize,
- strides, padding):
+ def _VerifyGradient(self,
+ pool_func,
+ pool_grad_func,
+ input_sizes,
+ ksize,
+ strides,
+ padding,
+ pool_grad_grad_func=None):
"""Verifies the output values of the pooling gradient function.
Args:
@@ -198,6 +204,7 @@ class Pooling3DTest(XLATestCase):
ksize: The kernel size dimensions
strides: The stride dimensions
padding: Padding type.
+ pool_grad_grad_func: Second-order gradient function, if available.
"""
ksize = [1] + ksize + [1]
strides = [1] + strides + [1]
@@ -218,6 +225,8 @@ class Pooling3DTest(XLATestCase):
output_gradient_vals = np.arange(
1, output_vals.size + 1, dtype=np.float32)
output_gradient_vals = output_gradient_vals.reshape(output_vals.shape)
+ output_grad_grad_vals = np.arange(1, x.size + 1, dtype=np.float32)
+ output_grad_grad_vals = output_grad_grad_vals.reshape(x.shape)
# Use the Tensorflow CPU pooling gradient to compute the expected input
# gradients.
@@ -236,6 +245,22 @@ class Pooling3DTest(XLATestCase):
{inputs: x,
output_gradients: output_gradient_vals})
+ output_grad_gradients = array_ops.placeholder(
+ dtypes.float32, shape=expected_input_gradient_vals.shape)
+ if pool_grad_grad_func is not None:
+ expected_grad_gradients = pool_grad_grad_func(
+ inputs,
+ outputs,
+ output_grad_gradients,
+ ksize=ksize,
+ strides=strides,
+ padding=padding,
+ data_format="NDHWC")
+ expected_grad_gradients_vals = sess.run(expected_grad_gradients, {
+ inputs: x,
+ output_grad_gradients: output_grad_grad_vals
+ })
+
# Run the gradient op on the XLA device
with self.test_scope():
outputs = array_ops.placeholder(dtypes.float32, shape=output_vals.shape)
@@ -246,6 +271,16 @@ class Pooling3DTest(XLATestCase):
ksize=ksize,
strides=strides,
padding=padding)
+ if pool_grad_grad_func is not None:
+ actual_grad_gradients = pool_grad_grad_func(
+ inputs,
+ outputs,
+ output_grad_gradients,
+ ksize=ksize,
+ strides=strides,
+ padding=padding,
+ data_format="NDHWC")
+
actual = sess.run(actual_input_gradients, {
inputs: x,
outputs: output_vals,
@@ -260,6 +295,22 @@ class Pooling3DTest(XLATestCase):
atol=1e-6)
self.assertShapeEqual(actual, inputs)
+ if pool_grad_grad_func is not None:
+ actual_grad_gradients_vals = sess.run(
+ actual_grad_gradients, {
+ inputs: x,
+ outputs: output_vals,
+ output_grad_gradients: output_grad_grad_vals
+ })
+
+ # Compare the Tensorflow and XLA results.
+ self.assertAllClose(
+ expected_grad_gradients_vals,
+ actual_grad_gradients_vals,
+ rtol=1e-4,
+ atol=1e-6)
+ self.assertShapeEqual(actual_grad_gradients_vals, outputs)
+
def testMaxPoolGradValidPadding1_1_3d(self):
self._VerifyGradient(
nn_ops.max_pool3d,
@@ -267,7 +318,8 @@ class Pooling3DTest(XLATestCase):
input_sizes=[1, 3, 3, 3, 1],
ksize=[1, 1, 1],
strides=[1, 1, 1],
- padding="VALID")
+ padding="VALID",
+ pool_grad_grad_func=gen_nn_ops.max_pool3d_grad_grad)
def testMaxPoolGradValidPadding2_1_6_3d(self):
self._VerifyGradient(
@@ -276,9 +328,13 @@ class Pooling3DTest(XLATestCase):
input_sizes=[2, 3, 3, 6, 3],
ksize=[2, 2, 2],
strides=[1, 1, 1],
- padding="VALID")
+ padding="VALID",
+ pool_grad_grad_func=gen_nn_ops.max_pool3d_grad_grad)
def testMaxPoolGradValidPadding2_1_7_3d(self):
+ # TODO(b/73062247): the bfloat16 implementation of MaxPool3DGradGrad does
+ # not have enough precision for this test case to pass if
+ # pool_grad_grad_func is passed.
self._VerifyGradient(
nn_ops.max_pool3d,
gen_nn_ops.max_pool3d_grad,
@@ -294,7 +350,8 @@ class Pooling3DTest(XLATestCase):
input_sizes=[2, 2, 2, 2, 3],
ksize=[2, 2, 2],
strides=[2, 2, 2],
- padding="VALID")
+ padding="VALID",
+ pool_grad_grad_func=gen_nn_ops.max_pool3d_grad_grad)
def testMaxPoolGradSamePadding1_1_3d(self):
self._VerifyGradient(
@@ -303,7 +360,8 @@ class Pooling3DTest(XLATestCase):
input_sizes=[2, 3, 2, 4, 1],
ksize=[1, 1, 1],
strides=[1, 1, 1],
- padding="SAME")
+ padding="SAME",
+ pool_grad_grad_func=gen_nn_ops.max_pool3d_grad_grad)
def testMaxPoolGradSamePadding2_1_3d(self):
self._VerifyGradient(
@@ -312,7 +370,8 @@ class Pooling3DTest(XLATestCase):
input_sizes=[2, 3, 2, 4, 1],
ksize=[2, 2, 2],
strides=[1, 1, 1],
- padding="SAME")
+ padding="SAME",
+ pool_grad_grad_func=gen_nn_ops.max_pool3d_grad_grad)
def testMaxPoolGradSamePadding2_2_3d(self):
self._VerifyGradient(
@@ -321,7 +380,8 @@ class Pooling3DTest(XLATestCase):
input_sizes=[2, 5, 2, 4, 3],
ksize=[2, 2, 2],
strides=[2, 2, 2],
- padding="SAME")
+ padding="SAME",
+ pool_grad_grad_func=gen_nn_ops.max_pool3d_grad_grad)
def testMaxPoolGradSamePadding3_1_3d(self):
self._VerifyGradient(
@@ -330,7 +390,8 @@ class Pooling3DTest(XLATestCase):
input_sizes=[1, 3, 3, 7, 1],
ksize=[3, 3, 3],
strides=[1, 1, 1],
- padding="SAME")
+ padding="SAME",
+ pool_grad_grad_func=gen_nn_ops.max_pool3d_grad_grad)
def testAvgPoolGradValidPadding1_1_3d(self):
self._VerifyGradient(
diff --git a/tensorflow/compiler/tests/test_utils.py b/tensorflow/compiler/tests/test_utils.py
new file mode 100644
index 0000000000..6abde18ea9
--- /dev/null
+++ b/tensorflow/compiler/tests/test_utils.py
@@ -0,0 +1,63 @@
+# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Utilities for helping test ops."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import numpy as np
+
+
+def ConvertBetweenDataFormats(x, data_format_src, data_format_dst):
+ """Converts 4D tensor between data formats."""
+
+ valid_data_formats = ["NHWC", "NCHW", "HWNC", "HWCN"]
+ if data_format_src not in valid_data_formats:
+ raise ValueError("data_format_src must be of %s, got %s." %
+ (valid_data_formats, data_format_src))
+ if data_format_dst not in valid_data_formats:
+ raise ValueError("data_format_dst must be of %s, got %s." %
+ (valid_data_formats, data_format_dst))
+ if len(x.shape) != 4:
+ raise ValueError("x must be 4D, got shape %s." % x.shape)
+
+ if data_format_src == data_format_dst:
+ return x
+
+ dim_map = {d: i for i, d in enumerate(data_format_src)}
+ transpose_dims = [dim_map[d] for d in data_format_dst]
+ return np.transpose(x, transpose_dims)
+
+
+def PermuteDimsBetweenDataFormats(dims, data_format_src, data_format_dst):
+ """Get new shape for converting between data formats."""
+
+ valid_data_formats = ["NHWC", "NCHW", "HWNC", "HWCN"]
+ if data_format_src not in valid_data_formats:
+ raise ValueError("data_format_src must be of %s, got %s." %
+ (valid_data_formats, data_format_src))
+ if data_format_dst not in valid_data_formats:
+ raise ValueError("data_format_dst must be of %s, got %s." %
+ (valid_data_formats, data_format_dst))
+ if len(dims) != 4:
+ raise ValueError("dims must be of length 4, got %s." % dims)
+
+ if data_format_src == data_format_dst:
+ return dims
+
+ dim_map = {d: i for i, d in enumerate(data_format_src)}
+ permuted_dims = [dims[dim_map[d]] for d in data_format_dst]
+ return permuted_dims
diff --git a/tensorflow/compiler/tests/variable_ops_test.py b/tensorflow/compiler/tests/variable_ops_test.py
index 2c09b03d5a..bd616f2a20 100644
--- a/tensorflow/compiler/tests/variable_ops_test.py
+++ b/tensorflow/compiler/tests/variable_ops_test.py
@@ -26,6 +26,7 @@ from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
+from tensorflow.python.ops import gen_state_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import resource_variable_ops
@@ -52,9 +53,7 @@ class VariableOpsTest(XLATestCase):
with ops.control_dependencies([x]):
y = v.read_value()
self.assertAllClose(
- np.array([[2, 1 + 2j], [4, 5]]).astype(dtype), sess.run(y, {
- p: 1
- }))
+ np.array([[2, 1 + 2j], [4, 5]]).astype(dtype), sess.run(y, {p: 1}))
def testSparseRead0DIndices(self):
for dtype in self.numeric_types:
@@ -103,9 +102,9 @@ class VariableOpsTest(XLATestCase):
x = v.sparse_read([[2, 1], [3, 0]])
self.assertAllClose(
np.array(
- [[[[20, 21, 22], [23, 24j, 25]], [[10, 11, 12], [13, 14, 15]]],
- [[[30, 31, 32], [33, 34, 35]], [[0, 1, 2], [3, 4, 5]]]],
- ).astype(dtype), sess.run(x))
+ [[[[20, 21, 22], [23, 24j, 25]], [[10, 11, 12], [13, 14, 15]]
+ ], [[[30, 31, 32], [33, 34, 35]], [[0, 1, 2], [3, 4, 5]]]
+ ],).astype(dtype), sess.run(x))
def testShape(self):
for dtype in self.numeric_types:
@@ -206,6 +205,206 @@ class VariableOpsTest(XLATestCase):
self.assertAllClose(update, result[1])
self.assertAllClose(update, result[2])
+ def testScatterAdd(self):
+ with self.test_session() as sess, self.test_scope():
+ handle = resource_variable_ops.var_handle_op(
+ dtype=dtypes.int32, shape=[2, 1])
+ sess.run(
+ resource_variable_ops.assign_variable_op(
+ handle, constant_op.constant([[1], [7]], dtype=dtypes.int32)))
+ sess.run(
+ resource_variable_ops.resource_scatter_add(
+ handle, [0], constant_op.constant([[2]], dtype=dtypes.int32)))
+ read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32)
+ self.assertAllEqual(sess.run(read), [[3], [7]])
+
+ def testScatterSub(self):
+ with self.test_session() as sess, self.test_scope():
+ handle = resource_variable_ops.var_handle_op(
+ dtype=dtypes.int32, shape=[2, 1])
+ sess.run(
+ resource_variable_ops.assign_variable_op(
+ handle, constant_op.constant([[4], [1]], dtype=dtypes.int32)))
+ sess.run(
+ resource_variable_ops.resource_scatter_sub(
+ handle, [1], constant_op.constant([[2]], dtype=dtypes.int32)))
+ read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32)
+ self.assertAllEqual(sess.run(read), [[4], [-1]])
+
+ def testScatterMul(self):
+ with self.test_session() as sess, self.test_scope():
+ handle = resource_variable_ops.var_handle_op(
+ dtype=dtypes.int32, shape=[1, 1])
+ sess.run(
+ resource_variable_ops.assign_variable_op(
+ handle, constant_op.constant([[1]], dtype=dtypes.int32)))
+ sess.run(
+ resource_variable_ops.resource_scatter_mul(
+ handle, [0], constant_op.constant([[5]], dtype=dtypes.int32)))
+ read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32)
+ self.assertEqual(sess.run(read), [[5]])
+
+ def testScatterDiv(self):
+ with self.test_session() as sess, self.test_scope():
+ handle = resource_variable_ops.var_handle_op(
+ dtype=dtypes.int32, shape=[1, 1])
+ sess.run(
+ resource_variable_ops.assign_variable_op(
+ handle, constant_op.constant([[6]], dtype=dtypes.int32)))
+ sess.run(
+ resource_variable_ops.resource_scatter_div(
+ handle, [0], constant_op.constant([[3]], dtype=dtypes.int32)))
+ read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32)
+ self.assertAllEqual(sess.run(read), [[2]])
+
+ def testScatterMin(self):
+ with self.test_session() as sess, self.test_scope():
+ handle = resource_variable_ops.var_handle_op(
+ dtype=dtypes.int32, shape=[1, 1])
+ sess.run(
+ resource_variable_ops.assign_variable_op(
+ handle, constant_op.constant([[6]], dtype=dtypes.int32)))
+ sess.run(
+ resource_variable_ops.resource_scatter_min(
+ handle, [0], constant_op.constant([[3]], dtype=dtypes.int32)))
+ read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32)
+ self.assertEqual(sess.run(read), [[3]])
+
+ def testScatterMax(self):
+ with self.test_session() as sess, self.test_scope():
+ handle = resource_variable_ops.var_handle_op(
+ dtype=dtypes.int32, shape=[1, 1])
+ sess.run(
+ resource_variable_ops.assign_variable_op(
+ handle, constant_op.constant([[6]], dtype=dtypes.int32)))
+ sess.run(
+ resource_variable_ops.resource_scatter_max(
+ handle, [0], constant_op.constant([[3]], dtype=dtypes.int32)))
+ read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32)
+ self.assertEqual(sess.run(read), [[6]])
+
+ def testScatterUpdate(self):
+ with self.test_session() as sess, self.test_scope():
+ handle = resource_variable_ops.var_handle_op(
+ dtype=dtypes.int32, shape=[1, 1])
+ sess.run(
+ resource_variable_ops.assign_variable_op(
+ handle, constant_op.constant([[6]], dtype=dtypes.int32)))
+ sess.run(
+ resource_variable_ops.resource_scatter_update(
+ handle, [0], constant_op.constant([[3]], dtype=dtypes.int32)))
+ read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32)
+ self.assertEqual(sess.run(read), [[3]])
+
+ def testScatterAddScalar(self):
+ with self.test_session() as sess, self.test_scope():
+ handle = resource_variable_ops.var_handle_op(
+ dtype=dtypes.int32, shape=[1, 1])
+ sess.run(
+ resource_variable_ops.assign_variable_op(
+ handle, constant_op.constant([[1]], dtype=dtypes.int32)))
+ sess.run(
+ resource_variable_ops.resource_scatter_add(
+ handle, [0], constant_op.constant(2, dtype=dtypes.int32)))
+ read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32)
+ self.assertEqual(sess.run(read), [[3]])
+
+ def testScatterSubScalar(self):
+ with self.test_session() as sess, self.test_scope():
+ handle = resource_variable_ops.var_handle_op(
+ dtype=dtypes.int32, shape=[1, 1])
+ sess.run(
+ resource_variable_ops.assign_variable_op(
+ handle, constant_op.constant([[1]], dtype=dtypes.int32)))
+ sess.run(
+ resource_variable_ops.resource_scatter_sub(
+ handle, [0], constant_op.constant(2, dtype=dtypes.int32)))
+ read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32)
+ self.assertEqual(sess.run(read), [[-1]])
+
+ def testScatterMulScalar(self):
+ with self.test_session() as sess, self.test_scope():
+ handle = resource_variable_ops.var_handle_op(
+ dtype=dtypes.int32, shape=[1, 1])
+ sess.run(
+ resource_variable_ops.assign_variable_op(
+ handle, constant_op.constant([[1]], dtype=dtypes.int32)))
+ sess.run(
+ resource_variable_ops.resource_scatter_mul(
+ handle, [0], constant_op.constant(5, dtype=dtypes.int32)))
+ read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32)
+ self.assertEqual(sess.run(read), [[5]])
+
+ def testScatterDivScalar(self):
+ with self.test_session() as sess, self.test_scope():
+ handle = resource_variable_ops.var_handle_op(
+ dtype=dtypes.int32, shape=[1, 1])
+ sess.run(
+ resource_variable_ops.assign_variable_op(
+ handle, constant_op.constant([[6]], dtype=dtypes.int32)))
+ sess.run(
+ resource_variable_ops.resource_scatter_div(
+ handle, [0], constant_op.constant(3, dtype=dtypes.int32)))
+ read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32)
+ self.assertEqual(sess.run(read), [[2]])
+
+ def testScatterMinScalar(self):
+ with self.test_session() as sess, self.test_scope():
+ handle = resource_variable_ops.var_handle_op(
+ dtype=dtypes.int32, shape=[1, 1])
+ sess.run(
+ resource_variable_ops.assign_variable_op(
+ handle, constant_op.constant([[6]], dtype=dtypes.int32)))
+ sess.run(
+ resource_variable_ops.resource_scatter_min(
+ handle, [0], constant_op.constant(3, dtype=dtypes.int32)))
+ read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32)
+ self.assertEqual(sess.run(read), [[3]])
+
+ def testScatterMaxScalar(self):
+ with self.test_session() as sess, self.test_scope():
+ handle = resource_variable_ops.var_handle_op(
+ dtype=dtypes.int32, shape=[1, 1])
+ sess.run(
+ resource_variable_ops.assign_variable_op(
+ handle, constant_op.constant([[6]], dtype=dtypes.int32)))
+ sess.run(
+ resource_variable_ops.resource_scatter_max(
+ handle, [0], constant_op.constant(3, dtype=dtypes.int32)))
+ read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32)
+ self.assertEqual(sess.run(read), [[6]])
+
+ def testScatterNdAddOps(self):
+ with self.test_session() as sess, self.test_scope():
+ handle = resource_variable_ops.var_handle_op(
+ dtype=dtypes.float32, shape=[8])
+ sess.run(
+ resource_variable_ops.assign_variable_op(
+ handle, constant_op.constant([1] * 8, dtype=dtypes.float32)))
+ indices = constant_op.constant([[4], [3], [1], [7]], dtype=dtypes.int32)
+ updates = constant_op.constant([9, 10, 11, 12], dtype=dtypes.float32)
+ expected = np.array([1, 12, 1, 11, 10, 1, 1, 13])
+ sess.run(gen_state_ops.resource_scatter_nd_add(handle, indices, updates))
+ read = resource_variable_ops.read_variable_op(
+ handle, dtype=dtypes.float32)
+ self.assertAllClose(expected, sess.run(read))
+
+ def testScatterNdUpdateAddOps(self):
+ with self.test_session() as sess, self.test_scope():
+ handle = resource_variable_ops.var_handle_op(
+ dtype=dtypes.float32, shape=[8])
+ sess.run(
+ resource_variable_ops.assign_variable_op(
+ handle, constant_op.constant([1] * 8, dtype=dtypes.float32)))
+ indices = constant_op.constant([[4], [3], [1], [7]], dtype=dtypes.int32)
+ updates = constant_op.constant([9, 10, 11, 12], dtype=dtypes.float32)
+ expected = np.array([1, 11, 1, 10, 9, 1, 1, 12])
+ sess.run(
+ gen_state_ops.resource_scatter_nd_update(handle, indices, updates))
+ read = resource_variable_ops.read_variable_op(
+ handle, dtype=dtypes.float32)
+ self.assertAllClose(expected, sess.run(read))
+
class StridedSliceAssignChecker(object):
"""Compares the results of a slice assignment using Tensorflow and numpy."""
@@ -240,8 +439,8 @@ class SliceAssignTest(XLATestCase):
def testSliceAssign(self):
for dtype in self.numeric_types:
- checker = StridedSliceAssignChecker(self, [[1, 2, 3], [4, 5, 6]],
- dtype=dtype)
+ checker = StridedSliceAssignChecker(
+ self, [[1, 2, 3], [4, 5, 6]], dtype=dtype)
# No-op assignment
checker[:] = [[10, 20, 30], [40, 50, 60]]
# Checks trivial (1,1) shape tensor
diff --git a/tensorflow/compiler/tf2xla/kernels/pooling_ops.cc b/tensorflow/compiler/tf2xla/kernels/pooling_ops.cc
index f8e7b48a0f..eb8b5b130f 100644
--- a/tensorflow/compiler/tf2xla/kernels/pooling_ops.cc
+++ b/tensorflow/compiler/tf2xla/kernels/pooling_ops.cc
@@ -694,5 +694,18 @@ REGISTER_XLA_OP(Name("MaxPoolGradGradV2")
.CompileTimeConstInput("strides"),
MaxPool2DGradGradOp);
+class MaxPool3DGradGradOp : public MaxPoolGradGradOp {
+ public:
+ explicit MaxPool3DGradGradOp(OpKernelConstruction* ctx)
+ : MaxPoolGradGradOp(ctx, /*num_spatial_dims=*/3) {
+ string data_format;
+ OP_REQUIRES_OK(ctx, ctx->GetAttr("data_format", &data_format));
+ OP_REQUIRES(ctx, FormatFromString(data_format, &data_format_),
+ errors::InvalidArgument("Invalid data format"));
+ }
+};
+REGISTER_XLA_OP(Name("MaxPool3DGradGrad").TypeConstraint("T", DT_FLOAT),
+ MaxPool3DGradGradOp);
+
} // anonymous namespace
} // namespace tensorflow
diff --git a/tensorflow/compiler/tf2xla/kernels/stateless_random_ops.cc b/tensorflow/compiler/tf2xla/kernels/stateless_random_ops.cc
index 3799d491bb..0367501433 100644
--- a/tensorflow/compiler/tf2xla/kernels/stateless_random_ops.cc
+++ b/tensorflow/compiler/tf2xla/kernels/stateless_random_ops.cc
@@ -207,7 +207,7 @@ class StatelessRandomNormalOp : public XlaOpKernel {
RandomUniform(builder, seed, shape, std::nextafter(-1.0f, 0.0f), 1.0);
// Convert uniform distribution to normal distribution by computing
// sqrt(2) * erfinv(x)
- auto erfinv_or_status = ErfInv(builder, uniform);
+ auto erfinv_or_status = ErfInv(uniform);
OP_REQUIRES_OK(ctx, erfinv_or_status.status());
auto normal = builder->Mul(builder->ConstantR0<float>(std::sqrt(2.0)),
erfinv_or_status.ValueOrDie());
diff --git a/tensorflow/compiler/tf2xla/kernels/unary_ops.cc b/tensorflow/compiler/tf2xla/kernels/unary_ops.cc
index 1d078de211..a39e5dcfc5 100644
--- a/tensorflow/compiler/tf2xla/kernels/unary_ops.cc
+++ b/tensorflow/compiler/tf2xla/kernels/unary_ops.cc
@@ -202,9 +202,8 @@ class ErfOp : public XlaOpKernel {
OP_REQUIRES_OK(ctx,
DataTypeToPrimitiveType(input_type(0), &primitive_type));
- auto y =
- b->Select(b->Gt(abs_x, one), b->Sub(one, Erfc(b, x, primitive_type)),
- Erf(b, x, primitive_type));
+ auto y = b->Select(b->Gt(abs_x, one), b->Sub(one, Erfc(x, primitive_type)),
+ Erf(x, primitive_type));
ctx->SetOutput(0, y);
}
};
@@ -223,9 +222,8 @@ class ErfcOp : public XlaOpKernel {
OP_REQUIRES_OK(ctx,
DataTypeToPrimitiveType(input_type(0), &primitive_type));
- auto y =
- b->Select(b->Lt(abs_x, one), b->Sub(one, Erf(b, x, primitive_type)),
- Erfc(b, x, primitive_type));
+ auto y = b->Select(b->Lt(abs_x, one), b->Sub(one, Erf(x, primitive_type)),
+ Erfc(x, primitive_type));
ctx->SetOutput(0, y);
}
};
diff --git a/tensorflow/compiler/tf2xla/kernels/variable_ops.cc b/tensorflow/compiler/tf2xla/kernels/variable_ops.cc
index a163fa0a5b..ad51396bdf 100644
--- a/tensorflow/compiler/tf2xla/kernels/variable_ops.cc
+++ b/tensorflow/compiler/tf2xla/kernels/variable_ops.cc
@@ -13,9 +13,9 @@ See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
-#include "tensorflow/compiler/tf2xla/kernels/cwise_ops.h"
#include "tensorflow/compiler/tf2xla/kernels/gather_op_helpers.h"
#include "tensorflow/compiler/tf2xla/kernels/shape_util.h"
+#include "tensorflow/compiler/tf2xla/lib/scatter.h"
#include "tensorflow/compiler/tf2xla/shape_util.h"
#include "tensorflow/compiler/tf2xla/xla_helpers.h"
#include "tensorflow/compiler/tf2xla/xla_op_registry.h"
@@ -23,8 +23,6 @@ limitations under the License.
#include "tensorflow/compiler/xla/literal_util.h"
#include "tensorflow/core/framework/kernel_def_builder.h"
#include "tensorflow/core/framework/types.h"
-#include "tensorflow/core/kernels/bounds_check.h"
-#include "tensorflow/core/kernels/no_op.h"
namespace tensorflow {
namespace {
@@ -41,6 +39,27 @@ class VarIsInitializedOp : public XlaOpKernel {
};
REGISTER_XLA_OP(Name("VarIsInitializedOp"), VarIsInitializedOp);
+class VariableShapeOp : public XlaOpKernel {
+ public:
+ explicit VariableShapeOp(OpKernelConstruction* ctx) : XlaOpKernel(ctx) {
+ OP_REQUIRES_OK(ctx, ctx->GetAttr("out_type", &out_dtype_));
+ }
+
+ void Compile(XlaOpKernelContext* ctx) override {
+ DataType variable_dtype;
+ TensorShape shape;
+ OP_REQUIRES_OK(ctx,
+ ctx->GetVariableTypeAndShape(0, &variable_dtype, &shape));
+ Tensor shape_constant(out_dtype_, TensorShape({shape.dims()}));
+ OP_REQUIRES_OK(ctx, TensorShapeToConstant(shape, &shape_constant));
+ ctx->SetConstantOutput(0, shape_constant);
+ }
+
+ private:
+ DataType out_dtype_;
+};
+REGISTER_XLA_OP(Name("VariableShape"), VariableShapeOp);
+
class ReadVariableOp : public XlaOpKernel {
public:
explicit ReadVariableOp(OpKernelConstruction* ctx) : XlaOpKernel(ctx) {
@@ -125,29 +144,152 @@ class ResourceGatherOp : public XlaOpKernel {
ctx->SetOutput(0, gather);
}
};
-REGISTER_XLA_OP(Name("ResourceGather").TypeConstraint("dtype", kNumericTypes),
- ResourceGatherOp);
+REGISTER_XLA_OP(Name("ResourceGather"), ResourceGatherOp);
-class VariableShapeOp : public XlaOpKernel {
+class ResourceScatterOp : public XlaOpKernel {
public:
- explicit VariableShapeOp(OpKernelConstruction* ctx) : XlaOpKernel(ctx) {
- OP_REQUIRES_OK(ctx, ctx->GetAttr("out_type", &out_dtype_));
+ explicit ResourceScatterOp(
+ OpKernelConstruction* context, bool indices_are_vectors,
+ std::function<xla::XlaOp(const xla::XlaOp&, const xla::XlaOp&,
+ xla::XlaBuilder*)>
+ combiner)
+ : XlaOpKernel(context),
+ indices_are_vectors_(indices_are_vectors),
+ combiner_(std::move(combiner)) {}
+
+ void Compile(XlaOpKernelContext* context) override {
+ xla::XlaBuilder* builder = context->builder();
+
+ DataType dtype = context->input_type(2);
+ TensorShape var_shape;
+ xla::XlaOp var_value;
+ OP_REQUIRES_OK(
+ context, context->ReadVariableInput(0, dtype, &var_shape, &var_value));
+
+ const xla::XlaOp indices = context->Input(1);
+ const xla::XlaOp updates = context->Input(2);
+
+ auto result = XlaScatter(var_value, updates, indices, indices_are_vectors_,
+ combiner_, builder);
+ OP_REQUIRES_OK(context, result.status());
+ OP_REQUIRES_OK(context,
+ context->AssignVariable(0, dtype, result.ValueOrDie()));
}
- void Compile(XlaOpKernelContext* ctx) override {
- DataType variable_dtype;
- TensorShape shape;
- OP_REQUIRES_OK(ctx,
- ctx->GetVariableTypeAndShape(0, &variable_dtype, &shape));
- Tensor shape_constant(out_dtype_, TensorShape({shape.dims()}));
- OP_REQUIRES_OK(ctx, TensorShapeToConstant(shape, &shape_constant));
- ctx->SetConstantOutput(0, shape_constant);
+ private:
+ const bool indices_are_vectors_;
+ const std::function<xla::XlaOp(const xla::XlaOp&, const xla::XlaOp&,
+ xla::XlaBuilder*)>
+ combiner_;
+};
+
+class ResourceScatterAddOp : public ResourceScatterOp {
+ public:
+ explicit ResourceScatterAddOp(OpKernelConstruction* context)
+ : ResourceScatterOp(context, /*indices_are_vectors=*/false, Combine) {}
+
+ private:
+ static xla::XlaOp Combine(const xla::XlaOp& x, const xla::XlaOp& y,
+ xla::XlaBuilder* builder) {
+ return builder->Add(x, y);
}
+};
+REGISTER_XLA_OP(Name("ResourceScatterAdd"), ResourceScatterAddOp);
+
+class ResourceScatterSubOp : public ResourceScatterOp {
+ public:
+ explicit ResourceScatterSubOp(OpKernelConstruction* context)
+ : ResourceScatterOp(context, /*indices_are_vectors=*/false, Combine) {}
private:
- DataType out_dtype_;
+ static xla::XlaOp Combine(const xla::XlaOp& x, const xla::XlaOp& y,
+ xla::XlaBuilder* builder) {
+ return builder->Sub(x, y);
+ }
};
+REGISTER_XLA_OP(Name("ResourceScatterSub"), ResourceScatterSubOp);
+
+class ResourceScatterMulOp : public ResourceScatterOp {
+ public:
+ explicit ResourceScatterMulOp(OpKernelConstruction* context)
+ : ResourceScatterOp(context, /*indices_are_vectors=*/false, Combine) {}
+
+ private:
+ static xla::XlaOp Combine(const xla::XlaOp& x, const xla::XlaOp& y,
+ xla::XlaBuilder* builder) {
+ return builder->Mul(x, y);
+ }
+};
+REGISTER_XLA_OP(Name("ResourceScatterMul"), ResourceScatterMulOp);
+
+class ResourceScatterDivOp : public ResourceScatterOp {
+ public:
+ explicit ResourceScatterDivOp(OpKernelConstruction* context)
+ : ResourceScatterOp(context, /*indices_are_vectors=*/false, Combine) {}
+
+ private:
+ static xla::XlaOp Combine(const xla::XlaOp& x, const xla::XlaOp& y,
+ xla::XlaBuilder* builder) {
+ return builder->Div(x, y);
+ }
+};
+REGISTER_XLA_OP(Name("ResourceScatterDiv"), ResourceScatterDivOp);
+
+class ResourceScatterMinOp : public ResourceScatterOp {
+ public:
+ explicit ResourceScatterMinOp(OpKernelConstruction* context)
+ : ResourceScatterOp(context, /*indices_are_vectors=*/false, Combine) {}
+
+ private:
+ static xla::XlaOp Combine(const xla::XlaOp& x, const xla::XlaOp& y,
+ xla::XlaBuilder* builder) {
+ return builder->Min(x, y);
+ }
+};
+REGISTER_XLA_OP(Name("ResourceScatterMin"), ResourceScatterMinOp);
+
+class ResourceScatterMaxOp : public ResourceScatterOp {
+ public:
+ explicit ResourceScatterMaxOp(OpKernelConstruction* context)
+ : ResourceScatterOp(context, /*indices_are_vectors=*/false, Combine) {}
+
+ private:
+ static xla::XlaOp Combine(const xla::XlaOp& x, const xla::XlaOp& y,
+ xla::XlaBuilder* builder) {
+ return builder->Max(x, y);
+ }
+};
+REGISTER_XLA_OP(Name("ResourceScatterMax"), ResourceScatterMaxOp);
+
+class ResourceScatterUpdateOp : public ResourceScatterOp {
+ public:
+ explicit ResourceScatterUpdateOp(OpKernelConstruction* context)
+ : ResourceScatterOp(context, /*indices_are_vectors=*/false,
+ /*combiner=*/{}) {}
+};
+REGISTER_XLA_OP(Name("ResourceScatterUpdate"), ResourceScatterUpdateOp);
+
+class ResourceScatterNdUpdateOp : public ResourceScatterOp {
+ public:
+ explicit ResourceScatterNdUpdateOp(OpKernelConstruction* context)
+ : ResourceScatterOp(context, /*indices_are_vectors=*/true,
+ /*combiner=*/{}) {}
+};
+REGISTER_XLA_OP(Name("ResourceScatterNdUpdate"), ResourceScatterNdUpdateOp);
+
+class ResourceScatterNdAddOp : public ResourceScatterOp {
+ public:
+ explicit ResourceScatterNdAddOp(OpKernelConstruction* context)
+ : ResourceScatterOp(context, /*indices_are_vectors=*/true,
+ /*combiner=*/Combine) {}
+
+ private:
+ static xla::XlaOp Combine(const xla::XlaOp& x, const xla::XlaOp& y,
+ xla::XlaBuilder* builder) {
+ return builder->Add(x, y);
+ }
+};
+REGISTER_XLA_OP(Name("ResourceScatterNdAdd"), ResourceScatterNdAddOp);
-REGISTER_XLA_OP(Name("VariableShape"), VariableShapeOp);
} // namespace
} // namespace tensorflow
diff --git a/tensorflow/compiler/tf2xla/lib/random.cc b/tensorflow/compiler/tf2xla/lib/random.cc
index 360c3e40e3..4a2516244a 100644
--- a/tensorflow/compiler/tf2xla/lib/random.cc
+++ b/tensorflow/compiler/tf2xla/lib/random.cc
@@ -51,7 +51,7 @@ xla::StatusOr<xla::XlaOp> TruncatedNormal(const DataType dtype,
// probit(p) = sqrt(2) * erfinv(2*p-1)
auto p = builder->Add(alpha_normal_cdf, builder->Mul(z, uniform));
auto erfinv_input = builder->Sub(builder->Mul(p, two), one);
- TF_ASSIGN_OR_RETURN(auto erfinv_or_status, ErfInv(builder, erfinv_input));
+ TF_ASSIGN_OR_RETURN(auto erfinv_or_status, ErfInv(erfinv_input));
return builder->Mul(sqrt_2, erfinv_or_status);
}
} // namespace tensorflow
diff --git a/tensorflow/compiler/tf2xla/xla_op_kernel.cc b/tensorflow/compiler/tf2xla/xla_op_kernel.cc
index c6ddbcc6e1..b58959bd6c 100644
--- a/tensorflow/compiler/tf2xla/xla_op_kernel.cc
+++ b/tensorflow/compiler/tf2xla/xla_op_kernel.cc
@@ -39,8 +39,7 @@ xla::XlaBuilder* XlaOpKernelContext::builder() const {
static const XlaExpression* CastExpressionFromTensor(const Tensor& tensor) {
const XlaExpression* expression =
reinterpret_cast<const XlaExpression*>(tensor.tensor_data().data());
- CHECK(expression->handle().builder() != nullptr ||
- expression->resource() != nullptr);
+ CHECK(expression->handle().valid() || expression->resource() != nullptr);
VLOG(1) << "Fetched T" << expression->handle();
return expression;
}
@@ -49,7 +48,7 @@ static const XlaExpression* CastExpressionFromTensor(const Tensor& tensor) {
static XlaExpression* CastExpressionFromUninitializedTensor(Tensor* tensor) {
const XlaExpression* expression =
reinterpret_cast<const XlaExpression*>(tensor->tensor_data().data());
- CHECK_EQ(expression->handle().builder(), nullptr);
+ CHECK(!expression->handle().valid());
return const_cast<XlaExpression*>(expression);
}
@@ -396,7 +395,7 @@ void XlaOpKernelContext::SetConstantOutput(int index, const Tensor& constant) {
OP_REQUIRES_OK(context_, HostTensorToBorrowingLiteral(constant, &literal));
xla::XlaOp handle = builder()->ConstantLiteral(literal);
- CHECK_NE(handle.builder(), nullptr);
+ CHECK(handle.valid());
// Make the Tensor that will refer to the expression.
Tensor* output = nullptr;
@@ -441,7 +440,7 @@ Status XlaOpKernelContext::GetResourceInput(int index, XlaResource** resource) {
Status XlaOpKernelContext::AssignVariable(int input_index, DataType type,
xla::XlaOp handle) {
- TF_RET_CHECK(handle.builder() != nullptr);
+ TF_RET_CHECK(handle.valid());
const XlaExpression* expression =
CastExpressionFromTensor(context_->input(input_index));
diff --git a/tensorflow/compiler/tf2xla/xla_op_registry_test.cc b/tensorflow/compiler/tf2xla/xla_op_registry_test.cc
index a2ec8dc730..266cbc4395 100644
--- a/tensorflow/compiler/tf2xla/xla_op_registry_test.cc
+++ b/tensorflow/compiler/tf2xla/xla_op_registry_test.cc
@@ -66,7 +66,7 @@ REGISTER_XLA_OP(Name("DummyDuplicateOp").TypeConstraint("T", DT_FLOAT),
// should have type INT32 while all other kernels should have type FLOAT.
TEST(XlaOpRegistryTest, XlaOpRegistrationWithOverride) {
XlaOpRegistry::RegisterCompilationKernels();
- auto registered_kernels = GetAllRegisteredKernels();
+ auto registered_kernels = GetAllRegisteredKernels().kernel();
for (const auto& kernels : registered_kernels) {
if (kernels.op() == "DummyDuplicateOp") {
EXPECT_EQ(kernels.constraint_size(), 1);
diff --git a/tensorflow/compiler/tf2xla/xla_resource.h b/tensorflow/compiler/tf2xla/xla_resource.h
index 9ce36d1aa7..4de18a7788 100644
--- a/tensorflow/compiler/tf2xla/xla_resource.h
+++ b/tensorflow/compiler/tf2xla/xla_resource.h
@@ -75,7 +75,7 @@ class XlaResource {
const xla::XlaOp& initial_value() const { return initial_value_; }
// A variable is initialized if it has a value.
- bool initialized() const { return value_.builder() != nullptr; }
+ bool initialized() const { return value_.valid(); }
// Sets the type and shape of the resource. The type and shape of a resource
// must not change once the variable has been initialized.
diff --git a/tensorflow/compiler/xla/client/lib/arithmetic.cc b/tensorflow/compiler/xla/client/lib/arithmetic.cc
index f095ec9213..8e875bf352 100644
--- a/tensorflow/compiler/xla/client/lib/arithmetic.cc
+++ b/tensorflow/compiler/xla/client/lib/arithmetic.cc
@@ -122,8 +122,7 @@ StatusOr<XlaOp> Any(const XlaOp& predicates, XlaBuilder* builder) {
}
namespace {
-xla::XlaOp FloatLiteral(xla::XlaBuilder* b, PrimitiveType data_type,
- float value) {
+XlaOp FloatLiteral(XlaBuilder* b, PrimitiveType data_type, float value) {
return b->ConvertElementType(b->ConstantR0(value), data_type);
}
@@ -165,10 +164,11 @@ std::array<float, 6> kErfUCoefficient = {
// Evaluate the polynomial given coefficients and `x`.
// N.B. Coefficients should be supplied in decreasing order.
-xla::XlaOp EvaluatePolynomial(xla::XlaBuilder* b, const xla::XlaOp& x,
- tensorflow::gtl::ArraySlice<float> coefficients,
- PrimitiveType data_type) {
- xla::XlaOp poly = FloatLiteral(b, data_type, 0.0);
+XlaOp EvaluatePolynomial(const XlaOp& x,
+ tensorflow::gtl::ArraySlice<float> coefficients,
+ PrimitiveType data_type) {
+ XlaBuilder* b = x.builder();
+ XlaOp poly = FloatLiteral(b, data_type, 0.0);
for (float c : coefficients) {
poly = b->Add(b->Mul(poly, x), FloatLiteral(b, data_type, c));
}
@@ -176,32 +176,32 @@ xla::XlaOp EvaluatePolynomial(xla::XlaBuilder* b, const xla::XlaOp& x,
}
// Compute an approximation of the error function complement (1 - erf(x)).
-xla::XlaOp Erfc(xla::XlaBuilder* b, const xla::XlaOp& x,
- PrimitiveType data_type) {
- xla::XlaOp zero = FloatLiteral(b, data_type, 0.0);
- xla::XlaOp two = FloatLiteral(b, data_type, 2.0);
- xla::XlaOp eight = FloatLiteral(b, data_type, 8.0);
+XlaOp Erfc(const XlaOp& x, PrimitiveType data_type) {
+ XlaBuilder* b = x.builder();
+ XlaOp zero = FloatLiteral(b, data_type, 0.0);
+ XlaOp two = FloatLiteral(b, data_type, 2.0);
+ XlaOp eight = FloatLiteral(b, data_type, 8.0);
- xla::XlaOp abs_x = b->Abs(x);
- xla::XlaOp z = b->Exp(b->Mul(b->Neg(x), x));
+ XlaOp abs_x = b->Abs(x);
+ XlaOp z = b->Exp(b->Mul(b->Neg(x), x));
- xla::XlaOp pp = EvaluatePolynomial(b, abs_x, kErfcPCoefficient, data_type);
- xla::XlaOp pq = EvaluatePolynomial(b, abs_x, kErfcQCoefficient, data_type);
- xla::XlaOp pr = EvaluatePolynomial(b, abs_x, kErfcRCoefficient, data_type);
- xla::XlaOp ps = EvaluatePolynomial(b, abs_x, kErfcSCoefficient, data_type);
+ XlaOp pp = EvaluatePolynomial(abs_x, kErfcPCoefficient, data_type);
+ XlaOp pq = EvaluatePolynomial(abs_x, kErfcQCoefficient, data_type);
+ XlaOp pr = EvaluatePolynomial(abs_x, kErfcRCoefficient, data_type);
+ XlaOp ps = EvaluatePolynomial(abs_x, kErfcSCoefficient, data_type);
- xla::XlaOp y = b->Select(b->Lt(abs_x, eight), b->Div(b->Mul(z, pp), pq),
- b->Div(b->Mul(z, pr), ps));
+ XlaOp y = b->Select(b->Lt(abs_x, eight), b->Div(b->Mul(z, pp), pq),
+ b->Div(b->Mul(z, pr), ps));
return b->Select(b->Lt(x, zero), b->Sub(two, y), y);
}
// Compute a polynomial approximation of the error function.
-xla::XlaOp Erf(xla::XlaBuilder* b, const xla::XlaOp& x,
- PrimitiveType data_type) {
- xla::XlaOp z = b->Mul(x, x);
- xla::XlaOp pt = EvaluatePolynomial(b, z, kErfTCoefficient, data_type);
- xla::XlaOp pu = EvaluatePolynomial(b, z, kErfUCoefficient, data_type);
+XlaOp Erf(const XlaOp& x, PrimitiveType data_type) {
+ XlaBuilder* b = x.builder();
+ XlaOp z = b->Mul(x, x);
+ XlaOp pt = EvaluatePolynomial(z, kErfTCoefficient, data_type);
+ XlaOp pu = EvaluatePolynomial(z, kErfUCoefficient, data_type);
return b->Div(b->Mul(x, pt), pu);
}
@@ -217,7 +217,8 @@ xla::XlaOp Erf(xla::XlaBuilder* b, const xla::XlaOp& x,
// p = sum_{i=1}^n gq[i]*w^i
// }
// return p*x
-StatusOr<XlaOp> ErfInv(xla::XlaBuilder* b, const xla::XlaOp& x) {
+StatusOr<XlaOp> ErfInv(const XlaOp& x) {
+ XlaBuilder* b = x.builder();
TF_ASSIGN_OR_RETURN(Shape shape, b->GetShape(x));
constexpr int kDegree = 9;
constexpr std::array<float, 9> w_less_than_5_constants = {
diff --git a/tensorflow/compiler/xla/client/lib/arithmetic.h b/tensorflow/compiler/xla/client/lib/arithmetic.h
index efdcc7e198..33a8254274 100644
--- a/tensorflow/compiler/xla/client/lib/arithmetic.h
+++ b/tensorflow/compiler/xla/client/lib/arithmetic.h
@@ -57,20 +57,18 @@ StatusOr<XlaOp> Any(const XlaOp& predicates, XlaBuilder* builder);
// Evaluate the polynomial given coefficients and `x`.
// N.B. Coefficients should be supplied in decreasing order.
-xla::XlaOp EvaluatePolynomial(xla::XlaBuilder* b, const xla::XlaOp& x,
- tensorflow::gtl::ArraySlice<double> coefficients,
- PrimitiveType data_type);
+XlaOp EvaluatePolynomial(const XlaOp& x,
+ tensorflow::gtl::ArraySlice<float> coefficients,
+ PrimitiveType data_type);
// Compute an approximation of the error function complement (1 - erf(x)).
-xla::XlaOp Erfc(xla::XlaBuilder* b, const xla::XlaOp& x,
- PrimitiveType data_type);
+XlaOp Erfc(const XlaOp& x, PrimitiveType data_type);
// Compute an approximation of the error function.
-xla::XlaOp Erf(xla::XlaBuilder* b, const xla::XlaOp& x,
- PrimitiveType data_type);
+XlaOp Erf(const XlaOp& x, PrimitiveType data_type);
// Compute an approximation of the inverse of the error function.
-StatusOr<XlaOp> ErfInv(xla::XlaBuilder* b, const xla::XlaOp& x);
+StatusOr<XlaOp> ErfInv(const XlaOp& x);
} // namespace xla
diff --git a/tensorflow/compiler/xla/client/xla_client/xla_builder.cc b/tensorflow/compiler/xla/client/xla_client/xla_builder.cc
index 805223dcc4..256667cbe0 100644
--- a/tensorflow/compiler/xla/client/xla_client/xla_builder.cc
+++ b/tensorflow/compiler/xla/client/xla_client/xla_builder.cc
@@ -96,12 +96,12 @@ void XlaBuilder::NoteError(const Status& error) {
XlaOp XlaBuilder::NoteErrorOrReturn(
const std::function<StatusOr<XlaOp>()>& op_creator) {
if (!first_error_.ok()) {
- return {};
+ return XlaOp(this);
}
auto op = op_creator();
if (!op.ok()) {
NoteError(op.status());
- return {};
+ return XlaOp(this);
}
return op.ConsumeValueOrDie();
}
@@ -1990,9 +1990,4 @@ StatusOr<const HloInstructionProto*> XlaBuilder::LookUpInstruction(
return &instructions_[op.handle()];
}
-XlaOp XlaBuilder::UnimplementedOp() {
- NoteError(Unimplemented("Op not implemented"));
- return {};
-}
-
} // namespace xla
diff --git a/tensorflow/compiler/xla/client/xla_client/xla_builder.h b/tensorflow/compiler/xla/client/xla_client/xla_builder.h
index 0329e42ed1..f18306fff0 100644
--- a/tensorflow/compiler/xla/client/xla_client/xla_builder.h
+++ b/tensorflow/compiler/xla/client/xla_client/xla_builder.h
@@ -46,10 +46,10 @@ class XlaBuilder;
// instruction as an operand.
class XlaOp {
public:
- XlaOp() : handle_(0), builder_(nullptr) {}
+ XlaOp() : handle_(-1), builder_(nullptr) {}
~XlaOp() {}
- const XlaBuilder* builder() const { return builder_; }
+ XlaBuilder* builder() const { return builder_; }
bool operator==(const XlaOp& rhs) const {
return handle_ == rhs.handle_ && builder_ == rhs.builder_;
@@ -59,12 +59,16 @@ class XlaOp {
return handle_ != rhs.handle_ || builder_ != rhs.builder_;
}
+ // Returns true if the XlaOp represents valid, non-erroneous value.
+ bool valid() const { return handle_ >= 0; }
+
friend std::ostream& operator<<(std::ostream& out, const XlaOp& op) {
out << op.handle();
return out;
}
private:
+ explicit XlaOp(XlaBuilder* builder) : handle_(-1), builder_(builder) {}
XlaOp(int64 handle, XlaBuilder* builder)
: handle_(handle), builder_(builder) {}
@@ -72,8 +76,12 @@ class XlaOp {
friend class XlaBuilder;
+ // < 0 means "invalid handle".
int64 handle_;
- XlaBuilder* builder_; // Not owned.
+
+ // Not owned. Non-null for any handle returned by XlaBuilder, even if the
+ // handle is invalid.
+ XlaBuilder* builder_;
};
// A convenient interface for building up computations.
@@ -830,9 +838,6 @@ class XlaBuilder {
XlaOp NoteErrorOrReturn(const std::function<StatusOr<XlaOp>()>& op_creator);
- // Helper method that creates an empty op and notes error.
- XlaOp UnimplementedOp();
-
StatusOr<const HloInstructionProto*> LookUpInstruction(const XlaOp& op) const;
// Internal helper method that does the building for an arbitrary unary op.
diff --git a/tensorflow/compiler/xla/python/xla_client.py b/tensorflow/compiler/xla/python/xla_client.py
index c025127c3c..a1fc25303c 100644
--- a/tensorflow/compiler/xla/python/xla_client.py
+++ b/tensorflow/compiler/xla/python/xla_client.py
@@ -257,9 +257,12 @@ class Shape(object):
self._dimensions == other._dimensions and
self._minor_to_major == other._minor_to_major)
+ def __ne__(self, other):
+ return not self == other
+
def __repr__(self):
return ('xla_client.Shape(_dtype={!r}, _dimensions={!r}, '
- '_is_tuple={!r}), _minor_to_major={!r}').format(
+ '_is_tuple={!r}, _minor_to_major={!r})').format(
self._dtype, self._dimensions, self._is_tuple,
self._minor_to_major)
diff --git a/tensorflow/compiler/xla/service/algebraic_simplifier.cc b/tensorflow/compiler/xla/service/algebraic_simplifier.cc
index 1fc8fb9b69..d8a9aba834 100644
--- a/tensorflow/compiler/xla/service/algebraic_simplifier.cc
+++ b/tensorflow/compiler/xla/service/algebraic_simplifier.cc
@@ -75,21 +75,22 @@ bool TransposeIsBitcast(const HloInstruction* transpose) {
transpose->dimensions());
}
-// Returns true if the given reshape produces a result which is bit-wise
+// Returns true if the given reshape/copy produces a result which is bit-wise
// identical to its operand and thus may be replaced with a bitcast.
//
// This function is conservative -- even if this function returns false, the
// reshape may still be a bitcast. For example, a reshape from [28x28] to [784].
-bool ReshapeIsBitcast(
- const HloInstruction* reshape,
+bool ReshapeOrCopyIsBitcast(
+ const HloInstruction* instr,
const AlgebraicSimplifier::ValidBitcastCallback& valid_bitcast_callback) {
- CHECK_EQ(HloOpcode::kReshape, reshape->opcode());
+ CHECK(HloOpcode::kReshape == instr->opcode() ||
+ HloOpcode::kCopy == instr->opcode());
- const HloInstruction* operand = reshape->operand(0);
+ const HloInstruction* operand = instr->operand(0);
// Can't insert bitcasts if the compiler used a memory layout which isn't
// compatible.
- return ShapeUtil::ReshapeIsBitcast(operand->shape(), reshape->shape()) &&
- valid_bitcast_callback(operand->shape(), reshape->shape());
+ return ShapeUtil::ReshapeIsBitcast(operand->shape(), instr->shape()) &&
+ valid_bitcast_callback(operand->shape(), instr->shape());
}
// AlgebraicSimplifierVisitor traverses the HLO computation and reduces certain
@@ -433,7 +434,15 @@ Status AlgebraicSimplifierVisitor::HandleCopy(HloInstruction* copy) {
copy, HloInstruction::CreateUnary(copy->shape(), HloOpcode::kCopy, op));
}
// All copies can be eliminated (assuming layout constraints are satisified).
- ReplaceInstructionIfSameShape(copy, copy->mutable_operand(0));
+ if (ReplaceInstructionIfSameShape(copy, copy->mutable_operand(0))) {
+ return Status::OK();
+ }
+
+ if (is_layout_sensitive_ &&
+ ReshapeOrCopyIsBitcast(copy, valid_bitcast_callback_)) {
+ ReplaceWithBitcast(copy);
+ }
+
return Status::OK();
}
@@ -1672,7 +1681,7 @@ Status AlgebraicSimplifierVisitor::HandleReshape(HloInstruction* reshape) {
// Make this a bitcast if possible.
if (is_layout_sensitive_ &&
- ReshapeIsBitcast(reshape, valid_bitcast_callback_)) {
+ ReshapeOrCopyIsBitcast(reshape, valid_bitcast_callback_)) {
ReplaceWithBitcast(reshape);
return Status::OK();
}
diff --git a/tensorflow/compiler/xla/service/algebraic_simplifier_test.cc b/tensorflow/compiler/xla/service/algebraic_simplifier_test.cc
index 2605b0488c..49cc0b808b 100644
--- a/tensorflow/compiler/xla/service/algebraic_simplifier_test.cc
+++ b/tensorflow/compiler/xla/service/algebraic_simplifier_test.cc
@@ -1159,6 +1159,33 @@ TEST_F(AlgebraicSimplifierTest, RemoveCopy) {
EXPECT_THAT(computation->root_instruction(), param0);
}
+TEST_F(AlgebraicSimplifierTest, CopyEqualsBitcast) {
+ HloComputation::Builder builder(TestName());
+ HloInstruction* param =
+ builder.AddInstruction(HloInstruction::CreateParameter(
+ 0, ShapeUtil::MakeShape(F32, {1, 14, 14, 64}), "param"));
+ *param->mutable_shape()->mutable_layout() =
+ LayoutUtil::MakeLayout({0, 1, 2, 3});
+ HloInstruction* copy = builder.AddInstruction(HloInstruction::CreateUnary(
+ ShapeUtil::MakeShape(F32, {1, 14, 14, 64}), HloOpcode::kCopy, param));
+ *copy->mutable_shape()->mutable_layout() =
+ LayoutUtil::MakeLayout({1, 2, 0, 3});
+ auto computation = module().AddEntryComputation(builder.Build());
+ EXPECT_THAT(computation->root_instruction(), op::Copy(param));
+
+ AlgebraicSimplifier simplifier1(/*is_layout_sensitive=*/true,
+ non_bitcasting_callback());
+ ASSERT_FALSE(simplifier1.Run(&module()).ValueOrDie());
+ // Verify that the copy is not replaced.
+ EXPECT_THAT(computation->root_instruction(), op::Copy(param));
+
+ AlgebraicSimplifier simplifier2(/*is_layout_sensitive=*/true,
+ bitcasting_callback());
+ ASSERT_TRUE(simplifier2.Run(&module()).ValueOrDie());
+ // Verify that the copy is replaced.
+ EXPECT_THAT(computation->root_instruction(), op::Bitcast(param));
+}
+
// Test that unary concatenates are removed.
TEST_F(AlgebraicSimplifierTest, RemoveUnaryConcatenate) {
Shape r1f32 = ShapeUtil::MakeShape(F32, {100});
diff --git a/tensorflow/contrib/autograph/README.md b/tensorflow/contrib/autograph/README.md
index 829a57d8e6..7e26f47118 100644
--- a/tensorflow/contrib/autograph/README.md
+++ b/tensorflow/contrib/autograph/README.md
@@ -4,7 +4,7 @@ IMPORTANT: AutoGraph is alpha software, and under active development. Expect rou
AutoGraph is a Python to TensorFlow compiler.
-With AutoGraph, you can write [Eager style](https://www.tensorflow.org/programmers_guide/eager) code in a concise manner, and run it as a TensorFlow graph. AutoGraph uses source code transformation and partial evaluation to generate Python code that builds an equivalent TensorFlow subgraph. The result is code that behaves like ops and can be freely combined with other TensorFlow ops.
+With AutoGraph, you can write [Eager style](https://www.tensorflow.org/guide/eager) code in a concise manner, and run it as a TensorFlow graph. AutoGraph uses source code transformation and partial evaluation to generate Python code that builds an equivalent TensorFlow subgraph. The result is code that behaves like ops and can be freely combined with other TensorFlow ops.
For example, this Python function:
diff --git a/tensorflow/contrib/bayesflow/python/kernel_tests/monte_carlo_test.py b/tensorflow/contrib/bayesflow/python/kernel_tests/monte_carlo_test.py
index d9e23646d8..9e6a146f67 100644
--- a/tensorflow/contrib/bayesflow/python/kernel_tests/monte_carlo_test.py
+++ b/tensorflow/contrib/bayesflow/python/kernel_tests/monte_carlo_test.py
@@ -29,7 +29,6 @@ from tensorflow.python.framework import dtypes
from tensorflow.python.ops import gradients_impl
from tensorflow.python.ops import math_ops
from tensorflow.python.ops.distributions import distribution as distribution_lib
-from tensorflow.python.ops.distributions import gamma as gamma_lib
from tensorflow.python.ops.distributions import kullback_leibler
from tensorflow.python.ops.distributions import normal as normal_lib
from tensorflow.python.platform import test
@@ -256,50 +255,6 @@ class ExpectationTest(test.TestCase):
gradq_approx_kl_normal_normal_,
rtol=0.01, atol=0.)
- def test_docstring_example_gamma(self):
- with self.test_session() as sess:
- num_draws = int(1e5)
- concentration_p = constant_op.constant(1.)
- concentration_q = constant_op.constant(2.)
- p = gamma_lib.Gamma(concentration=concentration_p, rate=1.)
- q = gamma_lib.Gamma(concentration=concentration_q, rate=3.)
- approx_kl_gamma_gamma = monte_carlo_lib.expectation(
- f=lambda x: p.log_prob(x) - q.log_prob(x),
- samples=p.sample(num_draws, seed=42),
- log_prob=p.log_prob,
- use_reparametrization=(p.reparameterization_type
- == distribution_lib.FULLY_REPARAMETERIZED))
- exact_kl_gamma_gamma = kullback_leibler.kl_divergence(p, q)
- [exact_kl_gamma_gamma_, approx_kl_gamma_gamma_] = sess.run([
- exact_kl_gamma_gamma, approx_kl_gamma_gamma])
- self.assertEqual(
- False,
- p.reparameterization_type == distribution_lib.FULLY_REPARAMETERIZED)
- self.assertAllClose(exact_kl_gamma_gamma_, approx_kl_gamma_gamma_,
- rtol=0.01, atol=0.)
-
- # Compare gradients. (Not present in `docstring`.)
- gradp = lambda fp: gradients_impl.gradients(fp, concentration_p)[0]
- gradq = lambda fq: gradients_impl.gradients(fq, concentration_q)[0]
- [
- gradp_exact_kl_gamma_gamma_,
- gradq_exact_kl_gamma_gamma_,
- gradp_approx_kl_gamma_gamma_,
- gradq_approx_kl_gamma_gamma_,
- ] = sess.run([
- gradp(exact_kl_gamma_gamma),
- gradq(exact_kl_gamma_gamma),
- gradp(approx_kl_gamma_gamma),
- gradq(approx_kl_gamma_gamma),
- ])
- # Notice that variance (i.e., `rtol`) is higher when using score-trick.
- self.assertAllClose(gradp_exact_kl_gamma_gamma_,
- gradp_approx_kl_gamma_gamma_,
- rtol=0.05, atol=0.)
- self.assertAllClose(gradq_exact_kl_gamma_gamma_,
- gradq_approx_kl_gamma_gamma_,
- rtol=0.03, atol=0.)
-
if __name__ == '__main__':
test.main()
diff --git a/tensorflow/contrib/checkpoint/__init__.py b/tensorflow/contrib/checkpoint/__init__.py
index 38856417c0..8c1ce5c2a2 100644
--- a/tensorflow/contrib/checkpoint/__init__.py
+++ b/tensorflow/contrib/checkpoint/__init__.py
@@ -41,11 +41,11 @@ from tensorflow.contrib.checkpoint.python.containers import UniqueNameTracker
from tensorflow.contrib.checkpoint.python.split_dependency import split_dependency
from tensorflow.contrib.checkpoint.python.visualize import dot_graph_from_checkpoint
from tensorflow.core.protobuf.checkpointable_object_graph_pb2 import CheckpointableObjectGraph
-from tensorflow.python.training.checkpointable.base import Checkpointable
from tensorflow.python.training.checkpointable.base import CheckpointableBase
-from tensorflow.python.training.checkpointable.base import NoDependency
from tensorflow.python.training.checkpointable.data_structures import List
from tensorflow.python.training.checkpointable.data_structures import Mapping
+from tensorflow.python.training.checkpointable.tracking import Checkpointable
+from tensorflow.python.training.checkpointable.tracking import NoDependency
from tensorflow.python.training.checkpointable.util import capture_dependencies
from tensorflow.python.training.checkpointable.util import list_objects
from tensorflow.python.training.checkpointable.util import object_metadata
diff --git a/tensorflow/contrib/checkpoint/python/containers_test.py b/tensorflow/contrib/checkpoint/python/containers_test.py
index 12b99d3e22..64d056bd68 100644
--- a/tensorflow/contrib/checkpoint/python/containers_test.py
+++ b/tensorflow/contrib/checkpoint/python/containers_test.py
@@ -26,8 +26,8 @@ from tensorflow.python.keras import layers
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.platform import test
-from tensorflow.python.training.checkpointable import base as checkpointable
-from tensorflow.python.training.checkpointable import util as checkpointable_utils
+from tensorflow.python.training.checkpointable import tracking
+from tensorflow.python.training.checkpointable import util
class UniqueNameTrackerTests(test.TestCase):
@@ -48,11 +48,11 @@ class UniqueNameTrackerTests(test.TestCase):
slots.track(y, "y")
self.evaluate((x1.initializer, x2.initializer, x3.initializer,
y.initializer))
- save_root = checkpointable_utils.Checkpoint(slots=slots)
+ save_root = util.Checkpoint(slots=slots)
save_path = save_root.save(checkpoint_prefix)
- restore_slots = checkpointable.Checkpointable()
- restore_root = checkpointable_utils.Checkpoint(
+ restore_slots = tracking.Checkpointable()
+ restore_root = util.Checkpoint(
slots=restore_slots)
status = restore_root.restore(save_path)
restore_slots.x = resource_variable_ops.ResourceVariable(0.)
@@ -67,7 +67,7 @@ class UniqueNameTrackerTests(test.TestCase):
@test_util.run_in_graph_and_eager_modes
def testExample(self):
- class SlotManager(checkpointable.Checkpointable):
+ class SlotManager(tracking.Checkpointable):
def __init__(self):
self.slotdeps = containers.UniqueNameTracker()
@@ -83,11 +83,11 @@ class UniqueNameTrackerTests(test.TestCase):
manager = SlotManager()
self.evaluate([v.initializer for v in manager.slots])
- checkpoint = checkpointable_utils.Checkpoint(slot_manager=manager)
+ checkpoint = util.Checkpoint(slot_manager=manager)
checkpoint_directory = self.get_temp_dir()
checkpoint_prefix = os.path.join(checkpoint_directory, "ckpt")
save_path = checkpoint.save(checkpoint_prefix)
- metadata = checkpointable_utils.object_metadata(save_path)
+ metadata = util.object_metadata(save_path)
dependency_names = []
for node in metadata.nodes:
for child in node.children:
diff --git a/tensorflow/contrib/checkpoint/python/split_dependency_test.py b/tensorflow/contrib/checkpoint/python/split_dependency_test.py
index 43c5d6515b..00a805af25 100644
--- a/tensorflow/contrib/checkpoint/python/split_dependency_test.py
+++ b/tensorflow/contrib/checkpoint/python/split_dependency_test.py
@@ -23,8 +23,9 @@ from tensorflow.python.eager import test
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import resource_variable_ops
-from tensorflow.python.training.checkpointable import base as checkpointable
-from tensorflow.python.training.checkpointable import util as checkpointable_utils
+from tensorflow.python.training.checkpointable import base
+from tensorflow.python.training.checkpointable import tracking
+from tensorflow.python.training.checkpointable import util
def _split_variable_closure(variable):
@@ -43,7 +44,7 @@ def _combine_variable_closure(variable):
return _consume_restore_buffer_fn
-class SaveTensorSlicesAsDeps(checkpointable.CheckpointableBase):
+class SaveTensorSlicesAsDeps(base.CheckpointableBase):
def __init__(self):
self.combined = resource_variable_ops.ResourceVariable([0., 0., 0., 0.])
@@ -58,14 +59,14 @@ class SaveTensorSlicesAsDeps(checkpointable.CheckpointableBase):
self._track_checkpointable(dep, name=name)
-class HasRegularDeps(checkpointable.Checkpointable):
+class HasRegularDeps(tracking.Checkpointable):
def __init__(self):
self.first_half = resource_variable_ops.ResourceVariable([0., 0.])
self.second_half = resource_variable_ops.ResourceVariable([0., 0.])
-class OnlyOneDep(checkpointable.Checkpointable):
+class OnlyOneDep(tracking.Checkpointable):
def __init__(self):
self.first_half = resource_variable_ops.ResourceVariable([0., 0.])
@@ -75,7 +76,7 @@ class SplitTests(test.TestCase):
@test_util.run_in_graph_and_eager_modes
def testSaveRestoreSplitDep(self):
- save_checkpoint = checkpointable_utils.Checkpoint(
+ save_checkpoint = util.Checkpoint(
dep=SaveTensorSlicesAsDeps())
self.evaluate(save_checkpoint.dep.combined.assign([1., 2., 3., 4.]))
checkpoint_directory = self.get_temp_dir()
@@ -83,7 +84,7 @@ class SplitTests(test.TestCase):
save_path = save_checkpoint.save(checkpoint_prefix)
regular_deps = HasRegularDeps()
- regular_restore_checkpoint = checkpointable_utils.Checkpoint(
+ regular_restore_checkpoint = util.Checkpoint(
dep=regular_deps)
regular_restore_checkpoint.restore(
save_path).assert_consumed().run_restore_ops()
@@ -91,7 +92,7 @@ class SplitTests(test.TestCase):
self.assertAllEqual([3., 4.], self.evaluate(regular_deps.second_half))
one_dep = OnlyOneDep()
- one_dep_restore_checkpoint = checkpointable_utils.Checkpoint(dep=one_dep)
+ one_dep_restore_checkpoint = util.Checkpoint(dep=one_dep)
status = one_dep_restore_checkpoint.restore(save_path)
with self.assertRaises(AssertionError):
# Missing the second dependency.
@@ -99,7 +100,7 @@ class SplitTests(test.TestCase):
status.run_restore_ops()
self.assertAllEqual([1., 2.], self.evaluate(one_dep.first_half))
- restore_checkpoint = checkpointable_utils.Checkpoint()
+ restore_checkpoint = util.Checkpoint()
status = restore_checkpoint.restore(save_path)
restore_checkpoint.dep = SaveTensorSlicesAsDeps()
status.assert_consumed().run_restore_ops()
diff --git a/tensorflow/contrib/coder/python/layers/entropybottleneck.py b/tensorflow/contrib/coder/python/layers/entropybottleneck.py
index 0fbe3081af..0c997bd4fd 100644
--- a/tensorflow/contrib/coder/python/layers/entropybottleneck.py
+++ b/tensorflow/contrib/coder/python/layers/entropybottleneck.py
@@ -28,7 +28,7 @@ from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
-from tensorflow.python.keras import engine
+from tensorflow.python.keras.engine import base_layer
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import functional_ops
from tensorflow.python.ops import init_ops
@@ -40,7 +40,7 @@ from tensorflow.python.ops import variable_scope
from tensorflow.python.summary import summary
-class EntropyBottleneck(engine.Layer):
+class EntropyBottleneck(base_layer.Layer):
"""Entropy bottleneck layer.
This layer can be used to model the entropy (the amount of information
@@ -262,7 +262,7 @@ class EntropyBottleneck(engine.Layer):
self._range_coder_precision = int(range_coder_precision)
self._data_format = data_format
self._channel_axis(2) # trigger ValueError early
- self.input_spec = engine.InputSpec(min_ndim=2)
+ self.input_spec = base_layer.InputSpec(min_ndim=2)
@property
def init_scale(self):
@@ -357,7 +357,7 @@ class EntropyBottleneck(engine.Layer):
channels = input_shape[channel_axis].value
if channels is None:
raise ValueError("The channel dimension of the inputs must be defined.")
- self.input_spec = engine.InputSpec(
+ self.input_spec = base_layer.InputSpec(
ndim=input_shape.ndims, axes={channel_axis: channels})
filters = (1,) + self.filters + (1,)
scale = self.init_scale ** (1 / (len(self.filters) + 1))
diff --git a/tensorflow/contrib/cudnn_rnn/python/ops/cudnn_rnn_ops.py b/tensorflow/contrib/cudnn_rnn/python/ops/cudnn_rnn_ops.py
index 8822a7523f..748d7cd011 100644
--- a/tensorflow/contrib/cudnn_rnn/python/ops/cudnn_rnn_ops.py
+++ b/tensorflow/contrib/cudnn_rnn/python/ops/cudnn_rnn_ops.py
@@ -33,7 +33,7 @@ from tensorflow.python.ops import rnn_cell_impl
from tensorflow.python.ops import state_ops
from tensorflow.python.ops import variable_scope as vs
from tensorflow.python.training import saver
-from tensorflow.python.training.checkpointable import base as checkpointable_lib
+from tensorflow.python.training.checkpointable import tracking as checkpointable_lib
CUDNN_RNN_UNIDIRECTION = "unidirectional"
CUDNN_RNN_BIDIRECTION = "bidirectional"
diff --git a/tensorflow/contrib/data/__init__.py b/tensorflow/contrib/data/__init__.py
index 2a4cf877f0..156538b4e0 100644
--- a/tensorflow/contrib/data/__init__.py
+++ b/tensorflow/contrib/data/__init__.py
@@ -20,7 +20,7 @@ be used in conjunction with the @{tf.data.Dataset} API. Note that the
guarantees as `tf.data`, but we will provide deprecation advice in advance of
removing existing functionality.
-See the @{$datasets$Importing Data} Programmer's Guide for an overview.
+See @{$guide/datasets$Importing Data} for an overview.
@@Counter
@@CheckpointInputPipelineHook
diff --git a/tensorflow/contrib/data/python/kernel_tests/batch_dataset_op_test.py b/tensorflow/contrib/data/python/kernel_tests/batch_dataset_op_test.py
index 4c60232308..af97fbf87a 100644
--- a/tensorflow/contrib/data/python/kernel_tests/batch_dataset_op_test.py
+++ b/tensorflow/contrib/data/python/kernel_tests/batch_dataset_op_test.py
@@ -641,6 +641,80 @@ class BatchDatasetTest(test.TestCase, parameterized.TestCase):
"number of elements does not match"):
sess.run(get_next)
+ def testMapAndBatchImplicitDispose(self):
+ # Tests whether a map and batch dataset will be cleaned up correctly when
+ # the pipeline does not run it until exhaustion.
+ # The pipeline is TensorSliceDataset -> RepeatDataset(1000) ->
+ # MapAndBatchDataset(f=square_3, batch_size=100).
+ components = (np.arange(1000),
+ np.array([[1, 2, 3]]) * np.arange(1000)[:, np.newaxis],
+ np.array(37.0) * np.arange(1000))
+
+ def _map_fn(x, y, z):
+ return math_ops.square(x), math_ops.square(y), math_ops.square(z)
+
+ dataset = dataset_ops.Dataset.from_tensor_slices(components).repeat(
+ 1000).apply(batching.map_and_batch(_map_fn, batch_size=100))
+ dataset = dataset.prefetch(5)
+ iterator = dataset.make_one_shot_iterator()
+ get_next = iterator.get_next()
+
+ with self.test_session() as sess:
+ for _ in range(3):
+ sess.run(get_next)
+
+ @parameterized.parameters(0, 5, 10, 90, 95, 99)
+ def testMapAndBatchOutOfRangeError(self, threshold):
+
+ def raising_py_fn(i):
+ if i >= threshold:
+ raise StopIteration()
+ else:
+ return i
+
+ iterator = (
+ dataset_ops.Dataset.range(100).apply(
+ batching.map_and_batch(
+ lambda x: script_ops.py_func(raising_py_fn, [x], dtypes.int64),
+ batch_size=10)).make_one_shot_iterator())
+ get_next = iterator.get_next()
+
+ with self.test_session() as sess:
+ for i in range(threshold // 10):
+ self.assertAllEqual([i * 10 + j for j in range(10)], sess.run(get_next))
+ if threshold % 10 != 0:
+ self.assertAllEqual(
+ [threshold // 10 * 10 + j for j in range(threshold % 10)],
+ sess.run(get_next))
+ with self.assertRaises(errors.OutOfRangeError):
+ sess.run(get_next)
+
+ @parameterized.parameters(
+ (False, dtypes.bool),
+ (-42, dtypes.int8),
+ (-42, dtypes.int16),
+ (-42, dtypes.int32),
+ (-42, dtypes.int64),
+ (42, dtypes.uint8),
+ (42, dtypes.uint16),
+ (42.0, dtypes.float16),
+ (42.0, dtypes.float32),
+ (42.0, dtypes.float64),
+ (b"hello", dtypes.string),
+ )
+ def testMapAndBatchTypes(self, element, dtype):
+ def gen():
+ yield element
+
+ dataset = dataset_ops.Dataset.from_generator(gen, dtype).repeat(100).apply(
+ batching.map_and_batch(lambda x: x, batch_size=10))
+
+ get_next = dataset.make_one_shot_iterator().get_next()
+
+ with self.test_session() as sess:
+ for _ in range(10):
+ self.assertAllEqual([element for _ in range(10)], sess.run(get_next))
+
class RestructuredDatasetTest(test.TestCase):
diff --git a/tensorflow/contrib/data/python/kernel_tests/bucketing_test.py b/tensorflow/contrib/data/python/kernel_tests/bucketing_test.py
index c5d2edbbc6..5fc7e51d81 100644
--- a/tensorflow/contrib/data/python/kernel_tests/bucketing_test.py
+++ b/tensorflow/contrib/data/python/kernel_tests/bucketing_test.py
@@ -175,6 +175,28 @@ class GroupByReducerTest(test.TestCase):
dataset.apply(
grouping.group_by_reducer(lambda _: "wrong", reducer))
+ def testTuple(self):
+ def init_fn(_):
+ return np.array([], dtype=np.int64), np.int64(0)
+
+ def reduce_fn(state, value):
+ s1, s2 = state
+ v1, v2 = value
+ return array_ops.concat([s1, [v1]], 0), s2 + v2
+
+ def finalize_fn(s1, s2):
+ return s1, s2
+
+ reducer = grouping.Reducer(init_fn, reduce_fn, finalize_fn)
+ dataset = dataset_ops.Dataset.zip(
+ (dataset_ops.Dataset.range(10), dataset_ops.Dataset.range(10))).apply(
+ grouping.group_by_reducer(lambda x, y: np.int64(0), reducer))
+ get_next = dataset.make_one_shot_iterator().get_next()
+ with self.test_session() as sess:
+ x, y = sess.run(get_next)
+ self.assertAllEqual(x, np.asarray([x for x in range(10)]))
+ self.assertEqual(y, 45)
+
class GroupByWindowTest(test.TestCase):
diff --git a/tensorflow/contrib/eager/README.md b/tensorflow/contrib/eager/README.md
index 4384431e7b..86d203452e 100644
--- a/tensorflow/contrib/eager/README.md
+++ b/tensorflow/contrib/eager/README.md
@@ -44,7 +44,7 @@ Installation instructions at https://www.tensorflow.org/install/
For an introduction to eager execution in TensorFlow, see:
-- [User Guide](https://www.tensorflow.org/programmers_guide/eager) ([source](../../docs_src/programmers_guide/eager.md))
+- [User Guide](https://www.tensorflow.org/guide/eager) ([source](../../docs_src/guide/eager.md))
- Notebook: [Basic Usage](python/examples/notebooks/1_basics.ipynb)
- Notebook: [Gradients](python/examples/notebooks/2_gradients.ipynb)
- Notebook: [Importing Data](python/examples/notebooks/3_datasets.ipynb)
diff --git a/tensorflow/contrib/eager/python/examples/notebooks/3_datasets.ipynb b/tensorflow/contrib/eager/python/examples/notebooks/3_datasets.ipynb
index bfcc7feb07..d268cbcd91 100644
--- a/tensorflow/contrib/eager/python/examples/notebooks/3_datasets.ipynb
+++ b/tensorflow/contrib/eager/python/examples/notebooks/3_datasets.ipynb
@@ -9,7 +9,7 @@
"source": [
"# Eager Execution Tutorial: Importing Data\n",
"\n",
- "This notebook demonstrates the use of the [`tf.data.Dataset` API](https://www.tensorflow.org/programmers_guide/datasets) to build pipelines to feed data to your program. It covers:\n",
+ "This notebook demonstrates the use of the [`tf.data.Dataset` API](https://www.tensorflow.org/guide/datasets) to build pipelines to feed data to your program. It covers:\n",
"\n",
"* Creating a `Dataset`.\n",
"* Iteration over a `Dataset` with eager execution enabled.\n",
@@ -18,7 +18,7 @@
"\n",
"If you're familiar with TensorFlow graphs, the API for constructing the `Dataset` object remains exactly the same when eager execution is enabled, but the process of iterating over elements of the dataset is slightly simpler.\n",
"You can use Python iteration over the `tf.data.Dataset` object and do not need to explicitly create an `tf.data.Iterator` object.\n",
- "As a result, the discussion on iterators in the [Programmer's Guide](https://www.tensorflow.org/programmers_guide/datasets) is not relevant when eager execution is enabled."
+ "As a result, the discussion on iterators in the [TensorFlow Guide](https://www.tensorflow.org/guide/datasets) is not relevant when eager execution is enabled."
]
},
{
@@ -63,7 +63,7 @@
"source": [
"# Step 1: Create a source `Dataset`\n",
"\n",
- "Create a _source_ dataset using one of the factory functions like [`Dataset.from_tensors`](https://www.tensorflow.org/api_docs/python/tf/data/Dataset#from_tensors), [`Dataset.from_tensor_slices`](https://www.tensorflow.org/api_docs/python/tf/data/Dataset#from_tensor_slices) or using objects that read from files like [`TextLineDataset`](https://www.tensorflow.org/api_docs/python/tf/data/TextLineDataset) or [`TFRecordDataset`](https://www.tensorflow.org/api_docs/python/tf/data/TFRecordDataset). See the [Programmer's Guide](https://www.google.com/url?sa=D\u0026q=https%3A%2F%2Fwww.tensorflow.org%2Fprogrammers_guide%2Fdatasets%23reading_input_data) for more information."
+ "Create a _source_ dataset using one of the factory functions like [`Dataset.from_tensors`](https://www.tensorflow.org/api_docs/python/tf/data/Dataset#from_tensors), [`Dataset.from_tensor_slices`](https://www.tensorflow.org/api_docs/python/tf/data/Dataset#from_tensor_slices) or using objects that read from files like [`TextLineDataset`](https://www.tensorflow.org/api_docs/python/tf/data/TextLineDataset) or [`TFRecordDataset`](https://www.tensorflow.org/api_docs/python/tf/data/TFRecordDataset). See the [TensorFlow Guide](https://www.tensorflow.org/guide/datasets#reading_input_data) for more information."
]
},
{
diff --git a/tensorflow/contrib/eager/python/g3doc/guide.md b/tensorflow/contrib/eager/python/g3doc/guide.md
index 2d2aba6908..23f33d0230 100644
--- a/tensorflow/contrib/eager/python/g3doc/guide.md
+++ b/tensorflow/contrib/eager/python/g3doc/guide.md
@@ -4,8 +4,8 @@ Eager execution is a feature that makes TensorFlow execute operations
immediately: concrete values are returned, instead of creating a computational
graph that is executed later.
-A user guide is available: https://www.tensorflow.org/programmers_guide/eager
-([source file](../../../../docs_src/programmers_guide/eager.md))
+A user guide is available: https://www.tensorflow.org/guide/eager
+([source file](../../../../docs_src/guide/eager.md))
We welcome feedback through [GitHub issues](https://github.com/tensorflow/tensorflow/labels/comp:eager).
diff --git a/tensorflow/contrib/eager/python/tfe.py b/tensorflow/contrib/eager/python/tfe.py
index 113aa7967c..ca6430253b 100644
--- a/tensorflow/contrib/eager/python/tfe.py
+++ b/tensorflow/contrib/eager/python/tfe.py
@@ -122,7 +122,7 @@ from tensorflow.python.ops.resource_variable_ops import ResourceVariable as Vari
from tensorflow.python.ops.variable_scope import EagerVariableStore
from tensorflow.python.ops import script_ops
from tensorflow.python.ops import template
-from tensorflow.python.training.checkpointable.base import Checkpointable
+from tensorflow.python.training.checkpointable.tracking import Checkpointable
from tensorflow.python.training.checkpointable.util import CheckpointableSaver
from tensorflow.python.training.checkpointable.util import Checkpoint
from tensorflow.python.util.all_util import remove_undocumented
diff --git a/tensorflow/contrib/keras/api/keras/layers/__init__.py b/tensorflow/contrib/keras/api/keras/layers/__init__.py
index 938c881fcb..3327a9f9a6 100644
--- a/tensorflow/contrib/keras/api/keras/layers/__init__.py
+++ b/tensorflow/contrib/keras/api/keras/layers/__init__.py
@@ -20,10 +20,10 @@ from __future__ import print_function
# Generic layers.
# pylint: disable=g-bad-import-order
-from tensorflow.python.keras.engine import Input
-from tensorflow.python.keras.engine import InputLayer
-from tensorflow.python.keras.engine import InputSpec
-from tensorflow.python.keras.engine import Layer
+from tensorflow.python.keras.engine.base_layer import InputSpec
+from tensorflow.python.keras.engine.base_layer import Layer
+from tensorflow.python.keras.engine.input_layer import Input
+from tensorflow.python.keras.engine.input_layer import InputLayer
# Advanced activations.
from tensorflow.python.keras.layers.advanced_activations import LeakyReLU
diff --git a/tensorflow/contrib/layers/python/layers/layers.py b/tensorflow/contrib/layers/python/layers/layers.py
index b6d63c9640..beeabd6b65 100644
--- a/tensorflow/contrib/layers/python/layers/layers.py
+++ b/tensorflow/contrib/layers/python/layers/layers.py
@@ -2664,6 +2664,7 @@ def separable_convolution2d(
normalizer_fn=None,
normalizer_params=None,
weights_initializer=initializers.xavier_initializer(),
+ pointwise_initializer=None,
weights_regularizer=None,
biases_initializer=init_ops.zeros_initializer(),
biases_regularizer=None,
@@ -2705,7 +2706,9 @@ def separable_convolution2d(
`biases_regularizer` are ignored and `biases` are not created nor added.
default set to None for no normalizer function
normalizer_params: Normalization function parameters.
- weights_initializer: An initializer for the weights.
+ weights_initializer: An initializer for the depthwise weights.
+ pointwise_initializer: An initializer for the pointwise weights.
+ default set to None, means use weights_initializer.
weights_regularizer: Optional regularizer for the weights.
biases_initializer: An initializer for the biases. If None skip biases.
biases_regularizer: Optional regularizer for the biases.
@@ -2737,6 +2740,9 @@ def separable_convolution2d(
custom_getter=layer_variable_getter) as sc:
inputs = ops.convert_to_tensor(inputs)
+ if pointwise_initializer is None:
+ pointwise_initializer = weights_initializer
+
df = ('channels_first'
if data_format and data_format.startswith('NC') else 'channels_last')
if num_outputs is not None:
@@ -2752,7 +2758,7 @@ def separable_convolution2d(
depth_multiplier=depth_multiplier,
use_bias=not normalizer_fn and biases_initializer,
depthwise_initializer=weights_initializer,
- pointwise_initializer=weights_initializer,
+ pointwise_initializer=pointwise_initializer,
bias_initializer=biases_initializer,
depthwise_regularizer=weights_regularizer,
pointwise_regularizer=weights_regularizer,
diff --git a/tensorflow/contrib/lite/g3doc/apis.md b/tensorflow/contrib/lite/g3doc/apis.md
index 50cc146a87..a591a353dd 100644
--- a/tensorflow/contrib/lite/g3doc/apis.md
+++ b/tensorflow/contrib/lite/g3doc/apis.md
@@ -7,6 +7,9 @@ no surprise that the APIs try to avoid unnecessary copies at the expense of
convenience. Similarly, consistency with TensorFlow APIs was not an explicit
goal and some variance is to be expected.
+There is also a Python API for TensorFlow Lite described
+[here](../toco/g3doc/python_api.md#interpreter).
+
## C++
In order to run the inference model in TensorFlow Lite, one has to load the
diff --git a/tensorflow/contrib/lite/java/aar_with_jni.bzl b/tensorflow/contrib/lite/java/aar_with_jni.bzl
index 4450bc9085..db837cf29e 100644
--- a/tensorflow/contrib/lite/java/aar_with_jni.bzl
+++ b/tensorflow/contrib/lite/java/aar_with_jni.bzl
@@ -1,5 +1,7 @@
"""Generate zipped aar file including different variants of .so in jni folder."""
+load("@build_bazel_rules_android//android:rules.bzl", "android_binary")
+
def aar_with_jni(name, android_library):
# Generate dummy AndroidManifest.xml for dummy apk usage
# (dummy apk is generated by <name>_dummy_app_for_so target below)
@@ -19,7 +21,7 @@ EOF
# Generate dummy apk including .so files and later we extract out
# .so files and throw away the apk.
- native.android_binary(
+ android_binary(
name = name + "_dummy_app_for_so",
manifest = name + "_generated_AndroidManifest.xml",
custom_package = "dummy.package.for.so",
diff --git a/tensorflow/contrib/lite/kernels/lstm.cc b/tensorflow/contrib/lite/kernels/lstm.cc
index 1dda97c101..3577ae6caa 100644
--- a/tensorflow/contrib/lite/kernels/lstm.cc
+++ b/tensorflow/contrib/lite/kernels/lstm.cc
@@ -24,6 +24,7 @@ limitations under the License.
#include "tensorflow/contrib/lite/builtin_op_data.h"
#include "tensorflow/contrib/lite/context.h"
#include "tensorflow/contrib/lite/kernels/activation_functor.h"
+#include "tensorflow/contrib/lite/kernels/gemm_support.h"
#include "tensorflow/contrib/lite/kernels/internal/kernel_utils.h"
#include "tensorflow/contrib/lite/kernels/internal/optimized/optimized_ops.h"
#include "tensorflow/contrib/lite/kernels/internal/tensor.h"
@@ -856,13 +857,6 @@ TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) {
TF_LITE_ENSURE(context, node->inputs->size == kInputNum);
TF_LITE_ENSURE(context, node->outputs->size == kOutputNum);
- // Only Float32 is supported currently.
- // TODO(ycling): Implement quantize uint8 support.
- for (int index = 0; index < node->inputs->size; ++index) {
- TfLiteTensor* tensor = &context->tensors[node->inputs->data[index]];
- TF_LITE_ENSURE_EQ(context, tensor->type, kTfLiteFloat32);
- }
-
const TfLiteTensor* input = GetInput(context, node, kInputData);
const TfLiteTensor* prev_activation =
GetInput(context, node, kInputPrevActivation);
@@ -872,15 +866,23 @@ TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) {
TF_LITE_ENSURE_EQ(context, input->dims->size, 2);
const int num_batches = input->dims->data[0];
+ const int input_depth = input->dims->data[1];
TF_LITE_ENSURE_EQ(context, prev_activation->dims->size, 2);
TF_LITE_ENSURE_EQ(context, prev_activation->dims->data[0], num_batches);
+ const int activation_depth = prev_activation->dims->data[1];
+ const int total_depth = input_depth + activation_depth;
TF_LITE_ENSURE_EQ(context, weights->dims->size, 2);
+ TF_LITE_ENSURE_EQ(context, weights->dims->data[0], 4 * activation_depth);
+ TF_LITE_ENSURE_EQ(context, weights->dims->data[1], total_depth);
+
TF_LITE_ENSURE_EQ(context, bias->dims->size, 1);
+ TF_LITE_ENSURE_EQ(context, bias->dims->data[0], 4 * activation_depth);
TF_LITE_ENSURE_EQ(context, prev_state->dims->size, 2);
TF_LITE_ENSURE_EQ(context, prev_state->dims->data[0], num_batches);
+ TF_LITE_ENSURE_EQ(context, prev_state->dims->data[1], activation_depth);
TfLiteTensor* activation_out = GetOutput(context, node, kOutputActivation);
TfLiteTensor* state_out = GetOutput(context, node, kOutputState);
@@ -894,14 +896,15 @@ TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) {
TF_LITE_ENSURE_OK(
context, context->ResizeTensor(context, state_out,
TfLiteIntArrayCopy(prev_state->dims)));
+
TfLiteIntArray* concat_temp_size = TfLiteIntArrayCreate(2);
concat_temp_size->data[0] = num_batches;
- concat_temp_size->data[1] = weights->dims->data[1];
+ concat_temp_size->data[1] = total_depth;
TF_LITE_ENSURE_OK(
context, context->ResizeTensor(context, concat_temp, concat_temp_size));
TfLiteIntArray* activation_temp_size = TfLiteIntArrayCreate(2);
activation_temp_size->data[0] = num_batches;
- activation_temp_size->data[1] = weights->dims->data[0];
+ activation_temp_size->data[1] = 4 * activation_depth;
TF_LITE_ENSURE_OK(context, context->ResizeTensor(context, activation_temp,
activation_temp_size));
@@ -927,18 +930,73 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) {
TfLiteTensor* activation_temp =
GetOutput(context, node, kOutputActivationTemp);
- optimized_ops::LstmCell(
- // Inputs.
- GetTensorData<float>(input), GetTensorDims(input),
- GetTensorData<float>(prev_activation), GetTensorDims(prev_activation),
- GetTensorData<float>(weights), GetTensorDims(weights),
- GetTensorData<float>(bias), GetTensorDims(bias),
- GetTensorData<float>(prev_state), GetTensorDims(prev_state),
- // Outputs.
- GetTensorData<float>(state_out), GetTensorDims(state_out),
- GetTensorData<float>(activation_out), GetTensorDims(activation_out),
- GetTensorData<float>(concat_temp), GetTensorDims(concat_temp),
- GetTensorData<float>(activation_temp), GetTensorDims(activation_temp));
+ if (input->type == kTfLiteFloat32 &&
+ prev_activation->type == kTfLiteFloat32 &&
+ weights->type == kTfLiteFloat32 && bias->type == kTfLiteFloat32 &&
+ prev_state->type == kTfLiteFloat32 && state_out->type == kTfLiteFloat32 &&
+ activation_out->type == kTfLiteFloat32 &&
+ concat_temp->type == kTfLiteFloat32 &&
+ activation_temp->type == kTfLiteFloat32) {
+ optimized_ops::LstmCell(
+ // Inputs.
+ GetTensorData<float>(input), GetTensorDims(input),
+ GetTensorData<float>(prev_activation), GetTensorDims(prev_activation),
+ GetTensorData<float>(weights), GetTensorDims(weights),
+ GetTensorData<float>(bias), GetTensorDims(bias),
+ GetTensorData<float>(prev_state), GetTensorDims(prev_state),
+ // Outputs.
+ GetTensorData<float>(state_out), GetTensorDims(state_out),
+ GetTensorData<float>(activation_out), GetTensorDims(activation_out),
+ GetTensorData<float>(concat_temp), GetTensorDims(concat_temp),
+ GetTensorData<float>(activation_temp), GetTensorDims(activation_temp));
+ } else if (input->type == kTfLiteUInt8 &&
+ prev_activation->type == kTfLiteUInt8 &&
+ weights->type == kTfLiteUInt8 && bias->type == kTfLiteInt32 &&
+ prev_state->type == kTfLiteInt16 &&
+ state_out->type == kTfLiteInt16 &&
+ activation_out->type == kTfLiteUInt8 &&
+ concat_temp->type == kTfLiteUInt8 &&
+ activation_temp->type == kTfLiteInt16) {
+ gemmlowp::GemmContext* gemm_context = gemm_support::GetFromContext(context);
+ int state_scale_log2_rounded;
+ if (!CheckedLog2(state_out->params.scale, &state_scale_log2_rounded)) {
+ context->ReportError(
+ context,
+ "The internal state of a LSTM cell must have a power-of-two scale.");
+ return kTfLiteError;
+ }
+ const int state_integer_bits = 15 + state_scale_log2_rounded;
+ if (state_integer_bits != 4) {
+ context->ReportError(context,
+ "The only case of quantized LstmCell currently "
+ "supported is with StateIntegerBits==4");
+ return kTfLiteError;
+ }
+
+ double real_accum_multiplier = 4096 * bias->params.scale;
+ int32 accum_multiplier;
+ int accum_shift;
+ tflite::QuantizeMultiplier(real_accum_multiplier, &accum_multiplier,
+ &accum_shift);
+ optimized_ops::LstmCell<4>(
+ // Inputs.
+ GetTensorData<uint8_t>(input), GetTensorDims(input),
+ GetTensorData<uint8_t>(prev_activation), GetTensorDims(prev_activation),
+ GetTensorData<uint8_t>(weights), GetTensorDims(weights),
+ GetTensorData<int32_t>(bias), GetTensorDims(bias),
+ GetTensorData<int16_t>(prev_state), GetTensorDims(prev_state),
+ // Outputs.
+ GetTensorData<int16_t>(state_out), GetTensorDims(state_out),
+ GetTensorData<uint8_t>(activation_out), GetTensorDims(activation_out),
+ GetTensorData<uint8_t>(concat_temp), GetTensorDims(concat_temp),
+ GetTensorData<int16_t>(activation_temp), GetTensorDims(activation_temp),
+ weights->params.zero_point, accum_multiplier, accum_shift,
+ gemm_context);
+ } else {
+ context->ReportError(context,
+ "Unsupported combination of data types for LstmCell");
+ return kTfLiteError;
+ }
// TODO(ycling): Investigate if this copy can be avoided with the 5-inputs
// LSTM kernel.
@@ -952,6 +1010,8 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) {
} // namespace basic
void* Init(TfLiteContext* context, const char* buffer, size_t length) {
+ gemm_support::IncrementUsageCounter(context);
+
const auto* params = reinterpret_cast<const TfLiteLSTMParams*>(buffer);
switch (params->kernel_type) {
case kTfLiteLSTMFullKernel:
@@ -961,6 +1021,8 @@ void* Init(TfLiteContext* context, const char* buffer, size_t length) {
}
}
void Free(TfLiteContext* context, void* buffer) {
+ gemm_support::DecrementUsageCounter(context);
+
delete reinterpret_cast<OpData*>(buffer);
}
diff --git a/tensorflow/contrib/lite/python/convert.py b/tensorflow/contrib/lite/python/convert.py
index c038c88945..0ea2630f71 100644
--- a/tensorflow/contrib/lite/python/convert.py
+++ b/tensorflow/contrib/lite/python/convert.py
@@ -25,7 +25,6 @@ import tempfile as _tempfile
from tensorflow.contrib.lite.python import lite_constants
from tensorflow.contrib.lite.toco import model_flags_pb2 as _model_flags_pb2
from tensorflow.contrib.lite.toco import toco_flags_pb2 as _toco_flags_pb2
-from tensorflow.python.framework import dtypes as _dtypes
from tensorflow.python.platform import resource_loader as _resource_loader
from tensorflow.python.util.lazy_loader import LazyLoader
@@ -135,11 +134,11 @@ def build_toco_convert_protos(input_tensors,
input_tensors: List of input tensors. Type and shape are computed using
`foo.get_shape()` and `foo.dtype`.
output_tensors: List of output tensors (only .name is used from this).
- inference_type: Target data type of arrays in the output file. Currently
- must be `{FLOAT, QUANTIZED_UINT8}`. (default FLOAT)
- inference_input_type: Target data type of input arrays. Allows for a
- different type for input arrays in the case of quantization. Currently
- must be `{FLOAT, QUANTIZED_UINT8}`. (default `inference_type`)
+ inference_type: Target data type of real-number arrays in the output file.
+ Must be `{FLOAT, QUANTIZED_UINT8}`. (default FLOAT)
+ inference_input_type: Target data type of real-number input arrays. Allows
+ for a different type for input arrays in the case of quantization.
+ Must be `{FLOAT, QUANTIZED_UINT8}`. (default `inference_type`)
input_format: Type of data to read Currently must be
`{TENSORFLOW_GRAPHDEF}`. (default TENSORFLOW_GRAPHDEF)
output_format: Output file format. Currently must be `{TFLITE,
@@ -202,29 +201,13 @@ def build_toco_convert_protos(input_tensors,
if dump_graphviz_dir:
toco.dump_graphviz_dir = dump_graphviz_dir
toco.dump_graphviz_include_video = dump_graphviz_video
+
model = _model_flags_pb2.ModelFlags()
model.change_concat_input_ranges = change_concat_input_ranges
for idx, input_tensor in enumerate(input_tensors):
- if input_tensor.dtype == _dtypes.float32:
- tflite_input_type = lite_constants.FLOAT
- elif input_tensor.dtype == _dtypes.int32:
- tflite_input_type = lite_constants.INT32
- elif input_tensor.dtype == _dtypes.int64:
- tflite_input_type = lite_constants.INT64
- elif input_tensor.dtype == _dtypes.uint8:
- tflite_input_type = lite_constants.QUANTIZED_UINT8
- # TODO(aselle): Insert strings when they are available
- else:
- raise ValueError("Tensors %s not known type %r" % (input_tensor.name,
- input_tensor.dtype))
-
input_array = model.input_arrays.add()
-
if inference_type == lite_constants.QUANTIZED_UINT8:
- if tflite_input_type == lite_constants.FLOAT:
- tflite_input_type = lite_constants.QUANTIZED_UINT8
input_array.mean_value, input_array.std_value = quantized_input_stats[idx]
-
input_array.name = tensor_name(input_tensor)
input_array.shape.dims.extend(map(int, input_tensor.get_shape()))
diff --git a/tensorflow/contrib/lite/python/lite.py b/tensorflow/contrib/lite/python/lite.py
index 88dda7290b..69a2f638af 100644
--- a/tensorflow/contrib/lite/python/lite.py
+++ b/tensorflow/contrib/lite/python/lite.py
@@ -67,11 +67,11 @@ class TocoConverter(object):
Attributes:
- inference_type: Target data type of arrays in the output file. Currently
- must be `{FLOAT, QUANTIZED_UINT8}`. (default FLOAT)
- inference_input_type: Target data type of input arrays. Allows for a
- different type for input arrays in the case of quantization. Currently
- must be `{FLOAT, QUANTIZED_UINT8}`. (default `inference_type`)
+ inference_type: Target data type of real-number arrays in the output file.
+ Must be `{FLOAT, QUANTIZED_UINT8}`. (default FLOAT)
+ inference_input_type: Target data type of real-number input arrays. Allows
+ for a different type for input arrays in the case of quantization.
+ Must be `{FLOAT, QUANTIZED_UINT8}`. (default `inference_type`)
output_format: Output file format. Currently must be `{TFLITE,
GRAPHVIZ_DOT}`. (default TFLITE)
quantized_input_stats: Dict of strings representing input tensor names
diff --git a/tensorflow/contrib/lite/python/tflite_convert.py b/tensorflow/contrib/lite/python/tflite_convert.py
index f497533bed..d18a29834b 100644
--- a/tensorflow/contrib/lite/python/tflite_convert.py
+++ b/tensorflow/contrib/lite/python/tflite_convert.py
@@ -23,19 +23,15 @@ import os
import sys
from tensorflow.contrib.lite.python import lite
+from tensorflow.contrib.lite.python import lite_constants
from tensorflow.contrib.lite.toco import toco_flags_pb2 as _toco_flags_pb2
from tensorflow.contrib.lite.toco import types_pb2 as _types_pb2
from tensorflow.python.platform import app
-def _parse_array(values):
+def _parse_array(values, type_fn=str):
if values:
- return values.split(",")
-
-
-def _parse_int_array(values):
- if values:
- return [int(val) for val in values.split(",")]
+ return [type_fn(val) for val in values.split(",") if val]
def _parse_set(values):
@@ -57,7 +53,8 @@ def _get_toco_converter(flags):
input_shapes = None
if flags.input_shapes:
input_shapes_list = [
- _parse_int_array(shape) for shape in flags.input_shapes.split(":")
+ _parse_array(shape, type_fn=int)
+ for shape in flags.input_shapes.split(":")
]
input_shapes = dict(zip(input_arrays, input_shapes_list))
output_arrays = _parse_array(flags.output_arrays)
@@ -103,8 +100,8 @@ def _convert_model(flags):
if flags.mean_values and flags.std_dev_values:
input_arrays = converter.get_input_arrays()
- std_dev_values = _parse_int_array(flags.std_dev_values)
- mean_values = _parse_int_array(flags.mean_values)
+ std_dev_values = _parse_array(flags.std_dev_values, type_fn=int)
+ mean_values = _parse_array(flags.mean_values, type_fn=int)
quant_stats = zip(mean_values, std_dev_values)
if ((not flags.input_arrays and len(input_arrays) > 1) or
(len(input_arrays) != len(quant_stats))):
@@ -130,6 +127,9 @@ def _convert_model(flags):
if flags.allow_custom_ops:
converter.allow_custom_ops = flags.allow_custom_ops
if flags.quantize_weights:
+ if flags.inference_type == lite_constants.QUANTIZED_UINT8:
+ raise ValueError("--quantized_weights is not supported with "
+ "--inference_type=QUANTIZED_UINT8")
converter.quantize_weights = flags.quantize_weights
if flags.dump_graphviz_dir:
converter.dump_graphviz_dir = flags.dump_graphviz_dir
@@ -200,6 +200,9 @@ def _check_flags(flags, unparsed):
raise ValueError("--default_ranges_min and --default_ranges_max must be "
"used together")
+ if flags.dump_graphviz_video and not flags.dump_graphviz:
+ raise ValueError("--dump_graphviz_video must be used with --dump_graphviz")
+
def run_main(_):
"""Main in toco_convert.py."""
@@ -235,13 +238,13 @@ def run_main(_):
"--inference_type",
type=str.upper,
choices=["FLOAT", "QUANTIZED_UINT8"],
- help="Target data type of arrays in the output file.")
+ help="Target data type of real-number arrays in the output file.")
parser.add_argument(
"--inference_input_type",
type=str.upper,
choices=["FLOAT", "QUANTIZED_UINT8"],
- help=("Target data type of input arrays. Allows for a different type for "
- "input arrays in the case of quantization."))
+ help=("Target data type of real-number input arrays. Allows for a "
+ "different type for input arrays in the case of quantization."))
# Input and output arrays flags.
parser.add_argument(
@@ -275,12 +278,12 @@ def run_main(_):
"--std_dev_values",
type=str,
help=("Standard deviation of training data for each input tensor, "
- "comma-separated. Used for quantization. (default None)"))
+ "comma-separated integers. Used for quantization. (default None)"))
parser.add_argument(
"--mean_values",
type=str,
- help=("Mean of training data for each input tensor, comma-separated. "
- "Used for quantization. (default None)"))
+ help=("Mean of training data for each input tensor, comma-separated "
+ "integers. Used for quantization. (default None)"))
parser.add_argument(
"--default_ranges_min",
type=int,
diff --git a/tensorflow/contrib/lite/toco/README.md b/tensorflow/contrib/lite/toco/README.md
index 522e260ad2..ee83c7a6e3 100644
--- a/tensorflow/contrib/lite/toco/README.md
+++ b/tensorflow/contrib/lite/toco/README.md
@@ -17,7 +17,7 @@ Usage information is given in these documents:
Once an application developer has a trained TensorFlow model, TOCO will accept
that model and generate a TensorFlow Lite
[FlatBuffer](https://google.github.io/flatbuffers/) file. TOCO currently supports
-[SavedModels](https://www.tensorflow.org/programmers_guide/saved_model#using_savedmodel_with_estimators)
+[SavedModels](https://www.tensorflow.org/guide/saved_model#using_savedmodel_with_estimators)
and frozen graphs (models generated via
[freeze_graph.py](https://github.com/tensorflow/tensorflow/blob/master/tensorflow/python/tools/freeze_graph.py)).
The TensorFlow Lite FlatBuffer file can be shipped to client devices, generally
diff --git a/tensorflow/contrib/lite/toco/g3doc/cmdline_examples.md b/tensorflow/contrib/lite/toco/g3doc/cmdline_examples.md
index a4b4e14ba6..0ab024c618 100644
--- a/tensorflow/contrib/lite/toco/g3doc/cmdline_examples.md
+++ b/tensorflow/contrib/lite/toco/g3doc/cmdline_examples.md
@@ -9,57 +9,49 @@ complemented by the following documents:
Table of contents:
-* [Convert a TensorFlow SavedModel to TensorFlow Lite](#savedmodel)
-* [Convert a TensorFlow GraphDef to TensorFlow Lite for float
- inference](#graphdef-float)
+* [Command-line tools](#tools)
+ * [Converting models prior to TensorFlow 1.9.](#pre-tensorflow-1.9)
+* [Convert a TensorFlow GraphDef](#graphdef)
+* [Convert a TensorFlow SavedModel](#savedmodel)
* [Quantization](#quantization)
- * [Convert a TensorFlow GraphDef to TensorFlow Lite for quantized
- inference](#graphdef-quant)
+ * [Convert a TensorFlow GraphDef for quantized inference](#graphdef-quant)
* [Use "dummy-quantization" to try out quantized inference on a float
graph](#dummy-quant)
* [Specifying input and output arrays](#specifying-input-and-output-arrays)
- * [Multiple output arrays](#multiple-output-arrays)
* [Multiple input arrays](#multiple-input-arrays)
+ * [Multiple output arrays](#multiple-output-arrays)
* [Specifying subgraphs](#specifying-subgraphs)
-* [Other conversions supported by TOCO](#other-conversions)
- * [Optimize a TensorFlow GraphDef](#optimize-graphdef)
- * [Convert a TensorFlow Lite FlatBuffer back into TensorFlow GraphDef
- format](#to-graphdef)
-* [Logging](#logging)
- * [Graph "video" logging](#graph-video-logging)
* [Graph visualizations](#graph-visualizations)
* [Using --output_format=GRAPHVIZ_DOT](#using-output-formatgraphviz-dot)
* [Using --dump_graphviz](#using-dump-graphviz)
+ * [Graph "video" logging](#graph-video-logging)
* [Legend for the graph visualizations](#graphviz-legend)
-## Convert a TensorFlow SavedModel to TensorFlow Lite <a name="savedmodel"></a>
+## Command-line tools <a name="tools"></a>
-The follow example converts a basic TensorFlow SavedModel into a Tensorflow Lite
-FlatBuffer to perform floating-point inference.
+There are two approaches to running TOCO via command line.
-```
-bazel run --config=opt \
- //tensorflow/contrib/lite/toco:toco -- \
- --savedmodel_directory=/tmp/saved_model \
- --output_file=/tmp/foo.tflite
-```
+* `tflite_convert`: Starting from TensorFlow 1.9, the command-line tool
+ `tflite_convert` will be installed as part of the Python package. All of the
+ examples below use `tflite_convert` for simplicity.
+ * Example: `tflite --output_file=...`
+* `bazel`: In order to run the latest version of TOCO, [clone the TensorFlow
+ repository](https://www.tensorflow.org/install/install_sources#clone_the_tensorflow_repository)
+ and use `bazel`. This is the recommended approach for converting models that
+ utilize new features that were not supported by TOCO in TensorFlow 1.9.
+ * Example: `bazel run
+ //tensorflow/contrib/lite/python:tflite_convert --
+ --output_file=...`
-[SavedModel](https://www.tensorflow.org/programmers_guide/saved_model#using_savedmodel_with_estimators)
-has fewer required flags than frozen graphs (described [below](#graphdef-float))
-due to access to additional data contained within the SavedModel. The values for
-`--input_arrays` and `--output_arrays` are an aggregated, alphabetized list of
-the inputs and outputs in the
-[SignatureDefs](https://www.tensorflow.org/serving/signature_defs) within the
-[MetaGraphDef](https://www.tensorflow.org/programmers_guide/saved_model#apis_to_build_and_load_a_savedmodel)
-specified by `--savedmodel_tagset`. The value for `input_shapes` is
-automatically determined from the MetaGraphDef whenever possible. The default
-value for `--inference_type` for SavedModels is `FLOAT`.
+### Converting models prior to TensorFlow 1.9. <a name="pre-tensorflow-1.9"></a>
-There is currently no support for MetaGraphDefs without a SignatureDef or for
-MetaGraphDefs that use the [`assets/`
-directory](https://www.tensorflow.org/programmers_guide/saved_model#structure_of_a_savedmodel_directory).
+The recommended approach for using TOCO prior to TensorFlow 1.9 is the [Python
+API](python_api.md#pre-tensorflow-1.9). If a command line tool is desired, the
+`toco` command line tool was available in TensorFlow 1.7. Enter `toco --help` in
+Terminal for additional details on the command-line flags available. There were
+no command line tools in TensorFlow 1.8.
-## Convert a TensorFlow GraphDef to TensorFlow Lite for float inference <a name="graphdef-float"></a>
+## Convert a TensorFlow GraphDef <a name="graphdef"></a>
The follow example converts a basic TensorFlow GraphDef (frozen by
[freeze_graph.py](https://github.com/tensorflow/tensorflow/blob/master/tensorflow/python/tools/freeze_graph.py))
@@ -69,19 +61,43 @@ graphs contain the variables stored in Checkpoint files as Const ops.
```
curl https://storage.googleapis.com/download.tensorflow.org/models/mobilenet_v1_0.50_128_frozen.tgz \
| tar xzv -C /tmp
-bazel run --config=opt \
- //tensorflow/contrib/lite/toco:toco -- \
- --input_file=/tmp/mobilenet_v1_0.50_128/frozen_graph.pb \
+tflite_convert \
--output_file=/tmp/foo.tflite \
- --inference_type=FLOAT \
- --input_shape=1,128,128,3 \
- --input_array=input \
- --output_array=MobilenetV1/Predictions/Reshape_1
+ --graph_def_file=/tmp/mobilenet_v1_0.50_128/frozen_graph.pb \
+ --input_arrays=input \
+ --output_arrays=MobilenetV1/Predictions/Reshape_1
```
+The value for `input_shapes` is automatically determined whenever possible.
+
+## Convert a TensorFlow SavedModel <a name="savedmodel"></a>
+
+The follow example converts a basic TensorFlow SavedModel into a Tensorflow Lite
+FlatBuffer to perform floating-point inference.
+
+```
+tflite_convert \
+ --output_file=/tmp/foo.tflite \
+ --saved_model_dir=/tmp/saved_model
+```
+
+[SavedModel](https://www.tensorflow.org/guide/saved_model#using_savedmodel_with_estimators)
+has fewer required flags than frozen graphs due to access to additional data
+contained within the SavedModel. The values for `--input_arrays` and
+`--output_arrays` are an aggregated, alphabetized list of the inputs and outputs
+in the [SignatureDefs](https://www.tensorflow.org/serving/signature_defs) within
+the
+[MetaGraphDef](https://www.tensorflow.org/guide/saved_model#apis_to_build_and_load_a_savedmodel)
+specified by `--saved_model_tag_set`. As with the GraphDef, the value for
+`input_shapes` is automatically determined whenever possible.
+
+There is currently no support for MetaGraphDefs without a SignatureDef or for
+MetaGraphDefs that use the [`assets/`
+directory](https://www.tensorflow.org/guide/saved_model#structure_of_a_savedmodel_directory).
+
## Quantization
-### Convert a TensorFlow GraphDef to TensorFlow Lite for quantized inference <a name="graphdef-quant"></a>
+### Convert a TensorFlow GraphDef for quantized inference <a name="graphdef-quant"></a>
TOCO is compatible with fixed point quantization models described
[here](https://www.tensorflow.org/performance/quantization). These are float
@@ -95,18 +111,14 @@ The following command generates a quantized TensorFlow Lite FlatBuffer from a
"quantized" TensorFlow GraphDef.
```
-bazel run --config=opt \
- //tensorflow/contrib/lite/toco:toco -- \
- --input_file=/tmp/some_quantized_graph.pb \
+tflite_convert \
--output_file=/tmp/foo.tflite \
- --input_format=TENSORFLOW_GRAPHDEF \
- --output_format=TFLITE \
+ --graph_def_file=/tmp/some_quantized_graph.pb \
--inference_type=QUANTIZED_UINT8 \
- --input_shape=1,128,128,3 \
- --input_array=input \
- --output_array=MobilenetV1/Predictions/Reshape_1 \
- --mean_value=128 \
- --std_value=127
+ --input_arrays=input \
+ --output_arrays=MobilenetV1/Predictions/Reshape_1 \
+ --mean_values=128 \
+ --std_dev_values=127
```
### Use \"dummy-quantization\" to try out quantized inference on a float graph <a name="dummy-quant"></a>
@@ -124,45 +136,20 @@ a reasonable guess is that most activation ranges should be contained in [0, 6].
```
curl https://storage.googleapis.com/download.tensorflow.org/models/mobilenet_v1_0.50_128_frozen.tgz \
| tar xzv -C /tmp
-bazel run --config=opt \
- //tensorflow/contrib/lite/toco:toco -- \
- --input_file=/tmp/mobilenet_v1_0.50_128/frozen_graph.pb \
+tflite_convert \
--output_file=/tmp/foo.cc \
- --input_format=TENSORFLOW_GRAPHDEF \
- --output_format=TFLITE \
+ --graph_def_file=/tmp/mobilenet_v1_0.50_128/frozen_graph.pb \
--inference_type=QUANTIZED_UINT8 \
- --input_shape=1,128,128,3 \
- --input_array=input \
- --output_array=MobilenetV1/Predictions/Reshape_1 \
+ --input_arrays=input \
+ --output_arrays=MobilenetV1/Predictions/Reshape_1 \
--default_ranges_min=0 \
--default_ranges_max=6 \
- --mean_value=127.5 \
- --std_value=127.5
+ --mean_values=128 \
+ --std_dev_values=127
```
## Specifying input and output arrays
-### Multiple output arrays
-
-The flag `output_arrays` takes in a comma-separated list of output arrays as
-seen in the example below. This is useful for models or subgraphs with multiple
-outputs.
-
-```
-curl https://storage.googleapis.com/download.tensorflow.org/models/inception_v1_2016_08_28_frozen.pb.tar.gz \
- | tar xzv -C /tmp
-bazel run --config=opt \
- //tensorflow/contrib/lite/toco:toco -- \
- --input_file=/tmp/inception_v1_2016_08_28_frozen.pb \
- --output_file=/tmp/foo.tflite \
- --input_format=TENSORFLOW_GRAPHDEF \
- --output_format=TFLITE \
- --inference_type=FLOAT \
- --input_shape=1,224,224,3 \
- --input_array=input \
- --output_arrays=InceptionV1/InceptionV1/Mixed_3b/Branch_1/Conv2d_0a_1x1/Relu,InceptionV1/InceptionV1/Mixed_3b/Branch_2/Conv2d_0a_1x1/Relu
-```
-
### Multiple input arrays
The flag `input_arrays` takes in a comma-separated list of input arrays as seen
@@ -172,21 +159,33 @@ inputs.
```
curl https://storage.googleapis.com/download.tensorflow.org/models/inception_v1_2016_08_28_frozen.pb.tar.gz \
| tar xzv -C /tmp
-bazel run --config=opt \
- //tensorflow/contrib/lite/toco:toco -- \
- --input_file=/tmp/inception_v1_2016_08_28_frozen.pb \
+tflite_convert \
+ --graph_def_file=/tmp/inception_v1_2016_08_28_frozen.pb \
--output_file=/tmp/foo.tflite \
- --input_format=TENSORFLOW_GRAPHDEF \
- --output_format=TFLITE \
- --inference_type=FLOAT \
--input_shapes=1,28,28,96:1,28,28,16:1,28,28,192:1,28,28,64 \
--input_arrays=InceptionV1/InceptionV1/Mixed_3b/Branch_1/Conv2d_0a_1x1/Relu,InceptionV1/InceptionV1/Mixed_3b/Branch_2/Conv2d_0a_1x1/Relu,InceptionV1/InceptionV1/Mixed_3b/Branch_3/MaxPool_0a_3x3/MaxPool,InceptionV1/InceptionV1/Mixed_3b/Branch_0/Conv2d_0a_1x1/Relu \
- --output_array=InceptionV1/Logits/Predictions/Reshape_1
+ --output_arrays=InceptionV1/Logits/Predictions/Reshape_1
```
Note that `input_shapes` is provided as a colon-separated list. Each input shape
corresponds to the input array at the same position in the respective list.
+### Multiple output arrays
+
+The flag `output_arrays` takes in a comma-separated list of output arrays as
+seen in the example below. This is useful for models or subgraphs with multiple
+outputs.
+
+```
+curl https://storage.googleapis.com/download.tensorflow.org/models/inception_v1_2016_08_28_frozen.pb.tar.gz \
+ | tar xzv -C /tmp
+tflite_convert \
+ --graph_def_file=/tmp/inception_v1_2016_08_28_frozen.pb \
+ --output_file=/tmp/foo.tflite \
+ --input_arrays=input \
+ --output_arrays=InceptionV1/InceptionV1/Mixed_3b/Branch_1/Conv2d_0a_1x1/Relu,InceptionV1/InceptionV1/Mixed_3b/Branch_2/Conv2d_0a_1x1/Relu
+```
+
### Specifying subgraphs
Any array in the input file can be specified as an input or output array in
@@ -201,115 +200,57 @@ GraphDef.
```
curl https://storage.googleapis.com/download.tensorflow.org/models/inception_v1_2016_08_28_frozen.pb.tar.gz \
| tar xzv -C /tmp
-bazel run --config=opt \
- //tensorflow/contrib/lite/toco:toco -- \
- --input_file=/tmp/inception_v1_2016_08_28_frozen.pb \
+tflite_convert \
+ --graph_def_file=/tmp/inception_v1_2016_08_28_frozen.pb \
--output_file=/tmp/foo.pb \
- --input_format=TENSORFLOW_GRAPHDEF \
- --output_format=TENSORFLOW_GRAPHDEF \
--input_shapes=1,28,28,96:1,28,28,16:1,28,28,192:1,28,28,64 \
--input_arrays=InceptionV1/InceptionV1/Mixed_3b/Branch_1/Conv2d_0a_1x1/Relu,InceptionV1/InceptionV1/Mixed_3b/Branch_2/Conv2d_0a_1x1/Relu,InceptionV1/InceptionV1/Mixed_3b/Branch_3/MaxPool_0a_3x3/MaxPool,InceptionV1/InceptionV1/Mixed_3b/Branch_0/Conv2d_0a_1x1/Relu \
- --output_array=InceptionV1/InceptionV1/Mixed_3b/concat_v2
+ --output_arrays=InceptionV1/InceptionV1/Mixed_3b/concat_v2
```
-Note that the final representation of an on-device inference workload (say, in
-TensorFlow Lite FlatBuffers format) tends to have coarser granularity than the
-very fine granularity of the TensorFlow GraphDef representation. For example,
-while a fully-connected layer is typically represented as at least four separate
-ops in TensorFlow GraphDef (Reshape, MatMul, BiasAdd, Relu...), it is typically
-represented as a single "fused" op (FullyConnected) in the converter's optimized
-representation and in the final on-device representation (e.g. in TensorFlow
-Lite FlatBuffer format). As the level of granularity gets coarser, some
+Note that the final representation in TensorFlow Lite FlatBuffers tends to have
+coarser granularity than the very fine granularity of the TensorFlow GraphDef
+representation. For example, while a fully-connected layer is typically
+represented as at least four separate ops in TensorFlow GraphDef (Reshape,
+MatMul, BiasAdd, Relu...), it is typically represented as a single "fused" op
+(FullyConnected) in the converter's optimized representation and in the final
+on-device representation. As the level of granularity gets coarser, some
intermediate arrays (say, the array between the MatMul and the BiasAdd in the
-TensorFlow GraphDef) are dropped. When specifying intermediate arrays as
-`--input_arrays` / `--output_arrays`, it is desirable (and often required) to
-specify arrays that are meant to survive in the final form of the graph, after
-fusing. These are typically the outputs of activation functions (since
-everything in each layer until the activation function tends to get fused).
-
-## Other conversions supported by TOCO <a name="other-conversions"></a>
-
-The converter accepts both TENSORFLOW_GRAPHDEF and TFLITE file formats as both
-`--input_format` and `--output_format`. This means that conversion to and from
-any supported format is possible.
-
-### Optimize a TensorFlow GraphDef <a name="optimize-graphdef"></a>
-
-Same-format "conversions" can be used to optimize and simplify a graph or be
-used to [get a subgraph](#specifying-subgraphs) of a graph. The flag
-`--inference_type` is not required because TensorFlow graphs, including those
-containing the
-[`FakeQuant*`](https://www.tensorflow.org/api_guides/python/array_ops#Fake_quantization)
-ops are always float graphs.
-
-```
-curl https://storage.googleapis.com/download.tensorflow.org/models/mobilenet_v1_0.50_128_frozen.tgz \
- | tar xzv -C /tmp
-bazel run --config=opt \
- //tensorflow/contrib/lite/toco:toco -- \
- --input_file=/tmp/mobilenet_v1_0.50_128/frozen_graph.pb \
- --output_file=/tmp/foo.pb \
- --input_format=TENSORFLOW_GRAPHDEF \
- --output_format=TENSORFLOW_GRAPHDEF \
- --input_shape=1,128,128,3 \
- --input_array=input \
- --output_array=MobilenetV1/Predictions/Reshape_1
-```
-
-### Convert a TensorFlow Lite FlatBuffer back into TensorFlow GraphDef format <a name="to-graphdef"></a>
+TensorFlow GraphDef) are dropped.
-The converter supports file format conversions from TensorFlow Lite, back into
-TensorFlow GraphDef format.
-
-```
-bazel run --config=opt \
- //tensorflow/contrib/lite/toco:toco -- \
- --input_file=/tmp/foo.tflite \
- --output_file=/tmp/foo.pb \
- --input_format=TFLITE \
- --output_format=TENSORFLOW_GRAPHDEF \
- --input_shape=1,128,128,3 \
- --input_array=input \
- --output_array=MobilenetV1/Predictions/Reshape_1
-```
+When specifying intermediate arrays as `--input_arrays` and `--output_arrays`,
+it is desirable (and often required) to specify arrays that are meant to survive
+in the final form of the graph, after fusing. These are typically the outputs of
+activation functions (since everything in each layer until the activation
+function tends to get fused).
## Logging
-### Graph "video" logging
-
-When `--dump_graphviz=` is used (see the section on [graph
-visualizations](#graph-visualizations)), one may additionally pass
-`--dump_graphviz_video`, which causes a graph visualization to be dumped after
-each individual graph transformation. This results in thousands of files.
-Typically, one would then bisect into these files to understand when a given
-change was introduced in the graph.
## Graph visualizations
TOCO can export a graph to the GraphViz Dot format for easy visualization via
-either the `--output_format` flag or the `--dump_graphviz` flag. The subsections
-below outline the use cases for each.
+either the `--output_format` flag or the `--dump_graphviz_dir` flag. The
+subsections below outline the use cases for each.
### Using `--output_format=GRAPHVIZ_DOT`
The first way to get a graphviz rendering is to pass `GRAPHVIZ_DOT` into
`--output_format`. This results in a plausible visualization of the graph. This
-reduces the requirements that normally exist during conversion between other
-input and output formats. For example, this may be useful if conversion from
-TENSORFLOW_GRAPHDEF to TFLITE is failing.
+reduces the requirements that exist during conversion between other input and
+output formats. This may be useful if conversion from TENSORFLOW_GRAPHDEF to
+TFLITE is failing.
```
curl https://storage.googleapis.com/download.tensorflow.org/models/mobilenet_v1_0.50_128_frozen.tgz \
| tar xzv -C /tmp
-bazel run --config=opt \
- //tensorflow/contrib/lite/toco:toco -- \
- --input_file=/tmp/mobilenet_v1_0.50_128/frozen_graph.pb \
+tflite_convert \
+ --graph_def_file=/tmp/mobilenet_v1_0.50_128/frozen_graph.pb \
--output_file=/tmp/foo.dot \
- --input_format=TENSORFLOW_GRAPHDEF \
--output_format=GRAPHVIZ_DOT \
--input_shape=1,128,128,3 \
- --input_array=input \
- --output_array=MobilenetV1/Predictions/Reshape_1
+ --input_arrays=input \
+ --output_arrays=MobilenetV1/Predictions/Reshape_1
```
The resulting `.dot` file can be rendered into a PDF as follows:
@@ -330,49 +271,35 @@ Example PDF files are viewable online in the next section.
### Using `--dump_graphviz`
-The second way to get a graphviz rendering is to pass the `--dump_graphviz=`
+The second way to get a graphviz rendering is to pass the `--dump_graphviz_dir`
flag, specifying a destination directory to dump GraphViz rendering to. Unlike
-the previous approach, this one allows you to keep your real command-line (with
-your real `--output_format` and other flags) unchanged, just appending a
-`--dump_graphviz=` flag to it. This provides a visualization of the actual graph
-during a specific conversion process.
+the previous approach, this one retains the original output format. This
+provides a visualization of the actual graph resulting from a specific
+conversion process.
```
curl https://storage.googleapis.com/download.tensorflow.org/models/mobilenet_v1_0.50_128_frozen.tgz \
| tar xzv -C /tmp
-bazel run --config=opt \
- //tensorflow/contrib/lite/toco:toco -- \
- --input_file=/tmp/mobilenet_v1_0.50_128/frozen_graph.pb \
+tflite_convert \
+ --graph_def_file=/tmp/mobilenet_v1_0.50_128/frozen_graph.pb \
--output_file=/tmp/foo.tflite \
- --input_format=TENSORFLOW_GRAPHDEF \
- --output_format=TFLITE \
- --inference_type=FLOAT \
- --input_shape=1,128,128,3 \
- --input_array=input \
- --output_array=MobilenetV1/Predictions/Reshape_1 \
- --dump_graphviz=/tmp
-```
-
-This generates a few files in the destination directory, here `/tmp`. The two
-most important files are:
-
-```
-/tmp/toco_AT_IMPORT.dot
-/tmp/toco_AFTER_TRANSFORMATIONS.dot
+ --input_arrays=input \
+ --output_arrays=MobilenetV1/Predictions/Reshape_1 \
+ --dump_graphviz_dir=/tmp
```
-`toco_AT_IMPORT.dot` represents the graph as it was imported from
-`--input_file`, before any transformation was applied to it (besides some
-transformations that are applied immediately while importing). This tends to be
-a complex visualization with limited information, but is useful especially in
-situations where a conversion command fails (this file is generated even if the
-conversion subsequently fails).
+This generates a few files in the destination directory. The two most important
+files are `toco_AT_IMPORT.dot` and `/tmp/toco_AFTER_TRANSFORMATIONS.dot`.
+`toco_AT_IMPORT.dot` represents the original graph containing only the
+transformations done at import time. This tends to be a complex visualization
+with limited information about each node. It is useful in situations where a
+conversion command fails.
`toco_AFTER_TRANSFORMATIONS.dot` represents the graph after all transformations
-were applied to it, just before it was exported to the `--output_file`.
-Typically, this is a much smaller graph with more information about each node.
+were applied to it, just before it is exported. Typically, this is a much
+smaller graph with more information about each node.
-Again, these can be rendered to PDFs:
+As before, these can be rendered to PDFs:
```
dot -Tpdf -O /tmp/toco_*.dot
@@ -383,6 +310,14 @@ Sample output files can be seen here:
* [toco_AT_IMPORT.dot.pdf](https://storage.googleapis.com/download.tensorflow.org/example_images/toco_AT_IMPORT.dot.pdf)
* [toco_AFTER_TRANSFORMATIONS.dot.pdf](https://storage.googleapis.com/download.tensorflow.org/example_images/toco_AFTER_TRANSFORMATIONS.dot.pdf).
+### Graph "video" logging
+
+When `--dump_graphviz_dir` is used, one may additionally pass
+`--dump_graphviz_video`. This causes a graph visualization to be dumped after
+each individual graph transformation, resulting in thousands of files.
+Typically, one would then bisect into these files to understand when a given
+change was introduced in the graph.
+
### Legend for the graph visualizations <a name="graphviz-legend"></a>
* Operators are red square boxes with the following hues of red:
diff --git a/tensorflow/contrib/lite/toco/g3doc/cmdline_reference.md b/tensorflow/contrib/lite/toco/g3doc/cmdline_reference.md
index 8085ae0748..2d44b871c6 100644
--- a/tensorflow/contrib/lite/toco/g3doc/cmdline_reference.md
+++ b/tensorflow/contrib/lite/toco/g3doc/cmdline_reference.md
@@ -1,7 +1,8 @@
# TensorFlow Lite Optimizing Converter command-line glossary
-This page is complete reference of command-line flags. It is complemented by the
-following other documents:
+This page is complete reference of command-line flags used by TOCO's command
+line starting from TensorFlow 1.9 up until the most recent build of TensorFlow.
+It is complemented by the following other documents:
* [README](../README.md)
* [Command-line examples](cmdline_examples.md)
@@ -16,116 +17,79 @@ Table of contents:
## High-level flags
-The following high level flags specify the location of the input and output
+The following high level flags specify the details of the input and output
files. The flag `--output_file` is always required. Additionally, either
-`--input_file` or `--savedmodel_directory` is required.
-
-* `--savedmodel_directory`. Type: string. Specifies the full path to the
- directory containing the SavedModel.
-* `--savedmodel_tagset`. Type: string. Default:
+`--graph_def_file` or `--saved_model_dir` is required.
+
+* `--output_file`. Type: string. Specifies the full path of the output file.
+* `--graph_def_file`. Type: string. Specifies the full path of the input
+ GraphDef file frozen using
+ [freeze_graph.py](https://github.com/tensorflow/tensorflow/blob/master/tensorflow/python/tools/freeze_graph.py).
+* `--saved_model_dir`. Type: string. Specifies the full path to the directory
+ containing the SavedModel.
+* `--output_format`. Type: string. Default: `TFLITE`. Specifies the format of
+ the output file. Allowed values:
+ * `TFLITE`: TensorFlow Lite FlatBuffer format.
+ * `GRAPHVIZ_DOT`: GraphViz `.dot` format containg a visualization of the
+ graph after graph transformations.
+ * Note that passing `GRAPHVIZ_DOT` to `--output_format` leads to loss
+ of TFLite specific transformations. Therefore, the resulting
+ visualization may not reflect the final set of graph
+ transformations. To get a final visualization with all graph
+ transformations use `--dump_graphviz` instead.
+
+The following flags specify optional parameters when using SavedModels.
+
+* `--saved_model_tag_set`. Type: string. Default:
[kSavedModelTagServe](https://github.com/tensorflow/tensorflow/blob/master/tensorflow/cc/saved_model/tag_constants.h).
Specifies a comma-separated set of tags identifying the MetaGraphDef within
the SavedModel to analyze. All tags in the tag set must be specified.
-* `--input_file`. Type: string. Specifies the path of the input file. This may
- be either an absolute or a relative path.
-* `--output_file`. Type: string. Specifies the path of the output file.
-
-The following high level flags specify the types of the input and output files:
-
-* `--input_format`. Type: string. Default: `TENSORFLOW_GRAPHDEF`. Specifies
- the format of the input file. Allowed values:
- * `TENSORFLOW_GRAPHDEF` &mdash; The TensorFlow GraphDef format. Both
- binary and text proto formats are allowed.
- * `TFLITE` &mdash; The TensorFlow Lite FlatBuffers format.
-* `--output_format`. Type: string. Default: `TFLITE`. Specifies the format of
- the output file. Allowed values:
- * `TENSORFLOW_GRAPHDEF` &mdash; The TensorFlow GraphDef format. Always
- produces a file in binary (not text) proto format.
- * `TFLITE` &mdash; The TensorFlow Lite FlatBuffers format.
- * Whether a float or quantized TensorFlow Lite file will be produced
- depends on the `--inference_type` flag.
- * `GRAPHVIZ_DOT` &mdash; The GraphViz `.dot` format. This asks the
- converter to generate a reasonable graphical representation of the graph
- after simplification by a generic set of transformation.
- * A typical `dot` command line to view the resulting graph might look
- like: `dot -Tpdf -O file.dot`.
- * Note that since passing this `--output_format` means losing the
- information of which output format you actually care about, and
- since the converter's transformations depend on the specific output
- format, the resulting visualization may not fully reflect what you
- would get on the actual output format that you are using. To avoid
- that concern, and generally to get a visualization of exactly what
- you get in your actual output format as opposed to just a merely
- plausible visualization of a model, consider using `--dump_graphviz`
- instead and keeping your true `--output_format`.
+* `--saved_model_signature_key`. Type: string. Default:
+ [DEFAULT_SERVING_SIGNATURE_DEF_KEY](https://www.tensorflow.org/api_docs/python/tf/saved_model/signature_constants).
+ Specifies the key identifying the SignatureDef containing inputs and
+ outputs.
## Model flags
*Model flags* provide additional information about the model stored in the input
file.
-* `--output_array`. Type: string. Specifies a single array as the output
- activations. Incompatible with `--output_arrays`.
-* `--output_arrays`. Type: comma-separated list of strings. Specifies a list
- of arrays as the output activations, for models with multiple outputs.
- Incompatible with `--output_array`.
-* `--input_array`. Type: string. Specifies a single array as the input
- activations. Incompatible with `--input_arrays`.
-* `--input_arrays`. Type: comma-separated list of strings. Specifies a list of
- arrays as the input activations, for models with multiple inputs.
- Incompatible with `--input_array`.
-* `--batch_size`. Type: integer. Default: 1. Specifies the batch size for the
- model. Replaces the first dimension of an input size array if undefined. Use
- only with SavedModels when neither `--input_shape` nor `input_shapes` flags
- are specified. Incompatible with GraphDefs.
-
-When `--input_array` is used, the following flags are available to provide
-additional information about the single input array:
-
-* `--input_shape`. Type: comma-separated list of integers. Specifies the shape
- of the input array, in TensorFlow convention: starting with the outer-most
- dimension (the dimension corresponding to the largest offset stride in the
- array layout), ending with the inner-most dimension (the dimension along
- which array entries are typically laid out contiguously in memory).
- * For example, a typical vision model might pass
- `--input_shape=1,60,80,3`, meaning a batch size of 1 (no batching), an
- input image height of 60, an input image width of 80, and an input image
- depth of 3, for the typical case where the input image is a RGB bitmap
- (3 channels, depth=3) stored by horizontal scanlines (so 'width' is the
- next innermost dimension after 'depth').
-* `--mean_value` and `--std_value`. Type: floating-point. The decimal point
- character is always the dot (`.`) regardless of the locale. These specify
- the (de-)quantization parameters of the input array, when it is quantized.
- * The meaning of mean_value and std_value is as follows: each quantized
- value in the quantized input array will be interpreted as a mathematical
- real number (i.e. as an input activation value) according to the
- following formula:
+* `--input_arrays`. Type: comma-separated list of strings. Specifies the list
+ of names of input activation tensors.
+* `--output_arrays`. Type: comma-separated list of strings. Specifies the list
+ of names of output activation tensors.
+
+The following flags define properties of the input tensors. Each item in the
+`--input_arrays` flag should correspond to each item in the following flags
+based on index.
+
+* `--input_shapes`. Type: colon-separated list of comma-separated lists of
+ integers. Each comma-separated list of integers gives the shape of one of
+ the input arrays specified in [TensorFlow
+ convention](https://www.tensorflow.org/versions/r1.2/programmers_guide/dims_types#shape).
+ * Example: `--input_shapes=1,60,80,3` for a typical vision model means a
+ batch size of 1, an input image height of 60, an input image width of
+ 80, and an input image depth of 3 (representing RGB channels).
+ * Example: `--input_arrays=foo,bar --input_shapes=2,3:4,5,6` means "foo"
+ has a shape of [2, 3] and "bar" has a shape of [4, 5, 6].
+* `--std_dev_values`, `--mean_values`. Type: comma-separated list of integers.
+ These specify the (de-)quantization parameters of the input array, when it
+ is quantized.
+ * The meaning of `mean_values` and `std_dev_values` is as follows: each
+ quantized value in the quantized input array will be interpreted as a
+ mathematical real number (i.e. as an input activation value) according
+ to the following formula:
* `real_value = (quantized_input_value - mean_value) / std_value`.
* When performing float inference (`--inference_type=FLOAT`) on a
quantized input, the quantized input would be immediately dequantized by
the inference code according to the above formula, before proceeding
with float inference.
* When performing quantized inference
- (`--inference_type=QUANTIZED_UINT8`), no dequantization is ever to be
- performed by the inference code; however, the quantization parameters of
- all arrays, including those of the input arrays as specified by
- mean_value and std_value, all participate in the determination of the
- fixed-point multipliers used in the quantized inference code.
-
-When `--input_arrays` is used, the following flags are available to provide
-additional information about the multiple input arrays:
-
-* `--input_shapes`. Type: colon-separated list of comma-separated lists of
- integers. Each comma-separated list of integer gives the shape of one of the
- input arrays specified in `--input_arrays`, in the same order. See
- `--input_shape` for details.
- * Example: `--input_arrays=foo,bar --input_shapes=2,3:4,5,6` means that
- there are two input arrays. The first one, "foo", has shape [2,3]. The
- second one, "bar", has shape [4,5,6].
-* `--mean_values`, `--std_values`. Type: comma-separated lists of
- floating-point numbers. Each number gives the corresponding value for one of
- the input arrays specified in `--input_arrays`, in the same order. See
- `--mean_value`, `--std_value` for details.
+ (`--inference_type=QUANTIZED_UINT8`), no dequantization is performed by
+ the inference code. However, the quantization parameters of all arrays,
+ including those of the input arrays as specified by `mean_value` and
+ `std_dev_value`, determine the fixed-point multipliers used in the
+ quantized inference code.
## Transformation flags
@@ -133,21 +97,13 @@ additional information about the multiple input arrays:
the graph, i.e. they specify requested properties that the output file should
have.
-* `--inference_type`. Type: string. Sets the type of real-number arrays in the
- output file, that is, controls the representation (quantization) of real
- numbers in the output file, except for input arrays, which are controlled by
- `--inference_input_type`.
-
- This flag only impacts real-number arrays. By "real-number" we mean float
- arrays, and quantized arrays. This excludes plain integer arrays, strings
- arrays, and every other data type.
+* `--inference_type`. Type: string. Default: `FLOAT`. Data type of all
+ real-number arrays in the output file except for input arrays (defined by
+ `--inference_input_type`). Must be `{FLOAT, QUANTIZED_UINT8}`.
- For real-number arrays, the impact of this flag is to allow the output file
- to choose a different real-numbers representation (quantization) from what
- the input file used. For any other types of arrays, changing the data type
- would not make sense.
-
- Specifically:
+ This flag only impacts real-number arrays including float and quantized
+ arrays. This excludes all other data types including plain integer arrays
+ and string arrays. Specifically:
* If `FLOAT`, then real-numbers arrays will be of type float in the output
file. If they were quantized in the input file, then they get
@@ -155,66 +111,54 @@ have.
* If `QUANTIZED_UINT8`, then real-numbers arrays will be quantized as
uint8 in the output file. If they were float in the input file, then
they get quantized.
- * If not set, then all real-numbers arrays retain the same type in the
- output file as they have in the input file.
-
-* `--inference_input_type`. Type: string. Similar to inference_type, but
- allows to control specifically the quantization of input arrays, separately
- from other arrays.
-
- If not set, then the value of `--inference_type` is implicitly used, i.e. by
- default input arrays are quantized like other arrays.
-
- Like `--inference_type`, this only affects real-number arrays. By
- "real-number" we mean float arrays, and quantized arrays. This excludes
- plain integer arrays, strings arrays, and every other data type.
-
- The typical use for this flag is for vision models taking a bitmap as input,
- typically with uint8 channels, yet still requiring floating-point inference.
- For such image models, the uint8 input is quantized, i.e. the uint8 values
- are interpreted as real numbers, and the quantization parameters used for
- such input arrays are their `mean_value`, `std_value` parameters.
-
-* `--default_ranges_min`, `--default_ranges_max`. Type: floating-point. The
- decimal point character is always the dot (`.`) regardless of the locale.
- These flags enable what is called "dummy quantization". If defined, their
- effect is to define fallback (min, max) range values for all arrays that do
- not have a properly specified (min, max) range in the input file, thus
- allowing to proceed with quantization of non-quantized or
- incorrectly-quantized input files. This enables easy performance prototyping
- ("how fast would my model run if I quantized it?") but should never be used
- in production as the resulting quantized arithmetic is inaccurate.
-
-* `--drop_fake_quant`. Type: boolean. Default: false. Causes fake-quantization
- nodes to be dropped from the graph. This may be used to recover a plain
- float graph from a fake-quantized graph.
-
-* `--reorder_across_fake_quant`. Type: boolean. Default: false. Normally,
- fake-quantization nodes must be strict boundaries for graph transformations,
- in order to ensure that quantized inference has the exact same arithmetic
- behavior as quantized training --- which is the whole point of quantized
- training and of FakeQuant nodes in the first place. However, that entails
- subtle requirements on where exactly FakeQuant nodes must be placed in the
- graph. Some quantized graphs have FakeQuant nodes at unexpected locations,
- that prevent graph transformations that are necessary in order to generate a
- well-formed quantized representation of these graphs. Such graphs should be
- fixed, but as a temporary work-around, setting this
- reorder_across_fake_quant flag allows the converter to perform necessary
- graph transformations on them, at the cost of no longer faithfully matching
- inference and training arithmetic.
-
-* `--quantize_weights`. Type: boolean. Default: false. Store weights as
- quantized weights followed by dequantize operations. Computation is still
- done in float, but reduces model size (at the cost of accuracy and latency).
+
+* `--inference_input_type`. Type: string. Data type of a real-number input
+ array in the output file. By default the `--inference_type` is used as type
+ of all of the input arrays. Flag is primarily intended for generating a
+ float-point graph with a quantized input array. A Dequantized operator is
+ added immediately after the input array. Must be `{FLOAT, QUANTIZED_UINT8}`.
+
+ The flag is typically used for vision models taking a bitmap as input but
+ requiring floating-point inference. For such image models, the uint8 input
+ is quantized and the quantization parameters used for such input arrays are
+ their `mean_value` and `std_dev_value` parameters.
+
+* `--default_ranges_min`, `--default_ranges_max`. Type: floating-point.
+ Default value for the (min, max) range values used for all arrays without a
+ specified range. Allows user to proceed with quantization of non-quantized
+ or incorrectly-quantized input files. These flags produce models with low
+ accuracy. They are intended for easy experimentation with quantization via
+ "dummy quantization".
+
+* `--drop_control_dependency`. Type: boolean. Default: True. Indicates whether
+ to drop control dependencies silently. This is due to TensorFlow Lite not
+ supporting control dependencies.
+
+* `--reorder_across_fake_quant`. Type: boolean. Default: False. Indicates
+ whether to reorder FakeQuant nodes in unexpected locations. Used when the
+ location of the FakeQuant nodes is preventing graph transformations
+ necessary to convert the graph. Results in a graph that differs from the
+ quantized training graph, potentially causing differing arithmetic behavior.
+
+* `--allow_custom_ops`. Type: string. Default: False. Indicates whether to
+ allow custom operations. When false, any unknown operation is an error. When
+ true, custom ops are created for any op that is unknown. The developer will
+ need to provide these to the TensorFlow Lite runtime with a custom resolver.
+
+* `--quantize_weights`. Type: boolean. Default: False. Indicates whether to
+ store weights as quantized weights followed by dequantize operations.
+ Computation is still done in float, but reduces model size (at the cost of
+ accuracy and latency).
## Logging flags
-The following flags allow to generate graph visualizations of the actual graph
-at various points during transformations:
+The following flags generate graph visualizations of the graph as
+[GraphViz](https://www.graphviz.org/) `.dot` files at various points during
+graph transformations:
-* `--dump_graphviz=/path` enables dumping of the graphs at various stages of
- processing as GraphViz `.dot` files. Generally preferred over
- `--output_format=GRAPHVIZ_DOT` as this allows you to keep your actually
- relevant `--output_format`.
-* `--dump_graphviz_video` enables dumping of the graph after every single
- graph transformation (for debugging purposes).
+* `--dump_graphviz_dir`. Type: string. Specifies the full path of the
+ directory to output GraphViz `.dot` files. Outputs the graph immediately
+ after reading in the graph and after all of the transformations have been
+ completed.
+* `--dump_graphviz_video`. Type: boolean. Outputs GraphViz after every graph
+ transformation. Requires `--dump_graphviz_dir` to be specified.
diff --git a/tensorflow/contrib/lite/toco/g3doc/python_api.md b/tensorflow/contrib/lite/toco/g3doc/python_api.md
index a7841a6855..afa6fd6957 100644
--- a/tensorflow/contrib/lite/toco/g3doc/python_api.md
+++ b/tensorflow/contrib/lite/toco/g3doc/python_api.md
@@ -20,6 +20,9 @@ Table of contents:
* [TensorFlow Lite Python interpreter](#interpreter)
* [Using the interpreter from a model file](#interpreter-file)
* [Using the interpreter from model data](#interpreter-data)
+* [Additional instructions](#additional-instructions)
+ * [Build from source code](#latest-package)
+ * [Converting models prior to TensorFlow 1.9.](#pre-tensorflow-1.9)
## High-level overview
@@ -31,8 +34,8 @@ designing a model that can be targeted to devices with mobile.
## API
-The API for converting TensorFlow models to TensorFlow Lite is
-`tf.contrib.lite.TocoConverter`. The API for calling the Python intepreter is
+The API for converting TensorFlow models to TensorFlow Lite as of TensorFlow 1.9
+is `tf.contrib.lite.TocoConverter`. The API for calling the Python intepreter is
`tf.contrib.lite.Interpreter`.
`TocoConverter` provides class methods based on the original format of the
@@ -200,3 +203,18 @@ with tf.Session() as sess:
interpreter = tf.contrib.lite.Interpreter(model_content=tflite_model)
interpreter.allocate_tensors()
```
+
+## Additional instructions
+
+### Build from source code <a name="latest-package"></a>
+
+In order to run the latest version of the TOCO Python API, clone the TensorFlow
+repository, configure the installation, and build and install the pip package.
+Detailed instructions are available
+[here](https://www.tensorflow.org/install/install_sources).
+
+### Converting models prior to TensorFlow 1.9. <a name="pre-tensorflow-1.9"></a>
+
+To use TOCO in TensorFlow 1.7 and TensorFlow 1.8, use the `toco_convert`
+function. Run `help(tf.contrib.lite.toco_convert)` to get details about accepted
+parameters.
diff --git a/tensorflow/contrib/lite/toco/g3doc/toco_landscape.svg b/tensorflow/contrib/lite/toco/g3doc/toco_landscape.svg
index a47c088991..262e13a591 100644
--- a/tensorflow/contrib/lite/toco/g3doc/toco_landscape.svg
+++ b/tensorflow/contrib/lite/toco/g3doc/toco_landscape.svg
@@ -1 +1 @@
-<svg version="1.1" viewBox="0.0 0.0 720.0 540.0" fill="none" stroke="none" stroke-linecap="square" stroke-miterlimit="10" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns="http://www.w3.org/2000/svg"><clipPath id="p.0"><path d="m0 0l720.0 0l0 540.0l-720.0 0l0 -540.0z" clip-rule="nonzero"/></clipPath><g clip-path="url(#p.0)"><path fill="#000000" fill-opacity="0.0" d="m0 0l720.0 0l0 540.0l-720.0 0z" fill-rule="evenodd"/><path fill="#f3f3f3" d="m19.375328 28.750656l361.6378 0l0 358.01575l-361.6378 0z" fill-rule="evenodd"/><path stroke="#cccccc" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m19.375328 28.750656l361.6378 0l0 358.01575l-361.6378 0z" fill-rule="evenodd"/><path fill="#434343" d="m338.49512 374.66016q-0.609375 0 -1.171875 -0.140625q-0.546875 -0.15625 -0.96875 -0.421875q-0.25 -0.15625 -0.359375 -0.296875q-0.09375 -0.140625 -0.09375 -0.34375q0 -0.171875 0.09375 -0.28125q0.109375 -0.109375 0.265625 -0.109375q0.171875 0 0.46875 0.1875q0.40625 0.25 0.796875 0.390625q0.390625 0.140625 0.984375 0.140625q0.71875 0 1.109375 -0.25q0.40625 -0.265625 0.40625 -0.734375q0 -0.296875 -0.15625 -0.46875q-0.140625 -0.1875 -0.5 -0.328125q-0.359375 -0.140625 -1.046875 -0.296875q-1.171875 -0.25 -1.6875 -0.671875q-0.5 -0.421875 -0.5 -1.15625q0 -0.578125 0.3125 -1.015625q0.328125 -0.4375 0.890625 -0.6875q0.5625 -0.265625 1.28125 -0.265625q0.53125 0 1.015625 0.140625q0.484375 0.140625 0.859375 0.390625q0.453125 0.328125 0.453125 0.671875q0 0.171875 -0.109375 0.296875q-0.109375 0.125 -0.25 0.125q-0.15625 0 -0.484375 -0.234375q-0.375 -0.234375 -0.703125 -0.359375q-0.328125 -0.140625 -0.828125 -0.140625q-0.625 0 -1.015625 0.28125q-0.375 0.265625 -0.375 0.734375q0 0.296875 0.140625 0.484375q0.140625 0.171875 0.46875 0.3125q0.328125 0.140625 0.9375 0.28125q0.90625 0.1875 1.40625 0.4375q0.5 0.234375 0.703125 0.578125q0.21875 0.34375 0.21875 0.890625q0 0.828125 -0.703125 1.34375q-0.703125 0.515625 -1.859375 0.515625zm9.241241 -1.59375q0.140625 0 0.25 0.125q0.109375 0.109375 0.109375 0.296875q0 0.328125 -0.46875 0.609375q-0.484375 0.28125 -1.015625 0.421875q-0.53125 0.140625 -1.046875 0.140625q-1.5 0 -2.375 -0.890625q-0.875 -0.890625 -0.875 -2.46875q0 -1.0 0.390625 -1.765625q0.390625 -0.765625 1.078125 -1.1875q0.703125 -0.4375 1.59375 -0.4375q1.265625 0 2.015625 0.828125q0.75 0.828125 0.75 2.25q0 0.265625 -0.109375 0.390625q-0.109375 0.109375 -0.34375 0.109375l-4.296875 0q0.125 2.296875 2.171875 2.296875q0.53125 0 0.890625 -0.140625q0.375 -0.140625 0.8125 -0.390625q0.34375 -0.1875 0.46875 -0.1875zm-2.34375 -4.3125q-0.84375 0 -1.359375 0.53125q-0.515625 0.53125 -0.609375 1.515625l3.765625 0q-0.015625 -1.0 -0.484375 -1.515625q-0.46875 -0.53125 -1.3125 -0.53125zm7.5551147 -0.8125q0.546875 -0.03125 0.546875 0.453125q0 0.21875 -0.125 0.34375q-0.109375 0.125 -0.40625 0.15625l-0.390625 0.03125q-0.890625 0.078125 -1.328125 0.640625q-0.4375 0.546875 -0.4375 1.296875l0 3.234375q0 0.265625 -0.15625 0.40625q-0.140625 0.125 -0.375 0.125q-0.234375 0 -0.390625 -0.140625q-0.15625 -0.140625 -0.15625 -0.390625l0 -5.625q0 -0.25 0.15625 -0.390625q0.15625 -0.140625 0.390625 -0.140625q0.21875 0 0.359375 0.140625q0.140625 0.140625 0.140625 0.375l0 0.75q0.28125 -0.578125 0.796875 -0.890625q0.515625 -0.3125 1.1875 -0.359375l0.1875 -0.015625zm6.157959 0.328125q0.15625 -0.3125 0.46875 -0.3125q0.203125 0 0.359375 0.140625q0.15625 0.125 0.15625 0.328125q0 0.109375 -0.046875 0.203125l-2.59375 5.609375q-0.078125 0.171875 -0.25 0.28125q-0.15625 0.09375 -0.34375 0.09375q-0.171875 0 -0.328125 -0.09375q-0.15625 -0.109375 -0.25 -0.28125l-2.59375 -5.609375q-0.046875 -0.09375 -0.046875 -0.1875q0 -0.203125 0.171875 -0.34375q0.1875 -0.15625 0.390625 -0.15625q0.140625 0 0.265625 0.078125q0.125 0.078125 0.1875 0.234375l2.234375 5.0l2.21875 -4.984375zm7.2099915 4.796875q0.140625 0 0.25 0.125q0.109375 0.109375 0.109375 0.296875q0 0.328125 -0.46875 0.609375q-0.484375 0.28125 -1.015625 0.421875q-0.53125 0.140625 -1.046875 0.140625q-1.5 0 -2.375 -0.890625q-0.875 -0.890625 -0.875 -2.46875q0 -1.0 0.390625 -1.765625q0.390625 -0.765625 1.078125 -1.1875q0.703125 -0.4375 1.59375 -0.4375q1.265625 0 2.015625 0.828125q0.75 0.828125 0.75 2.25q0 0.265625 -0.109375 0.390625q-0.109375 0.109375 -0.34375 0.109375l-4.296875 0q0.125 2.296875 2.171875 2.296875q0.53125 0 0.890625 -0.140625q0.375 -0.140625 0.8125 -0.390625q0.34375 -0.1875 0.46875 -0.1875zm-2.34375 -4.3125q-0.84375 0 -1.359375 0.53125q-0.515625 0.53125 -0.609375 1.515625l3.765625 0q-0.015625 -1.0 -0.484375 -1.515625q-0.46875 -0.53125 -1.3125 -0.53125zm7.5551453 -0.8125q0.546875 -0.03125 0.546875 0.453125q0 0.21875 -0.125 0.34375q-0.109375 0.125 -0.40625 0.15625l-0.390625 0.03125q-0.890625 0.078125 -1.328125 0.640625q-0.4375 0.546875 -0.4375 1.296875l0 3.234375q0 0.265625 -0.15625 0.40625q-0.140625 0.125 -0.375 0.125q-0.234375 0 -0.390625 -0.140625q-0.15625 -0.140625 -0.15625 -0.390625l0 -5.625q0 -0.25 0.15625 -0.390625q0.15625 -0.140625 0.390625 -0.140625q0.21875 0 0.359375 0.140625q0.140625 0.140625 0.140625 0.375l0 0.75q0.28125 -0.578125 0.796875 -0.890625q0.515625 -0.3125 1.1875 -0.359375l0.1875 -0.015625z" fill-rule="nonzero"/><path fill="#d9d9d9" d="m25.624672 36.249344l301.88977 0l0 69.98425l-301.88977 0z" fill-rule="evenodd"/><path stroke="#cccccc" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" stroke-dasharray="4.0,3.0" d="m25.624672 36.249344l301.88977 0l0 69.98425l-301.88977 0z" fill-rule="evenodd"/><path fill="#434343" d="m134.36497 56.831844q-0.234375 0 -0.375 -0.140625q-0.140625 -0.140625 -0.140625 -0.359375l0 -7.1875l-2.578125 0q-0.21875 0 -0.34375 -0.109375q-0.109375 -0.109375 -0.109375 -0.3125q0 -0.203125 0.109375 -0.296875q0.125 -0.109375 0.34375 -0.109375l6.15625 0q0.21875 0 0.328125 0.109375q0.125 0.09375 0.125 0.296875q0 0.203125 -0.125 0.3125q-0.109375 0.109375 -0.328125 0.109375l-2.578125 0l0 7.1875q0 0.21875 -0.140625 0.359375q-0.125 0.140625 -0.34375 0.140625zm9.004181 -1.421875q0.140625 0 0.234375 0.109375q0.09375 0.109375 0.09375 0.28125q0 0.296875 -0.421875 0.546875q-0.4375 0.25 -0.921875 0.375q-0.46875 0.125 -0.921875 0.125q-1.359375 0 -2.15625 -0.796875q-0.78125 -0.8125 -0.78125 -2.21875q0 -0.90625 0.34375 -1.59375q0.359375 -0.6875 0.984375 -1.0625q0.640625 -0.390625 1.4375 -0.390625q1.140625 0 1.8125 0.75q0.671875 0.734375 0.671875 2.0q0 0.25 -0.09375 0.359375q-0.09375 0.109375 -0.3125 0.109375l-3.859375 0q0.09375 2.0625 1.953125 2.0625q0.46875 0 0.796875 -0.125q0.34375 -0.125 0.71875 -0.34375q0.3125 -0.1875 0.421875 -0.1875zm-2.09375 -3.875q-0.765625 0 -1.234375 0.484375q-0.46875 0.484375 -0.546875 1.359375l3.390625 0q-0.015625 -0.890625 -0.4375 -1.359375q-0.421875 -0.484375 -1.171875 -0.484375zm6.839676 -0.75q2.09375 0 2.09375 2.3125l0 3.25q0 0.234375 -0.125 0.359375q-0.125 0.125 -0.359375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -3.1875q0 -0.8125 -0.328125 -1.1875q-0.3125 -0.375 -1.0 -0.375q-0.8125 0 -1.296875 0.5q-0.46875 0.484375 -0.46875 1.328125l0 2.921875q0 0.234375 -0.125 0.359375q-0.125 0.125 -0.359375 0.125q-0.234375 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -5.0625q0 -0.21875 0.125 -0.34375q0.125 -0.140625 0.359375 -0.140625q0.21875 0 0.34375 0.140625q0.125 0.125 0.125 0.328125l0 0.609375q0.28125 -0.53125 0.796875 -0.8125q0.53125 -0.28125 1.1875 -0.28125zm5.84729 6.0625q-0.56248474 0 -1.0624847 -0.125q-0.5 -0.140625 -0.875 -0.375q-0.21875 -0.140625 -0.3125 -0.265625q-0.078125 -0.125 -0.078125 -0.3125q0 -0.15625 0.078125 -0.25q0.09375 -0.109375 0.234375 -0.109375q0.15625 0 0.421875 0.1875q0.359375 0.21875 0.71875 0.34375q0.359375 0.125 0.87498474 0.125q0.65625 0 1.015625 -0.21875q0.359375 -0.234375 0.359375 -0.671875q0 -0.265625 -0.140625 -0.421875q-0.125 -0.171875 -0.453125 -0.296875q-0.3125 -0.125 -0.9375 -0.25q-1.0624847 -0.234375 -1.5156097 -0.609375q-0.453125 -0.390625 -0.453125 -1.046875q0 -0.515625 0.28125 -0.90625q0.28125 -0.40625 0.796875 -0.625q0.515625 -0.234375 1.1562347 -0.234375q0.46875 0 0.90625 0.125q0.4375 0.125 0.78125 0.34375q0.40625 0.296875 0.40625 0.609375q0 0.15625 -0.09375 0.265625q-0.09375 0.109375 -0.234375 0.109375q-0.140625 0 -0.4375 -0.203125q-0.328125 -0.21875 -0.625 -0.34375q-0.296875 -0.125 -0.75 -0.125q-0.56248474 0 -0.90623474 0.265625q-0.34375 0.25 -0.34375 0.671875q0 0.25 0.125 0.421875q0.125 0.15625 0.421875 0.28125q0.296875 0.125 0.84373474 0.25q0.828125 0.1875 1.265625 0.40625q0.453125 0.203125 0.640625 0.515625q0.203125 0.3125 0.203125 0.796875q0 0.75 -0.640625 1.21875q-0.640625 0.453125 -1.671875 0.453125zm6.2131653 0q-0.828125 0 -1.46875 -0.359375q-0.625 -0.375 -0.96875 -1.0625q-0.34375 -0.703125 -0.34375 -1.609375q0 -0.90625 0.34375 -1.59375q0.34375 -0.703125 0.96875 -1.0625q0.640625 -0.375 1.46875 -0.375q0.828125 0 1.453125 0.375q0.640625 0.359375 0.984375 1.0625q0.34375 0.6875 0.34375 1.59375q0 0.90625 -0.34375 1.609375q-0.34375 0.6875 -0.984375 1.0625q-0.625 0.359375 -1.453125 0.359375zm0 -0.796875q0.859375 0 1.3125 -0.5625q0.46875 -0.578125 0.46875 -1.671875q0 -1.0625 -0.46875 -1.640625q-0.46875 -0.59375 -1.3125 -0.59375q-0.859375 0 -1.328125 0.59375q-0.46875 0.578125 -0.46875 1.640625q0 1.078125 0.453125 1.65625q0.46875 0.578125 1.34375 0.578125zm7.1288147 -5.25q0.5 -0.03125 0.5 0.40625q0 0.203125 -0.109375 0.3125q-0.109375 0.109375 -0.375 0.140625l-0.359375 0.03125q-0.796875 0.078125 -1.1875 0.578125q-0.390625 0.484375 -0.390625 1.15625l0 2.921875q0 0.234375 -0.140625 0.359375q-0.125 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -5.0625q0 -0.234375 0.140625 -0.359375q0.140625 -0.125 0.34375 -0.125q0.1875 0 0.3125 0.125q0.140625 0.125 0.140625 0.34375l0 0.671875q0.25 -0.53125 0.71875 -0.796875q0.46875 -0.28125 1.0625 -0.328125l0.171875 -0.015625zm1.970398 6.03125q-0.21875 0 -0.359375 -0.140625q-0.125 -0.140625 -0.125 -0.359375l0 -7.546875q0 -0.21875 0.125 -0.34375q0.140625 -0.125 0.375 -0.125l4.375 0q0.203125 0 0.328125 0.109375q0.125 0.09375 0.125 0.296875q0 0.203125 -0.125 0.3125q-0.125 0.109375 -0.328125 0.109375l-3.90625 0l0 2.90625l3.65625 0q0.21875 0 0.328125 0.109375q0.125 0.109375 0.125 0.3125q0 0.1875 -0.125 0.296875q-0.109375 0.109375 -0.328125 0.109375l-3.65625 0l0 3.453125q0 0.21875 -0.125 0.359375q-0.125 0.140625 -0.359375 0.140625zm6.5434265 0q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -7.625q0 -0.21875 0.125 -0.34375q0.140625 -0.125 0.359375 -0.125q0.203125 0 0.34375 0.125q0.140625 0.125 0.140625 0.34375l0 7.625q0 0.234375 -0.140625 0.359375q-0.140625 0.125 -0.34375 0.125zm4.721527 0.015625q-0.828125 0 -1.46875 -0.359375q-0.625 -0.375 -0.96875 -1.0625q-0.34375 -0.703125 -0.34375 -1.609375q0 -0.90625 0.34375 -1.59375q0.34375 -0.703125 0.96875 -1.0625q0.640625 -0.375 1.46875 -0.375q0.828125 0 1.453125 0.375q0.640625 0.359375 0.984375 1.0625q0.34375 0.6875 0.34375 1.59375q0 0.90625 -0.34375 1.609375q-0.34375 0.6875 -0.984375 1.0625q-0.625 0.359375 -1.453125 0.359375zm0 -0.796875q0.859375 0 1.3125 -0.5625q0.46875 -0.578125 0.46875 -1.671875q0 -1.0625 -0.46875 -1.640625q-0.46875 -0.59375 -1.3125 -0.59375q-0.859375 0 -1.328125 0.59375q-0.46875 0.578125 -0.46875 1.640625q0 1.078125 0.453125 1.65625q0.46875 0.578125 1.34375 0.578125zm12.222534 -4.9375q0.125 -0.28125 0.390625 -0.28125q0.1875 0 0.328125 0.125q0.140625 0.109375 0.140625 0.296875q0 0.078125 -0.03125 0.171875l-1.984375 5.046875q-0.078125 0.15625 -0.21875 0.25q-0.140625 0.078125 -0.296875 0.078125q-0.15625 0 -0.296875 -0.078125q-0.140625 -0.09375 -0.21875 -0.25l-1.65625 -4.21875l-1.640625 4.21875q-0.0625 0.15625 -0.203125 0.25q-0.140625 0.078125 -0.3125 0.078125q-0.15625 0 -0.296875 -0.078125q-0.140625 -0.09375 -0.21875 -0.25l-1.984375 -5.03125q-0.046875 -0.09375 -0.046875 -0.171875q0 -0.1875 0.15625 -0.3125q0.171875 -0.140625 0.359375 -0.140625q0.296875 0 0.40625 0.296875l1.65625 4.421875l1.6875 -4.390625q0.078125 -0.15625 0.203125 -0.234375q0.125 -0.09375 0.265625 -0.09375q0.15625 0 0.28125 0.09375q0.125 0.078125 0.1875 0.234375l1.6875 4.375l1.65625 -4.40625zm12.637604 5.09375q0.046875 0.09375 0.046875 0.203125q0 0.171875 -0.140625 0.296875q-0.140625 0.125 -0.328125 0.125q-0.296875 0 -0.421875 -0.296875l-0.84375 -1.9375l-4.53125 0l-0.859375 1.9375q-0.125 0.296875 -0.421875 0.296875q-0.1875 0 -0.34375 -0.125q-0.140625 -0.125 -0.140625 -0.3125q0 -0.09375 0.046875 -0.1875l3.4375 -7.640625q0.078125 -0.15625 0.21875 -0.234375q0.140625 -0.09375 0.3125 -0.09375q0.171875 0 0.3125 0.09375q0.15625 0.078125 0.21875 0.234375l3.4375 7.640625zm-5.859375 -2.421875l3.8125 0l-1.90625 -4.3125l-1.90625 4.3125zm7.78656 3.046875q-0.21875 0 -0.359375 -0.140625q-0.125 -0.140625 -0.125 -0.359375l0 -7.546875q0 -0.21875 0.125 -0.34375q0.140625 -0.125 0.375 -0.125l2.84375 0q1.328125 0 2.0625 0.65625q0.75 0.640625 0.75 1.828125q0 1.1875 -0.75 1.84375q-0.734375 0.65625 -2.0625 0.65625l-2.359375 0l0 3.03125q0 0.21875 -0.140625 0.359375q-0.125 0.140625 -0.359375 0.140625zm2.765625 -4.34375q1.9375 0 1.9375 -1.6875q0 -1.671875 -1.9375 -1.671875l-2.265625 0l0 3.359375l2.265625 0zm4.9744263 4.34375q-0.21875 0 -0.359375 -0.140625q-0.125 -0.140625 -0.125 -0.359375l0 -7.578125q0 -0.234375 0.125 -0.359375q0.140625 -0.140625 0.359375 -0.140625q0.234375 0 0.359375 0.140625q0.140625 0.125 0.140625 0.359375l0 7.578125q0 0.21875 -0.140625 0.359375q-0.125 0.140625 -0.359375 0.140625zm4.4157715 0.015625q-0.5625 0 -1.0625 -0.125q-0.5 -0.140625 -0.875 -0.375q-0.21875 -0.140625 -0.3125 -0.265625q-0.078125 -0.125 -0.078125 -0.3125q0 -0.15625 0.078125 -0.25q0.09375 -0.109375 0.234375 -0.109375q0.15625 0 0.421875 0.1875q0.359375 0.21875 0.71875 0.34375q0.359375 0.125 0.875 0.125q0.65625 0 1.015625 -0.21875q0.359375 -0.234375 0.359375 -0.671875q0 -0.265625 -0.140625 -0.421875q-0.125 -0.171875 -0.453125 -0.296875q-0.3125 -0.125 -0.9375 -0.25q-1.0625 -0.234375 -1.515625 -0.609375q-0.453125 -0.390625 -0.453125 -1.046875q0 -0.515625 0.28125 -0.90625q0.28125 -0.40625 0.796875 -0.625q0.515625 -0.234375 1.15625 -0.234375q0.46875 0 0.90625 0.125q0.4375 0.125 0.78125 0.34375q0.40625 0.296875 0.40625 0.609375q0 0.15625 -0.09375 0.265625q-0.09375 0.109375 -0.234375 0.109375q-0.140625 0 -0.4375 -0.203125q-0.328125 -0.21875 -0.625 -0.34375q-0.296875 -0.125 -0.75 -0.125q-0.5625 0 -0.90625 0.265625q-0.34375 0.25 -0.34375 0.671875q0 0.25 0.125 0.421875q0.125 0.15625 0.421875 0.28125q0.296875 0.125 0.84375 0.25q0.828125 0.1875 1.265625 0.40625q0.453125 0.203125 0.640625 0.515625q0.203125 0.3125 0.203125 0.796875q0 0.75 -0.640625 1.21875q-0.640625 0.453125 -1.671875 0.453125z" fill-rule="nonzero"/><path fill="#f3f3f3" d="m396.75067 183.75066l249.00787 0l0 203.02364l-249.00787 0z" fill-rule="evenodd"/><path stroke="#cccccc" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m396.75067 183.75066l249.00787 0l0 203.02364l-249.00787 0z" fill-rule="evenodd"/><path fill="#434343" d="m409.42255 374.66803q-0.90625 0 -1.609375 -0.40625q-0.6875 -0.421875 -1.078125 -1.171875q-0.375 -0.765625 -0.375 -1.765625q0 -1.0 0.390625 -1.765625q0.40625 -0.78125 1.109375 -1.203125q0.703125 -0.4375 1.625 -0.4375q0.5 0 1.0 0.140625q0.5 0.140625 0.875 0.40625q0.234375 0.171875 0.328125 0.328125q0.109375 0.140625 0.109375 0.328125q0 0.1875 -0.109375 0.3125q-0.09375 0.109375 -0.25 0.109375q-0.09375 0 -0.203125 -0.046875q-0.09375 -0.046875 -0.171875 -0.09375q-0.078125 -0.0625 -0.09375 -0.078125q-0.359375 -0.234375 -0.671875 -0.359375q-0.3125 -0.140625 -0.765625 -0.140625q-0.96875 0 -1.515625 0.671875q-0.53125 0.65625 -0.53125 1.828125q0 1.171875 0.53125 1.8125q0.546875 0.640625 1.515625 0.640625q0.453125 0 0.78125 -0.125q0.328125 -0.140625 0.65625 -0.375q0.15625 -0.09375 0.28125 -0.15625q0.140625 -0.0625 0.234375 -0.0625q0.140625 0 0.234375 0.125q0.109375 0.109375 0.109375 0.296875q0 0.171875 -0.09375 0.3125q-0.09375 0.140625 -0.34375 0.3125q-0.375 0.25 -0.90625 0.40625q-0.515625 0.15625 -1.0625 0.15625zm4.2591553 -0.03125q-0.234375 0 -0.390625 -0.140625q-0.15625 -0.140625 -0.15625 -0.390625l0 -8.46875q0 -0.25 0.15625 -0.390625q0.15625 -0.140625 0.390625 -0.140625q0.21875 0 0.375 0.140625q0.15625 0.140625 0.15625 0.390625l0 8.46875q0 0.25 -0.15625 0.390625q-0.15625 0.140625 -0.375 0.140625zm3.092102 0q-0.234375 0 -0.390625 -0.140625q-0.15625 -0.140625 -0.15625 -0.390625l0 -5.625q0 -0.25 0.15625 -0.390625q0.15625 -0.140625 0.390625 -0.140625q0.234375 0 0.375 0.140625q0.15625 0.140625 0.15625 0.390625l0 5.625q0 0.265625 -0.15625 0.40625q-0.140625 0.125 -0.375 0.125zm0 -8.09375q-0.3125 0 -0.515625 -0.171875q-0.203125 -0.1875 -0.203125 -0.5q0 -0.296875 0.203125 -0.484375q0.203125 -0.1875 0.515625 -0.1875q0.328125 0 0.515625 0.1875q0.203125 0.1875 0.203125 0.484375q0 0.3125 -0.203125 0.5q-0.1875 0.171875 -0.515625 0.171875zm7.5765076 6.53125q0.140625 0 0.25 0.125q0.109375 0.109375 0.109375 0.296875q0 0.328125 -0.46875 0.609375q-0.484375 0.28125 -1.015625 0.421875q-0.53125 0.140625 -1.046875 0.140625q-1.5 0 -2.375 -0.890625q-0.875 -0.890625 -0.875 -2.46875q0 -1.0 0.390625 -1.765625q0.390625 -0.765625 1.078125 -1.1875q0.703125 -0.4375 1.59375 -0.4375q1.265625 0 2.015625 0.828125q0.75 0.828125 0.75 2.25q0 0.265625 -0.109375 0.390625q-0.109375 0.109375 -0.34375 0.109375l-4.296875 0q0.125 2.296875 2.171875 2.296875q0.53125 0 0.890625 -0.140625q0.375 -0.140625 0.8125 -0.390625q0.34375 -0.1875 0.46875 -0.1875zm-2.34375 -4.3125q-0.84375 0 -1.359375 0.53125q-0.515625 0.53125 -0.609375 1.515625l3.765625 0q-0.015625 -1.0 -0.484375 -1.515625q-0.46875 -0.53125 -1.3125 -0.53125zm7.6020203 -0.84375q2.328125 0 2.328125 2.578125l0 3.609375q0 0.25 -0.140625 0.390625q-0.140625 0.140625 -0.390625 0.140625q-0.25 0 -0.40625 -0.140625q-0.140625 -0.140625 -0.140625 -0.390625l0 -3.546875q0 -0.90625 -0.359375 -1.3125q-0.34375 -0.421875 -1.125 -0.421875q-0.890625 0 -1.421875 0.546875q-0.53125 0.546875 -0.53125 1.484375l0 3.25q0 0.25 -0.140625 0.390625q-0.140625 0.140625 -0.390625 0.140625q-0.25 0 -0.40625 -0.140625q-0.140625 -0.140625 -0.140625 -0.390625l0 -5.625q0 -0.234375 0.140625 -0.375q0.15625 -0.15625 0.40625 -0.15625q0.234375 0 0.375 0.15625q0.140625 0.140625 0.140625 0.359375l0 0.6875q0.328125 -0.609375 0.890625 -0.921875q0.578125 -0.3125 1.3125 -0.3125zm7.304718 5.875q0.46875 0.03125 0.46875 0.421875q0 0.21875 -0.171875 0.34375q-0.171875 0.109375 -0.5 0.078125l-0.359375 -0.015625q-1.0625 -0.09375 -1.578125 -0.640625q-0.5 -0.5625 -0.5 -1.703125l0 -3.34375l-0.890625 0q-0.234375 0 -0.359375 -0.109375q-0.125 -0.109375 -0.125 -0.296875q0 -0.203125 0.125 -0.3125q0.125 -0.125 0.359375 -0.125l0.890625 0l0 -1.515625q0 -0.25 0.140625 -0.390625q0.15625 -0.140625 0.40625 -0.140625q0.234375 0 0.375 0.140625q0.15625 0.140625 0.15625 0.390625l0 1.515625l1.484375 0q0.203125 0 0.328125 0.125q0.140625 0.109375 0.140625 0.3125q0 0.1875 -0.140625 0.296875q-0.125 0.109375 -0.328125 0.109375l-1.484375 0l0 3.40625q0 0.734375 0.296875 1.0625q0.296875 0.3125 0.90625 0.359375l0.359375 0.03125z" fill-rule="nonzero"/><path fill="#f4cccc" d="m206.61942 201.17455l140.47244 0l0 30.992126l-140.47244 0z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m206.61942 201.17455l140.47244 0l0 30.992126l-140.47244 0z" fill-rule="evenodd"/><path fill="#000000" d="m237.0857 213.5031q-0.640625 0.046875 -0.96875 0.40625q-0.3125 0.34375 -0.3125 1.046875l0 0.390625l1.328125 0q0.203125 0 0.3125 0.109375q0.109375 0.109375 0.109375 0.28125q0 0.1875 -0.109375 0.28125q-0.109375 0.09375 -0.3125 0.09375l-1.328125 0l0 4.65625q0 0.234375 -0.140625 0.359375q-0.140625 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.140625 -0.125 -0.140625 -0.359375l0 -4.65625l-0.796875 0q-0.203125 0 -0.328125 -0.09375q-0.109375 -0.109375 -0.109375 -0.28125q0 -0.171875 0.109375 -0.28125q0.125 -0.109375 0.328125 -0.109375l0.796875 0l0 -0.21875q0 -1.078125 0.53125 -1.6875q0.546875 -0.625 1.5625 -0.703125l0.3125 -0.015625q0.3125 -0.03125 0.453125 0.0625q0.140625 0.078125 0.140625 0.296875q0 0.34375 -0.421875 0.390625l-0.3125 0.03125zm4.248535 1.71875q0.5 -0.03125 0.5 0.40625q0 0.203125 -0.109375 0.3125q-0.109375 0.109375 -0.375 0.140625l-0.359375 0.03125q-0.796875 0.078125 -1.1875 0.578125q-0.390625 0.484375 -0.390625 1.15625l0 2.921875q0 0.234375 -0.140625 0.359375q-0.125 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -5.0625q0 -0.234375 0.140625 -0.359375q0.140625 -0.125 0.34375 -0.125q0.1875 0 0.3125 0.125q0.140625 0.125 0.140625 0.34375l0 0.671875q0.25 -0.53125 0.71875 -0.796875q0.46875 -0.28125 1.0625 -0.328125l0.171875 -0.015625zm5.861023 4.609375q0.140625 0 0.234375 0.109375q0.09375 0.109375 0.09375 0.28125q0 0.296875 -0.421875 0.546875q-0.4375 0.25 -0.921875 0.375q-0.46875 0.125 -0.921875 0.125q-1.359375 0 -2.15625 -0.796875q-0.78125 -0.8125 -0.78125 -2.21875q0 -0.90625 0.34375 -1.59375q0.359375 -0.6875 0.984375 -1.0625q0.640625 -0.390625 1.4375 -0.390625q1.140625 0 1.8125 0.75q0.671875 0.734375 0.671875 2.0q0 0.25 -0.09375 0.359375q-0.09375 0.109375 -0.3125 0.109375l-3.859375 0q0.09375 2.0625 1.953125 2.0625q0.46875 0 0.796875 -0.125q0.34375 -0.125 0.71875 -0.34375q0.3125 -0.1875 0.421875 -0.1875zm-2.09375 -3.875q-0.765625 0 -1.234375 0.484375q-0.46875 0.484375 -0.546875 1.359375l3.390625 0q-0.015625 -0.890625 -0.4375 -1.359375q-0.421875 -0.484375 -1.171875 -0.484375zm8.417801 3.875q0.140625 0 0.234375 0.109375q0.09375 0.109375 0.09375 0.28125q0 0.296875 -0.421875 0.546875q-0.4375 0.25 -0.921875 0.375q-0.46875 0.125 -0.921875 0.125q-1.359375 0 -2.15625 -0.796875q-0.78125 -0.8125 -0.78125 -2.21875q0 -0.90625 0.34375 -1.59375q0.359375 -0.6875 0.984375 -1.0625q0.640625 -0.390625 1.4375 -0.390625q1.140625 0 1.8125 0.75q0.671875 0.734375 0.671875 2.0q0 0.25 -0.09375 0.359375q-0.09375 0.109375 -0.3125 0.109375l-3.859375 0q0.09375 2.0625 1.953125 2.0625q0.46875 0 0.796875 -0.125q0.34375 -0.125 0.71875 -0.34375q0.3125 -0.1875 0.421875 -0.1875zm-2.09375 -3.875q-0.765625 0 -1.234375 0.484375q-0.46875 0.484375 -0.546875 1.359375l3.390625 0q-0.015625 -0.890625 -0.4375 -1.359375q-0.421875 -0.484375 -1.171875 -0.484375zm8.199051 4.46875q0.203125 0 0.296875 0.109375q0.109375 0.09375 0.109375 0.265625q0 0.1875 -0.109375 0.296875q-0.09375 0.09375 -0.296875 0.09375l-4.203125 0q-0.203125 0 -0.34375 -0.125q-0.125 -0.125 -0.125 -0.3125q0 -0.1875 0.140625 -0.359375l3.546875 -4.28125l-3.28125 0q-0.203125 0 -0.328125 -0.09375q-0.109375 -0.109375 -0.109375 -0.28125q0 -0.171875 0.109375 -0.28125q0.125 -0.109375 0.328125 -0.109375l4.0625 0q0.21875 0 0.34375 0.125q0.140625 0.125 0.140625 0.3125q0 0.1875 -0.140625 0.359375l-3.5625 4.28125l3.421875 0zm6.2547913 -0.59375q0.140625 0 0.234375 0.109375q0.09375 0.109375 0.09375 0.28125q0 0.296875 -0.421875 0.546875q-0.4375 0.25 -0.921875 0.375q-0.46875 0.125 -0.921875 0.125q-1.359375 0 -2.15625 -0.796875q-0.78125 -0.8125 -0.78125 -2.21875q0 -0.90625 0.34375 -1.59375q0.359375 -0.6875 0.984375 -1.0625q0.640625 -0.390625 1.4375 -0.390625q1.140625 0 1.8125 0.75q0.671875 0.734375 0.671875 2.0q0 0.25 -0.09375 0.359375q-0.09375 0.109375 -0.3125 0.109375l-3.859375 0q0.09375 2.0625 1.953125 2.0625q0.46875 0 0.796875 -0.125q0.34375 -0.125 0.71875 -0.34375q0.3125 -0.1875 0.421875 -0.1875zm-2.09375 -3.875q-0.765625 0 -1.234375 0.484375q-0.46875 0.484375 -0.546875 1.359375l3.390625 0q-0.015625 -0.890625 -0.4375 -1.359375q-0.421875 -0.484375 -1.171875 -0.484375zm3.3865662 5.875q-0.171875 0 -0.28125 -0.09375q-0.109375 -0.09375 -0.109375 -0.21875q0 -0.140625 0.109375 -0.234375q0.109375 -0.09375 0.28125 -0.09375l5.21875 0q0.171875 0 0.28125 0.09375q0.109375 0.09375 0.109375 0.234375q0 0.125 -0.109375 0.21875q-0.109375 0.09375 -0.28125 0.09375l-5.21875 0zm11.2500305 -6.609375q0.234375 0 0.359375 0.140625q0.125 0.125 0.125 0.34375l0 5.09375q0 1.296875 -0.671875 1.96875q-0.671875 0.671875 -1.984375 0.671875q-1.28125 0 -2.140625 -0.515625q-0.421875 -0.234375 -0.421875 -0.546875q0 -0.171875 0.078125 -0.28125q0.09375 -0.109375 0.234375 -0.109375q0.125 0 0.4375 0.171875q0.421875 0.21875 0.828125 0.34375q0.40625 0.140625 0.96875 0.140625q0.859375 0 1.28125 -0.453125q0.4375 -0.453125 0.4375 -1.3125l0 -1.03125q-0.25 0.5625 -0.78125 0.859375q-0.515625 0.296875 -1.21875 0.296875q-0.765625 0 -1.359375 -0.359375q-0.59375 -0.359375 -0.9375 -1.015625q-0.328125 -0.65625 -0.328125 -1.515625q0 -0.875 0.328125 -1.53125q0.34375 -0.65625 0.9375 -1.015625q0.59375 -0.359375 1.359375 -0.359375q0.6875 0 1.203125 0.296875q0.515625 0.296875 0.78125 0.84375l0 -0.640625q0 -0.21875 0.125 -0.34375q0.125 -0.140625 0.359375 -0.140625zm-2.28125 4.984375q0.84375 0 1.3125 -0.546875q0.484375 -0.5625 0.484375 -1.546875q0 -0.984375 -0.46875 -1.53125q-0.46875 -0.5625 -1.328125 -0.5625q-0.84375 0 -1.34375 0.5625q-0.484375 0.546875 -0.484375 1.53125q0 0.984375 0.484375 1.546875q0.5 0.546875 1.34375 0.546875zm7.4695435 -4.984375q0.5 -0.03125 0.5 0.40625q0 0.203125 -0.109375 0.3125q-0.109375 0.109375 -0.375 0.140625l-0.359375 0.03125q-0.796875 0.078125 -1.1875 0.578125q-0.390625 0.484375 -0.390625 1.15625l0 2.921875q0 0.234375 -0.140625 0.359375q-0.125 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -5.0625q0 -0.234375 0.140625 -0.359375q0.140625 -0.125 0.34375 -0.125q0.1875 0 0.3125 0.125q0.140625 0.125 0.140625 0.34375l0 0.671875q0.25 -0.53125 0.71875 -0.796875q0.46875 -0.28125 1.0625 -0.328125l0.171875 -0.015625zm3.720398 -0.015625q2.203125 0 2.203125 2.296875l0 3.265625q0 0.21875 -0.125 0.359375q-0.125 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.140625 -0.125 -0.359375l0 -0.578125q-0.21875 0.515625 -0.6875 0.796875q-0.46875 0.28125 -1.078125 0.28125q-0.5625 0 -1.046875 -0.21875q-0.46875 -0.234375 -0.75 -0.640625q-0.265625 -0.40625 -0.265625 -0.90625q0 -0.65625 0.328125 -1.015625q0.34375 -0.375 1.109375 -0.53125q0.765625 -0.15625 2.125 -0.15625l0.265625 0l0 -0.40625q0 -0.71875 -0.296875 -1.046875q-0.28125 -0.34375 -0.953125 -0.34375q-0.8125 0 -1.65625 0.453125q-0.3125 0.203125 -0.453125 0.203125q-0.140625 0 -0.234375 -0.109375q-0.09375 -0.109375 -0.09375 -0.28125q0 -0.171875 0.09375 -0.296875q0.109375 -0.125 0.328125 -0.25q0.421875 -0.25 0.953125 -0.375q0.546875 -0.140625 1.0625 -0.140625zm-0.390625 5.296875q0.71875 0 1.171875 -0.484375q0.46875 -0.484375 0.46875 -1.25l0 -0.34375l-0.21875 0q-1.046875 0 -1.609375 0.09375q-0.546875 0.078125 -0.78125 0.296875q-0.234375 0.203125 -0.234375 0.609375q0 0.46875 0.34375 0.78125q0.34375 0.296875 0.859375 0.296875zm7.3131714 -5.296875q0.765625 0 1.34375 0.390625q0.59375 0.375 0.921875 1.0625q0.328125 0.6875 0.328125 1.609375q0 0.90625 -0.328125 1.59375q-0.328125 0.671875 -0.90625 1.046875q-0.578125 0.359375 -1.359375 0.359375q-0.6875 0 -1.203125 -0.296875q-0.5 -0.296875 -0.765625 -0.84375l0 2.8125q0 0.21875 -0.125 0.34375q-0.125 0.125 -0.359375 0.125q-0.234375 0 -0.359375 -0.140625q-0.125 -0.125 -0.125 -0.328125l0 -7.234375q0 -0.21875 0.125 -0.34375q0.125 -0.140625 0.359375 -0.140625q0.234375 0 0.359375 0.140625q0.125 0.125 0.125 0.34375l0 0.640625q0.265625 -0.546875 0.765625 -0.84375q0.515625 -0.296875 1.203125 -0.296875zm-0.203125 5.265625q0.859375 0 1.328125 -0.578125q0.46875 -0.578125 0.46875 -1.625q0 -1.0625 -0.46875 -1.65625q-0.46875 -0.59375 -1.328125 -0.59375q-0.84375 0 -1.3125 0.578125q-0.453125 0.578125 -0.453125 1.640625q0 1.0625 0.453125 1.65625q0.46875 0.578125 1.3125 0.578125zm7.20282 -5.265625q1.03125 0 1.546875 0.578125q0.53125 0.578125 0.53125 1.734375l0 3.25q0 0.234375 -0.140625 0.359375q-0.125 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -3.21875q0 -0.78125 -0.328125 -1.15625q-0.3125 -0.375 -1.0 -0.375q-0.8125 0 -1.296875 0.5q-0.46875 0.484375 -0.46875 1.328125l0 2.921875q0 0.234375 -0.125 0.359375q-0.125 0.125 -0.359375 0.125q-0.234375 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -7.625q0 -0.203125 0.125 -0.328125q0.140625 -0.140625 0.359375 -0.140625q0.234375 0 0.359375 0.125q0.125 0.125 0.125 0.34375l0 3.140625q0.28125 -0.53125 0.796875 -0.796875q0.515625 -0.28125 1.1875 -0.28125zm4.331665 6.046875q-0.28125 0 -0.484375 -0.1875q-0.1875 -0.1875 -0.1875 -0.484375q0 -0.296875 0.1875 -0.484375q0.203125 -0.203125 0.484375 -0.203125q0.28125 0 0.46875 0.203125q0.1875 0.1875 0.1875 0.484375q0 0.296875 -0.1875 0.484375q-0.1875 0.1875 -0.46875 0.1875zm5.2167664 -6.046875q0.765625 0 1.34375 0.390625q0.59375 0.375 0.921875 1.0625q0.328125 0.6875 0.328125 1.609375q0 0.90625 -0.328125 1.59375q-0.328125 0.671875 -0.90625 1.046875q-0.578125 0.359375 -1.359375 0.359375q-0.6875 0 -1.203125 -0.296875q-0.5 -0.296875 -0.765625 -0.84375l0 2.8125q0 0.21875 -0.125 0.34375q-0.125 0.125 -0.359375 0.125q-0.234375 0 -0.359375 -0.140625q-0.125 -0.125 -0.125 -0.328125l0 -7.234375q0 -0.21875 0.125 -0.34375q0.125 -0.140625 0.359375 -0.140625q0.234375 0 0.359375 0.140625q0.125 0.125 0.125 0.34375l0 0.640625q0.265625 -0.546875 0.765625 -0.84375q0.515625 -0.296875 1.203125 -0.296875zm-0.203125 5.265625q0.859375 0 1.328125 -0.578125q0.46875 -0.578125 0.46875 -1.625q0 -1.0625 -0.46875 -1.65625q-0.46875 -0.59375 -1.328125 -0.59375q-0.84375 0 -1.3125 0.578125q-0.453125 0.578125 -0.453125 1.640625q0 1.0625 0.453125 1.65625q0.46875 0.578125 1.3125 0.578125zm8.45282 -4.9375q0.140625 -0.296875 0.421875 -0.296875q0.1875 0 0.328125 0.125q0.140625 0.109375 0.140625 0.296875q0 0.109375 -0.046875 0.1875l-3.375 7.28125q-0.0625 0.125 -0.171875 0.1875q-0.109375 0.078125 -0.234375 0.078125q-0.1875 0 -0.328125 -0.109375q-0.125 -0.109375 -0.125 -0.296875q0 -0.09375 0.046875 -0.1875l0.84375 -1.8125l-2.375 -5.140625q-0.046875 -0.078125 -0.046875 -0.171875q0 -0.1875 0.15625 -0.3125q0.15625 -0.140625 0.359375 -0.140625q0.109375 0 0.21875 0.078125q0.125 0.078125 0.1875 0.203125l2.0 4.5l2.0 -4.46875z" fill-rule="nonzero"/><path fill="#f4cccc" d="m154.36745 319.3983l87.49606 0l0 30.992126l-87.49606 0z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m154.36745 319.3983l87.49606 0l0 30.992126l-87.49606 0z" fill-rule="evenodd"/><path fill="#000000" d="m184.89111 339.47687q-0.234375 0 -0.375 -0.140625q-0.140625 -0.140625 -0.140625 -0.359375l0 -7.1875l-2.578125 0q-0.21875 0 -0.34375 -0.109375q-0.109375 -0.109375 -0.109375 -0.3125q0 -0.203125 0.109375 -0.296875q0.125 -0.109375 0.34375 -0.109375l6.15625 0q0.21875 0 0.328125 0.109375q0.125 0.09375 0.125 0.296875q0 0.203125 -0.125 0.3125q-0.109375 0.109375 -0.328125 0.109375l-2.578125 0l0 7.1875q0 0.21875 -0.140625 0.359375q-0.125 0.140625 -0.34375 0.140625zm8.160431 0.03125q-1.171875 0 -2.046875 -0.515625q-0.859375 -0.53125 -1.328125 -1.5q-0.46875 -0.984375 -0.46875 -2.296875q0 -1.34375 0.453125 -2.3125q0.46875 -0.984375 1.328125 -1.5q0.875 -0.53125 2.0625 -0.53125q1.1875 0 2.0625 0.53125q0.875 0.515625 1.328125 1.5q0.46875 0.96875 0.46875 2.296875q0 1.3125 -0.46875 2.296875q-0.46875 0.984375 -1.34375 1.515625q-0.859375 0.515625 -2.046875 0.515625zm0 -0.84375q1.34375 0 2.09375 -0.90625q0.75 -0.90625 0.75 -2.578125q0 -1.6875 -0.75 -2.578125q-0.734375 -0.90625 -2.09375 -0.90625q-1.34375 0 -2.09375 0.90625q-0.75 0.90625 -0.75 2.578125q0 1.671875 0.75 2.578125q0.75 0.90625 2.09375 0.90625zm9.214935 0.84375q-1.1875 0 -2.0625 -0.515625q-0.875 -0.53125 -1.359375 -1.5q-0.46875 -0.984375 -0.46875 -2.3125q0 -1.328125 0.46875 -2.296875q0.484375 -0.984375 1.359375 -1.5q0.875 -0.53125 2.0625 -0.53125q0.8125 0 1.515625 0.265625q0.71875 0.25 1.25 0.734375q0.1875 0.1875 0.1875 0.421875q0 0.171875 -0.09375 0.296875q-0.09375 0.125 -0.21875 0.125q-0.15625 0 -0.359375 -0.140625q-0.609375 -0.46875 -1.109375 -0.65625q-0.5 -0.203125 -1.140625 -0.203125q-1.390625 0 -2.140625 0.90625q-0.75 0.90625 -0.75 2.578125q0 1.671875 0.75 2.578125q0.75 0.90625 2.140625 0.90625q0.640625 0 1.140625 -0.1875q0.5 -0.1875 1.109375 -0.671875q0.203125 -0.125 0.359375 -0.125q0.125 0 0.21875 0.125q0.09375 0.109375 0.09375 0.296875q0 0.234375 -0.1875 0.40625q-0.53125 0.484375 -1.25 0.75q-0.703125 0.25 -1.515625 0.25zm8.077179 0q-1.171875 0 -2.046875 -0.515625q-0.859375 -0.53125 -1.328125 -1.5q-0.46875 -0.984375 -0.46875 -2.296875q0 -1.34375 0.453125 -2.3125q0.46875 -0.984375 1.328125 -1.5q0.875 -0.53125 2.0625 -0.53125q1.1875 0 2.0625 0.53125q0.875 0.515625 1.328125 1.5q0.46875 0.96875 0.46875 2.296875q0 1.3125 -0.46875 2.296875q-0.46875 0.984375 -1.34375 1.515625q-0.859375 0.515625 -2.046875 0.515625zm0 -0.84375q1.34375 0 2.09375 -0.90625q0.75 -0.90625 0.75 -2.578125q0 -1.6875 -0.75 -2.578125q-0.734375 -0.90625 -2.09375 -0.90625q-1.34375 0 -2.09375 0.90625q-0.75 0.90625 -0.75 2.578125q0 1.671875 0.75 2.578125q0.75 0.90625 2.09375 0.90625z" fill-rule="nonzero"/><path fill="#d9ead3" d="m284.12296 319.3983l87.49606 0l0 30.992126l-87.49606 0z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m284.12296 319.3983l87.49606 0l0 30.992126l-87.49606 0z" fill-rule="evenodd"/><path fill="#000000" d="m314.7006 332.47687q-0.234375 0 -0.375 -0.140625q-0.140625 -0.140625 -0.140625 -0.359375l0 -7.1875l-2.578125 0q-0.21875 0 -0.34375 -0.109375q-0.109375 -0.109375 -0.109375 -0.3125q0 -0.203125 0.109375 -0.296875q0.125 -0.109375 0.34375 -0.109375l6.15625 0q0.21875 0 0.328125 0.109375q0.125 0.09375 0.125 0.296875q0 0.203125 -0.125 0.3125q-0.109375 0.109375 -0.328125 0.109375l-2.578125 0l0 7.1875q0 0.21875 -0.140625 0.359375q-0.125 0.140625 -0.34375 0.140625zm5.113556 0q-0.21875 0 -0.359375 -0.140625q-0.125 -0.140625 -0.125 -0.359375l0 -7.546875q0 -0.21875 0.125 -0.34375q0.140625 -0.125 0.375 -0.125l4.375 0q0.203125 0 0.328125 0.109375q0.125 0.09375 0.125 0.296875q0 0.203125 -0.125 0.3125q-0.125 0.109375 -0.328125 0.109375l-3.90625 0l0 2.90625l3.65625 0q0.21875 0 0.328125 0.109375q0.125 0.109375 0.125 0.3125q0 0.1875 -0.125 0.296875q-0.109375 0.109375 -0.328125 0.109375l-3.65625 0l0 3.453125q0 0.21875 -0.125 0.359375q-0.125 0.140625 -0.359375 0.140625zm6.6840515 -0.0625q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.328125l0 -7.5625q0 -0.234375 0.125 -0.359375q0.140625 -0.140625 0.359375 -0.140625q0.234375 0 0.359375 0.140625q0.140625 0.125 0.140625 0.359375l0 7.171875l3.875 0q0.21875 0 0.328125 0.109375q0.125 0.109375 0.125 0.3125q0 0.203125 -0.125 0.3125q-0.109375 0.109375 -0.328125 0.109375l-4.375 0zm6.3394165 0.0625q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -5.0625q0 -0.234375 0.125 -0.359375q0.140625 -0.125 0.359375 -0.125q0.21875 0 0.34375 0.125q0.140625 0.125 0.140625 0.359375l0 5.0625q0 0.234375 -0.140625 0.359375q-0.125 0.125 -0.34375 0.125zm0 -7.28125q-0.296875 0 -0.484375 -0.171875q-0.171875 -0.171875 -0.171875 -0.453125q0 -0.25 0.171875 -0.421875q0.1875 -0.171875 0.484375 -0.171875q0.28125 0 0.453125 0.171875q0.1875 0.171875 0.1875 0.421875q0 0.28125 -0.1875 0.453125q-0.171875 0.171875 -0.453125 0.171875zm4.987152 6.515625q0.421875 0.03125 0.421875 0.375q0 0.203125 -0.15625 0.3125q-0.140625 0.09375 -0.4375 0.078125l-0.328125 -0.03125q-0.953125 -0.0625 -1.421875 -0.5625q-0.453125 -0.515625 -0.453125 -1.53125l0 -3.015625l-0.796875 0q-0.203125 0 -0.328125 -0.09375q-0.109375 -0.109375 -0.109375 -0.28125q0 -0.171875 0.109375 -0.28125q0.125 -0.109375 0.328125 -0.109375l0.796875 0l0 -1.359375q0 -0.21875 0.125 -0.34375q0.140625 -0.140625 0.375 -0.140625q0.21875 0 0.34375 0.140625q0.140625 0.125 0.140625 0.34375l0 1.359375l1.328125 0q0.1875 0 0.296875 0.109375q0.125 0.109375 0.125 0.28125q0 0.171875 -0.125 0.28125q-0.109375 0.09375 -0.296875 0.09375l-1.328125 0l0 3.0625q0 0.65625 0.265625 0.953125q0.265625 0.296875 0.8125 0.328125l0.3125 0.03125zm5.9081726 -0.65625q0.140625 0 0.234375 0.109375q0.09375 0.109375 0.09375 0.28125q0 0.296875 -0.421875 0.546875q-0.4375 0.25 -0.921875 0.375q-0.46875 0.125 -0.921875 0.125q-1.359375 0 -2.15625 -0.796875q-0.78125 -0.8125 -0.78125 -2.21875q0 -0.90625 0.34375 -1.59375q0.359375 -0.6875 0.984375 -1.0625q0.640625 -0.390625 1.4375 -0.390625q1.140625 0 1.8125 0.75q0.671875 0.734375 0.671875 2.0q0 0.25 -0.09375 0.359375q-0.09375 0.109375 -0.3125 0.109375l-3.859375 0q0.09375 2.0625 1.953125 2.0625q0.46875 0 0.796875 -0.125q0.34375 -0.125 0.71875 -0.34375q0.3125 -0.1875 0.421875 -0.1875zm-2.09375 -3.875q-0.765625 0 -1.234375 0.484375q-0.46875 0.484375 -0.546875 1.359375l3.390625 0q-0.015625 -0.890625 -0.4375 -1.359375q-0.421875 -0.484375 -1.171875 -0.484375z" fill-rule="nonzero"/><path fill="#000000" d="m303.37402 346.47687q-0.21875 0 -0.359375 -0.140625q-0.125 -0.140625 -0.125 -0.359375l0 -7.546875q0 -0.21875 0.125 -0.34375q0.140625 -0.125 0.375 -0.125l4.375 0q0.203125 0 0.328125 0.109375q0.125 0.09375 0.125 0.296875q0 0.203125 -0.125 0.3125q-0.125 0.109375 -0.328125 0.109375l-3.90625 0l0 2.90625l3.65625 0q0.21875 0 0.328125 0.109375q0.125 0.109375 0.125 0.3125q0 0.1875 -0.125 0.296875q-0.109375 0.109375 -0.328125 0.109375l-3.65625 0l0 3.453125q0 0.21875 -0.125 0.359375q-0.125 0.140625 -0.359375 0.140625zm6.5434265 0q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -7.625q0 -0.21875 0.125 -0.34375q0.140625 -0.125 0.359375 -0.125q0.203125 0 0.34375 0.125q0.140625 0.125 0.140625 0.34375l0 7.625q0 0.234375 -0.140625 0.359375q-0.140625 0.125 -0.34375 0.125zm4.674652 -6.046875q2.203125 0 2.203125 2.296875l0 3.265625q0 0.21875 -0.125 0.359375q-0.125 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.140625 -0.125 -0.359375l0 -0.578125q-0.21875 0.515625 -0.6875 0.796875q-0.46875 0.28125 -1.078125 0.28125q-0.5625 0 -1.046875 -0.21875q-0.46875 -0.234375 -0.75 -0.640625q-0.265625 -0.40625 -0.265625 -0.90625q0 -0.65625 0.328125 -1.015625q0.34375 -0.375 1.109375 -0.53125q0.765625 -0.15625 2.125 -0.15625l0.265625 0l0 -0.40625q0 -0.71875 -0.296875 -1.046875q-0.28125 -0.34375 -0.953125 -0.34375q-0.8125 0 -1.65625 0.453125q-0.3125 0.203125 -0.453125 0.203125q-0.140625 0 -0.234375 -0.109375q-0.09375 -0.109375 -0.09375 -0.28125q0 -0.171875 0.09375 -0.296875q0.109375 -0.125 0.328125 -0.25q0.421875 -0.25 0.953125 -0.375q0.546875 -0.140625 1.0625 -0.140625zm-0.390625 5.296875q0.71875 0 1.171875 -0.484375q0.46875 -0.484375 0.46875 -1.25l0 -0.34375l-0.21875 0q-1.046875 0 -1.609375 0.09375q-0.546875 0.078125 -0.78125 0.296875q-0.234375 0.203125 -0.234375 0.609375q0 0.46875 0.34375 0.78125q0.34375 0.296875 0.859375 0.296875zm7.0631714 -0.015625q0.421875 0.03125 0.421875 0.375q0 0.203125 -0.15625 0.3125q-0.140625 0.09375 -0.4375 0.078125l-0.328125 -0.03125q-0.953125 -0.0625 -1.421875 -0.5625q-0.453125 -0.515625 -0.453125 -1.53125l0 -3.015625l-0.796875 0q-0.203125 0 -0.328125 -0.09375q-0.109375 -0.109375 -0.109375 -0.28125q0 -0.171875 0.109375 -0.28125q0.125 -0.109375 0.328125 -0.109375l0.796875 0l0 -1.359375q0 -0.21875 0.125 -0.34375q0.140625 -0.140625 0.375 -0.140625q0.21875 0 0.34375 0.140625q0.140625 0.125 0.140625 0.34375l0 1.359375l1.328125 0q0.1875 0 0.296875 0.109375q0.125 0.109375 0.125 0.28125q0 0.171875 -0.125 0.28125q-0.109375 0.09375 -0.296875 0.09375l-1.328125 0l0 3.0625q0 0.65625 0.265625 0.953125q0.265625 0.296875 0.8125 0.328125l0.3125 0.03125zm4.3300476 -5.28125q0.765625 0 1.34375 0.375q0.59375 0.359375 0.921875 1.046875q0.328125 0.6875 0.328125 1.59375q0 0.90625 -0.328125 1.59375q-0.328125 0.6875 -0.921875 1.078125q-0.578125 0.375 -1.34375 0.375q-0.6875 0 -1.203125 -0.296875q-0.5 -0.296875 -0.765625 -0.84375l0 0.640625q0 0.21875 -0.125 0.359375q-0.125 0.125 -0.359375 0.125q-0.234375 0 -0.359375 -0.125q-0.125 -0.140625 -0.125 -0.359375l0 -7.625q0 -0.203125 0.125 -0.328125q0.125 -0.140625 0.359375 -0.140625q0.234375 0 0.359375 0.125q0.125 0.125 0.125 0.34375l0 3.203125q0.265625 -0.546875 0.765625 -0.84375q0.515625 -0.296875 1.203125 -0.296875zm-0.203125 5.265625q0.859375 0 1.328125 -0.59375q0.46875 -0.59375 0.46875 -1.65625q0 -1.046875 -0.46875 -1.625q-0.46875 -0.578125 -1.328125 -0.578125q-0.84375 0 -1.3125 0.578125q-0.453125 0.578125 -0.453125 1.640625q0 1.0625 0.453125 1.65625q0.46875 0.578125 1.3125 0.578125zm8.687164 -5.25q0.21875 0 0.34375 0.140625q0.125 0.125 0.125 0.34375l0 5.078125q0 0.203125 -0.125 0.34375q-0.125 0.125 -0.34375 0.125q-0.21875 0 -0.34375 -0.125q-0.125 -0.125 -0.125 -0.328125l0 -0.609375q-0.28125 0.53125 -0.78125 0.8125q-0.5 0.265625 -1.125 0.265625q-1.03125 0 -1.5625 -0.578125q-0.53125 -0.578125 -0.53125 -1.71875l0 -3.265625q0 -0.21875 0.125 -0.34375q0.140625 -0.140625 0.375 -0.140625q0.21875 0 0.34375 0.140625q0.125 0.125 0.125 0.34375l0 3.234375q0 0.78125 0.3125 1.15625q0.3125 0.359375 0.984375 0.359375q0.765625 0 1.234375 -0.5q0.46875 -0.5 0.46875 -1.3125l0 -2.9375q0 -0.21875 0.125 -0.34375q0.140625 -0.140625 0.375 -0.140625zm4.8726807 -1.71875q-0.640625 0.046875 -0.96875 0.40625q-0.3125 0.34375 -0.3125 1.046875l0 0.390625l1.328125 0q0.203125 0 0.3125 0.109375q0.109375 0.109375 0.109375 0.28125q0 0.1875 -0.109375 0.28125q-0.109375 0.09375 -0.3125 0.09375l-1.328125 0l0 4.65625q0 0.234375 -0.140625 0.359375q-0.140625 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.140625 -0.125 -0.140625 -0.359375l0 -4.65625l-0.796875 0q-0.203125 0 -0.328125 -0.09375q-0.109375 -0.109375 -0.109375 -0.28125q0 -0.171875 0.109375 -0.28125q0.125 -0.109375 0.328125 -0.109375l0.796875 0l0 -0.21875q0 -1.078125 0.53125 -1.6875q0.546875 -0.625 1.5625 -0.703125l0.3125 -0.015625q0.3125 -0.03125 0.453125 0.0625q0.140625 0.078125 0.140625 0.296875q0 0.34375 -0.421875 0.390625l-0.3125 0.03125zm3.9360352 0q-0.640625 0.046875 -0.96875 0.40625q-0.3125 0.34375 -0.3125 1.046875l0 0.390625l1.328125 0q0.203125 0 0.3125 0.109375q0.109375 0.109375 0.109375 0.28125q0 0.1875 -0.109375 0.28125q-0.109375 0.09375 -0.3125 0.09375l-1.328125 0l0 4.65625q0 0.234375 -0.140625 0.359375q-0.140625 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.140625 -0.125 -0.140625 -0.359375l0 -4.65625l-0.796875 0q-0.203125 0 -0.328125 -0.09375q-0.109375 -0.109375 -0.109375 -0.28125q0 -0.171875 0.109375 -0.28125q0.125 -0.109375 0.328125 -0.109375l0.796875 0l0 -0.21875q0 -1.078125 0.53125 -1.6875q0.546875 -0.625 1.5625 -0.703125l0.3125 -0.015625q0.3125 -0.03125 0.453125 0.0625q0.140625 0.078125 0.140625 0.296875q0 0.34375 -0.421875 0.390625l-0.3125 0.03125zm5.873535 6.328125q0.140625 0 0.234375 0.109375q0.09375 0.109375 0.09375 0.28125q0 0.296875 -0.421875 0.546875q-0.4375 0.25 -0.921875 0.375q-0.46875 0.125 -0.921875 0.125q-1.359375 0 -2.15625 -0.796875q-0.78125 -0.8125 -0.78125 -2.21875q0 -0.90625 0.34375 -1.59375q0.359375 -0.6875 0.984375 -1.0625q0.640625 -0.390625 1.4375 -0.390625q1.140625 0 1.8125 0.75q0.671875 0.734375 0.671875 2.0q0 0.25 -0.09375 0.359375q-0.09375 0.109375 -0.3125 0.109375l-3.859375 0q0.09375 2.0625 1.953125 2.0625q0.46875 0 0.796875 -0.125q0.34375 -0.125 0.71875 -0.34375q0.3125 -0.1875 0.421875 -0.1875zm-2.09375 -3.875q-0.765625 0 -1.234375 0.484375q-0.46875 0.484375 -0.546875 1.359375l3.390625 0q-0.015625 -0.890625 -0.4375 -1.359375q-0.421875 -0.484375 -1.171875 -0.484375zm6.7927856 -0.734375q0.5 -0.03125 0.5 0.40625q0 0.203125 -0.109375 0.3125q-0.109375 0.109375 -0.375 0.140625l-0.359375 0.03125q-0.796875 0.078125 -1.1875 0.578125q-0.390625 0.484375 -0.390625 1.15625l0 2.921875q0 0.234375 -0.140625 0.359375q-0.125 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -5.0625q0 -0.234375 0.140625 -0.359375q0.140625 -0.125 0.34375 -0.125q0.1875 0 0.3125 0.125q0.140625 0.125 0.140625 0.34375l0 0.671875q0.25 -0.53125 0.71875 -0.796875q0.46875 -0.28125 1.0625 -0.328125l0.171875 -0.015625z" fill-rule="nonzero"/><path fill="#f4cccc" d="m413.02625 319.3983l87.49606 0l0 30.992126l-87.49606 0z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m413.02625 319.3983l87.49606 0l0 30.992126l-87.49606 0z" fill-rule="evenodd"/><path fill="#000000" d="m443.6039 332.47687q-0.234375 0 -0.375 -0.140625q-0.140625 -0.140625 -0.140625 -0.359375l0 -7.1875l-2.578125 0q-0.21875 0 -0.34375 -0.109375q-0.109375 -0.109375 -0.109375 -0.3125q0 -0.203125 0.109375 -0.296875q0.125 -0.109375 0.34375 -0.109375l6.15625 0q0.21875 0 0.328125 0.109375q0.125 0.09375 0.125 0.296875q0 0.203125 -0.125 0.3125q-0.109375 0.109375 -0.328125 0.109375l-2.578125 0l0 7.1875q0 0.21875 -0.140625 0.359375q-0.125 0.140625 -0.34375 0.140625zm5.113556 0q-0.21875 0 -0.359375 -0.140625q-0.125 -0.140625 -0.125 -0.359375l0 -7.546875q0 -0.21875 0.125 -0.34375q0.140625 -0.125 0.375 -0.125l4.375 0q0.203125 0 0.328125 0.109375q0.125 0.09375 0.125 0.296875q0 0.203125 -0.125 0.3125q-0.125 0.109375 -0.328125 0.109375l-3.90625 0l0 2.90625l3.65625 0q0.21875 0 0.328125 0.109375q0.125 0.109375 0.125 0.3125q0 0.1875 -0.125 0.296875q-0.109375 0.109375 -0.328125 0.109375l-3.65625 0l0 3.453125q0 0.21875 -0.125 0.359375q-0.125 0.140625 -0.359375 0.140625zm6.6840515 -0.0625q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.328125l0 -7.5625q0 -0.234375 0.125 -0.359375q0.140625 -0.140625 0.359375 -0.140625q0.234375 0 0.359375 0.140625q0.140625 0.125 0.140625 0.359375l0 7.171875l3.875 0q0.21875 0 0.328125 0.109375q0.125 0.109375 0.125 0.3125q0 0.203125 -0.125 0.3125q-0.109375 0.109375 -0.328125 0.109375l-4.375 0zm6.3394165 0.0625q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -5.0625q0 -0.234375 0.125 -0.359375q0.140625 -0.125 0.359375 -0.125q0.21875 0 0.34375 0.125q0.140625 0.125 0.140625 0.359375l0 5.0625q0 0.234375 -0.140625 0.359375q-0.125 0.125 -0.34375 0.125zm0 -7.28125q-0.296875 0 -0.484375 -0.171875q-0.171875 -0.171875 -0.171875 -0.453125q0 -0.25 0.171875 -0.421875q0.1875 -0.171875 0.484375 -0.171875q0.28125 0 0.453125 0.171875q0.1875 0.171875 0.1875 0.421875q0 0.28125 -0.1875 0.453125q-0.171875 0.171875 -0.453125 0.171875zm4.987152 6.515625q0.421875 0.03125 0.421875 0.375q0 0.203125 -0.15625 0.3125q-0.140625 0.09375 -0.4375 0.078125l-0.328125 -0.03125q-0.953125 -0.0625 -1.421875 -0.5625q-0.453125 -0.515625 -0.453125 -1.53125l0 -3.015625l-0.796875 0q-0.203125 0 -0.328125 -0.09375q-0.109375 -0.109375 -0.109375 -0.28125q0 -0.171875 0.109375 -0.28125q0.125 -0.109375 0.328125 -0.109375l0.796875 0l0 -1.359375q0 -0.21875 0.125 -0.34375q0.140625 -0.140625 0.375 -0.140625q0.21875 0 0.34375 0.140625q0.140625 0.125 0.140625 0.34375l0 1.359375l1.328125 0q0.1875 0 0.296875 0.109375q0.125 0.109375 0.125 0.28125q0 0.171875 -0.125 0.28125q-0.109375 0.09375 -0.296875 0.09375l-1.328125 0l0 3.0625q0 0.65625 0.265625 0.953125q0.265625 0.296875 0.8125 0.328125l0.3125 0.03125zm5.908142 -0.65625q0.140625 0 0.234375 0.109375q0.09375 0.109375 0.09375 0.28125q0 0.296875 -0.421875 0.546875q-0.4375 0.25 -0.921875 0.375q-0.46875 0.125 -0.921875 0.125q-1.359375 0 -2.15625 -0.796875q-0.78125 -0.8125 -0.78125 -2.21875q0 -0.90625 0.34375 -1.59375q0.359375 -0.6875 0.984375 -1.0625q0.640625 -0.390625 1.4375 -0.390625q1.140625 0 1.8125 0.75q0.671875 0.734375 0.671875 2.0q0 0.25 -0.09375 0.359375q-0.09375 0.109375 -0.3125 0.109375l-3.859375 0q0.09375 2.0625 1.953125 2.0625q0.46875 0 0.796875 -0.125q0.34375 -0.125 0.71875 -0.34375q0.3125 -0.1875 0.421875 -0.1875zm-2.09375 -3.875q-0.765625 0 -1.234375 0.484375q-0.46875 0.484375 -0.546875 1.359375l3.390625 0q-0.015625 -0.890625 -0.4375 -1.359375q-0.421875 -0.484375 -1.171875 -0.484375z" fill-rule="nonzero"/><path fill="#000000" d="m429.9527 346.47687q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -5.0625q0 -0.234375 0.125 -0.359375q0.140625 -0.125 0.359375 -0.125q0.21875 0 0.34375 0.125q0.140625 0.125 0.140625 0.359375l0 5.0625q0 0.234375 -0.140625 0.359375q-0.125 0.125 -0.34375 0.125zm0 -7.28125q-0.296875 0 -0.484375 -0.171875q-0.171875 -0.171875 -0.171875 -0.453125q0 -0.25 0.171875 -0.421875q0.1875 -0.171875 0.484375 -0.171875q0.28125 0 0.453125 0.171875q0.1875 0.171875 0.1875 0.421875q0 0.28125 -0.1875 0.453125q-0.171875 0.171875 -0.453125 0.171875zm5.237152 1.234375q2.09375 0 2.09375 2.3125l0 3.25q0 0.234375 -0.125 0.359375q-0.125 0.125 -0.359375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -3.1875q0 -0.8125 -0.328125 -1.1875q-0.3125 -0.375 -1.0 -0.375q-0.8125 0 -1.296875 0.5q-0.46875 0.484375 -0.46875 1.328125l0 2.921875q0 0.234375 -0.125 0.359375q-0.125 0.125 -0.359375 0.125q-0.234375 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -5.0625q0 -0.21875 0.125 -0.34375q0.125 -0.140625 0.359375 -0.140625q0.21875 0 0.34375 0.140625q0.125 0.125 0.125 0.328125l0 0.609375q0.28125 -0.53125 0.796875 -0.8125q0.53125 -0.28125 1.1875 -0.28125zm6.56604 5.28125q0.421875 0.03125 0.421875 0.375q0 0.203125 -0.15625 0.3125q-0.140625 0.09375 -0.4375 0.078125l-0.328125 -0.03125q-0.953125 -0.0625 -1.421875 -0.5625q-0.453125 -0.515625 -0.453125 -1.53125l0 -3.015625l-0.796875 0q-0.203125 0 -0.328125 -0.09375q-0.109375 -0.109375 -0.109375 -0.28125q0 -0.171875 0.109375 -0.28125q0.125 -0.109375 0.328125 -0.109375l0.796875 0l0 -1.359375q0 -0.21875 0.125 -0.34375q0.140625 -0.140625 0.375 -0.140625q0.21875 0 0.34375 0.140625q0.140625 0.125 0.140625 0.34375l0 1.359375l1.328125 0q0.1875 0 0.296875 0.109375q0.125 0.109375 0.125 0.28125q0 0.171875 -0.125 0.28125q-0.109375 0.09375 -0.296875 0.09375l-1.328125 0l0 3.0625q0 0.65625 0.265625 0.953125q0.265625 0.296875 0.8125 0.328125l0.3125 0.03125zm5.9081726 -0.65625q0.140625 0 0.234375 0.109375q0.09375 0.109375 0.09375 0.28125q0 0.296875 -0.421875 0.546875q-0.4375 0.25 -0.921875 0.375q-0.46875 0.125 -0.921875 0.125q-1.359375 0 -2.15625 -0.796875q-0.78125 -0.8125 -0.78125 -2.21875q0 -0.90625 0.34375 -1.59375q0.359375 -0.6875 0.984375 -1.0625q0.640625 -0.390625 1.4375 -0.390625q1.140625 0 1.8125 0.75q0.671875 0.734375 0.671875 2.0q0 0.25 -0.09375 0.359375q-0.09375 0.109375 -0.3125 0.109375l-3.859375 0q0.09375 2.0625 1.953125 2.0625q0.46875 0 0.796875 -0.125q0.34375 -0.125 0.71875 -0.34375q0.3125 -0.1875 0.421875 -0.1875zm-2.09375 -3.875q-0.765625 0 -1.234375 0.484375q-0.46875 0.484375 -0.546875 1.359375l3.390625 0q-0.015625 -0.890625 -0.4375 -1.359375q-0.421875 -0.484375 -1.171875 -0.484375zm6.7927856 -0.734375q0.5 -0.03125 0.5 0.40625q0 0.203125 -0.109375 0.3125q-0.109375 0.109375 -0.375 0.140625l-0.359375 0.03125q-0.796875 0.078125 -1.1875 0.578125q-0.390625 0.484375 -0.390625 1.15625l0 2.921875q0 0.234375 -0.140625 0.359375q-0.125 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -5.0625q0 -0.234375 0.140625 -0.359375q0.140625 -0.125 0.34375 -0.125q0.1875 0 0.3125 0.125q0.140625 0.125 0.140625 0.34375l0 0.671875q0.25 -0.53125 0.71875 -0.796875q0.46875 -0.28125 1.0625 -0.328125l0.171875 -0.015625zm4.282898 -0.015625q0.765625 0 1.34375 0.390625q0.59375 0.375 0.921875 1.0625q0.328125 0.6875 0.328125 1.609375q0 0.90625 -0.328125 1.59375q-0.328125 0.671875 -0.90625 1.046875q-0.578125 0.359375 -1.359375 0.359375q-0.6875 0 -1.203125 -0.296875q-0.5 -0.296875 -0.765625 -0.84375l0 2.8125q0 0.21875 -0.125 0.34375q-0.125 0.125 -0.359375 0.125q-0.234375 0 -0.359375 -0.140625q-0.125 -0.125 -0.125 -0.328125l0 -7.234375q0 -0.21875 0.125 -0.34375q0.125 -0.140625 0.359375 -0.140625q0.234375 0 0.359375 0.140625q0.125 0.125 0.125 0.34375l0 0.640625q0.265625 -0.546875 0.765625 -0.84375q0.515625 -0.296875 1.203125 -0.296875zm-0.203125 5.265625q0.859375 0 1.328125 -0.578125q0.46875 -0.578125 0.46875 -1.625q0 -1.0625 -0.46875 -1.65625q-0.46875 -0.59375 -1.328125 -0.59375q-0.84375 0 -1.3125 0.578125q-0.453125 0.578125 -0.453125 1.640625q0 1.0625 0.453125 1.65625q0.46875 0.578125 1.3125 0.578125zm7.14032 -5.25q0.5 -0.03125 0.5 0.40625q0 0.203125 -0.109375 0.3125q-0.109375 0.109375 -0.375 0.140625l-0.359375 0.03125q-0.796875 0.078125 -1.1875 0.578125q-0.390625 0.484375 -0.390625 1.15625l0 2.921875q0 0.234375 -0.140625 0.359375q-0.125 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -5.0625q0 -0.234375 0.140625 -0.359375q0.140625 -0.125 0.34375 -0.125q0.1875 0 0.3125 0.125q0.140625 0.125 0.140625 0.34375l0 0.671875q0.25 -0.53125 0.71875 -0.796875q0.46875 -0.28125 1.0625 -0.328125l0.171875 -0.015625zm5.861023 4.609375q0.140625 0 0.234375 0.109375q0.09375 0.109375 0.09375 0.28125q0 0.296875 -0.421875 0.546875q-0.4375 0.25 -0.921875 0.375q-0.46875 0.125 -0.921875 0.125q-1.359375 0 -2.15625 -0.796875q-0.78125 -0.8125 -0.78125 -2.21875q0 -0.90625 0.34375 -1.59375q0.359375 -0.6875 0.984375 -1.0625q0.640625 -0.390625 1.4375 -0.390625q1.140625 0 1.8125 0.75q0.671875 0.734375 0.671875 2.0q0 0.25 -0.09375 0.359375q-0.09375 0.109375 -0.3125 0.109375l-3.859375 0q0.09375 2.0625 1.953125 2.0625q0.46875 0 0.796875 -0.125q0.34375 -0.125 0.71875 -0.34375q0.3125 -0.1875 0.421875 -0.1875zm-2.09375 -3.875q-0.765625 0 -1.234375 0.484375q-0.46875 0.484375 -0.546875 1.359375l3.390625 0q-0.015625 -0.890625 -0.4375 -1.359375q-0.421875 -0.484375 -1.171875 -0.484375zm6.5896606 4.53125q0.421875 0.03125 0.421875 0.375q0 0.203125 -0.15625 0.3125q-0.140625 0.09375 -0.4375 0.078125l-0.328125 -0.03125q-0.953125 -0.0625 -1.421875 -0.5625q-0.453125 -0.515625 -0.453125 -1.53125l0 -3.015625l-0.796875 0q-0.203125 0 -0.328125 -0.09375q-0.109375 -0.109375 -0.109375 -0.28125q0 -0.171875 0.109375 -0.28125q0.125 -0.109375 0.328125 -0.109375l0.796875 0l0 -1.359375q0 -0.21875 0.125 -0.34375q0.140625 -0.140625 0.375 -0.140625q0.21875 0 0.34375 0.140625q0.140625 0.125 0.140625 0.34375l0 1.359375l1.328125 0q0.1875 0 0.296875 0.109375q0.125 0.109375 0.125 0.28125q0 0.171875 -0.125 0.28125q-0.109375 0.09375 -0.296875 0.09375l-1.328125 0l0 3.0625q0 0.65625 0.265625 0.953125q0.265625 0.296875 0.8125 0.328125l0.3125 0.03125zm5.9081726 -0.65625q0.140625 0 0.234375 0.109375q0.09375 0.109375 0.09375 0.28125q0 0.296875 -0.421875 0.546875q-0.4375 0.25 -0.921875 0.375q-0.46875 0.125 -0.921875 0.125q-1.359375 0 -2.15625 -0.796875q-0.78125 -0.8125 -0.78125 -2.21875q0 -0.90625 0.34375 -1.59375q0.359375 -0.6875 0.984375 -1.0625q0.640625 -0.390625 1.4375 -0.390625q1.140625 0 1.8125 0.75q0.671875 0.734375 0.671875 2.0q0 0.25 -0.09375 0.359375q-0.09375 0.109375 -0.3125 0.109375l-3.859375 0q0.09375 2.0625 1.953125 2.0625q0.46875 0 0.796875 -0.125q0.34375 -0.125 0.71875 -0.34375q0.3125 -0.1875 0.421875 -0.1875zm-2.09375 -3.875q-0.765625 0 -1.234375 0.484375q-0.46875 0.484375 -0.546875 1.359375l3.390625 0q-0.015625 -0.890625 -0.4375 -1.359375q-0.421875 -0.484375 -1.171875 -0.484375zm6.7927856 -0.734375q0.5 -0.03125 0.5 0.40625q0 0.203125 -0.109375 0.3125q-0.109375 0.109375 -0.375 0.140625l-0.359375 0.03125q-0.796875 0.078125 -1.1875 0.578125q-0.390625 0.484375 -0.390625 1.15625l0 2.921875q0 0.234375 -0.140625 0.359375q-0.125 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -5.0625q0 -0.234375 0.140625 -0.359375q0.140625 -0.125 0.34375 -0.125q0.1875 0 0.3125 0.125q0.140625 0.125 0.140625 0.34375l0 0.671875q0.25 -0.53125 0.71875 -0.796875q0.46875 -0.28125 1.0625 -0.328125l0.171875 -0.015625z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m371.61902 334.89435l41.417297 0" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m371.61902 334.89435l37.990234 0" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m409.60925 334.89435l-1.1245728 1.1246033l3.0897522 -1.1246033l-3.0897522 -1.1245728z" fill-rule="evenodd"/><path fill="#c9daf8" d="m548.5407 277.52954l87.49603 0l0 30.992126l-87.49603 0z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m548.5407 277.52954l87.49603 0l0 30.992126l-87.49603 0z" fill-rule="evenodd"/><path fill="#000000" d="m587.0588 293.13934q0.1875 0 0.296875 0.109375q0.109375 0.109375 0.109375 0.296875l0 2.984375q0 0.296875 -0.09375 0.4375q-0.078125 0.140625 -0.328125 0.234375q-0.46875 0.203125 -1.15625 0.328125q-0.6875 0.109375 -1.375 0.109375q-1.25 0 -2.171875 -0.515625q-0.90625 -0.515625 -1.390625 -1.484375q-0.484375 -0.96875 -0.484375 -2.328125q0 -1.328125 0.46875 -2.296875q0.484375 -0.984375 1.375 -1.5q0.90625 -0.53125 2.125 -0.53125q0.84375 0 1.5625 0.265625q0.71875 0.25 1.203125 0.734375q0.21875 0.203125 0.21875 0.421875q0 0.171875 -0.109375 0.296875q-0.09375 0.125 -0.234375 0.125q-0.140625 0 -0.328125 -0.140625q-0.625 -0.484375 -1.140625 -0.671875q-0.5 -0.1875 -1.15625 -0.1875q-1.4375 0 -2.203125 0.90625q-0.75 0.890625 -0.75 2.578125q0 1.71875 0.765625 2.609375q0.78125 0.890625 2.28125 0.890625q1.109375 0 2.03125 -0.328125l0 -2.578125l-1.75 0q-0.203125 0 -0.328125 -0.109375q-0.125 -0.109375 -0.125 -0.265625q0 -0.1875 0.125 -0.28125q0.125 -0.109375 0.328125 -0.109375l2.234375 0zm2.8911743 4.46875q-0.21875 0 -0.359375 -0.140625q-0.125 -0.140625 -0.125 -0.359375l0 -7.546875q0 -0.21875 0.125 -0.34375q0.140625 -0.125 0.375 -0.125l2.84375 0q1.328125 0 2.0625 0.65625q0.75 0.640625 0.75 1.828125q0 1.1875 -0.75 1.84375q-0.734375 0.65625 -2.0625 0.65625l-2.359375 0l0 3.03125q0 0.21875 -0.140625 0.359375q-0.125 0.140625 -0.359375 0.140625zm2.765625 -4.34375q1.9375 0 1.9375 -1.6875q0 -1.671875 -1.9375 -1.671875l-2.265625 0l0 3.359375l2.265625 0zm7.7869263 4.375q-1.65625 0 -2.515625 -0.859375q-0.84375 -0.859375 -0.84375 -2.546875l0 -4.703125q0 -0.234375 0.125 -0.359375q0.140625 -0.140625 0.359375 -0.140625q0.21875 0 0.34375 0.140625q0.140625 0.125 0.140625 0.359375l0 4.78125q0 1.25 0.609375 1.875q0.609375 0.609375 1.78125 0.609375q1.171875 0 1.765625 -0.609375q0.609375 -0.625 0.609375 -1.875l0 -4.78125q0 -0.234375 0.140625 -0.359375q0.140625 -0.140625 0.359375 -0.140625q0.21875 0 0.34375 0.140625q0.140625 0.125 0.140625 0.359375l0 4.703125q0 1.671875 -0.859375 2.546875q-0.859375 0.859375 -2.5 0.859375z" fill-rule="nonzero"/><path fill="#c9daf8" d="m548.5407 319.3983l87.49603 0l0 30.992126l-87.49603 0z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m548.5407 319.3983l87.49603 0l0 30.992126l-87.49603 0z" fill-rule="evenodd"/><path fill="#000000" d="m584.63763 339.50812q-1.1875 0 -2.0625 -0.515625q-0.875 -0.53125 -1.359375 -1.5q-0.46875 -0.984375 -0.46875 -2.3125q0 -1.328125 0.46875 -2.296875q0.484375 -0.984375 1.359375 -1.5q0.875 -0.53125 2.0625 -0.53125q0.8125 0 1.515625 0.265625q0.71875 0.25 1.25 0.734375q0.1875 0.1875 0.1875 0.421875q0 0.171875 -0.09375 0.296875q-0.09375 0.125 -0.21875 0.125q-0.15625 0 -0.359375 -0.140625q-0.609375 -0.46875 -1.109375 -0.65625q-0.5 -0.203125 -1.140625 -0.203125q-1.390625 0 -2.140625 0.90625q-0.75 0.90625 -0.75 2.578125q0 1.671875 0.75 2.578125q0.75 0.90625 2.140625 0.90625q0.640625 0 1.140625 -0.1875q0.5 -0.1875 1.109375 -0.671875q0.203125 -0.125 0.359375 -0.125q0.125 0 0.21875 0.125q0.09375 0.109375 0.09375 0.296875q0 0.234375 -0.1875 0.40625q-0.53125 0.484375 -1.25 0.75q-0.703125 0.25 -1.515625 0.25zm5.0302734 -0.03125q-0.21875 0 -0.359375 -0.140625q-0.125 -0.140625 -0.125 -0.359375l0 -7.546875q0 -0.21875 0.125 -0.34375q0.140625 -0.125 0.375 -0.125l2.84375 0q1.328125 0 2.0625 0.65625q0.75 0.640625 0.75 1.828125q0 1.1875 -0.75 1.84375q-0.734375 0.65625 -2.0625 0.65625l-2.359375 0l0 3.03125q0 0.21875 -0.140625 0.359375q-0.125 0.140625 -0.359375 0.140625zm2.765625 -4.34375q1.9375 0 1.9375 -1.6875q0 -1.671875 -1.9375 -1.671875l-2.265625 0l0 3.359375l2.265625 0zm7.7869263 4.375q-1.65625 0 -2.515625 -0.859375q-0.84375 -0.859375 -0.84375 -2.546875l0 -4.703125q0 -0.234375 0.125 -0.359375q0.140625 -0.140625 0.359375 -0.140625q0.21875 0 0.34375 0.140625q0.140625 0.125 0.140625 0.359375l0 4.78125q0 1.25 0.609375 1.875q0.609375 0.609375 1.78125 0.609375q1.171875 0 1.765625 -0.609375q0.609375 -0.625 0.609375 -1.875l0 -4.78125q0 -0.234375 0.140625 -0.359375q0.140625 -0.140625 0.359375 -0.140625q0.21875 0 0.34375 0.140625q0.140625 0.125 0.140625 0.359375l0 4.703125q0 1.671875 -0.859375 2.546875q-0.859375 0.859375 -2.5 0.859375z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m241.86351 334.89435l42.267715 0" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m241.86351 334.89435l38.840652 0" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m280.70413 334.89435l-1.1245728 1.1246033l3.0897827 -1.1246033l-3.0897827 -1.1245728z" fill-rule="evenodd"/><path fill="#d9ead3" d="m413.02625 141.28871l20.53543 0l0 20.53543l-20.53543 0z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m413.02625 141.28871l20.53543 0l0 20.53543l-20.53543 0z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m437.52493 135.68242l73.763794 0l0 31.748032l-73.763794 0z" fill-rule="evenodd"/><path fill="#000000" d="m448.0718 156.20241q-0.234375 0 -0.375 -0.125q-0.125 -0.140625 -0.125 -0.359375l0 -7.5q0 -0.21875 0.125 -0.34375q0.140625 -0.125 0.375 -0.125l2.34375 0q2.03125 0 3.140625 1.09375q1.109375 1.09375 1.109375 3.125q0 2.03125 -1.125 3.140625q-1.109375 1.09375 -3.125 1.09375l-2.34375 0zm2.28125 -0.84375q3.28125 0 3.28125 -3.390625q0 -3.390625 -3.28125 -3.390625l-1.796875 0l0 6.78125l1.796875 0zm8.3211975 -5.140625q2.203125 0 2.203125 2.296875l0 3.265625q0 0.21875 -0.125 0.359375q-0.125 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.140625 -0.125 -0.359375l0 -0.578125q-0.21875 0.515625 -0.6875 0.796875q-0.46875 0.28125 -1.078125 0.28125q-0.5625 0 -1.046875 -0.21875q-0.46875 -0.234375 -0.75 -0.640625q-0.265625 -0.40625 -0.265625 -0.90625q0 -0.65625 0.328125 -1.015625q0.34375 -0.375 1.109375 -0.53125q0.765625 -0.15625 2.125 -0.15625l0.265625 0l0 -0.40625q0 -0.71875 -0.296875 -1.046875q-0.28125 -0.34375 -0.953125 -0.34375q-0.8125 0 -1.65625 0.453125q-0.3125 0.203125 -0.453125 0.203125q-0.140625 0 -0.234375 -0.109375q-0.09375 -0.109375 -0.09375 -0.28125q0 -0.171875 0.09375 -0.296875q0.109375 -0.125 0.328125 -0.25q0.421875 -0.25 0.953125 -0.375q0.546875 -0.140625 1.0625 -0.140625zm-0.390625 5.296875q0.71875 0 1.171875 -0.484375q0.46875 -0.484375 0.46875 -1.25l0 -0.34375l-0.21875 0q-1.046875 0 -1.609375 0.09375q-0.546875 0.078125 -0.78125 0.296875q-0.234375 0.203125 -0.234375 0.609375q0 0.46875 0.34375 0.78125q0.34375 0.296875 0.859375 0.296875zm7.0631714 -0.015625q0.421875 0.03125 0.421875 0.375q0 0.203125 -0.15625 0.3125q-0.140625 0.09375 -0.4375 0.078125l-0.328125 -0.03125q-0.953125 -0.0625 -1.421875 -0.5625q-0.453125 -0.515625 -0.453125 -1.53125l0 -3.015625l-0.796875 0q-0.203125 0 -0.328125 -0.09375q-0.109375 -0.109375 -0.109375 -0.28125q0 -0.171875 0.109375 -0.28125q0.125 -0.109375 0.328125 -0.109375l0.796875 0l0 -1.359375q0 -0.21875 0.125 -0.34375q0.140625 -0.140625 0.375 -0.140625q0.21875 0 0.34375 0.140625q0.140625 0.125 0.140625 0.34375l0 1.359375l1.328125 0q0.1875 0 0.296875 0.109375q0.125 0.109375 0.125 0.28125q0 0.171875 -0.125 0.28125q-0.109375 0.09375 -0.296875 0.09375l-1.328125 0l0 3.0625q0 0.65625 0.265625 0.953125q0.265625 0.296875 0.8125 0.328125l0.3125 0.03125zm3.767517 -5.28125q2.203125 0 2.203125 2.296875l0 3.265625q0 0.21875 -0.125 0.359375q-0.125 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.140625 -0.125 -0.359375l0 -0.578125q-0.21875 0.515625 -0.6875 0.796875q-0.46875 0.28125 -1.078125 0.28125q-0.5625 0 -1.046875 -0.21875q-0.46875 -0.234375 -0.75 -0.640625q-0.265625 -0.40625 -0.265625 -0.90625q0 -0.65625 0.328125 -1.015625q0.34375 -0.375 1.109375 -0.53125q0.765625 -0.15625 2.125 -0.15625l0.265625 0l0 -0.40625q0 -0.71875 -0.296875 -1.046875q-0.28125 -0.34375 -0.953125 -0.34375q-0.8125 0 -1.65625 0.453125q-0.3125 0.203125 -0.453125 0.203125q-0.140625 0 -0.234375 -0.109375q-0.09375 -0.109375 -0.09375 -0.28125q0 -0.171875 0.09375 -0.296875q0.109375 -0.125 0.328125 -0.25q0.421875 -0.25 0.953125 -0.375q0.546875 -0.140625 1.0625 -0.140625zm-0.390625 5.296875q0.71875 0 1.171875 -0.484375q0.46875 -0.484375 0.46875 -1.25l0 -0.34375l-0.21875 0q-1.046875 0 -1.609375 0.09375q-0.546875 0.078125 -0.78125 0.296875q-0.234375 0.203125 -0.234375 0.609375q0 0.46875 0.34375 0.78125q0.34375 0.296875 0.859375 0.296875zm10.15921 0.75q-0.234375 0 -0.375 -0.140625q-0.140625 -0.140625 -0.140625 -0.359375l0 -7.1875l-2.578125 0q-0.21875 0 -0.34375 -0.109375q-0.109375 -0.109375 -0.109375 -0.3125q0 -0.203125 0.109375 -0.296875q0.125 -0.109375 0.34375 -0.109375l6.15625 0q0.21875 0 0.328125 0.109375q0.125 0.09375 0.125 0.296875q0 0.203125 -0.125 0.3125q-0.109375 0.109375 -0.328125 0.109375l-2.578125 0l0 7.1875q0 0.21875 -0.140625 0.359375q-0.125 0.140625 -0.34375 0.140625zm8.691681 -5.71875q0.140625 -0.296875 0.421875 -0.296875q0.1875 0 0.328125 0.125q0.140625 0.109375 0.140625 0.296875q0 0.109375 -0.046875 0.1875l-3.375 7.28125q-0.0625 0.125 -0.171875 0.1875q-0.109375 0.078125 -0.234375 0.078125q-0.1875 0 -0.328125 -0.109375q-0.125 -0.109375 -0.125 -0.296875q0 -0.09375 0.046875 -0.1875l0.84375 -1.8125l-2.375 -5.140625q-0.046875 -0.078125 -0.046875 -0.171875q0 -0.1875 0.15625 -0.3125q0.15625 -0.140625 0.359375 -0.140625q0.109375 0 0.21875 0.078125q0.125 0.078125 0.1875 0.203125l2.0 4.5l2.0 -4.46875zm4.902405 -0.328125q0.765625 0 1.34375 0.390625q0.59375 0.375 0.921875 1.0625q0.328125 0.6875 0.328125 1.609375q0 0.90625 -0.328125 1.59375q-0.328125 0.671875 -0.90625 1.046875q-0.578125 0.359375 -1.359375 0.359375q-0.6875 0 -1.203125 -0.296875q-0.5 -0.296875 -0.765625 -0.84375l0 2.8125q0 0.21875 -0.125 0.34375q-0.125 0.125 -0.359375 0.125q-0.234375 0 -0.359375 -0.140625q-0.125 -0.125 -0.125 -0.328125l0 -7.234375q0 -0.21875 0.125 -0.34375q0.125 -0.140625 0.359375 -0.140625q0.234375 0 0.359375 0.140625q0.125 0.125 0.125 0.34375l0 0.640625q0.265625 -0.546875 0.765625 -0.84375q0.515625 -0.296875 1.203125 -0.296875zm-0.203125 5.265625q0.859375 0 1.328125 -0.578125q0.46875 -0.578125 0.46875 -1.625q0 -1.0625 -0.46875 -1.65625q-0.46875 -0.59375 -1.328125 -0.59375q-0.84375 0 -1.3125 0.578125q-0.453125 0.578125 -0.453125 1.640625q0 1.0625 0.453125 1.65625q0.46875 0.578125 1.3125 0.578125zm8.76532 -0.640625q0.140625 0 0.234375 0.109375q0.09375 0.109375 0.09375 0.28125q0 0.296875 -0.421875 0.546875q-0.4375 0.25 -0.921875 0.375q-0.46875 0.125 -0.921875 0.125q-1.359375 0 -2.15625 -0.796875q-0.78125 -0.8125 -0.78125 -2.21875q0 -0.90625 0.34375 -1.59375q0.359375 -0.6875 0.984375 -1.0625q0.640625 -0.390625 1.4375 -0.390625q1.140625 0 1.8125 0.75q0.671875 0.734375 0.671875 2.0q0 0.25 -0.09375 0.359375q-0.09375 0.109375 -0.3125 0.109375l-3.859375 0q0.09375 2.0625 1.953125 2.0625q0.46875 0 0.796875 -0.125q0.34375 -0.125 0.71875 -0.34375q0.3125 -0.1875 0.421875 -0.1875zm-2.09375 -3.875q-0.765625 0 -1.234375 0.484375q-0.46875 0.484375 -0.546875 1.359375l3.390625 0q-0.015625 -0.890625 -0.4375 -1.359375q-0.421875 -0.484375 -1.171875 -0.484375z" fill-rule="nonzero"/><path fill="#f4cccc" d="m519.9029 141.28871l20.5354 0l0 20.53543l-20.5354 0z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m519.9029 141.28871l20.5354 0l0 20.53543l-20.5354 0z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m544.40155 135.68242l100.0 0l0 31.748032l-100.0 0z" fill-rule="evenodd"/><path fill="#000000" d="m554.9328 156.26491q-0.21875 0 -0.359375 -0.140625q-0.125 -0.140625 -0.125 -0.359375l0 -7.578125q0 -0.234375 0.125 -0.359375q0.140625 -0.140625 0.359375 -0.140625q0.234375 0 0.359375 0.140625q0.140625 0.125 0.140625 0.359375l0 7.578125q0 0.21875 -0.140625 0.359375q-0.125 0.140625 -0.359375 0.140625zm5.3845215 -6.046875q2.09375 0 2.09375 2.3125l0 3.25q0 0.234375 -0.125 0.359375q-0.125 0.125 -0.359375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -3.1875q0 -0.8125 -0.328125 -1.1875q-0.3125 -0.375 -1.0 -0.375q-0.8125 0 -1.296875 0.5q-0.46875 0.484375 -0.46875 1.328125l0 2.921875q0 0.234375 -0.125 0.359375q-0.125 0.125 -0.359375 0.125q-0.234375 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -5.0625q0 -0.21875 0.125 -0.34375q0.125 -0.140625 0.359375 -0.140625q0.21875 0 0.34375 0.140625q0.125 0.125 0.125 0.328125l0 0.609375q0.28125 -0.53125 0.796875 -0.8125q0.53125 -0.28125 1.1875 -0.28125zm6.456726 -1.703125q-0.640625 0.046875 -0.96875 0.40625q-0.3125 0.34375 -0.3125 1.046875l0 0.390625l1.328125 0q0.203125 0 0.3125 0.109375q0.109375 0.109375 0.109375 0.28125q0 0.1875 -0.109375 0.28125q-0.109375 0.09375 -0.3125 0.09375l-1.328125 0l0 4.65625q0 0.234375 -0.140625 0.359375q-0.140625 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.140625 -0.125 -0.140625 -0.359375l0 -4.65625l-0.796875 0q-0.203125 0 -0.328125 -0.09375q-0.109375 -0.109375 -0.109375 -0.28125q0 -0.171875 0.109375 -0.28125q0.125 -0.109375 0.328125 -0.109375l0.796875 0l0 -0.21875q0 -1.078125 0.53125 -1.6875q0.546875 -0.625 1.5625 -0.703125l0.3125 -0.015625q0.3125 -0.03125 0.453125 0.0625q0.140625 0.078125 0.140625 0.296875q0 0.34375 -0.421875 0.390625l-0.3125 0.03125zm4.248535 1.71875q0.5 -0.03125 0.5 0.40625q0 0.203125 -0.109375 0.3125q-0.109375 0.109375 -0.375 0.140625l-0.359375 0.03125q-0.796875 0.078125 -1.1875 0.578125q-0.390625 0.484375 -0.390625 1.15625l0 2.921875q0 0.234375 -0.140625 0.359375q-0.125 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -5.0625q0 -0.234375 0.140625 -0.359375q0.140625 -0.125 0.34375 -0.125q0.1875 0 0.3125 0.125q0.140625 0.125 0.140625 0.34375l0 0.671875q0.25 -0.53125 0.71875 -0.796875q0.46875 -0.28125 1.0625 -0.328125l0.171875 -0.015625zm3.720398 -0.015625q2.203125 0 2.203125 2.296875l0 3.265625q0 0.21875 -0.125 0.359375q-0.125 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.140625 -0.125 -0.359375l0 -0.578125q-0.21875 0.515625 -0.6875 0.796875q-0.46875 0.28125 -1.078125 0.28125q-0.5625 0 -1.046875 -0.21875q-0.46875 -0.234375 -0.75 -0.640625q-0.265625 -0.40625 -0.265625 -0.90625q0 -0.65625 0.328125 -1.015625q0.34375 -0.375 1.109375 -0.53125q0.765625 -0.15625 2.125 -0.15625l0.265625 0l0 -0.40625q0 -0.71875 -0.296875 -1.046875q-0.28125 -0.34375 -0.953125 -0.34375q-0.8125 0 -1.65625 0.453125q-0.3125 0.203125 -0.453125 0.203125q-0.140625 0 -0.234375 -0.109375q-0.09375 -0.109375 -0.09375 -0.28125q0 -0.171875 0.09375 -0.296875q0.109375 -0.125 0.328125 -0.25q0.421875 -0.25 0.953125 -0.375q0.546875 -0.140625 1.0625 -0.140625zm-0.390625 5.296875q0.71875 0 1.171875 -0.484375q0.46875 -0.484375 0.46875 -1.25l0 -0.34375l-0.21875 0q-1.046875 0 -1.609375 0.09375q-0.546875 0.078125 -0.78125 0.296875q-0.234375 0.203125 -0.234375 0.609375q0 0.46875 0.34375 0.78125q0.34375 0.296875 0.859375 0.296875zm6.3444214 0.765625q-0.5625 0 -1.0625 -0.125q-0.5 -0.140625 -0.875 -0.375q-0.21875 -0.140625 -0.3125 -0.265625q-0.078125 -0.125 -0.078125 -0.3125q0 -0.15625 0.078125 -0.25q0.09375 -0.109375 0.234375 -0.109375q0.15625 0 0.421875 0.1875q0.359375 0.21875 0.71875 0.34375q0.359375 0.125 0.875 0.125q0.65625 0 1.015625 -0.21875q0.359375 -0.234375 0.359375 -0.671875q0 -0.265625 -0.140625 -0.421875q-0.125 -0.171875 -0.453125 -0.296875q-0.3125 -0.125 -0.9375 -0.25q-1.0625 -0.234375 -1.515625 -0.609375q-0.453125 -0.390625 -0.453125 -1.046875q0 -0.515625 0.28125 -0.90625q0.28125 -0.40625 0.796875 -0.625q0.515625 -0.234375 1.15625 -0.234375q0.46875 0 0.90625 0.125q0.4375 0.125 0.78125 0.34375q0.40625 0.296875 0.40625 0.609375q0 0.15625 -0.09375 0.265625q-0.09375 0.109375 -0.234375 0.109375q-0.140625 0 -0.4375 -0.203125q-0.328125 -0.21875 -0.625 -0.34375q-0.296875 -0.125 -0.75 -0.125q-0.5625 0 -0.90625 0.265625q-0.34375 0.25 -0.34375 0.671875q0 0.25 0.125 0.421875q0.125 0.15625 0.421875 0.28125q0.296875 0.125 0.84375 0.25q0.828125 0.1875 1.265625 0.40625q0.453125 0.203125 0.640625 0.515625q0.203125 0.3125 0.203125 0.796875q0 0.75 -0.640625 1.21875q-0.640625 0.453125 -1.671875 0.453125zm6.47876 -0.78125q0.421875 0.03125 0.421875 0.375q0 0.203125 -0.15625 0.3125q-0.140625 0.09375 -0.4375 0.078125l-0.328125 -0.03125q-0.953125 -0.0625 -1.421875 -0.5625q-0.453125 -0.515625 -0.453125 -1.53125l0 -3.015625l-0.796875 0q-0.203125 0 -0.328125 -0.09375q-0.109375 -0.109375 -0.109375 -0.28125q0 -0.171875 0.109375 -0.28125q0.125 -0.109375 0.328125 -0.109375l0.796875 0l0 -1.359375q0 -0.21875 0.125 -0.34375q0.140625 -0.140625 0.375 -0.140625q0.21875 0 0.34375 0.140625q0.140625 0.125 0.140625 0.34375l0 1.359375l1.328125 0q0.1875 0 0.296875 0.109375q0.125 0.109375 0.125 0.28125q0 0.171875 -0.125 0.28125q-0.109375 0.09375 -0.296875 0.09375l-1.328125 0l0 3.0625q0 0.65625 0.265625 0.953125q0.265625 0.296875 0.8125 0.328125l0.3125 0.03125zm4.283142 -5.265625q0.5 -0.03125 0.5 0.40625q0 0.203125 -0.109375 0.3125q-0.109375 0.109375 -0.375 0.140625l-0.359375 0.03125q-0.796875 0.078125 -1.1875 0.578125q-0.390625 0.484375 -0.390625 1.15625l0 2.921875q0 0.234375 -0.140625 0.359375q-0.125 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -5.0625q0 -0.234375 0.140625 -0.359375q0.140625 -0.125 0.34375 -0.125q0.1875 0 0.3125 0.125q0.140625 0.125 0.140625 0.34375l0 0.671875q0.25 -0.53125 0.71875 -0.796875q0.46875 -0.28125 1.0625 -0.328125l0.171875 -0.015625zm5.782898 0q0.21875 0 0.34375 0.140625q0.125 0.125 0.125 0.34375l0 5.078125q0 0.203125 -0.125 0.34375q-0.125 0.125 -0.34375 0.125q-0.21875 0 -0.34375 -0.125q-0.125 -0.125 -0.125 -0.328125l0 -0.609375q-0.28125 0.53125 -0.78125 0.8125q-0.5 0.265625 -1.125 0.265625q-1.03125 0 -1.5625 -0.578125q-0.53125 -0.578125 -0.53125 -1.71875l0 -3.265625q0 -0.21875 0.125 -0.34375q0.140625 -0.140625 0.375 -0.140625q0.21875 0 0.34375 0.140625q0.125 0.125 0.125 0.34375l0 3.234375q0 0.78125 0.3125 1.15625q0.3125 0.359375 0.984375 0.359375q0.765625 0 1.234375 -0.5q0.46875 -0.5 0.46875 -1.3125l0 -2.9375q0 -0.21875 0.125 -0.34375q0.140625 -0.140625 0.375 -0.140625zm4.7008057 6.046875q-0.8125 0 -1.453125 -0.359375q-0.625 -0.375 -0.96875 -1.0625q-0.34375 -0.6875 -0.34375 -1.578125q0 -0.90625 0.359375 -1.59375q0.359375 -0.703125 0.984375 -1.078125q0.640625 -0.390625 1.46875 -0.390625q0.453125 0 0.90625 0.125q0.453125 0.125 0.78125 0.359375q0.21875 0.140625 0.3125 0.28125q0.09375 0.140625 0.09375 0.3125q0 0.171875 -0.09375 0.28125q-0.09375 0.09375 -0.234375 0.09375q-0.078125 0 -0.1875 -0.046875q-0.09375 -0.046875 -0.15625 -0.09375q-0.0625 -0.046875 -0.09375 -0.0625q-0.3125 -0.203125 -0.59375 -0.3125q-0.28125 -0.125 -0.6875 -0.125q-0.875 0 -1.359375 0.59375q-0.484375 0.59375 -0.484375 1.65625q0 1.046875 0.484375 1.625q0.484375 0.578125 1.359375 0.578125q0.40625 0 0.703125 -0.109375q0.296875 -0.125 0.59375 -0.328125q0.140625 -0.09375 0.25 -0.15625q0.125 -0.0625 0.203125 -0.0625q0.140625 0 0.21875 0.109375q0.09375 0.109375 0.09375 0.28125q0 0.15625 -0.09375 0.28125q-0.078125 0.125 -0.296875 0.28125q-0.34375 0.234375 -0.8125 0.375q-0.46875 0.125 -0.953125 0.125zm6.029297 -0.78125q0.421875 0.03125 0.421875 0.375q0 0.203125 -0.15625 0.3125q-0.140625 0.09375 -0.4375 0.078125l-0.328125 -0.03125q-0.953125 -0.0625 -1.421875 -0.5625q-0.453125 -0.515625 -0.453125 -1.53125l0 -3.015625l-0.796875 0q-0.203125 0 -0.328125 -0.09375q-0.109375 -0.109375 -0.109375 -0.28125q0 -0.171875 0.109375 -0.28125q0.125 -0.109375 0.328125 -0.109375l0.796875 0l0 -1.359375q0 -0.21875 0.125 -0.34375q0.140625 -0.140625 0.375 -0.140625q0.21875 0 0.34375 0.140625q0.140625 0.125 0.140625 0.34375l0 1.359375l1.328125 0q0.1875 0 0.296875 0.109375q0.125 0.109375 0.125 0.28125q0 0.171875 -0.125 0.28125q-0.109375 0.09375 -0.296875 0.09375l-1.328125 0l0 3.0625q0 0.65625 0.265625 0.953125q0.265625 0.296875 0.8125 0.328125l0.3125 0.03125zm5.830017 -5.265625q0.21875 0 0.34375 0.140625q0.125 0.125 0.125 0.34375l0 5.078125q0 0.203125 -0.125 0.34375q-0.125 0.125 -0.34375 0.125q-0.21875 0 -0.34375 -0.125q-0.125 -0.125 -0.125 -0.328125l0 -0.609375q-0.28125 0.53125 -0.78125 0.8125q-0.5 0.265625 -1.125 0.265625q-1.03125 0 -1.5625 -0.578125q-0.53125 -0.578125 -0.53125 -1.71875l0 -3.265625q0 -0.21875 0.125 -0.34375q0.140625 -0.140625 0.375 -0.140625q0.21875 0 0.34375 0.140625q0.125 0.125 0.125 0.34375l0 3.234375q0 0.78125 0.3125 1.15625q0.3125 0.359375 0.984375 0.359375q0.765625 0 1.234375 -0.5q0.46875 -0.5 0.46875 -1.3125l0 -2.9375q0 -0.21875 0.125 -0.34375q0.140625 -0.140625 0.375 -0.140625zm5.1851807 0q0.5 -0.03125 0.5 0.40625q0 0.203125 -0.109375 0.3125q-0.109375 0.109375 -0.375 0.140625l-0.359375 0.03125q-0.796875 0.078125 -1.1875 0.578125q-0.390625 0.484375 -0.390625 1.15625l0 2.921875q0 0.234375 -0.140625 0.359375q-0.125 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -5.0625q0 -0.234375 0.140625 -0.359375q0.140625 -0.125 0.34375 -0.125q0.1875 0 0.3125 0.125q0.140625 0.125 0.140625 0.34375l0 0.671875q0.25 -0.53125 0.71875 -0.796875q0.46875 -0.28125 1.0625 -0.328125l0.171875 -0.015625zm5.861023 4.609375q0.140625 0 0.234375 0.109375q0.09375 0.109375 0.09375 0.28125q0 0.296875 -0.421875 0.546875q-0.4375 0.25 -0.921875 0.375q-0.46875 0.125 -0.921875 0.125q-1.359375 0 -2.15625 -0.796875q-0.78125 -0.8125 -0.78125 -2.21875q0 -0.90625 0.34375 -1.59375q0.359375 -0.6875 0.984375 -1.0625q0.640625 -0.390625 1.4375 -0.390625q1.140625 0 1.8125 0.75q0.671875 0.734375 0.671875 2.0q0 0.25 -0.09375 0.359375q-0.09375 0.109375 -0.3125 0.109375l-3.859375 0q0.09375 2.0625 1.953125 2.0625q0.46875 0 0.796875 -0.125q0.34375 -0.125 0.71875 -0.34375q0.3125 -0.1875 0.421875 -0.1875zm-2.09375 -3.875q-0.765625 0 -1.234375 0.484375q-0.46875 0.484375 -0.546875 1.359375l3.390625 0q-0.015625 -0.890625 -0.4375 -1.359375q-0.421875 -0.484375 -1.171875 -0.484375z" fill-rule="nonzero"/><path fill="#d9ead3" d="m78.872284 252.53609l87.49606 0l0 30.992142l-87.49606 0z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m78.872284 252.53609l87.49606 0l0 30.992142l-87.49606 0z" fill-rule="evenodd"/><path fill="#000000" d="m93.328064 272.6459q-0.90625 0 -1.734375 -0.265625q-0.8125 -0.265625 -1.3125 -0.734375q-0.171875 -0.15625 -0.171875 -0.40625q0 -0.171875 0.09375 -0.296875q0.09375 -0.125 0.234375 -0.125q0.15625 0 0.328125 0.125q1.109375 0.859375 2.546875 0.859375q1.03125 0 1.578125 -0.390625q0.5625 -0.390625 0.5625 -1.125q0 -0.421875 -0.265625 -0.671875q-0.265625 -0.265625 -0.703125 -0.421875q-0.4375 -0.15625 -1.15625 -0.328125q-0.984375 -0.21875 -1.625 -0.46875q-0.625 -0.265625 -1.015625 -0.734375q-0.390625 -0.46875 -0.390625 -1.21875q0 -0.71875 0.390625 -1.265625q0.390625 -0.5625 1.09375 -0.875q0.703125 -0.3125 1.59375 -0.3125q0.84375 0 1.5625 0.265625q0.734375 0.25 1.234375 0.734375q0.1875 0.1875 0.1875 0.421875q0 0.171875 -0.09375 0.296875q-0.09375 0.125 -0.234375 0.125q-0.125 0 -0.34375 -0.140625q-0.59375 -0.46875 -1.09375 -0.65625q-0.5 -0.203125 -1.21875 -0.203125q-0.984375 0 -1.546875 0.421875q-0.546875 0.40625 -0.546875 1.15625q0 0.625 0.484375 0.953125q0.484375 0.3125 1.5 0.5625q1.09375 0.25 1.71875 0.484375q0.625 0.21875 1.03125 0.671875q0.421875 0.4375 0.421875 1.171875q0 0.71875 -0.390625 1.265625q-0.390625 0.53125 -1.109375 0.828125q-0.703125 0.296875 -1.609375 0.296875zm6.9353027 -6.078125q2.203125 0 2.203125 2.296875l0 3.265625q0 0.21875 -0.125 0.359375q-0.125 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.140625 -0.125 -0.359375l0 -0.578125q-0.21875 0.515625 -0.6875 0.796875q-0.46875 0.28125 -1.078125 0.28125q-0.5625 0 -1.046875 -0.21875q-0.46875 -0.234375 -0.75 -0.640625q-0.265625 -0.40625 -0.265625 -0.90625q0 -0.65625 0.328125 -1.015625q0.34375 -0.375 1.109375 -0.53125q0.765625 -0.15625 2.125 -0.15625l0.265625 0l0 -0.40625q0 -0.71875 -0.296875 -1.046875q-0.28125 -0.34375 -0.953125 -0.34375q-0.8125 0 -1.65625 0.453125q-0.3125 0.203125 -0.453125 0.203125q-0.140625 0 -0.234375 -0.109375q-0.09375 -0.109375 -0.09375 -0.28125q0 -0.171875 0.09375 -0.296875q0.109375 -0.125 0.328125 -0.25q0.421875 -0.25 0.953125 -0.375q0.546875 -0.140625 1.0625 -0.140625zm-0.390625 5.296875q0.71875 0 1.171875 -0.484375q0.46875 -0.484375 0.46875 -1.25l0 -0.34375l-0.21875 0q-1.046875 0 -1.609375 0.09375q-0.546875 0.078125 -0.78125 0.296875q-0.234375 0.203125 -0.234375 0.609375q0 0.46875 0.34375 0.78125q0.34375 0.296875 0.859375 0.296875zm8.578796 -4.96875q0.140625 -0.296875 0.421875 -0.296875q0.1875 0 0.328125 0.125q0.140625 0.109375 0.140625 0.296875q0 0.109375 -0.046875 0.1875l-2.34375 5.046875q-0.0625 0.15625 -0.21875 0.25q-0.140625 0.078125 -0.3125 0.078125q-0.15625 0 -0.296875 -0.078125q-0.140625 -0.09375 -0.21875 -0.25l-2.328125 -5.046875q-0.046875 -0.078125 -0.046875 -0.171875q0 -0.1875 0.15625 -0.3125q0.15625 -0.140625 0.359375 -0.140625q0.109375 0 0.21875 0.078125q0.125 0.078125 0.1875 0.203125l2.0 4.5l2.0 -4.46875zm6.480545 4.296875q0.140625 0 0.234375 0.109375q0.09375 0.109375 0.09375 0.28125q0 0.296875 -0.421875 0.546875q-0.4375 0.25 -0.921875 0.375q-0.46875 0.125 -0.921875 0.125q-1.359375 0 -2.15625 -0.796875q-0.78125 -0.8125 -0.78125 -2.21875q0 -0.90625 0.34375 -1.59375q0.359375 -0.6875 0.984375 -1.0625q0.640625 -0.390625 1.4375 -0.390625q1.140625 0 1.8125 0.75q0.671875 0.734375 0.671875 2.0q0 0.25 -0.09375 0.359375q-0.09375 0.109375 -0.3125 0.109375l-3.859375 0q0.09375 2.0625 1.953125 2.0625q0.46875 0 0.796875 -0.125q0.34375 -0.125 0.71875 -0.34375q0.3125 -0.1875 0.421875 -0.1875zm-2.09375 -3.875q-0.765625 0 -1.234375 0.484375q-0.46875 0.484375 -0.546875 1.359375l3.390625 0q-0.015625 -0.890625 -0.4375 -1.359375q-0.421875 -0.484375 -1.171875 -0.484375zm8.589676 -3.28125q0.21875 0 0.34375 0.140625q0.140625 0.125 0.140625 0.328125l0 7.625q0 0.21875 -0.140625 0.359375q-0.125 0.125 -0.34375 0.125q-0.234375 0 -0.359375 -0.125q-0.125 -0.140625 -0.125 -0.359375l0 -0.640625q-0.265625 0.546875 -0.78125 0.84375q-0.5 0.296875 -1.1875 0.296875q-0.765625 0 -1.359375 -0.375q-0.578125 -0.390625 -0.90625 -1.078125q-0.328125 -0.6875 -0.328125 -1.59375q0 -0.90625 0.328125 -1.59375q0.328125 -0.6875 0.90625 -1.046875q0.59375 -0.375 1.359375 -0.375q0.6875 0 1.1875 0.296875q0.515625 0.296875 0.78125 0.84375l0 -3.203125q0 -0.21875 0.125 -0.34375q0.125 -0.125 0.359375 -0.125zm-2.25 7.796875q0.84375 0 1.296875 -0.578125q0.46875 -0.59375 0.46875 -1.65625q0 -1.0625 -0.46875 -1.640625q-0.453125 -0.578125 -1.296875 -0.578125q-0.859375 0 -1.34375 0.578125q-0.46875 0.578125 -0.46875 1.625q0 1.0625 0.46875 1.65625q0.484375 0.59375 1.34375 0.59375zm12.202805 -7.796875q0.21875 0 0.34375 0.140625q0.125 0.125 0.125 0.359375l0 7.59375q0 0.21875 -0.125 0.359375q-0.109375 0.125 -0.328125 0.125q-0.21875 0 -0.328125 -0.125q-0.109375 -0.140625 -0.109375 -0.359375l0 -6.125l-2.59375 4.984375q-0.171875 0.34375 -0.5 0.34375q-0.3125 0 -0.484375 -0.34375l-2.625 -4.921875l0 6.0625q0 0.21875 -0.109375 0.359375q-0.109375 0.125 -0.328125 0.125q-0.21875 0 -0.34375 -0.125q-0.109375 -0.140625 -0.109375 -0.359375l0 -7.59375q0 -0.234375 0.125 -0.359375q0.140625 -0.140625 0.359375 -0.140625q0.3125 0 0.484375 0.34375l3.046875 5.84375l3.015625 -5.84375q0.09375 -0.1875 0.203125 -0.265625q0.125 -0.078125 0.28125 -0.078125zm4.8576965 8.59375q-0.828125 0 -1.46875 -0.359375q-0.625 -0.375 -0.96875 -1.0625q-0.34375 -0.703125 -0.34375 -1.609375q0 -0.90625 0.34375 -1.59375q0.34375 -0.703125 0.96875 -1.0625q0.640625 -0.375 1.46875 -0.375q0.828125 0 1.453125 0.375q0.640625 0.359375 0.984375 1.0625q0.34375 0.6875 0.34375 1.59375q0 0.90625 -0.34375 1.609375q-0.34375 0.6875 -0.984375 1.0625q-0.625 0.359375 -1.453125 0.359375zm0 -0.796875q0.859375 0 1.3125 -0.5625q0.46875 -0.578125 0.46875 -1.671875q0 -1.0625 -0.46875 -1.640625q-0.46875 -0.59375 -1.3125 -0.59375q-0.859375 0 -1.328125 0.59375q-0.46875 0.578125 -0.46875 1.640625q0 1.078125 0.453125 1.65625q0.46875 0.578125 1.34375 0.578125zm8.925674 -7.796875q0.21875 0 0.34375 0.140625q0.140625 0.125 0.140625 0.328125l0 7.625q0 0.21875 -0.140625 0.359375q-0.125 0.125 -0.34375 0.125q-0.234375 0 -0.359375 -0.125q-0.125 -0.140625 -0.125 -0.359375l0 -0.640625q-0.265625 0.546875 -0.78125 0.84375q-0.5 0.296875 -1.1875 0.296875q-0.765625 0 -1.359375 -0.375q-0.578125 -0.390625 -0.90625 -1.078125q-0.328125 -0.6875 -0.328125 -1.59375q0 -0.90625 0.328125 -1.59375q0.328125 -0.6875 0.90625 -1.046875q0.59375 -0.375 1.359375 -0.375q0.6875 0 1.1875 0.296875q0.515625 0.296875 0.78125 0.84375l0 -3.203125q0 -0.21875 0.125 -0.34375q0.125 -0.125 0.359375 -0.125zm-2.25 7.796875q0.84375 0 1.296875 -0.578125q0.46875 -0.59375 0.46875 -1.65625q0 -1.0625 -0.46875 -1.640625q-0.453125 -0.578125 -1.296875 -0.578125q-0.859375 0 -1.34375 0.578125q-0.46875 0.578125 -0.46875 1.625q0 1.0625 0.46875 1.65625q0.484375 0.59375 1.34375 0.59375zm9.06218 -0.640625q0.140625 0 0.234375 0.109375q0.09375 0.109375 0.09375 0.28125q0 0.296875 -0.421875 0.546875q-0.4375 0.25 -0.921875 0.375q-0.46875 0.125 -0.921875 0.125q-1.359375 0 -2.15625 -0.796875q-0.78125 -0.8125 -0.78125 -2.21875q0 -0.90625 0.34375 -1.59375q0.359375 -0.6875 0.984375 -1.0625q0.640625 -0.390625 1.4375 -0.390625q1.140625 0 1.8125 0.75q0.671875 0.734375 0.671875 2.0q0 0.25 -0.09375 0.359375q-0.09375 0.109375 -0.3125 0.109375l-3.859375 0q0.09375 2.0625 1.953125 2.0625q0.46875 0 0.796875 -0.125q0.34375 -0.125 0.71875 -0.34375q0.3125 -0.1875 0.421875 -0.1875zm-2.09375 -3.875q-0.765625 0 -1.234375 0.484375q-0.46875 0.484375 -0.546875 1.359375l3.390625 0q-0.015625 -0.890625 -0.4375 -1.359375q-0.421875 -0.484375 -1.171875 -0.484375zm4.386551 5.296875q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -7.625q0 -0.21875 0.125 -0.34375q0.140625 -0.125 0.359375 -0.125q0.203125 0 0.34375 0.125q0.140625 0.125 0.140625 0.34375l0 7.625q0 0.234375 -0.140625 0.359375q-0.140625 0.125 -0.34375 0.125z" fill-rule="nonzero"/><path fill="#d9ead3" d="m190.14 134.76706l87.49608 0l0 30.992126l-87.49608 0z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m190.14 134.76706l87.49608 0l0 30.992126l-87.49608 0z" fill-rule="evenodd"/><path fill="#000000" d="m215.10997 150.37688q0.1875 0 0.296875 0.109375q0.109375 0.109375 0.109375 0.296875l0 2.984375q0 0.296875 -0.09375 0.4375q-0.078125 0.140625 -0.328125 0.234375q-0.46875 0.203125 -1.15625 0.328125q-0.6875 0.109375 -1.375 0.109375q-1.25 0 -2.171875 -0.515625q-0.90625 -0.515625 -1.390625 -1.484375q-0.484375 -0.96875 -0.484375 -2.328125q0 -1.328125 0.46875 -2.296875q0.484375 -0.984375 1.375 -1.5q0.90625 -0.53125 2.125 -0.53125q0.84375 0 1.5625 0.265625q0.71875 0.25 1.203125 0.734375q0.21875 0.203125 0.21875 0.421875q0 0.171875 -0.109375 0.296875q-0.09375 0.125 -0.234375 0.125q-0.140625 0 -0.328125 -0.140625q-0.625 -0.484375 -1.140625 -0.671875q-0.5 -0.1875 -1.15625 -0.1875q-1.4375 0 -2.203125 0.90625q-0.75 0.890625 -0.75 2.578125q0 1.71875 0.765625 2.609375q0.78125 0.890625 2.28125 0.890625q1.109375 0 2.03125 -0.328125l0 -2.578125l-1.75 0q-0.203125 0 -0.328125 -0.109375q-0.125 -0.109375 -0.125 -0.265625q0 -0.1875 0.125 -0.28125q0.125 -0.109375 0.328125 -0.109375l2.234375 0zm5.1568146 -1.5625q0.5 -0.03125 0.5 0.40625q0 0.203125 -0.109375 0.3125q-0.109375 0.109375 -0.375 0.140625l-0.359375 0.03125q-0.796875 0.078125 -1.1875 0.578125q-0.390625 0.484375 -0.390625 1.15625l0 2.921875q0 0.234375 -0.140625 0.359375q-0.125 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -5.0625q0 -0.234375 0.140625 -0.359375q0.140625 -0.125 0.34375 -0.125q0.1875 0 0.3125 0.125q0.140625 0.125 0.140625 0.34375l0 0.671875q0.25 -0.53125 0.71875 -0.796875q0.46875 -0.28125 1.0625 -0.328125l0.171875 -0.015625zm3.720398 -0.015625q2.203125 0 2.203125 2.296875l0 3.265625q0 0.21875 -0.125 0.359375q-0.125 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.140625 -0.125 -0.359375l0 -0.578125q-0.21875 0.515625 -0.6875 0.796875q-0.46875 0.28125 -1.078125 0.28125q-0.5625 0 -1.046875 -0.21875q-0.46875 -0.234375 -0.75 -0.640625q-0.265625 -0.40625 -0.265625 -0.90625q0 -0.65625 0.328125 -1.015625q0.34375 -0.375 1.109375 -0.53125q0.765625 -0.15625 2.125 -0.15625l0.265625 0l0 -0.40625q0 -0.71875 -0.296875 -1.046875q-0.28125 -0.34375 -0.953125 -0.34375q-0.8125 0 -1.65625 0.453125q-0.3125 0.203125 -0.453125 0.203125q-0.140625 0 -0.234375 -0.109375q-0.09375 -0.109375 -0.09375 -0.28125q0 -0.171875 0.09375 -0.296875q0.109375 -0.125 0.328125 -0.25q0.421875 -0.25 0.953125 -0.375q0.546875 -0.140625 1.0625 -0.140625zm-0.390625 5.296875q0.71875 0 1.171875 -0.484375q0.46875 -0.484375 0.46875 -1.25l0 -0.34375l-0.21875 0q-1.046875 0 -1.609375 0.09375q-0.546875 0.078125 -0.78125 0.296875q-0.234375 0.203125 -0.234375 0.609375q0 0.46875 0.34375 0.78125q0.34375 0.296875 0.859375 0.296875zm7.3131714 -5.296875q0.765625 0 1.34375 0.390625q0.59375 0.375 0.921875 1.0625q0.328125 0.6875 0.328125 1.609375q0 0.90625 -0.328125 1.59375q-0.328125 0.671875 -0.90625 1.046875q-0.578125 0.359375 -1.359375 0.359375q-0.6875 0 -1.203125 -0.296875q-0.5 -0.296875 -0.765625 -0.84375l0 2.8125q0 0.21875 -0.125 0.34375q-0.125 0.125 -0.359375 0.125q-0.234375 0 -0.359375 -0.140625q-0.125 -0.125 -0.125 -0.328125l0 -7.234375q0 -0.21875 0.125 -0.34375q0.125 -0.140625 0.359375 -0.140625q0.234375 0 0.359375 0.140625q0.125 0.125 0.125 0.34375l0 0.640625q0.265625 -0.546875 0.765625 -0.84375q0.515625 -0.296875 1.203125 -0.296875zm-0.203125 5.265625q0.859375 0 1.328125 -0.578125q0.46875 -0.578125 0.46875 -1.625q0 -1.0625 -0.46875 -1.65625q-0.46875 -0.59375 -1.328125 -0.59375q-0.84375 0 -1.3125 0.578125q-0.453125 0.578125 -0.453125 1.640625q0 1.0625 0.453125 1.65625q0.46875 0.578125 1.3125 0.578125zm7.2028046 -5.265625q1.03125 0 1.546875 0.578125q0.53125 0.578125 0.53125 1.734375l0 3.25q0 0.234375 -0.140625 0.359375q-0.125 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -3.21875q0 -0.78125 -0.328125 -1.15625q-0.3125 -0.375 -1.0 -0.375q-0.8125 0 -1.296875 0.5q-0.46875 0.484375 -0.46875 1.328125l0 2.921875q0 0.234375 -0.125 0.359375q-0.125 0.125 -0.359375 0.125q-0.234375 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -7.625q0 -0.203125 0.125 -0.328125q0.140625 -0.140625 0.359375 -0.140625q0.234375 0 0.359375 0.125q0.125 0.125 0.125 0.34375l0 3.140625q0.28125 -0.53125 0.796875 -0.796875q0.515625 -0.28125 1.1875 -0.28125zm4.5035553 5.984375q-0.234375 0 -0.375 -0.125q-0.125 -0.140625 -0.125 -0.359375l0 -7.5q0 -0.21875 0.125 -0.34375q0.140625 -0.125 0.375 -0.125l2.34375 0q2.03125 0 3.140625 1.09375q1.109375 1.09375 1.109375 3.125q0 2.03125 -1.125 3.140625q-1.109375 1.09375 -3.125 1.09375l-2.34375 0zm2.28125 -0.84375q3.28125 0 3.28125 -3.390625q0 -3.390625 -3.28125 -3.390625l-1.796875 0l0 6.78125l1.796875 0zm10.461807 -0.515625q0.140625 0 0.234375 0.109375q0.09375 0.109375 0.09375 0.28125q0 0.296875 -0.421875 0.546875q-0.4375 0.25 -0.921875 0.375q-0.46875 0.125 -0.921875 0.125q-1.359375 0 -2.15625 -0.796875q-0.78125 -0.8125 -0.78125 -2.21875q0 -0.90625 0.34375 -1.59375q0.359375 -0.6875 0.984375 -1.0625q0.640625 -0.390625 1.4375 -0.390625q1.140625 0 1.8125 0.75q0.671875 0.734375 0.671875 2.0q0 0.25 -0.09375 0.359375q-0.09375 0.109375 -0.3125 0.109375l-3.859375 0q0.09375 2.0625 1.953125 2.0625q0.46875 0 0.796875 -0.125q0.34375 -0.125 0.71875 -0.34375q0.3125 -0.1875 0.421875 -0.1875zm-2.09375 -3.875q-0.765625 0 -1.234375 0.484375q-0.46875 0.484375 -0.546875 1.359375l3.390625 0q-0.015625 -0.890625 -0.4375 -1.359375q-0.421875 -0.484375 -1.171875 -0.484375zm6.480301 -2.453125q-0.640625 0.046875 -0.96875 0.40625q-0.3125 0.34375 -0.3125 1.046875l0 0.390625l1.328125 0q0.203125 0 0.3125 0.109375q0.109375 0.109375 0.109375 0.28125q0 0.1875 -0.109375 0.28125q-0.109375 0.09375 -0.3125 0.09375l-1.328125 0l0 4.65625q0 0.234375 -0.140625 0.359375q-0.140625 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.140625 -0.125 -0.140625 -0.359375l0 -4.65625l-0.796875 0q-0.203125 0 -0.328125 -0.09375q-0.109375 -0.109375 -0.109375 -0.28125q0 -0.171875 0.109375 -0.28125q0.125 -0.109375 0.328125 -0.109375l0.796875 0l0 -0.21875q0 -1.078125 0.53125 -1.6875q0.546875 -0.625 1.5625 -0.703125l0.3125 -0.015625q0.3125 -0.03125 0.453125 0.0625q0.140625 0.078125 0.140625 0.296875q0 0.34375 -0.421875 0.390625l-0.3125 0.03125z" fill-rule="nonzero"/><path fill="#d9ead3" d="m233.1085 252.53609l87.49608 0l0 30.992142l-87.49608 0z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m233.1085 252.53609l87.49608 0l0 30.992142l-87.49608 0z" fill-rule="evenodd"/><path fill="#000000" d="m260.00964 265.61465q-0.21875 0 -0.359375 -0.140625q-0.125 -0.140625 -0.125 -0.359375l0 -7.546875q0 -0.21875 0.125 -0.34375q0.140625 -0.125 0.375 -0.125l4.375 0q0.203125 0 0.328125 0.109375q0.125 0.09375 0.125 0.296875q0 0.203125 -0.125 0.3125q-0.125 0.109375 -0.328125 0.109375l-3.90625 0l0 2.90625l3.65625 0q0.21875 0 0.328125 0.109375q0.125 0.109375 0.125 0.3125q0 0.1875 -0.125 0.296875q-0.109375 0.109375 -0.328125 0.109375l-3.65625 0l0 3.453125q0 0.21875 -0.125 0.359375q-0.125 0.140625 -0.359375 0.140625zm8.9496765 -6.03125q0.5 -0.03125 0.5 0.40625q0 0.203125 -0.109375 0.3125q-0.109375 0.109375 -0.375 0.140625l-0.359375 0.03125q-0.796875 0.078125 -1.1875 0.578125q-0.390625 0.484375 -0.390625 1.15625l0 2.921875q0 0.234375 -0.140625 0.359375q-0.125 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -5.0625q0 -0.234375 0.140625 -0.359375q0.140625 -0.125 0.34375 -0.125q0.1875 0 0.3125 0.125q0.140625 0.125 0.140625 0.34375l0 0.671875q0.25 -0.53125 0.71875 -0.796875q0.46875 -0.28125 1.0625 -0.328125l0.171875 -0.015625zm3.767273 6.046875q-0.828125 0 -1.46875 -0.359375q-0.625 -0.375 -0.96875 -1.0625q-0.34375 -0.703125 -0.34375 -1.609375q0 -0.90625 0.34375 -1.59375q0.34375 -0.703125 0.96875 -1.0625q0.640625 -0.375 1.46875 -0.375q0.828125 0 1.453125 0.375q0.640625 0.359375 0.984375 1.0625q0.34375 0.6875 0.34375 1.59375q0 0.90625 -0.34375 1.609375q-0.34375 0.6875 -0.984375 1.0625q-0.625 0.359375 -1.453125 0.359375zm0 -0.796875q0.859375 0 1.3125 -0.5625q0.46875 -0.578125 0.46875 -1.671875q0 -1.0625 -0.46875 -1.640625q-0.46875 -0.59375 -1.3125 -0.59375q-0.859375 0 -1.328125 0.59375q-0.46875 0.578125 -0.46875 1.640625q0 1.078125 0.453125 1.65625q0.46875 0.578125 1.34375 0.578125zm8.535065 -0.046875q0.203125 0 0.296875 0.109375q0.109375 0.09375 0.109375 0.265625q0 0.1875 -0.109375 0.296875q-0.09375 0.09375 -0.296875 0.09375l-4.203125 0q-0.203125 0 -0.34375 -0.125q-0.125 -0.125 -0.125 -0.3125q0 -0.1875 0.140625 -0.359375l3.546875 -4.28125l-3.28125 0q-0.203125 0 -0.328125 -0.09375q-0.109375 -0.109375 -0.109375 -0.28125q0 -0.171875 0.109375 -0.28125q0.125 -0.109375 0.328125 -0.109375l4.0625 0q0.21875 0 0.34375 0.125q0.140625 0.125 0.140625 0.3125q0 0.1875 -0.140625 0.359375l-3.5625 4.28125l3.421875 0zm6.2547913 -0.59375q0.140625 0 0.234375 0.109375q0.09375 0.109375 0.09375 0.28125q0 0.296875 -0.421875 0.546875q-0.4375 0.25 -0.921875 0.375q-0.46875 0.125 -0.921875 0.125q-1.359375 0 -2.15625 -0.796875q-0.78125 -0.8125 -0.78125 -2.21875q0 -0.90625 0.34375 -1.59375q0.359375 -0.6875 0.984375 -1.0625q0.640625 -0.390625 1.4375 -0.390625q1.140625 0 1.8125 0.75q0.671875 0.734375 0.671875 2.0q0 0.25 -0.09375 0.359375q-0.09375 0.109375 -0.3125 0.109375l-3.859375 0q0.09375 2.0625 1.953125 2.0625q0.46875 0 0.796875 -0.125q0.34375 -0.125 0.71875 -0.34375q0.3125 -0.1875 0.421875 -0.1875zm-2.09375 -3.875q-0.765625 0 -1.234375 0.484375q-0.46875 0.484375 -0.546875 1.359375l3.390625 0q-0.015625 -0.890625 -0.4375 -1.359375q-0.421875 -0.484375 -1.171875 -0.484375zm6.8396606 -0.75q2.09375 0 2.09375 2.3125l0 3.25q0 0.234375 -0.125 0.359375q-0.125 0.125 -0.359375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -3.1875q0 -0.8125 -0.328125 -1.1875q-0.3125 -0.375 -1.0 -0.375q-0.8125 0 -1.296875 0.5q-0.46875 0.484375 -0.46875 1.328125l0 2.921875q0 0.234375 -0.125 0.359375q-0.125 0.125 -0.359375 0.125q-0.234375 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -5.0625q0 -0.21875 0.125 -0.34375q0.125 -0.140625 0.359375 -0.140625q0.21875 0 0.34375 0.140625q0.125 0.125 0.125 0.328125l0 0.609375q0.28125 -0.53125 0.796875 -0.8125q0.53125 -0.28125 1.1875 -0.28125z" fill-rule="nonzero"/><path fill="#000000" d="m258.07846 275.1459q0.1875 0 0.296875 0.109375q0.109375 0.109375 0.109375 0.296875l0 2.984375q0 0.296875 -0.09375 0.4375q-0.078125 0.140625 -0.328125 0.234375q-0.46875 0.203125 -1.15625 0.328125q-0.6875 0.109375 -1.3749847 0.109375q-1.25 0 -2.171875 -0.515625q-0.90625 -0.515625 -1.390625 -1.484375q-0.484375 -0.96875 -0.484375 -2.328125q0 -1.328125 0.46875 -2.296875q0.484375 -0.984375 1.375 -1.5q0.90625 -0.53125 2.125 -0.53125q0.84373474 0 1.5624847 0.265625q0.71875 0.25 1.203125 0.734375q0.21875 0.203125 0.21875 0.421875q0 0.171875 -0.109375 0.296875q-0.09375 0.125 -0.234375 0.125q-0.140625 0 -0.328125 -0.140625q-0.625 -0.484375 -1.140625 -0.671875q-0.5 -0.1875 -1.1562347 -0.1875q-1.4375 0 -2.203125 0.90625q-0.75 0.890625 -0.75 2.578125q0 1.71875 0.765625 2.609375q0.78125 0.890625 2.28125 0.890625q1.1093597 0 2.0312347 -0.328125l0 -2.578125l-1.7499847 0q-0.203125 0 -0.328125 -0.109375q-0.125 -0.109375 -0.125 -0.265625q0 -0.1875 0.125 -0.28125q0.125 -0.109375 0.328125 -0.109375l2.2343597 0zm5.15683 -1.5625q0.5 -0.03125 0.5 0.40625q0 0.203125 -0.109375 0.3125q-0.109375 0.109375 -0.375 0.140625l-0.359375 0.03125q-0.796875 0.078125 -1.1875 0.578125q-0.390625 0.484375 -0.390625 1.15625l0 2.921875q0 0.234375 -0.140625 0.359375q-0.125 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -5.0625q0 -0.234375 0.140625 -0.359375q0.140625 -0.125 0.34375 -0.125q0.1875 0 0.3125 0.125q0.140625 0.125 0.140625 0.34375l0 0.671875q0.25 -0.53125 0.71875 -0.796875q0.46875 -0.28125 1.0625 -0.328125l0.171875 -0.015625zm3.720398 -0.015625q2.203125 0 2.203125 2.296875l0 3.265625q0 0.21875 -0.125 0.359375q-0.125 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.140625 -0.125 -0.359375l0 -0.578125q-0.21875 0.515625 -0.6875 0.796875q-0.46875 0.28125 -1.078125 0.28125q-0.5625 0 -1.046875 -0.21875q-0.46875 -0.234375 -0.75 -0.640625q-0.265625 -0.40625 -0.265625 -0.90625q0 -0.65625 0.328125 -1.015625q0.34375 -0.375 1.109375 -0.53125q0.765625 -0.15625 2.125 -0.15625l0.265625 0l0 -0.40625q0 -0.71875 -0.296875 -1.046875q-0.28125 -0.34375 -0.953125 -0.34375q-0.8125 0 -1.65625 0.453125q-0.3125 0.203125 -0.453125 0.203125q-0.140625 0 -0.234375 -0.109375q-0.09375 -0.109375 -0.09375 -0.28125q0 -0.171875 0.09375 -0.296875q0.109375 -0.125 0.328125 -0.25q0.421875 -0.25 0.953125 -0.375q0.546875 -0.140625 1.0625 -0.140625zm-0.390625 5.296875q0.71875 0 1.171875 -0.484375q0.46875 -0.484375 0.46875 -1.25l0 -0.34375l-0.21875 0q-1.046875 0 -1.609375 0.09375q-0.546875 0.078125 -0.78125 0.296875q-0.234375 0.203125 -0.234375 0.609375q0 0.46875 0.34375 0.78125q0.34375 0.296875 0.859375 0.296875zm7.3131714 -5.296875q0.765625 0 1.34375 0.390625q0.59375 0.375 0.921875 1.0625q0.328125 0.6875 0.328125 1.609375q0 0.90625 -0.328125 1.59375q-0.328125 0.671875 -0.90625 1.046875q-0.578125 0.359375 -1.359375 0.359375q-0.6875 0 -1.203125 -0.296875q-0.5 -0.296875 -0.765625 -0.84375l0 2.8125q0 0.21875 -0.125 0.34375q-0.125 0.125 -0.359375 0.125q-0.234375 0 -0.359375 -0.140625q-0.125 -0.125 -0.125 -0.328125l0 -7.234375q0 -0.21875 0.125 -0.34375q0.125 -0.140625 0.359375 -0.140625q0.234375 0 0.359375 0.140625q0.125 0.125 0.125 0.34375l0 0.640625q0.265625 -0.546875 0.765625 -0.84375q0.515625 -0.296875 1.203125 -0.296875zm-0.203125 5.265625q0.859375 0 1.328125 -0.578125q0.46875 -0.578125 0.46875 -1.625q0 -1.0625 -0.46875 -1.65625q-0.46875 -0.59375 -1.328125 -0.59375q-0.84375 0 -1.3125 0.578125q-0.453125 0.578125 -0.453125 1.640625q0 1.0625 0.453125 1.65625q0.46875 0.578125 1.3125 0.578125zm7.2027893 -5.265625q1.03125 0 1.546875 0.578125q0.53125 0.578125 0.53125 1.734375l0 3.25q0 0.234375 -0.140625 0.359375q-0.125 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -3.21875q0 -0.78125 -0.328125 -1.15625q-0.3125 -0.375 -1.0 -0.375q-0.8125 0 -1.296875 0.5q-0.46875 0.484375 -0.46875 1.328125l0 2.921875q0 0.234375 -0.125 0.359375q-0.125 0.125 -0.359375 0.125q-0.234375 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -7.625q0 -0.203125 0.125 -0.328125q0.140625 -0.140625 0.359375 -0.140625q0.234375 0 0.359375 0.125q0.125 0.125 0.125 0.34375l0 3.140625q0.28125 -0.53125 0.796875 -0.796875q0.515625 -0.28125 1.1875 -0.28125zm4.5035706 5.984375q-0.234375 0 -0.375 -0.125q-0.125 -0.140625 -0.125 -0.359375l0 -7.5q0 -0.21875 0.125 -0.34375q0.140625 -0.125 0.375 -0.125l2.34375 0q2.03125 0 3.140625 1.09375q1.109375 1.09375 1.109375 3.125q0 2.03125 -1.125 3.140625q-1.109375 1.09375 -3.125 1.09375l-2.34375 0zm2.28125 -0.84375q3.28125 0 3.28125 -3.390625q0 -3.390625 -3.28125 -3.390625l-1.796875 0l0 6.78125l1.796875 0zm10.461792 -0.515625q0.140625 0 0.234375 0.109375q0.09375 0.109375 0.09375 0.28125q0 0.296875 -0.421875 0.546875q-0.4375 0.25 -0.921875 0.375q-0.46875 0.125 -0.921875 0.125q-1.359375 0 -2.15625 -0.796875q-0.78125 -0.8125 -0.78125 -2.21875q0 -0.90625 0.34375 -1.59375q0.359375 -0.6875 0.984375 -1.0625q0.640625 -0.390625 1.4375 -0.390625q1.140625 0 1.8125 0.75q0.671875 0.734375 0.671875 2.0q0 0.25 -0.09375 0.359375q-0.09375 0.109375 -0.3125 0.109375l-3.859375 0q0.09375 2.0625 1.953125 2.0625q0.46875 0 0.796875 -0.125q0.34375 -0.125 0.71875 -0.34375q0.3125 -0.1875 0.421875 -0.1875zm-2.09375 -3.875q-0.765625 0 -1.234375 0.484375q-0.46875 0.484375 -0.546875 1.359375l3.390625 0q-0.015625 -0.890625 -0.4375 -1.359375q-0.421875 -0.484375 -1.171875 -0.484375zm6.480316 -2.453125q-0.640625 0.046875 -0.96875 0.40625q-0.3125 0.34375 -0.3125 1.046875l0 0.390625l1.328125 0q0.203125 0 0.3125 0.109375q0.109375 0.109375 0.109375 0.28125q0 0.1875 -0.109375 0.28125q-0.109375 0.09375 -0.3125 0.09375l-1.328125 0l0 4.65625q0 0.234375 -0.140625 0.359375q-0.140625 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.140625 -0.125 -0.140625 -0.359375l0 -4.65625l-0.796875 0q-0.203125 0 -0.328125 -0.09375q-0.109375 -0.109375 -0.109375 -0.28125q0 -0.171875 0.109375 -0.28125q0.125 -0.109375 0.328125 -0.109375l0.796875 0l0 -0.21875q0 -1.078125 0.53125 -1.6875q0.546875 -0.625 1.5625 -0.703125l0.3125 -0.015625q0.3125 -0.03125 0.453125 0.0625q0.140625 0.078125 0.140625 0.296875q0 0.34375 -0.421875 0.390625l-0.3125 0.03125z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m276.85565 232.16667l0 20.377945" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m276.85565 232.16667l0 16.950867" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m276.85565 249.11754l-1.1246033 -1.124588l1.1246033 3.0897675l1.1245728 -3.0897675z" fill-rule="evenodd"/><path fill="#f4cccc" d="m31.874016 68.3563l87.49606 0l0 30.992126l-87.49606 0z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m31.874016 68.3563l87.49606 0l0 30.992126l-87.49606 0z" fill-rule="evenodd"/><path fill="#000000" d="m67.63894 87.62236q0.171875 0.15625 0.171875 0.359375q0 0.15625 -0.140625 0.296875q-0.140625 0.140625 -0.3125 0.140625q-0.15625 0 -0.328125 -0.140625l-4.484375 -3.921875l0 3.578125q0 0.21875 -0.140625 0.359375q-0.125 0.140625 -0.359375 0.140625q-0.21875 0 -0.359375 -0.140625q-0.125 -0.140625 -0.125 -0.359375l0 -7.578125q0 -0.234375 0.125 -0.359375q0.140625 -0.140625 0.359375 -0.140625q0.234375 0 0.359375 0.140625q0.140625 0.125 0.140625 0.359375l0 3.4375l4.28125 -3.796875q0.125 -0.140625 0.3125 -0.140625q0.171875 0 0.296875 0.140625q0.140625 0.140625 0.140625 0.3125q0 0.171875 -0.15625 0.328125l-3.875 3.421875l4.09375 3.5625zm5.8329315 -0.609375q0.140625 0 0.234375 0.109375q0.09375 0.109375 0.09375 0.28125q0 0.296875 -0.421875 0.546875q-0.4375 0.25 -0.921875 0.375q-0.46875 0.125 -0.921875 0.125q-1.359375 0 -2.15625 -0.796875q-0.78125 -0.8125 -0.78125 -2.21875q0 -0.90625 0.34375 -1.59375q0.359375 -0.6875 0.984375 -1.0625q0.640625 -0.390625 1.4375 -0.390625q1.140625 0 1.8125 0.75q0.671875 0.734375 0.671875 2.0q0 0.25 -0.09375 0.359375q-0.09375 0.109375 -0.3125 0.109375l-3.859375 0q0.09375 2.0625 1.953125 2.0625q0.46875 0 0.796875 -0.125q0.34375 -0.125 0.71875 -0.34375q0.3125 -0.1875 0.421875 -0.1875zm-2.09375 -3.875q-0.765625 0 -1.234375 0.484375q-0.46875 0.484375 -0.546875 1.359375l3.390625 0q-0.015625 -0.890625 -0.4375 -1.359375q-0.421875 -0.484375 -1.171875 -0.484375zm6.792801 -0.734375q0.5 -0.03125 0.5 0.40625q0 0.203125 -0.109375 0.3125q-0.109375 0.109375 -0.375 0.140625l-0.359375 0.03125q-0.796875 0.078125 -1.1875 0.578125q-0.390625 0.484375 -0.390625 1.15625l0 2.921875q0 0.234375 -0.140625 0.359375q-0.125 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -5.0625q0 -0.234375 0.140625 -0.359375q0.140625 -0.125 0.34375 -0.125q0.1875 0 0.3125 0.125q0.140625 0.125 0.140625 0.34375l0 0.671875q0.25 -0.53125 0.71875 -0.796875q0.46875 -0.28125 1.0625 -0.328125l0.171875 -0.015625zm3.720398 -0.015625q2.203125 0 2.203125 2.296875l0 3.265625q0 0.21875 -0.125 0.359375q-0.125 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.140625 -0.125 -0.359375l0 -0.578125q-0.21875 0.515625 -0.6875 0.796875q-0.46875 0.28125 -1.078125 0.28125q-0.5625 0 -1.046875 -0.21875q-0.46875 -0.234375 -0.75 -0.640625q-0.265625 -0.40625 -0.265625 -0.90625q0 -0.65625 0.328125 -1.015625q0.34375 -0.375 1.109375 -0.53125q0.765625 -0.15625 2.125 -0.15625l0.265625 0l0 -0.40625q0 -0.71875 -0.296875 -1.046875q-0.28125 -0.34375 -0.953125 -0.34375q-0.8125 0 -1.65625 0.453125q-0.3125 0.203125 -0.453125 0.203125q-0.140625 0 -0.234375 -0.109375q-0.09375 -0.109375 -0.09375 -0.28125q0 -0.171875 0.09375 -0.296875q0.109375 -0.125 0.328125 -0.25q0.421875 -0.25 0.953125 -0.375q0.546875 -0.140625 1.0625 -0.140625zm-0.390625 5.296875q0.71875 0 1.171875 -0.484375q0.46875 -0.484375 0.46875 -1.25l0 -0.34375l-0.21875 0q-1.046875 0 -1.609375 0.09375q-0.546875 0.078125 -0.78125 0.296875q-0.234375 0.203125 -0.234375 0.609375q0 0.46875 0.34375 0.78125q0.34375 0.296875 0.859375 0.296875zm6.3444214 0.765625q-0.5625 0 -1.0625 -0.125q-0.5 -0.140625 -0.875 -0.375q-0.21875 -0.140625 -0.3125 -0.265625q-0.078125 -0.125 -0.078125 -0.3125q0 -0.15625 0.078125 -0.25q0.09375 -0.109375 0.234375 -0.109375q0.15625 0 0.421875 0.1875q0.359375 0.21875 0.71875 0.34375q0.359375 0.125 0.875 0.125q0.65625 0 1.015625 -0.21875q0.359375 -0.234375 0.359375 -0.671875q0 -0.265625 -0.140625 -0.421875q-0.125 -0.171875 -0.453125 -0.296875q-0.3125 -0.125 -0.9375 -0.25q-1.0625 -0.234375 -1.515625 -0.609375q-0.453125 -0.390625 -0.453125 -1.046875q0 -0.515625 0.28125 -0.90625q0.28125 -0.40625 0.796875 -0.625q0.515625 -0.234375 1.15625 -0.234375q0.46875 0 0.90625 0.125q0.4375 0.125 0.78125 0.34375q0.40625 0.296875 0.40625 0.609375q0 0.15625 -0.09375 0.265625q-0.09375 0.109375 -0.234375 0.109375q-0.140625 0 -0.4375 -0.203125q-0.328125 -0.21875 -0.625 -0.34375q-0.296875 -0.125 -0.75 -0.125q-0.5625 0 -0.90625 0.265625q-0.34375 0.25 -0.34375 0.671875q0 0.25 0.125 0.421875q0.125 0.15625 0.421875 0.28125q0.296875 0.125 0.84375 0.25q0.828125 0.1875 1.265625 0.40625q0.453125 0.203125 0.640625 0.515625q0.203125 0.3125 0.203125 0.796875q0 0.75 -0.640625 1.21875q-0.640625 0.453125 -1.671875 0.453125z" fill-rule="nonzero"/><path fill="#f4cccc" d="m127.74803 68.35761l87.49606 0l0 30.992126l-87.49606 0z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m127.74803 68.35761l87.49606 0l0 30.992126l-87.49606 0z" fill-rule="evenodd"/><path fill="#000000" d="m147.45874 88.37367q-0.234375 0 -0.375 -0.125q-0.125 -0.140625 -0.125 -0.359375l0 -7.5q0 -0.21875 0.125 -0.34375q0.140625 -0.125 0.375 -0.125l4.484375 0q0.21875 0 0.328125 0.109375q0.125 0.09375 0.125 0.296875q0 0.1875 -0.125 0.296875q-0.109375 0.109375 -0.328125 0.109375l-4.015625 0l0 2.9375l3.78125 0q0.21875 0 0.328125 0.109375q0.125 0.109375 0.125 0.296875q0 0.1875 -0.125 0.296875q-0.109375 0.109375 -0.328125 0.109375l-3.78125 0l0 3.078125l4.015625 0q0.21875 0 0.328125 0.109375q0.125 0.09375 0.125 0.296875q0 0.1875 -0.125 0.296875q-0.109375 0.109375 -0.328125 0.109375l-4.484375 0zm8.31218 0.078125q-0.5625 0 -1.0625 -0.125q-0.5 -0.140625 -0.875 -0.375q-0.21875 -0.140625 -0.3125 -0.265625q-0.078125 -0.125 -0.078125 -0.3125q0 -0.15625 0.078125 -0.25q0.09375 -0.109375 0.234375 -0.109375q0.15625 0 0.421875 0.1875q0.359375 0.21875 0.71875 0.34375q0.359375 0.125 0.875 0.125q0.65625 0 1.015625 -0.21875q0.359375 -0.234375 0.359375 -0.671875q0 -0.265625 -0.140625 -0.421875q-0.125 -0.171875 -0.453125 -0.296875q-0.3125 -0.125 -0.9375 -0.25q-1.0625 -0.234375 -1.515625 -0.609375q-0.453125 -0.390625 -0.453125 -1.046875q0 -0.515625 0.28125 -0.90625q0.28125 -0.40625 0.796875 -0.625q0.515625 -0.234375 1.15625 -0.234375q0.46875 0 0.90625 0.125q0.4375 0.125 0.78125 0.34375q0.40625 0.296875 0.40625 0.609375q0 0.15625 -0.09375 0.265625q-0.09375 0.109375 -0.234375 0.109375q-0.140625 0 -0.4375 -0.203125q-0.328125 -0.21875 -0.625 -0.34375q-0.296875 -0.125 -0.75 -0.125q-0.5625 0 -0.90625 0.265625q-0.34375 0.25 -0.34375 0.671875q0 0.25 0.125 0.421875q0.125 0.15625 0.421875 0.28125q0.296875 0.125 0.84375 0.25q0.828125 0.1875 1.265625 0.40625q0.453125 0.203125 0.640625 0.515625q0.203125 0.3125 0.203125 0.796875q0 0.75 -0.640625 1.21875q-0.640625 0.453125 -1.671875 0.453125zm6.4787903 -0.78125q0.421875 0.03125 0.421875 0.375q0 0.203125 -0.15625 0.3125q-0.140625 0.09375 -0.4375 0.078125l-0.328125 -0.03125q-0.953125 -0.0625 -1.421875 -0.5625q-0.453125 -0.515625 -0.453125 -1.53125l0 -3.015625l-0.796875 0q-0.203125 0 -0.328125 -0.09375q-0.109375 -0.109375 -0.109375 -0.28125q0 -0.171875 0.109375 -0.28125q0.125 -0.109375 0.328125 -0.109375l0.796875 0l0 -1.359375q0 -0.21875 0.125 -0.34375q0.140625 -0.140625 0.375 -0.140625q0.21875 0 0.34375 0.140625q0.140625 0.125 0.140625 0.34375l0 1.359375l1.328125 0q0.1875 0 0.296875 0.109375q0.125 0.109375 0.125 0.28125q0 0.171875 -0.125 0.28125q-0.109375 0.09375 -0.296875 0.09375l-1.328125 0l0 3.0625q0 0.65625 0.265625 0.953125q0.265625 0.296875 0.8125 0.328125l0.3125 0.03125zm1.8769073 0.765625q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -5.0625q0 -0.234375 0.125 -0.359375q0.140625 -0.125 0.359375 -0.125q0.21875 0 0.34375 0.125q0.140625 0.125 0.140625 0.359375l0 5.0625q0 0.234375 -0.140625 0.359375q-0.125 0.125 -0.34375 0.125zm0 -7.28125q-0.296875 0 -0.484375 -0.171875q-0.171875 -0.171875 -0.171875 -0.453125q0 -0.25 0.171875 -0.421875q0.1875 -0.171875 0.484375 -0.171875q0.28125 0 0.453125 0.171875q0.1875 0.171875 0.1875 0.421875q0 0.28125 -0.1875 0.453125q-0.171875 0.171875 -0.453125 0.171875zm8.799652 1.234375q1.9375 0 1.9375 2.3125l0 3.25q0 0.234375 -0.140625 0.359375q-0.125 0.125 -0.328125 0.125q-0.21875 0 -0.359375 -0.125q-0.140625 -0.125 -0.140625 -0.359375l0 -3.21875q0 -0.8125 -0.296875 -1.171875q-0.28125 -0.359375 -0.890625 -0.359375q-0.734375 0 -1.15625 0.5q-0.421875 0.484375 -0.421875 1.328125l0 2.921875q0 0.234375 -0.140625 0.359375q-0.125 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -3.21875q0 -0.8125 -0.296875 -1.171875q-0.28125 -0.359375 -0.90625 -0.359375q-0.71875 0 -1.140625 0.5q-0.421875 0.484375 -0.421875 1.328125l0 2.921875q0 0.234375 -0.140625 0.359375q-0.125 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -5.0625q0 -0.21875 0.125 -0.34375q0.140625 -0.140625 0.359375 -0.140625q0.203125 0 0.328125 0.125q0.140625 0.125 0.140625 0.34375l0 0.578125q0.265625 -0.515625 0.734375 -0.78125q0.46875 -0.28125 1.078125 -0.28125q1.375 0 1.78125 1.140625q0.265625 -0.515625 0.78125 -0.828125q0.515625 -0.3125 1.171875 -0.3125zm6.0990753 0q2.203125 0 2.203125 2.296875l0 3.265625q0 0.21875 -0.125 0.359375q-0.125 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.140625 -0.125 -0.359375l0 -0.578125q-0.21875 0.515625 -0.6875 0.796875q-0.46875 0.28125 -1.078125 0.28125q-0.5625 0 -1.046875 -0.21875q-0.46875 -0.234375 -0.75 -0.640625q-0.265625 -0.40625 -0.265625 -0.90625q0 -0.65625 0.328125 -1.015625q0.34375 -0.375 1.109375 -0.53125q0.765625 -0.15625 2.125 -0.15625l0.265625 0l0 -0.40625q0 -0.71875 -0.296875 -1.046875q-0.28125 -0.34375 -0.953125 -0.34375q-0.8125 0 -1.65625 0.453125q-0.3125 0.203125 -0.453125 0.203125q-0.140625 0 -0.234375 -0.109375q-0.09375 -0.109375 -0.09375 -0.28125q0 -0.171875 0.09375 -0.296875q0.109375 -0.125 0.328125 -0.25q0.421875 -0.25 0.953125 -0.375q0.546875 -0.140625 1.0625 -0.140625zm-0.390625 5.296875q0.71875 0 1.171875 -0.484375q0.46875 -0.484375 0.46875 -1.25l0 -0.34375l-0.21875 0q-1.046875 0 -1.609375 0.09375q-0.546875 0.078125 -0.78125 0.296875q-0.234375 0.203125 -0.234375 0.609375q0 0.46875 0.34375 0.78125q0.34375 0.296875 0.859375 0.296875zm7.0631714 -0.015625q0.421875 0.03125 0.421875 0.375q0 0.203125 -0.15625 0.3125q-0.140625 0.09375 -0.4375 0.078125l-0.328125 -0.03125q-0.953125 -0.0625 -1.421875 -0.5625q-0.453125 -0.515625 -0.453125 -1.53125l0 -3.015625l-0.796875 0q-0.203125 0 -0.328125 -0.09375q-0.109375 -0.109375 -0.109375 -0.28125q0 -0.171875 0.109375 -0.28125q0.125 -0.109375 0.328125 -0.109375l0.796875 0l0 -1.359375q0 -0.21875 0.125 -0.34375q0.140625 -0.140625 0.375 -0.140625q0.21875 0 0.34375 0.140625q0.140625 0.125 0.140625 0.34375l0 1.359375l1.328125 0q0.1875 0 0.296875 0.109375q0.125 0.109375 0.125 0.28125q0 0.171875 -0.125 0.28125q-0.109375 0.09375 -0.296875 0.09375l-1.328125 0l0 3.0625q0 0.65625 0.265625 0.953125q0.265625 0.296875 0.8125 0.328125l0.3125 0.03125zm3.8144073 0.78125q-0.828125 0 -1.46875 -0.359375q-0.625 -0.375 -0.96875 -1.0625q-0.34375 -0.703125 -0.34375 -1.609375q0 -0.90625 0.34375 -1.59375q0.34375 -0.703125 0.96875 -1.0625q0.640625 -0.375 1.46875 -0.375q0.828125 0 1.453125 0.375q0.640625 0.359375 0.984375 1.0625q0.34375 0.6875 0.34375 1.59375q0 0.90625 -0.34375 1.609375q-0.34375 0.6875 -0.984375 1.0625q-0.625 0.359375 -1.453125 0.359375zm0 -0.796875q0.859375 0 1.3125 -0.5625q0.46875 -0.578125 0.46875 -1.671875q0 -1.0625 -0.46875 -1.640625q-0.46875 -0.59375 -1.3125 -0.59375q-0.859375 0 -1.328125 0.59375q-0.46875 0.578125 -0.46875 1.640625q0 1.078125 0.453125 1.65625q0.46875 0.578125 1.34375 0.578125zm7.1287994 -5.25q0.5 -0.03125 0.5 0.40625q0 0.203125 -0.109375 0.3125q-0.109375 0.109375 -0.375 0.140625l-0.359375 0.03125q-0.796875 0.078125 -1.1875 0.578125q-0.390625 0.484375 -0.390625 1.15625l0 2.921875q0 0.234375 -0.140625 0.359375q-0.125 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -5.0625q0 -0.234375 0.140625 -0.359375q0.140625 -0.125 0.34375 -0.125q0.1875 0 0.3125 0.125q0.140625 0.125 0.140625 0.34375l0 0.671875q0.25 -0.53125 0.71875 -0.796875q0.46875 -0.28125 1.0625 -0.328125l0.171875 -0.015625z" fill-rule="nonzero"/><path fill="#f4cccc" d="m233.1076 68.35761l87.49606 0l0 30.992126l-87.49606 0z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m233.1076 68.35761l87.49606 0l0 30.992126l-87.49606 0z" fill-rule="evenodd"/><path fill="#000000" d="m269.00754 88.46742q-0.90625 0 -1.734375 -0.265625q-0.8125 -0.265625 -1.3125 -0.734375q-0.171875 -0.15625 -0.171875 -0.40625q0 -0.171875 0.09375 -0.296875q0.09375 -0.125 0.234375 -0.125q0.15625 0 0.328125 0.125q1.109375 0.859375 2.546875 0.859375q1.03125 0 1.578125 -0.390625q0.5625 -0.390625 0.5625 -1.125q0 -0.421875 -0.265625 -0.671875q-0.265625 -0.265625 -0.703125 -0.421875q-0.4375 -0.15625 -1.15625 -0.328125q-0.984375 -0.21875 -1.625 -0.46875q-0.625 -0.265625 -1.015625 -0.734375q-0.390625 -0.46875 -0.390625 -1.21875q0 -0.71875 0.390625 -1.265625q0.390625 -0.5625 1.09375 -0.875q0.703125 -0.3125 1.59375 -0.3125q0.84375 0 1.5625 0.265625q0.734375 0.25 1.234375 0.734375q0.1875 0.1875 0.1875 0.421875q0 0.171875 -0.09375 0.296875q-0.09375 0.125 -0.234375 0.125q-0.125 0 -0.34375 -0.140625q-0.59375 -0.46875 -1.09375 -0.65625q-0.5 -0.203125 -1.21875 -0.203125q-0.984375 0 -1.546875 0.421875q-0.546875 0.40625 -0.546875 1.15625q0 0.625 0.484375 0.953125q0.484375 0.3125 1.5 0.5625q1.09375 0.25 1.71875 0.484375q0.625 0.21875 1.03125 0.671875q0.421875 0.4375 0.421875 1.171875q0 0.71875 -0.390625 1.265625q-0.390625 0.53125 -1.109375 0.828125q-0.703125 0.296875 -1.609375 0.296875zm5.0446777 -0.03125q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -7.625q0 -0.21875 0.125 -0.34375q0.140625 -0.125 0.359375 -0.125q0.203125 0 0.34375 0.125q0.140625 0.125 0.140625 0.34375l0 7.625q0 0.234375 -0.140625 0.359375q-0.140625 0.125 -0.34375 0.125zm2.784027 0q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -5.0625q0 -0.234375 0.125 -0.359375q0.140625 -0.125 0.359375 -0.125q0.21875 0 0.34375 0.125q0.140625 0.125 0.140625 0.359375l0 5.0625q0 0.234375 -0.140625 0.359375q-0.125 0.125 -0.34375 0.125zm0 -7.28125q-0.296875 0 -0.484375 -0.171875q-0.171875 -0.171875 -0.171875 -0.453125q0 -0.25 0.171875 -0.421875q0.1875 -0.171875 0.484375 -0.171875q0.28125 0 0.453125 0.171875q0.1875 0.171875 0.1875 0.421875q0 0.28125 -0.1875 0.453125q-0.171875 0.171875 -0.453125 0.171875zm8.799652 1.234375q1.9375 0 1.9375 2.3125l0 3.25q0 0.234375 -0.140625 0.359375q-0.125 0.125 -0.328125 0.125q-0.21875 0 -0.359375 -0.125q-0.140625 -0.125 -0.140625 -0.359375l0 -3.21875q0 -0.8125 -0.296875 -1.171875q-0.28125 -0.359375 -0.890625 -0.359375q-0.734375 0 -1.15625 0.5q-0.421875 0.484375 -0.421875 1.328125l0 2.921875q0 0.234375 -0.140625 0.359375q-0.125 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -3.21875q0 -0.8125 -0.296875 -1.171875q-0.28125 -0.359375 -0.90625 -0.359375q-0.71875 0 -1.140625 0.5q-0.421875 0.484375 -0.421875 1.328125l0 2.921875q0 0.234375 -0.140625 0.359375q-0.125 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -5.0625q0 -0.21875 0.125 -0.34375q0.140625 -0.140625 0.359375 -0.140625q0.203125 0 0.328125 0.125q0.140625 0.125 0.140625 0.34375l0 0.578125q0.265625 -0.515625 0.734375 -0.78125q0.46875 -0.28125 1.078125 -0.28125q1.375 0 1.78125 1.140625q0.265625 -0.515625 0.78125 -0.828125q0.515625 -0.3125 1.171875 -0.3125z" fill-rule="nonzero"/><path fill="#d9ead3" d="m282.5035 134.76706l87.49606 0l0 30.992126l-87.49606 0z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m282.5035 134.76706l87.49606 0l0 30.992126l-87.49606 0z" fill-rule="evenodd"/><path fill="#000000" d="m297.8283 154.87688q-1.1875 0 -2.0625 -0.515625q-0.875 -0.53125 -1.359375 -1.5q-0.46875 -0.984375 -0.46875 -2.3125q0 -1.328125 0.46875 -2.296875q0.484375 -0.984375 1.359375 -1.5q0.875 -0.53125 2.0625 -0.53125q0.8125 0 1.515625 0.265625q0.71875 0.25 1.25 0.734375q0.1875 0.1875 0.1875 0.421875q0 0.171875 -0.09375 0.296875q-0.09375 0.125 -0.21875 0.125q-0.15625 0 -0.359375 -0.140625q-0.609375 -0.46875 -1.109375 -0.65625q-0.5 -0.203125 -1.140625 -0.203125q-1.390625 0 -2.140625 0.90625q-0.75 0.90625 -0.75 2.578125q0 1.671875 0.75 2.578125q0.75 0.90625 2.140625 0.90625q0.640625 0 1.140625 -0.1875q0.5 -0.1875 1.109375 -0.671875q0.203125 -0.125 0.359375 -0.125q0.125 0 0.21875 0.125q0.09375 0.109375 0.09375 0.296875q0 0.234375 -0.1875 0.40625q-0.53125 0.484375 -1.25 0.75q-0.703125 0.25 -1.515625 0.25zm7.358429 -6.078125q1.03125 0 1.546875 0.578125q0.53125 0.578125 0.53125 1.734375l0 3.25q0 0.234375 -0.140625 0.359375q-0.125 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -3.21875q0 -0.78125 -0.328125 -1.15625q-0.3125 -0.375 -1.0 -0.375q-0.8125 0 -1.296875 0.5q-0.46875 0.484375 -0.46875 1.328125l0 2.921875q0 0.234375 -0.125 0.359375q-0.125 0.125 -0.359375 0.125q-0.234375 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -7.625q0 -0.203125 0.125 -0.328125q0.140625 -0.140625 0.359375 -0.140625q0.234375 0 0.359375 0.125q0.125 0.125 0.125 0.34375l0 3.140625q0.28125 -0.53125 0.796875 -0.796875q0.515625 -0.28125 1.1875 -0.28125zm8.37854 4.625q0.140625 0 0.234375 0.109375q0.09375 0.109375 0.09375 0.28125q0 0.296875 -0.421875 0.546875q-0.4375 0.25 -0.921875 0.375q-0.46875 0.125 -0.921875 0.125q-1.359375 0 -2.15625 -0.796875q-0.78125 -0.8125 -0.78125 -2.21875q0 -0.90625 0.34375 -1.59375q0.359375 -0.6875 0.984375 -1.0625q0.640625 -0.390625 1.4375 -0.390625q1.140625 0 1.8125 0.75q0.671875 0.734375 0.671875 2.0q0 0.25 -0.09375 0.359375q-0.09375 0.109375 -0.3125 0.109375l-3.859375 0q0.09375 2.0625 1.953125 2.0625q0.46875 0 0.796875 -0.125q0.34375 -0.125 0.71875 -0.34375q0.3125 -0.1875 0.421875 -0.1875zm-2.09375 -3.875q-0.765625 0 -1.234375 0.484375q-0.46875 0.484375 -0.546875 1.359375l3.390625 0q-0.015625 -0.890625 -0.4375 -1.359375q-0.421875 -0.484375 -1.171875 -0.484375zm6.308441 5.3125q-0.8125 0 -1.453125 -0.359375q-0.625 -0.375 -0.96875 -1.0625q-0.34375 -0.6875 -0.34375 -1.578125q0 -0.90625 0.359375 -1.59375q0.359375 -0.703125 0.984375 -1.078125q0.640625 -0.390625 1.46875 -0.390625q0.453125 0 0.90625 0.125q0.453125 0.125 0.78125 0.359375q0.21875 0.140625 0.3125 0.28125q0.09375 0.140625 0.09375 0.3125q0 0.171875 -0.09375 0.28125q-0.09375 0.09375 -0.234375 0.09375q-0.078125 0 -0.1875 -0.046875q-0.09375 -0.046875 -0.15625 -0.09375q-0.0625 -0.046875 -0.09375 -0.0625q-0.3125 -0.203125 -0.59375 -0.3125q-0.28125 -0.125 -0.6875 -0.125q-0.875 0 -1.359375 0.59375q-0.484375 0.59375 -0.484375 1.65625q0 1.046875 0.484375 1.625q0.484375 0.578125 1.359375 0.578125q0.40625 0 0.703125 -0.109375q0.296875 -0.125 0.59375 -0.328125q0.140625 -0.09375 0.25 -0.15625q0.125 -0.0625 0.203125 -0.0625q0.140625 0 0.21875 0.109375q0.09375 0.109375 0.09375 0.28125q0 0.15625 -0.09375 0.28125q-0.078125 0.125 -0.296875 0.28125q-0.34375 0.234375 -0.8125 0.375q-0.46875 0.125 -0.953125 0.125zm7.998047 -0.84375q0.203125 0.171875 0.203125 0.375q0 0.1875 -0.125 0.328125q-0.125 0.125 -0.3125 0.125q-0.15625 0 -0.328125 -0.140625l-3.125 -2.703125l0 2.359375q0 0.234375 -0.140625 0.359375q-0.140625 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -7.625q0 -0.21875 0.125 -0.34375q0.140625 -0.125 0.359375 -0.125q0.203125 0 0.34375 0.125q0.140625 0.125 0.140625 0.34375l0 4.875l2.859375 -2.625q0.15625 -0.140625 0.328125 -0.140625q0.1875 0 0.3125 0.140625q0.140625 0.125 0.140625 0.296875q0 0.203125 -0.171875 0.359375l-2.375 2.109375l2.59375 2.265625zm4.2812805 -5.21875q0.765625 0 1.34375 0.390625q0.59375 0.375 0.921875 1.0625q0.328125 0.6875 0.328125 1.609375q0 0.90625 -0.328125 1.59375q-0.328125 0.671875 -0.90625 1.046875q-0.578125 0.359375 -1.359375 0.359375q-0.6875 0 -1.203125 -0.296875q-0.5 -0.296875 -0.765625 -0.84375l0 2.8125q0 0.21875 -0.125 0.34375q-0.125 0.125 -0.359375 0.125q-0.234375 0 -0.359375 -0.140625q-0.125 -0.125 -0.125 -0.328125l0 -7.234375q0 -0.21875 0.125 -0.34375q0.125 -0.140625 0.359375 -0.140625q0.234375 0 0.359375 0.140625q0.125 0.125 0.125 0.34375l0 0.640625q0.265625 -0.546875 0.765625 -0.84375q0.515625 -0.296875 1.203125 -0.296875zm-0.203125 5.265625q0.859375 0 1.328125 -0.578125q0.46875 -0.578125 0.46875 -1.625q0 -1.0625 -0.46875 -1.65625q-0.46875 -0.59375 -1.328125 -0.59375q-0.84375 0 -1.3125 0.578125q-0.453125 0.578125 -0.453125 1.640625q0 1.0625 0.453125 1.65625q0.46875 0.578125 1.3125 0.578125zm6.67157 0.796875q-0.828125 0 -1.46875 -0.359375q-0.625 -0.375 -0.96875 -1.0625q-0.34375 -0.703125 -0.34375 -1.609375q0 -0.90625 0.34375 -1.59375q0.34375 -0.703125 0.96875 -1.0625q0.640625 -0.375 1.46875 -0.375q0.828125 0 1.453125 0.375q0.640625 0.359375 0.984375 1.0625q0.34375 0.6875 0.34375 1.59375q0 0.90625 -0.34375 1.609375q-0.34375 0.6875 -0.984375 1.0625q-0.625 0.359375 -1.453125 0.359375zm0 -0.796875q0.859375 0 1.3125 -0.5625q0.46875 -0.578125 0.46875 -1.671875q0 -1.0625 -0.46875 -1.640625q-0.46875 -0.59375 -1.3125 -0.59375q-0.859375 0 -1.328125 0.59375q-0.46875 0.578125 -0.46875 1.640625q0 1.078125 0.453125 1.65625q0.46875 0.578125 1.34375 0.578125zm4.722534 0.78125q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -5.0625q0 -0.234375 0.125 -0.359375q0.140625 -0.125 0.359375 -0.125q0.21875 0 0.34375 0.125q0.140625 0.125 0.140625 0.359375l0 5.0625q0 0.234375 -0.140625 0.359375q-0.125 0.125 -0.34375 0.125zm0 -7.28125q-0.296875 0 -0.484375 -0.171875q-0.171875 -0.171875 -0.171875 -0.453125q0 -0.25 0.171875 -0.421875q0.1875 -0.171875 0.484375 -0.171875q0.28125 0 0.453125 0.171875q0.1875 0.171875 0.1875 0.421875q0 0.28125 -0.1875 0.453125q-0.171875 0.171875 -0.453125 0.171875zm5.237152 1.234375q2.09375 0 2.09375 2.3125l0 3.25q0 0.234375 -0.125 0.359375q-0.125 0.125 -0.359375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -3.1875q0 -0.8125 -0.328125 -1.1875q-0.3125 -0.375 -1.0 -0.375q-0.8125 0 -1.296875 0.5q-0.46875 0.484375 -0.46875 1.328125l0 2.921875q0 0.234375 -0.125 0.359375q-0.125 0.125 -0.359375 0.125q-0.234375 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -5.0625q0 -0.21875 0.125 -0.34375q0.125 -0.140625 0.359375 -0.140625q0.21875 0 0.34375 0.140625q0.125 0.125 0.125 0.328125l0 0.609375q0.28125 -0.53125 0.796875 -0.8125q0.53125 -0.28125 1.1875 -0.28125zm6.5660706 5.28125q0.421875 0.03125 0.421875 0.375q0 0.203125 -0.15625 0.3125q-0.140625 0.09375 -0.4375 0.078125l-0.328125 -0.03125q-0.953125 -0.0625 -1.421875 -0.5625q-0.453125 -0.515625 -0.453125 -1.53125l0 -3.015625l-0.796875 0q-0.203125 0 -0.328125 -0.09375q-0.109375 -0.109375 -0.109375 -0.28125q0 -0.171875 0.109375 -0.28125q0.125 -0.109375 0.328125 -0.109375l0.796875 0l0 -1.359375q0 -0.21875 0.125 -0.34375q0.140625 -0.140625 0.375 -0.140625q0.21875 0 0.34375 0.140625q0.140625 0.125 0.140625 0.34375l0 1.359375l1.328125 0q0.1875 0 0.296875 0.109375q0.125 0.109375 0.125 0.28125q0 0.171875 -0.125 0.28125q-0.109375 0.09375 -0.296875 0.09375l-1.328125 0l0 3.0625q0 0.65625 0.265625 0.953125q0.265625 0.296875 0.8125 0.328125l0.3125 0.03125zm3.361267 0.78125q-0.5625 0 -1.0625 -0.125q-0.5 -0.140625 -0.875 -0.375q-0.21875 -0.140625 -0.3125 -0.265625q-0.078125 -0.125 -0.078125 -0.3125q0 -0.15625 0.078125 -0.25q0.09375 -0.109375 0.234375 -0.109375q0.15625 0 0.421875 0.1875q0.359375 0.21875 0.71875 0.34375q0.359375 0.125 0.875 0.125q0.65625 0 1.015625 -0.21875q0.359375 -0.234375 0.359375 -0.671875q0 -0.265625 -0.140625 -0.421875q-0.125 -0.171875 -0.453125 -0.296875q-0.3125 -0.125 -0.9375 -0.25q-1.0625 -0.234375 -1.515625 -0.609375q-0.453125 -0.390625 -0.453125 -1.046875q0 -0.515625 0.28125 -0.90625q0.28125 -0.40625 0.796875 -0.625q0.515625 -0.234375 1.15625 -0.234375q0.46875 0 0.90625 0.125q0.4375 0.125 0.78125 0.34375q0.40625 0.296875 0.40625 0.609375q0 0.15625 -0.09375 0.265625q-0.09375 0.109375 -0.234375 0.109375q-0.140625 0 -0.4375 -0.203125q-0.328125 -0.21875 -0.625 -0.34375q-0.296875 -0.125 -0.75 -0.125q-0.5625 0 -0.90625 0.265625q-0.34375 0.25 -0.34375 0.671875q0 0.25 0.125 0.421875q0.125 0.15625 0.421875 0.28125q0.296875 0.125 0.84375 0.25q0.828125 0.1875 1.265625 0.40625q0.453125 0.203125 0.640625 0.515625q0.203125 0.3125 0.203125 0.796875q0 0.75 -0.640625 1.21875q-0.640625 0.453125 -1.671875 0.453125z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m233.1085 268.03217l-66.74016 0" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" stroke-dasharray="4.0,3.0" d="m233.10852 268.03217l-63.313095 0" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m169.79543 268.03217l1.124588 -1.1246033l-3.0897675 1.1246033l3.0897675 1.1245728z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m75.62205 99.34843l0 19.652092l46.992126 0l0 133.54475" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m75.62205 99.34843l0 19.652084l46.992126 0l0 130.11768" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m122.614174 249.1182l-1.124588 -1.124588l1.124588 3.0897675l1.1245804 -3.0897675z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m171.49606 99.34974l0 19.650558l-48.88189 0l0 133.5463" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m171.49606 99.34974l0 19.650558l-48.88189 0l0 130.1192" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m122.614174 249.1195l-1.124588 -1.124588l1.124588 3.0897675l1.1245804 -3.0897675z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m276.85565 99.34974l0 17.70874l-42.960632 0l0 17.724327" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m276.85565 99.34974l0 17.70874l-42.960632 0l0 14.297249" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m233.89502 131.35573l-1.124588 -1.124588l1.124588 3.0897675l1.1245728 -3.0897675z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m276.85565 99.34974l0 17.70874l49.385803 0l0 17.724327" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m276.85565 99.34974l0 17.70874l49.385803 0l0 14.297249" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m326.24146 131.35573l-1.1245728 -1.124588l1.1245728 3.0897675l1.1246033 -3.0897675z" fill-rule="evenodd"/><path fill="#c9daf8" d="m548.5407 235.66077l87.49603 0l0 30.992126l-87.49603 0z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m548.5407 235.66077l87.49603 0l0 30.992126l-87.49603 0z" fill-rule="evenodd"/><path fill="#000000" d="m579.47955 247.1612q0.203125 0 0.328125 0.140625q0.125 0.125 0.125 0.359375l0 7.578125q0 0.21875 -0.125 0.359375q-0.125 0.140625 -0.359375 0.140625q-0.234375 0 -0.390625 -0.203125l-4.984375 -6.65625l0 6.359375q0 0.21875 -0.125 0.359375q-0.125 0.140625 -0.34375 0.140625q-0.21875 0 -0.34375 -0.140625q-0.109375 -0.140625 -0.109375 -0.359375l0 -7.578125q0 -0.234375 0.125 -0.359375q0.125 -0.140625 0.359375 -0.140625q0.234375 0 0.40625 0.203125l4.96875 6.65625l0 -6.359375q0 -0.234375 0.125 -0.359375q0.125 -0.140625 0.34375 -0.140625zm8.868103 0q0.203125 0 0.328125 0.140625q0.125 0.125 0.125 0.359375l0 7.578125q0 0.21875 -0.125 0.359375q-0.125 0.140625 -0.359375 0.140625q-0.234375 0 -0.390625 -0.203125l-4.984375 -6.65625l0 6.359375q0 0.21875 -0.125 0.359375q-0.125 0.140625 -0.34375 0.140625q-0.21875 0 -0.34375 -0.140625q-0.109375 -0.140625 -0.109375 -0.359375l0 -7.578125q0 -0.234375 0.125 -0.359375q0.125 -0.140625 0.359375 -0.140625q0.234375 0 0.40625 0.203125l4.96875 6.65625l0 -6.359375q0 -0.234375 0.125 -0.359375q0.125 -0.140625 0.34375 -0.140625zm12.917175 7.953125q0.046875 0.09375 0.046875 0.203125q0 0.171875 -0.140625 0.296875q-0.140625 0.125 -0.328125 0.125q-0.296875 0 -0.421875 -0.296875l-0.84375 -1.9375l-4.53125 0l-0.859375 1.9375q-0.125 0.296875 -0.421875 0.296875q-0.1875 0 -0.34375 -0.125q-0.140625 -0.125 -0.140625 -0.3125q0 -0.09375 0.046875 -0.1875l3.4375 -7.640625q0.078125 -0.15625 0.21875 -0.234375q0.140625 -0.09375 0.3125 -0.09375q0.171875 0 0.3125 0.09375q0.15625 0.078125 0.21875 0.234375l3.4375 7.640625zm-5.859375 -2.421875l3.8125 0l-1.90625 -4.3125l-1.90625 4.3125zm7.78656 3.046875q-0.21875 0 -0.359375 -0.140625q-0.125 -0.140625 -0.125 -0.359375l0 -7.546875q0 -0.21875 0.125 -0.34375q0.140625 -0.125 0.375 -0.125l2.84375 0q1.328125 0 2.0625 0.65625q0.75 0.640625 0.75 1.828125q0 1.1875 -0.75 1.84375q-0.734375 0.65625 -2.0625 0.65625l-2.359375 0l0 3.03125q0 0.21875 -0.140625 0.359375q-0.125 0.140625 -0.359375 0.140625zm2.765625 -4.34375q1.9375 0 1.9375 -1.6875q0 -1.671875 -1.9375 -1.671875l-2.265625 0l0 3.359375l2.265625 0zm4.9744263 4.34375q-0.21875 0 -0.359375 -0.140625q-0.125 -0.140625 -0.125 -0.359375l0 -7.578125q0 -0.234375 0.125 -0.359375q0.140625 -0.140625 0.359375 -0.140625q0.234375 0 0.359375 0.140625q0.140625 0.125 0.140625 0.359375l0 7.578125q0 0.21875 -0.140625 0.359375q-0.125 0.140625 -0.359375 0.140625z" fill-rule="nonzero"/><path fill="#c9daf8" d="m548.5407 193.79199l87.49603 0l0 30.992126l-87.49603 0z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m548.5407 193.79199l87.49603 0l0 30.992126l-87.49603 0z" fill-rule="evenodd"/><path fill="#000000" d="m589.5417 213.87056q-0.28125 0 -0.484375 -0.1875q-0.1875 -0.1875 -0.1875 -0.484375q0 -0.296875 0.1875 -0.484375q0.203125 -0.203125 0.484375 -0.203125q0.28125 0 0.46875 0.203125q0.1875 0.1875 0.1875 0.484375q0 0.296875 -0.1875 0.484375q-0.1875 0.1875 -0.46875 0.1875zm2.7480469 0q-0.28125 0 -0.484375 -0.1875q-0.1875 -0.1875 -0.1875 -0.484375q0 -0.296875 0.1875 -0.484375q0.203125 -0.203125 0.484375 -0.203125q0.28125 0 0.46875 0.203125q0.1875 0.1875 0.1875 0.484375q0 0.296875 -0.1875 0.484375q-0.1875 0.1875 -0.46875 0.1875zm2.7479858 0q-0.28125 0 -0.484375 -0.1875q-0.1875 -0.1875 -0.1875 -0.484375q0 -0.296875 0.1875 -0.484375q0.203125 -0.203125 0.484375 -0.203125q0.28125 0 0.46875 0.203125q0.1875 0.1875 0.1875 0.484375q0 0.296875 -0.1875 0.484375q-0.1875 0.1875 -0.46875 0.1875z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m122.620316 283.52823l0 14.9730835l75.49606 0l0 20.90091" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m122.620316 283.52823l0 14.9730835l75.49608 0l0 17.473846" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m198.1164 315.97516l-1.124588 -1.1246033l1.124588 3.0897827l1.1245728 -3.0897827z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m276.85654 283.52823l0 14.9730835l-78.74016 0l0 20.90091" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m276.85654 283.52823l0 14.9730835l-78.74014 0l0 17.473846" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m198.1164 315.97516l-1.124588 -1.1246033l1.124588 3.0897827l1.1245728 -3.0897827z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m500.5223 334.89435l24.009003 0l0 0.06298828l24.022522 0" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m500.5223 334.89435l24.009003 0l0 0.06298828l20.595398 0" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m545.1267 334.95734l-1.1245728 1.1246033l3.0897827 -1.1246033l-3.0897827 -1.1245728z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m500.5223 334.89435l24.009003 0l0 -41.858246l24.022522 0" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m500.5223 334.89435l24.009003 0l0 -41.858246l20.595398 0" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m545.1267 293.0361l-1.1245728 1.1245728l3.0897827 -1.1245728l-3.0897827 -1.1246033z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m500.5223 334.89435l24.009003 0l0 -83.74802l24.022522 0" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m500.5223 334.89435l24.009003 0l0 -83.74802l20.595398 0" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m545.1267 251.14633l-1.1245728 1.1245728l3.0897827 -1.1245728l-3.0897827 -1.124588z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m500.5223 334.89435l24.009003 0l0 -125.60629l24.022522 0" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m500.5223 334.89435l24.009003 0l0 -125.60629l20.595398 0" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m545.1267 209.28806l-1.1245728 1.124588l3.0897827 -1.124588l-3.0897827 -1.124588z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m233.88803 165.75919l0 17.70752l42.960632 0l0 17.694061" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m233.88805 165.75919l0 17.70752l42.960617 0l0 14.266968" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m276.84866 197.73367l-1.1245728 -1.124588l1.1245728 3.0897675l1.1246033 -3.0897675z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m326.25156 165.75919l0 17.70752l-49.385834 0l0 17.694061" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m326.25156 165.75919l0 17.70752l-49.385834 0l0 14.266968" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m276.86572 197.73367l-1.1245728 -1.124588l1.1245728 3.0897675l1.1246033 -3.0897675z" fill-rule="evenodd"/></g></svg> \ No newline at end of file
+<svg version="1.1" viewBox="0.0 0.0 720.0 540.0" fill="none" stroke="none" stroke-linecap="square" stroke-miterlimit="10" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns="http://www.w3.org/2000/svg"><clipPath id="p.0"><path d="m0 0l720.0 0l0 540.0l-720.0 0l0 -540.0z" clip-rule="nonzero"/></clipPath><g clip-path="url(#p.0)"><path fill="#000000" fill-opacity="0.0" d="m0 0l720.0 0l0 540.0l-720.0 0z" fill-rule="evenodd"/><path fill="#f3f3f3" d="m19.375328 28.750656l361.6378 0l0 358.01575l-361.6378 0z" fill-rule="evenodd"/><path stroke="#cccccc" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m19.375328 28.750656l361.6378 0l0 358.01575l-361.6378 0z" fill-rule="evenodd"/><path fill="#434343" d="m338.49512 374.66016q-0.609375 0 -1.171875 -0.140625q-0.546875 -0.15625 -0.96875 -0.421875q-0.25 -0.15625 -0.359375 -0.296875q-0.09375 -0.140625 -0.09375 -0.34375q0 -0.171875 0.09375 -0.28125q0.109375 -0.109375 0.265625 -0.109375q0.171875 0 0.46875 0.1875q0.40625 0.25 0.796875 0.390625q0.390625 0.140625 0.984375 0.140625q0.71875 0 1.109375 -0.25q0.40625 -0.265625 0.40625 -0.734375q0 -0.296875 -0.15625 -0.46875q-0.140625 -0.1875 -0.5 -0.328125q-0.359375 -0.140625 -1.046875 -0.296875q-1.171875 -0.25 -1.6875 -0.671875q-0.5 -0.421875 -0.5 -1.15625q0 -0.578125 0.3125 -1.015625q0.328125 -0.4375 0.890625 -0.6875q0.5625 -0.265625 1.28125 -0.265625q0.53125 0 1.015625 0.140625q0.484375 0.140625 0.859375 0.390625q0.453125 0.328125 0.453125 0.671875q0 0.171875 -0.109375 0.296875q-0.109375 0.125 -0.25 0.125q-0.15625 0 -0.484375 -0.234375q-0.375 -0.234375 -0.703125 -0.359375q-0.328125 -0.140625 -0.828125 -0.140625q-0.625 0 -1.015625 0.28125q-0.375 0.265625 -0.375 0.734375q0 0.296875 0.140625 0.484375q0.140625 0.171875 0.46875 0.3125q0.328125 0.140625 0.9375 0.28125q0.90625 0.1875 1.40625 0.4375q0.5 0.234375 0.703125 0.578125q0.21875 0.34375 0.21875 0.890625q0 0.828125 -0.703125 1.34375q-0.703125 0.515625 -1.859375 0.515625zm9.241241 -1.59375q0.140625 0 0.25 0.125q0.109375 0.109375 0.109375 0.296875q0 0.328125 -0.46875 0.609375q-0.484375 0.28125 -1.015625 0.421875q-0.53125 0.140625 -1.046875 0.140625q-1.5 0 -2.375 -0.890625q-0.875 -0.890625 -0.875 -2.46875q0 -1.0 0.390625 -1.765625q0.390625 -0.765625 1.078125 -1.1875q0.703125 -0.4375 1.59375 -0.4375q1.265625 0 2.015625 0.828125q0.75 0.828125 0.75 2.25q0 0.265625 -0.109375 0.390625q-0.109375 0.109375 -0.34375 0.109375l-4.296875 0q0.125 2.296875 2.171875 2.296875q0.53125 0 0.890625 -0.140625q0.375 -0.140625 0.8125 -0.390625q0.34375 -0.1875 0.46875 -0.1875zm-2.34375 -4.3125q-0.84375 0 -1.359375 0.53125q-0.515625 0.53125 -0.609375 1.515625l3.765625 0q-0.015625 -1.0 -0.484375 -1.515625q-0.46875 -0.53125 -1.3125 -0.53125zm7.5551147 -0.8125q0.546875 -0.03125 0.546875 0.453125q0 0.21875 -0.125 0.34375q-0.109375 0.125 -0.40625 0.15625l-0.390625 0.03125q-0.890625 0.078125 -1.328125 0.640625q-0.4375 0.546875 -0.4375 1.296875l0 3.234375q0 0.265625 -0.15625 0.40625q-0.140625 0.125 -0.375 0.125q-0.234375 0 -0.390625 -0.140625q-0.15625 -0.140625 -0.15625 -0.390625l0 -5.625q0 -0.25 0.15625 -0.390625q0.15625 -0.140625 0.390625 -0.140625q0.21875 0 0.359375 0.140625q0.140625 0.140625 0.140625 0.375l0 0.75q0.28125 -0.578125 0.796875 -0.890625q0.515625 -0.3125 1.1875 -0.359375l0.1875 -0.015625zm6.157959 0.328125q0.15625 -0.3125 0.46875 -0.3125q0.203125 0 0.359375 0.140625q0.15625 0.125 0.15625 0.328125q0 0.109375 -0.046875 0.203125l-2.59375 5.609375q-0.078125 0.171875 -0.25 0.28125q-0.15625 0.09375 -0.34375 0.09375q-0.171875 0 -0.328125 -0.09375q-0.15625 -0.109375 -0.25 -0.28125l-2.59375 -5.609375q-0.046875 -0.09375 -0.046875 -0.1875q0 -0.203125 0.171875 -0.34375q0.1875 -0.15625 0.390625 -0.15625q0.140625 0 0.265625 0.078125q0.125 0.078125 0.1875 0.234375l2.234375 5.0l2.21875 -4.984375zm7.2099915 4.796875q0.140625 0 0.25 0.125q0.109375 0.109375 0.109375 0.296875q0 0.328125 -0.46875 0.609375q-0.484375 0.28125 -1.015625 0.421875q-0.53125 0.140625 -1.046875 0.140625q-1.5 0 -2.375 -0.890625q-0.875 -0.890625 -0.875 -2.46875q0 -1.0 0.390625 -1.765625q0.390625 -0.765625 1.078125 -1.1875q0.703125 -0.4375 1.59375 -0.4375q1.265625 0 2.015625 0.828125q0.75 0.828125 0.75 2.25q0 0.265625 -0.109375 0.390625q-0.109375 0.109375 -0.34375 0.109375l-4.296875 0q0.125 2.296875 2.171875 2.296875q0.53125 0 0.890625 -0.140625q0.375 -0.140625 0.8125 -0.390625q0.34375 -0.1875 0.46875 -0.1875zm-2.34375 -4.3125q-0.84375 0 -1.359375 0.53125q-0.515625 0.53125 -0.609375 1.515625l3.765625 0q-0.015625 -1.0 -0.484375 -1.515625q-0.46875 -0.53125 -1.3125 -0.53125zm7.5551453 -0.8125q0.546875 -0.03125 0.546875 0.453125q0 0.21875 -0.125 0.34375q-0.109375 0.125 -0.40625 0.15625l-0.390625 0.03125q-0.890625 0.078125 -1.328125 0.640625q-0.4375 0.546875 -0.4375 1.296875l0 3.234375q0 0.265625 -0.15625 0.40625q-0.140625 0.125 -0.375 0.125q-0.234375 0 -0.390625 -0.140625q-0.15625 -0.140625 -0.15625 -0.390625l0 -5.625q0 -0.25 0.15625 -0.390625q0.15625 -0.140625 0.390625 -0.140625q0.21875 0 0.359375 0.140625q0.140625 0.140625 0.140625 0.375l0 0.75q0.28125 -0.578125 0.796875 -0.890625q0.515625 -0.3125 1.1875 -0.359375l0.1875 -0.015625z" fill-rule="nonzero"/><path fill="#d9d9d9" d="m25.624672 36.249344l301.88977 0l0 69.98425l-301.88977 0z" fill-rule="evenodd"/><path stroke="#cccccc" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" stroke-dasharray="4.0,3.0" d="m25.624672 36.249344l301.88977 0l0 69.98425l-301.88977 0z" fill-rule="evenodd"/><path fill="#434343" d="m134.36497 56.831844q-0.234375 0 -0.375 -0.140625q-0.140625 -0.140625 -0.140625 -0.359375l0 -7.1875l-2.578125 0q-0.21875 0 -0.34375 -0.109375q-0.109375 -0.109375 -0.109375 -0.3125q0 -0.203125 0.109375 -0.296875q0.125 -0.109375 0.34375 -0.109375l6.15625 0q0.21875 0 0.328125 0.109375q0.125 0.09375 0.125 0.296875q0 0.203125 -0.125 0.3125q-0.109375 0.109375 -0.328125 0.109375l-2.578125 0l0 7.1875q0 0.21875 -0.140625 0.359375q-0.125 0.140625 -0.34375 0.140625zm9.004181 -1.421875q0.140625 0 0.234375 0.109375q0.09375 0.109375 0.09375 0.28125q0 0.296875 -0.421875 0.546875q-0.4375 0.25 -0.921875 0.375q-0.46875 0.125 -0.921875 0.125q-1.359375 0 -2.15625 -0.796875q-0.78125 -0.8125 -0.78125 -2.21875q0 -0.90625 0.34375 -1.59375q0.359375 -0.6875 0.984375 -1.0625q0.640625 -0.390625 1.4375 -0.390625q1.140625 0 1.8125 0.75q0.671875 0.734375 0.671875 2.0q0 0.25 -0.09375 0.359375q-0.09375 0.109375 -0.3125 0.109375l-3.859375 0q0.09375 2.0625 1.953125 2.0625q0.46875 0 0.796875 -0.125q0.34375 -0.125 0.71875 -0.34375q0.3125 -0.1875 0.421875 -0.1875zm-2.09375 -3.875q-0.765625 0 -1.234375 0.484375q-0.46875 0.484375 -0.546875 1.359375l3.390625 0q-0.015625 -0.890625 -0.4375 -1.359375q-0.421875 -0.484375 -1.171875 -0.484375zm6.839676 -0.75q2.09375 0 2.09375 2.3125l0 3.25q0 0.234375 -0.125 0.359375q-0.125 0.125 -0.359375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -3.1875q0 -0.8125 -0.328125 -1.1875q-0.3125 -0.375 -1.0 -0.375q-0.8125 0 -1.296875 0.5q-0.46875 0.484375 -0.46875 1.328125l0 2.921875q0 0.234375 -0.125 0.359375q-0.125 0.125 -0.359375 0.125q-0.234375 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -5.0625q0 -0.21875 0.125 -0.34375q0.125 -0.140625 0.359375 -0.140625q0.21875 0 0.34375 0.140625q0.125 0.125 0.125 0.328125l0 0.609375q0.28125 -0.53125 0.796875 -0.8125q0.53125 -0.28125 1.1875 -0.28125zm5.84729 6.0625q-0.56248474 0 -1.0624847 -0.125q-0.5 -0.140625 -0.875 -0.375q-0.21875 -0.140625 -0.3125 -0.265625q-0.078125 -0.125 -0.078125 -0.3125q0 -0.15625 0.078125 -0.25q0.09375 -0.109375 0.234375 -0.109375q0.15625 0 0.421875 0.1875q0.359375 0.21875 0.71875 0.34375q0.359375 0.125 0.87498474 0.125q0.65625 0 1.015625 -0.21875q0.359375 -0.234375 0.359375 -0.671875q0 -0.265625 -0.140625 -0.421875q-0.125 -0.171875 -0.453125 -0.296875q-0.3125 -0.125 -0.9375 -0.25q-1.0624847 -0.234375 -1.5156097 -0.609375q-0.453125 -0.390625 -0.453125 -1.046875q0 -0.515625 0.28125 -0.90625q0.28125 -0.40625 0.796875 -0.625q0.515625 -0.234375 1.1562347 -0.234375q0.46875 0 0.90625 0.125q0.4375 0.125 0.78125 0.34375q0.40625 0.296875 0.40625 0.609375q0 0.15625 -0.09375 0.265625q-0.09375 0.109375 -0.234375 0.109375q-0.140625 0 -0.4375 -0.203125q-0.328125 -0.21875 -0.625 -0.34375q-0.296875 -0.125 -0.75 -0.125q-0.56248474 0 -0.90623474 0.265625q-0.34375 0.25 -0.34375 0.671875q0 0.25 0.125 0.421875q0.125 0.15625 0.421875 0.28125q0.296875 0.125 0.84373474 0.25q0.828125 0.1875 1.265625 0.40625q0.453125 0.203125 0.640625 0.515625q0.203125 0.3125 0.203125 0.796875q0 0.75 -0.640625 1.21875q-0.640625 0.453125 -1.671875 0.453125zm6.2131653 0q-0.828125 0 -1.46875 -0.359375q-0.625 -0.375 -0.96875 -1.0625q-0.34375 -0.703125 -0.34375 -1.609375q0 -0.90625 0.34375 -1.59375q0.34375 -0.703125 0.96875 -1.0625q0.640625 -0.375 1.46875 -0.375q0.828125 0 1.453125 0.375q0.640625 0.359375 0.984375 1.0625q0.34375 0.6875 0.34375 1.59375q0 0.90625 -0.34375 1.609375q-0.34375 0.6875 -0.984375 1.0625q-0.625 0.359375 -1.453125 0.359375zm0 -0.796875q0.859375 0 1.3125 -0.5625q0.46875 -0.578125 0.46875 -1.671875q0 -1.0625 -0.46875 -1.640625q-0.46875 -0.59375 -1.3125 -0.59375q-0.859375 0 -1.328125 0.59375q-0.46875 0.578125 -0.46875 1.640625q0 1.078125 0.453125 1.65625q0.46875 0.578125 1.34375 0.578125zm7.1288147 -5.25q0.5 -0.03125 0.5 0.40625q0 0.203125 -0.109375 0.3125q-0.109375 0.109375 -0.375 0.140625l-0.359375 0.03125q-0.796875 0.078125 -1.1875 0.578125q-0.390625 0.484375 -0.390625 1.15625l0 2.921875q0 0.234375 -0.140625 0.359375q-0.125 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -5.0625q0 -0.234375 0.140625 -0.359375q0.140625 -0.125 0.34375 -0.125q0.1875 0 0.3125 0.125q0.140625 0.125 0.140625 0.34375l0 0.671875q0.25 -0.53125 0.71875 -0.796875q0.46875 -0.28125 1.0625 -0.328125l0.171875 -0.015625zm1.970398 6.03125q-0.21875 0 -0.359375 -0.140625q-0.125 -0.140625 -0.125 -0.359375l0 -7.546875q0 -0.21875 0.125 -0.34375q0.140625 -0.125 0.375 -0.125l4.375 0q0.203125 0 0.328125 0.109375q0.125 0.09375 0.125 0.296875q0 0.203125 -0.125 0.3125q-0.125 0.109375 -0.328125 0.109375l-3.90625 0l0 2.90625l3.65625 0q0.21875 0 0.328125 0.109375q0.125 0.109375 0.125 0.3125q0 0.1875 -0.125 0.296875q-0.109375 0.109375 -0.328125 0.109375l-3.65625 0l0 3.453125q0 0.21875 -0.125 0.359375q-0.125 0.140625 -0.359375 0.140625zm6.5434265 0q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -7.625q0 -0.21875 0.125 -0.34375q0.140625 -0.125 0.359375 -0.125q0.203125 0 0.34375 0.125q0.140625 0.125 0.140625 0.34375l0 7.625q0 0.234375 -0.140625 0.359375q-0.140625 0.125 -0.34375 0.125zm4.721527 0.015625q-0.828125 0 -1.46875 -0.359375q-0.625 -0.375 -0.96875 -1.0625q-0.34375 -0.703125 -0.34375 -1.609375q0 -0.90625 0.34375 -1.59375q0.34375 -0.703125 0.96875 -1.0625q0.640625 -0.375 1.46875 -0.375q0.828125 0 1.453125 0.375q0.640625 0.359375 0.984375 1.0625q0.34375 0.6875 0.34375 1.59375q0 0.90625 -0.34375 1.609375q-0.34375 0.6875 -0.984375 1.0625q-0.625 0.359375 -1.453125 0.359375zm0 -0.796875q0.859375 0 1.3125 -0.5625q0.46875 -0.578125 0.46875 -1.671875q0 -1.0625 -0.46875 -1.640625q-0.46875 -0.59375 -1.3125 -0.59375q-0.859375 0 -1.328125 0.59375q-0.46875 0.578125 -0.46875 1.640625q0 1.078125 0.453125 1.65625q0.46875 0.578125 1.34375 0.578125zm12.222534 -4.9375q0.125 -0.28125 0.390625 -0.28125q0.1875 0 0.328125 0.125q0.140625 0.109375 0.140625 0.296875q0 0.078125 -0.03125 0.171875l-1.984375 5.046875q-0.078125 0.15625 -0.21875 0.25q-0.140625 0.078125 -0.296875 0.078125q-0.15625 0 -0.296875 -0.078125q-0.140625 -0.09375 -0.21875 -0.25l-1.65625 -4.21875l-1.640625 4.21875q-0.0625 0.15625 -0.203125 0.25q-0.140625 0.078125 -0.3125 0.078125q-0.15625 0 -0.296875 -0.078125q-0.140625 -0.09375 -0.21875 -0.25l-1.984375 -5.03125q-0.046875 -0.09375 -0.046875 -0.171875q0 -0.1875 0.15625 -0.3125q0.171875 -0.140625 0.359375 -0.140625q0.296875 0 0.40625 0.296875l1.65625 4.421875l1.6875 -4.390625q0.078125 -0.15625 0.203125 -0.234375q0.125 -0.09375 0.265625 -0.09375q0.15625 0 0.28125 0.09375q0.125 0.078125 0.1875 0.234375l1.6875 4.375l1.65625 -4.40625zm12.637604 5.09375q0.046875 0.09375 0.046875 0.203125q0 0.171875 -0.140625 0.296875q-0.140625 0.125 -0.328125 0.125q-0.296875 0 -0.421875 -0.296875l-0.84375 -1.9375l-4.53125 0l-0.859375 1.9375q-0.125 0.296875 -0.421875 0.296875q-0.1875 0 -0.34375 -0.125q-0.140625 -0.125 -0.140625 -0.3125q0 -0.09375 0.046875 -0.1875l3.4375 -7.640625q0.078125 -0.15625 0.21875 -0.234375q0.140625 -0.09375 0.3125 -0.09375q0.171875 0 0.3125 0.09375q0.15625 0.078125 0.21875 0.234375l3.4375 7.640625zm-5.859375 -2.421875l3.8125 0l-1.90625 -4.3125l-1.90625 4.3125zm7.78656 3.046875q-0.21875 0 -0.359375 -0.140625q-0.125 -0.140625 -0.125 -0.359375l0 -7.546875q0 -0.21875 0.125 -0.34375q0.140625 -0.125 0.375 -0.125l2.84375 0q1.328125 0 2.0625 0.65625q0.75 0.640625 0.75 1.828125q0 1.1875 -0.75 1.84375q-0.734375 0.65625 -2.0625 0.65625l-2.359375 0l0 3.03125q0 0.21875 -0.140625 0.359375q-0.125 0.140625 -0.359375 0.140625zm2.765625 -4.34375q1.9375 0 1.9375 -1.6875q0 -1.671875 -1.9375 -1.671875l-2.265625 0l0 3.359375l2.265625 0zm4.9744263 4.34375q-0.21875 0 -0.359375 -0.140625q-0.125 -0.140625 -0.125 -0.359375l0 -7.578125q0 -0.234375 0.125 -0.359375q0.140625 -0.140625 0.359375 -0.140625q0.234375 0 0.359375 0.140625q0.140625 0.125 0.140625 0.359375l0 7.578125q0 0.21875 -0.140625 0.359375q-0.125 0.140625 -0.359375 0.140625zm4.4157715 0.015625q-0.5625 0 -1.0625 -0.125q-0.5 -0.140625 -0.875 -0.375q-0.21875 -0.140625 -0.3125 -0.265625q-0.078125 -0.125 -0.078125 -0.3125q0 -0.15625 0.078125 -0.25q0.09375 -0.109375 0.234375 -0.109375q0.15625 0 0.421875 0.1875q0.359375 0.21875 0.71875 0.34375q0.359375 0.125 0.875 0.125q0.65625 0 1.015625 -0.21875q0.359375 -0.234375 0.359375 -0.671875q0 -0.265625 -0.140625 -0.421875q-0.125 -0.171875 -0.453125 -0.296875q-0.3125 -0.125 -0.9375 -0.25q-1.0625 -0.234375 -1.515625 -0.609375q-0.453125 -0.390625 -0.453125 -1.046875q0 -0.515625 0.28125 -0.90625q0.28125 -0.40625 0.796875 -0.625q0.515625 -0.234375 1.15625 -0.234375q0.46875 0 0.90625 0.125q0.4375 0.125 0.78125 0.34375q0.40625 0.296875 0.40625 0.609375q0 0.15625 -0.09375 0.265625q-0.09375 0.109375 -0.234375 0.109375q-0.140625 0 -0.4375 -0.203125q-0.328125 -0.21875 -0.625 -0.34375q-0.296875 -0.125 -0.75 -0.125q-0.5625 0 -0.90625 0.265625q-0.34375 0.25 -0.34375 0.671875q0 0.25 0.125 0.421875q0.125 0.15625 0.421875 0.28125q0.296875 0.125 0.84375 0.25q0.828125 0.1875 1.265625 0.40625q0.453125 0.203125 0.640625 0.515625q0.203125 0.3125 0.203125 0.796875q0 0.75 -0.640625 1.21875q-0.640625 0.453125 -1.671875 0.453125z" fill-rule="nonzero"/><path fill="#f3f3f3" d="m396.75067 183.75066l249.00787 0l0 203.02364l-249.00787 0z" fill-rule="evenodd"/><path stroke="#cccccc" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m396.75067 183.75066l249.00787 0l0 203.02364l-249.00787 0z" fill-rule="evenodd"/><path fill="#434343" d="m409.42255 374.66803q-0.90625 0 -1.609375 -0.40625q-0.6875 -0.421875 -1.078125 -1.171875q-0.375 -0.765625 -0.375 -1.765625q0 -1.0 0.390625 -1.765625q0.40625 -0.78125 1.109375 -1.203125q0.703125 -0.4375 1.625 -0.4375q0.5 0 1.0 0.140625q0.5 0.140625 0.875 0.40625q0.234375 0.171875 0.328125 0.328125q0.109375 0.140625 0.109375 0.328125q0 0.1875 -0.109375 0.3125q-0.09375 0.109375 -0.25 0.109375q-0.09375 0 -0.203125 -0.046875q-0.09375 -0.046875 -0.171875 -0.09375q-0.078125 -0.0625 -0.09375 -0.078125q-0.359375 -0.234375 -0.671875 -0.359375q-0.3125 -0.140625 -0.765625 -0.140625q-0.96875 0 -1.515625 0.671875q-0.53125 0.65625 -0.53125 1.828125q0 1.171875 0.53125 1.8125q0.546875 0.640625 1.515625 0.640625q0.453125 0 0.78125 -0.125q0.328125 -0.140625 0.65625 -0.375q0.15625 -0.09375 0.28125 -0.15625q0.140625 -0.0625 0.234375 -0.0625q0.140625 0 0.234375 0.125q0.109375 0.109375 0.109375 0.296875q0 0.171875 -0.09375 0.3125q-0.09375 0.140625 -0.34375 0.3125q-0.375 0.25 -0.90625 0.40625q-0.515625 0.15625 -1.0625 0.15625zm4.2591553 -0.03125q-0.234375 0 -0.390625 -0.140625q-0.15625 -0.140625 -0.15625 -0.390625l0 -8.46875q0 -0.25 0.15625 -0.390625q0.15625 -0.140625 0.390625 -0.140625q0.21875 0 0.375 0.140625q0.15625 0.140625 0.15625 0.390625l0 8.46875q0 0.25 -0.15625 0.390625q-0.15625 0.140625 -0.375 0.140625zm3.092102 0q-0.234375 0 -0.390625 -0.140625q-0.15625 -0.140625 -0.15625 -0.390625l0 -5.625q0 -0.25 0.15625 -0.390625q0.15625 -0.140625 0.390625 -0.140625q0.234375 0 0.375 0.140625q0.15625 0.140625 0.15625 0.390625l0 5.625q0 0.265625 -0.15625 0.40625q-0.140625 0.125 -0.375 0.125zm0 -8.09375q-0.3125 0 -0.515625 -0.171875q-0.203125 -0.1875 -0.203125 -0.5q0 -0.296875 0.203125 -0.484375q0.203125 -0.1875 0.515625 -0.1875q0.328125 0 0.515625 0.1875q0.203125 0.1875 0.203125 0.484375q0 0.3125 -0.203125 0.5q-0.1875 0.171875 -0.515625 0.171875zm7.5765076 6.53125q0.140625 0 0.25 0.125q0.109375 0.109375 0.109375 0.296875q0 0.328125 -0.46875 0.609375q-0.484375 0.28125 -1.015625 0.421875q-0.53125 0.140625 -1.046875 0.140625q-1.5 0 -2.375 -0.890625q-0.875 -0.890625 -0.875 -2.46875q0 -1.0 0.390625 -1.765625q0.390625 -0.765625 1.078125 -1.1875q0.703125 -0.4375 1.59375 -0.4375q1.265625 0 2.015625 0.828125q0.75 0.828125 0.75 2.25q0 0.265625 -0.109375 0.390625q-0.109375 0.109375 -0.34375 0.109375l-4.296875 0q0.125 2.296875 2.171875 2.296875q0.53125 0 0.890625 -0.140625q0.375 -0.140625 0.8125 -0.390625q0.34375 -0.1875 0.46875 -0.1875zm-2.34375 -4.3125q-0.84375 0 -1.359375 0.53125q-0.515625 0.53125 -0.609375 1.515625l3.765625 0q-0.015625 -1.0 -0.484375 -1.515625q-0.46875 -0.53125 -1.3125 -0.53125zm7.6020203 -0.84375q2.328125 0 2.328125 2.578125l0 3.609375q0 0.25 -0.140625 0.390625q-0.140625 0.140625 -0.390625 0.140625q-0.25 0 -0.40625 -0.140625q-0.140625 -0.140625 -0.140625 -0.390625l0 -3.546875q0 -0.90625 -0.359375 -1.3125q-0.34375 -0.421875 -1.125 -0.421875q-0.890625 0 -1.421875 0.546875q-0.53125 0.546875 -0.53125 1.484375l0 3.25q0 0.25 -0.140625 0.390625q-0.140625 0.140625 -0.390625 0.140625q-0.25 0 -0.40625 -0.140625q-0.140625 -0.140625 -0.140625 -0.390625l0 -5.625q0 -0.234375 0.140625 -0.375q0.15625 -0.15625 0.40625 -0.15625q0.234375 0 0.375 0.15625q0.140625 0.140625 0.140625 0.359375l0 0.6875q0.328125 -0.609375 0.890625 -0.921875q0.578125 -0.3125 1.3125 -0.3125zm7.304718 5.875q0.46875 0.03125 0.46875 0.421875q0 0.21875 -0.171875 0.34375q-0.171875 0.109375 -0.5 0.078125l-0.359375 -0.015625q-1.0625 -0.09375 -1.578125 -0.640625q-0.5 -0.5625 -0.5 -1.703125l0 -3.34375l-0.890625 0q-0.234375 0 -0.359375 -0.109375q-0.125 -0.109375 -0.125 -0.296875q0 -0.203125 0.125 -0.3125q0.125 -0.125 0.359375 -0.125l0.890625 0l0 -1.515625q0 -0.25 0.140625 -0.390625q0.15625 -0.140625 0.40625 -0.140625q0.234375 0 0.375 0.140625q0.15625 0.140625 0.15625 0.390625l0 1.515625l1.484375 0q0.203125 0 0.328125 0.125q0.140625 0.109375 0.140625 0.3125q0 0.1875 -0.140625 0.296875q-0.125 0.109375 -0.328125 0.109375l-1.484375 0l0 3.40625q0 0.734375 0.296875 1.0625q0.296875 0.3125 0.90625 0.359375l0.359375 0.03125z" fill-rule="nonzero"/><path fill="#f4cccc" d="m206.61942 201.17455l140.47244 0l0 30.992126l-140.47244 0z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m206.61942 201.17455l140.47244 0l0 30.992126l-140.47244 0z" fill-rule="evenodd"/><path fill="#000000" d="m237.0857 213.5031q-0.640625 0.046875 -0.96875 0.40625q-0.3125 0.34375 -0.3125 1.046875l0 0.390625l1.328125 0q0.203125 0 0.3125 0.109375q0.109375 0.109375 0.109375 0.28125q0 0.1875 -0.109375 0.28125q-0.109375 0.09375 -0.3125 0.09375l-1.328125 0l0 4.65625q0 0.234375 -0.140625 0.359375q-0.140625 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.140625 -0.125 -0.140625 -0.359375l0 -4.65625l-0.796875 0q-0.203125 0 -0.328125 -0.09375q-0.109375 -0.109375 -0.109375 -0.28125q0 -0.171875 0.109375 -0.28125q0.125 -0.109375 0.328125 -0.109375l0.796875 0l0 -0.21875q0 -1.078125 0.53125 -1.6875q0.546875 -0.625 1.5625 -0.703125l0.3125 -0.015625q0.3125 -0.03125 0.453125 0.0625q0.140625 0.078125 0.140625 0.296875q0 0.34375 -0.421875 0.390625l-0.3125 0.03125zm4.248535 1.71875q0.5 -0.03125 0.5 0.40625q0 0.203125 -0.109375 0.3125q-0.109375 0.109375 -0.375 0.140625l-0.359375 0.03125q-0.796875 0.078125 -1.1875 0.578125q-0.390625 0.484375 -0.390625 1.15625l0 2.921875q0 0.234375 -0.140625 0.359375q-0.125 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -5.0625q0 -0.234375 0.140625 -0.359375q0.140625 -0.125 0.34375 -0.125q0.1875 0 0.3125 0.125q0.140625 0.125 0.140625 0.34375l0 0.671875q0.25 -0.53125 0.71875 -0.796875q0.46875 -0.28125 1.0625 -0.328125l0.171875 -0.015625zm5.861023 4.609375q0.140625 0 0.234375 0.109375q0.09375 0.109375 0.09375 0.28125q0 0.296875 -0.421875 0.546875q-0.4375 0.25 -0.921875 0.375q-0.46875 0.125 -0.921875 0.125q-1.359375 0 -2.15625 -0.796875q-0.78125 -0.8125 -0.78125 -2.21875q0 -0.90625 0.34375 -1.59375q0.359375 -0.6875 0.984375 -1.0625q0.640625 -0.390625 1.4375 -0.390625q1.140625 0 1.8125 0.75q0.671875 0.734375 0.671875 2.0q0 0.25 -0.09375 0.359375q-0.09375 0.109375 -0.3125 0.109375l-3.859375 0q0.09375 2.0625 1.953125 2.0625q0.46875 0 0.796875 -0.125q0.34375 -0.125 0.71875 -0.34375q0.3125 -0.1875 0.421875 -0.1875zm-2.09375 -3.875q-0.765625 0 -1.234375 0.484375q-0.46875 0.484375 -0.546875 1.359375l3.390625 0q-0.015625 -0.890625 -0.4375 -1.359375q-0.421875 -0.484375 -1.171875 -0.484375zm8.417801 3.875q0.140625 0 0.234375 0.109375q0.09375 0.109375 0.09375 0.28125q0 0.296875 -0.421875 0.546875q-0.4375 0.25 -0.921875 0.375q-0.46875 0.125 -0.921875 0.125q-1.359375 0 -2.15625 -0.796875q-0.78125 -0.8125 -0.78125 -2.21875q0 -0.90625 0.34375 -1.59375q0.359375 -0.6875 0.984375 -1.0625q0.640625 -0.390625 1.4375 -0.390625q1.140625 0 1.8125 0.75q0.671875 0.734375 0.671875 2.0q0 0.25 -0.09375 0.359375q-0.09375 0.109375 -0.3125 0.109375l-3.859375 0q0.09375 2.0625 1.953125 2.0625q0.46875 0 0.796875 -0.125q0.34375 -0.125 0.71875 -0.34375q0.3125 -0.1875 0.421875 -0.1875zm-2.09375 -3.875q-0.765625 0 -1.234375 0.484375q-0.46875 0.484375 -0.546875 1.359375l3.390625 0q-0.015625 -0.890625 -0.4375 -1.359375q-0.421875 -0.484375 -1.171875 -0.484375zm8.199051 4.46875q0.203125 0 0.296875 0.109375q0.109375 0.09375 0.109375 0.265625q0 0.1875 -0.109375 0.296875q-0.09375 0.09375 -0.296875 0.09375l-4.203125 0q-0.203125 0 -0.34375 -0.125q-0.125 -0.125 -0.125 -0.3125q0 -0.1875 0.140625 -0.359375l3.546875 -4.28125l-3.28125 0q-0.203125 0 -0.328125 -0.09375q-0.109375 -0.109375 -0.109375 -0.28125q0 -0.171875 0.109375 -0.28125q0.125 -0.109375 0.328125 -0.109375l4.0625 0q0.21875 0 0.34375 0.125q0.140625 0.125 0.140625 0.3125q0 0.1875 -0.140625 0.359375l-3.5625 4.28125l3.421875 0zm6.2547913 -0.59375q0.140625 0 0.234375 0.109375q0.09375 0.109375 0.09375 0.28125q0 0.296875 -0.421875 0.546875q-0.4375 0.25 -0.921875 0.375q-0.46875 0.125 -0.921875 0.125q-1.359375 0 -2.15625 -0.796875q-0.78125 -0.8125 -0.78125 -2.21875q0 -0.90625 0.34375 -1.59375q0.359375 -0.6875 0.984375 -1.0625q0.640625 -0.390625 1.4375 -0.390625q1.140625 0 1.8125 0.75q0.671875 0.734375 0.671875 2.0q0 0.25 -0.09375 0.359375q-0.09375 0.109375 -0.3125 0.109375l-3.859375 0q0.09375 2.0625 1.953125 2.0625q0.46875 0 0.796875 -0.125q0.34375 -0.125 0.71875 -0.34375q0.3125 -0.1875 0.421875 -0.1875zm-2.09375 -3.875q-0.765625 0 -1.234375 0.484375q-0.46875 0.484375 -0.546875 1.359375l3.390625 0q-0.015625 -0.890625 -0.4375 -1.359375q-0.421875 -0.484375 -1.171875 -0.484375zm3.3865662 5.875q-0.171875 0 -0.28125 -0.09375q-0.109375 -0.09375 -0.109375 -0.21875q0 -0.140625 0.109375 -0.234375q0.109375 -0.09375 0.28125 -0.09375l5.21875 0q0.171875 0 0.28125 0.09375q0.109375 0.09375 0.109375 0.234375q0 0.125 -0.109375 0.21875q-0.109375 0.09375 -0.28125 0.09375l-5.21875 0zm11.2500305 -6.609375q0.234375 0 0.359375 0.140625q0.125 0.125 0.125 0.34375l0 5.09375q0 1.296875 -0.671875 1.96875q-0.671875 0.671875 -1.984375 0.671875q-1.28125 0 -2.140625 -0.515625q-0.421875 -0.234375 -0.421875 -0.546875q0 -0.171875 0.078125 -0.28125q0.09375 -0.109375 0.234375 -0.109375q0.125 0 0.4375 0.171875q0.421875 0.21875 0.828125 0.34375q0.40625 0.140625 0.96875 0.140625q0.859375 0 1.28125 -0.453125q0.4375 -0.453125 0.4375 -1.3125l0 -1.03125q-0.25 0.5625 -0.78125 0.859375q-0.515625 0.296875 -1.21875 0.296875q-0.765625 0 -1.359375 -0.359375q-0.59375 -0.359375 -0.9375 -1.015625q-0.328125 -0.65625 -0.328125 -1.515625q0 -0.875 0.328125 -1.53125q0.34375 -0.65625 0.9375 -1.015625q0.59375 -0.359375 1.359375 -0.359375q0.6875 0 1.203125 0.296875q0.515625 0.296875 0.78125 0.84375l0 -0.640625q0 -0.21875 0.125 -0.34375q0.125 -0.140625 0.359375 -0.140625zm-2.28125 4.984375q0.84375 0 1.3125 -0.546875q0.484375 -0.5625 0.484375 -1.546875q0 -0.984375 -0.46875 -1.53125q-0.46875 -0.5625 -1.328125 -0.5625q-0.84375 0 -1.34375 0.5625q-0.484375 0.546875 -0.484375 1.53125q0 0.984375 0.484375 1.546875q0.5 0.546875 1.34375 0.546875zm7.4695435 -4.984375q0.5 -0.03125 0.5 0.40625q0 0.203125 -0.109375 0.3125q-0.109375 0.109375 -0.375 0.140625l-0.359375 0.03125q-0.796875 0.078125 -1.1875 0.578125q-0.390625 0.484375 -0.390625 1.15625l0 2.921875q0 0.234375 -0.140625 0.359375q-0.125 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -5.0625q0 -0.234375 0.140625 -0.359375q0.140625 -0.125 0.34375 -0.125q0.1875 0 0.3125 0.125q0.140625 0.125 0.140625 0.34375l0 0.671875q0.25 -0.53125 0.71875 -0.796875q0.46875 -0.28125 1.0625 -0.328125l0.171875 -0.015625zm3.720398 -0.015625q2.203125 0 2.203125 2.296875l0 3.265625q0 0.21875 -0.125 0.359375q-0.125 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.140625 -0.125 -0.359375l0 -0.578125q-0.21875 0.515625 -0.6875 0.796875q-0.46875 0.28125 -1.078125 0.28125q-0.5625 0 -1.046875 -0.21875q-0.46875 -0.234375 -0.75 -0.640625q-0.265625 -0.40625 -0.265625 -0.90625q0 -0.65625 0.328125 -1.015625q0.34375 -0.375 1.109375 -0.53125q0.765625 -0.15625 2.125 -0.15625l0.265625 0l0 -0.40625q0 -0.71875 -0.296875 -1.046875q-0.28125 -0.34375 -0.953125 -0.34375q-0.8125 0 -1.65625 0.453125q-0.3125 0.203125 -0.453125 0.203125q-0.140625 0 -0.234375 -0.109375q-0.09375 -0.109375 -0.09375 -0.28125q0 -0.171875 0.09375 -0.296875q0.109375 -0.125 0.328125 -0.25q0.421875 -0.25 0.953125 -0.375q0.546875 -0.140625 1.0625 -0.140625zm-0.390625 5.296875q0.71875 0 1.171875 -0.484375q0.46875 -0.484375 0.46875 -1.25l0 -0.34375l-0.21875 0q-1.046875 0 -1.609375 0.09375q-0.546875 0.078125 -0.78125 0.296875q-0.234375 0.203125 -0.234375 0.609375q0 0.46875 0.34375 0.78125q0.34375 0.296875 0.859375 0.296875zm7.3131714 -5.296875q0.765625 0 1.34375 0.390625q0.59375 0.375 0.921875 1.0625q0.328125 0.6875 0.328125 1.609375q0 0.90625 -0.328125 1.59375q-0.328125 0.671875 -0.90625 1.046875q-0.578125 0.359375 -1.359375 0.359375q-0.6875 0 -1.203125 -0.296875q-0.5 -0.296875 -0.765625 -0.84375l0 2.8125q0 0.21875 -0.125 0.34375q-0.125 0.125 -0.359375 0.125q-0.234375 0 -0.359375 -0.140625q-0.125 -0.125 -0.125 -0.328125l0 -7.234375q0 -0.21875 0.125 -0.34375q0.125 -0.140625 0.359375 -0.140625q0.234375 0 0.359375 0.140625q0.125 0.125 0.125 0.34375l0 0.640625q0.265625 -0.546875 0.765625 -0.84375q0.515625 -0.296875 1.203125 -0.296875zm-0.203125 5.265625q0.859375 0 1.328125 -0.578125q0.46875 -0.578125 0.46875 -1.625q0 -1.0625 -0.46875 -1.65625q-0.46875 -0.59375 -1.328125 -0.59375q-0.84375 0 -1.3125 0.578125q-0.453125 0.578125 -0.453125 1.640625q0 1.0625 0.453125 1.65625q0.46875 0.578125 1.3125 0.578125zm7.20282 -5.265625q1.03125 0 1.546875 0.578125q0.53125 0.578125 0.53125 1.734375l0 3.25q0 0.234375 -0.140625 0.359375q-0.125 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -3.21875q0 -0.78125 -0.328125 -1.15625q-0.3125 -0.375 -1.0 -0.375q-0.8125 0 -1.296875 0.5q-0.46875 0.484375 -0.46875 1.328125l0 2.921875q0 0.234375 -0.125 0.359375q-0.125 0.125 -0.359375 0.125q-0.234375 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -7.625q0 -0.203125 0.125 -0.328125q0.140625 -0.140625 0.359375 -0.140625q0.234375 0 0.359375 0.125q0.125 0.125 0.125 0.34375l0 3.140625q0.28125 -0.53125 0.796875 -0.796875q0.515625 -0.28125 1.1875 -0.28125zm4.331665 6.046875q-0.28125 0 -0.484375 -0.1875q-0.1875 -0.1875 -0.1875 -0.484375q0 -0.296875 0.1875 -0.484375q0.203125 -0.203125 0.484375 -0.203125q0.28125 0 0.46875 0.203125q0.1875 0.1875 0.1875 0.484375q0 0.296875 -0.1875 0.484375q-0.1875 0.1875 -0.46875 0.1875zm5.2167664 -6.046875q0.765625 0 1.34375 0.390625q0.59375 0.375 0.921875 1.0625q0.328125 0.6875 0.328125 1.609375q0 0.90625 -0.328125 1.59375q-0.328125 0.671875 -0.90625 1.046875q-0.578125 0.359375 -1.359375 0.359375q-0.6875 0 -1.203125 -0.296875q-0.5 -0.296875 -0.765625 -0.84375l0 2.8125q0 0.21875 -0.125 0.34375q-0.125 0.125 -0.359375 0.125q-0.234375 0 -0.359375 -0.140625q-0.125 -0.125 -0.125 -0.328125l0 -7.234375q0 -0.21875 0.125 -0.34375q0.125 -0.140625 0.359375 -0.140625q0.234375 0 0.359375 0.140625q0.125 0.125 0.125 0.34375l0 0.640625q0.265625 -0.546875 0.765625 -0.84375q0.515625 -0.296875 1.203125 -0.296875zm-0.203125 5.265625q0.859375 0 1.328125 -0.578125q0.46875 -0.578125 0.46875 -1.625q0 -1.0625 -0.46875 -1.65625q-0.46875 -0.59375 -1.328125 -0.59375q-0.84375 0 -1.3125 0.578125q-0.453125 0.578125 -0.453125 1.640625q0 1.0625 0.453125 1.65625q0.46875 0.578125 1.3125 0.578125zm8.45282 -4.9375q0.140625 -0.296875 0.421875 -0.296875q0.1875 0 0.328125 0.125q0.140625 0.109375 0.140625 0.296875q0 0.109375 -0.046875 0.1875l-3.375 7.28125q-0.0625 0.125 -0.171875 0.1875q-0.109375 0.078125 -0.234375 0.078125q-0.1875 0 -0.328125 -0.109375q-0.125 -0.109375 -0.125 -0.296875q0 -0.09375 0.046875 -0.1875l0.84375 -1.8125l-2.375 -5.140625q-0.046875 -0.078125 -0.046875 -0.171875q0 -0.1875 0.15625 -0.3125q0.15625 -0.140625 0.359375 -0.140625q0.109375 0 0.21875 0.078125q0.125 0.078125 0.1875 0.203125l2.0 4.5l2.0 -4.46875z" fill-rule="nonzero"/><path fill="#f4cccc" d="m132.49081 319.42978l87.49606 0l0 30.992126l-87.49606 0z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m132.49081 319.42978l87.49606 0l0 30.992126l-87.49606 0z" fill-rule="evenodd"/><path fill="#000000" d="m163.01448 339.50836q-0.234375 0 -0.375 -0.140625q-0.140625 -0.140625 -0.140625 -0.359375l0 -7.1875l-2.578125 0q-0.21875 0 -0.34375 -0.109375q-0.109375 -0.109375 -0.109375 -0.3125q0 -0.203125 0.109375 -0.296875q0.125 -0.109375 0.34375 -0.109375l6.15625 0q0.21875 0 0.328125 0.109375q0.125 0.09375 0.125 0.296875q0 0.203125 -0.125 0.3125q-0.109375 0.109375 -0.328125 0.109375l-2.578125 0l0 7.1875q0 0.21875 -0.140625 0.359375q-0.125 0.140625 -0.34375 0.140625zm8.160431 0.03125q-1.171875 0 -2.046875 -0.515625q-0.859375 -0.53125 -1.328125 -1.5q-0.46875 -0.984375 -0.46875 -2.296875q0 -1.34375 0.453125 -2.3125q0.46875 -0.984375 1.328125 -1.5q0.875 -0.53125 2.0625 -0.53125q1.1875 0 2.0625 0.53125q0.875 0.515625 1.328125 1.5q0.46875 0.96875 0.46875 2.296875q0 1.3125 -0.46875 2.296875q-0.46875 0.984375 -1.34375 1.515625q-0.859375 0.515625 -2.046875 0.515625zm0 -0.84375q1.34375 0 2.09375 -0.90625q0.75 -0.90625 0.75 -2.578125q0 -1.6875 -0.75 -2.578125q-0.734375 -0.90625 -2.09375 -0.90625q-1.34375 0 -2.09375 0.90625q-0.75 0.90625 -0.75 2.578125q0 1.671875 0.75 2.578125q0.75 0.90625 2.09375 0.90625zm9.214935 0.84375q-1.1875 0 -2.0625 -0.515625q-0.875 -0.53125 -1.359375 -1.5q-0.46875 -0.984375 -0.46875 -2.3125q0 -1.328125 0.46875 -2.296875q0.484375 -0.984375 1.359375 -1.5q0.875 -0.53125 2.0625 -0.53125q0.8125 0 1.515625 0.265625q0.71875 0.25 1.25 0.734375q0.1875 0.1875 0.1875 0.421875q0 0.171875 -0.09375 0.296875q-0.09375 0.125 -0.21875 0.125q-0.15625 0 -0.359375 -0.140625q-0.609375 -0.46875 -1.109375 -0.65625q-0.5 -0.203125 -1.140625 -0.203125q-1.390625 0 -2.140625 0.90625q-0.75 0.90625 -0.75 2.578125q0 1.671875 0.75 2.578125q0.75 0.90625 2.140625 0.90625q0.640625 0 1.140625 -0.1875q0.5 -0.1875 1.109375 -0.671875q0.203125 -0.125 0.359375 -0.125q0.125 0 0.21875 0.125q0.09375 0.109375 0.09375 0.296875q0 0.234375 -0.1875 0.40625q-0.53125 0.484375 -1.25 0.75q-0.703125 0.25 -1.515625 0.25zm8.077179 0q-1.171875 0 -2.046875 -0.515625q-0.859375 -0.53125 -1.328125 -1.5q-0.46875 -0.984375 -0.46875 -2.296875q0 -1.34375 0.453125 -2.3125q0.46875 -0.984375 1.328125 -1.5q0.875 -0.53125 2.0625 -0.53125q1.1875 0 2.0625 0.53125q0.875 0.515625 1.328125 1.5q0.46875 0.96875 0.46875 2.296875q0 1.3125 -0.46875 2.296875q-0.46875 0.984375 -1.34375 1.515625q-0.859375 0.515625 -2.046875 0.515625zm0 -0.84375q1.34375 0 2.09375 -0.90625q0.75 -0.90625 0.75 -2.578125q0 -1.6875 -0.75 -2.578125q-0.734375 -0.90625 -2.09375 -0.90625q-1.34375 0 -2.09375 0.90625q-0.75 0.90625 -0.75 2.578125q0 1.671875 0.75 2.578125q0.75 0.90625 2.09375 0.90625z" fill-rule="nonzero"/><path fill="#d9ead3" d="m284.12296 319.3983l87.49606 0l0 30.992126l-87.49606 0z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m284.12296 319.3983l87.49606 0l0 30.992126l-87.49606 0z" fill-rule="evenodd"/><path fill="#000000" d="m314.7006 332.47687q-0.234375 0 -0.375 -0.140625q-0.140625 -0.140625 -0.140625 -0.359375l0 -7.1875l-2.578125 0q-0.21875 0 -0.34375 -0.109375q-0.109375 -0.109375 -0.109375 -0.3125q0 -0.203125 0.109375 -0.296875q0.125 -0.109375 0.34375 -0.109375l6.15625 0q0.21875 0 0.328125 0.109375q0.125 0.09375 0.125 0.296875q0 0.203125 -0.125 0.3125q-0.109375 0.109375 -0.328125 0.109375l-2.578125 0l0 7.1875q0 0.21875 -0.140625 0.359375q-0.125 0.140625 -0.34375 0.140625zm5.113556 0q-0.21875 0 -0.359375 -0.140625q-0.125 -0.140625 -0.125 -0.359375l0 -7.546875q0 -0.21875 0.125 -0.34375q0.140625 -0.125 0.375 -0.125l4.375 0q0.203125 0 0.328125 0.109375q0.125 0.09375 0.125 0.296875q0 0.203125 -0.125 0.3125q-0.125 0.109375 -0.328125 0.109375l-3.90625 0l0 2.90625l3.65625 0q0.21875 0 0.328125 0.109375q0.125 0.109375 0.125 0.3125q0 0.1875 -0.125 0.296875q-0.109375 0.109375 -0.328125 0.109375l-3.65625 0l0 3.453125q0 0.21875 -0.125 0.359375q-0.125 0.140625 -0.359375 0.140625zm6.6840515 -0.0625q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.328125l0 -7.5625q0 -0.234375 0.125 -0.359375q0.140625 -0.140625 0.359375 -0.140625q0.234375 0 0.359375 0.140625q0.140625 0.125 0.140625 0.359375l0 7.171875l3.875 0q0.21875 0 0.328125 0.109375q0.125 0.109375 0.125 0.3125q0 0.203125 -0.125 0.3125q-0.109375 0.109375 -0.328125 0.109375l-4.375 0zm6.3394165 0.0625q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -5.0625q0 -0.234375 0.125 -0.359375q0.140625 -0.125 0.359375 -0.125q0.21875 0 0.34375 0.125q0.140625 0.125 0.140625 0.359375l0 5.0625q0 0.234375 -0.140625 0.359375q-0.125 0.125 -0.34375 0.125zm0 -7.28125q-0.296875 0 -0.484375 -0.171875q-0.171875 -0.171875 -0.171875 -0.453125q0 -0.25 0.171875 -0.421875q0.1875 -0.171875 0.484375 -0.171875q0.28125 0 0.453125 0.171875q0.1875 0.171875 0.1875 0.421875q0 0.28125 -0.1875 0.453125q-0.171875 0.171875 -0.453125 0.171875zm4.987152 6.515625q0.421875 0.03125 0.421875 0.375q0 0.203125 -0.15625 0.3125q-0.140625 0.09375 -0.4375 0.078125l-0.328125 -0.03125q-0.953125 -0.0625 -1.421875 -0.5625q-0.453125 -0.515625 -0.453125 -1.53125l0 -3.015625l-0.796875 0q-0.203125 0 -0.328125 -0.09375q-0.109375 -0.109375 -0.109375 -0.28125q0 -0.171875 0.109375 -0.28125q0.125 -0.109375 0.328125 -0.109375l0.796875 0l0 -1.359375q0 -0.21875 0.125 -0.34375q0.140625 -0.140625 0.375 -0.140625q0.21875 0 0.34375 0.140625q0.140625 0.125 0.140625 0.34375l0 1.359375l1.328125 0q0.1875 0 0.296875 0.109375q0.125 0.109375 0.125 0.28125q0 0.171875 -0.125 0.28125q-0.109375 0.09375 -0.296875 0.09375l-1.328125 0l0 3.0625q0 0.65625 0.265625 0.953125q0.265625 0.296875 0.8125 0.328125l0.3125 0.03125zm5.9081726 -0.65625q0.140625 0 0.234375 0.109375q0.09375 0.109375 0.09375 0.28125q0 0.296875 -0.421875 0.546875q-0.4375 0.25 -0.921875 0.375q-0.46875 0.125 -0.921875 0.125q-1.359375 0 -2.15625 -0.796875q-0.78125 -0.8125 -0.78125 -2.21875q0 -0.90625 0.34375 -1.59375q0.359375 -0.6875 0.984375 -1.0625q0.640625 -0.390625 1.4375 -0.390625q1.140625 0 1.8125 0.75q0.671875 0.734375 0.671875 2.0q0 0.25 -0.09375 0.359375q-0.09375 0.109375 -0.3125 0.109375l-3.859375 0q0.09375 2.0625 1.953125 2.0625q0.46875 0 0.796875 -0.125q0.34375 -0.125 0.71875 -0.34375q0.3125 -0.1875 0.421875 -0.1875zm-2.09375 -3.875q-0.765625 0 -1.234375 0.484375q-0.46875 0.484375 -0.546875 1.359375l3.390625 0q-0.015625 -0.890625 -0.4375 -1.359375q-0.421875 -0.484375 -1.171875 -0.484375z" fill-rule="nonzero"/><path fill="#000000" d="m303.37402 346.47687q-0.21875 0 -0.359375 -0.140625q-0.125 -0.140625 -0.125 -0.359375l0 -7.546875q0 -0.21875 0.125 -0.34375q0.140625 -0.125 0.375 -0.125l4.375 0q0.203125 0 0.328125 0.109375q0.125 0.09375 0.125 0.296875q0 0.203125 -0.125 0.3125q-0.125 0.109375 -0.328125 0.109375l-3.90625 0l0 2.90625l3.65625 0q0.21875 0 0.328125 0.109375q0.125 0.109375 0.125 0.3125q0 0.1875 -0.125 0.296875q-0.109375 0.109375 -0.328125 0.109375l-3.65625 0l0 3.453125q0 0.21875 -0.125 0.359375q-0.125 0.140625 -0.359375 0.140625zm6.5434265 0q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -7.625q0 -0.21875 0.125 -0.34375q0.140625 -0.125 0.359375 -0.125q0.203125 0 0.34375 0.125q0.140625 0.125 0.140625 0.34375l0 7.625q0 0.234375 -0.140625 0.359375q-0.140625 0.125 -0.34375 0.125zm4.674652 -6.046875q2.203125 0 2.203125 2.296875l0 3.265625q0 0.21875 -0.125 0.359375q-0.125 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.140625 -0.125 -0.359375l0 -0.578125q-0.21875 0.515625 -0.6875 0.796875q-0.46875 0.28125 -1.078125 0.28125q-0.5625 0 -1.046875 -0.21875q-0.46875 -0.234375 -0.75 -0.640625q-0.265625 -0.40625 -0.265625 -0.90625q0 -0.65625 0.328125 -1.015625q0.34375 -0.375 1.109375 -0.53125q0.765625 -0.15625 2.125 -0.15625l0.265625 0l0 -0.40625q0 -0.71875 -0.296875 -1.046875q-0.28125 -0.34375 -0.953125 -0.34375q-0.8125 0 -1.65625 0.453125q-0.3125 0.203125 -0.453125 0.203125q-0.140625 0 -0.234375 -0.109375q-0.09375 -0.109375 -0.09375 -0.28125q0 -0.171875 0.09375 -0.296875q0.109375 -0.125 0.328125 -0.25q0.421875 -0.25 0.953125 -0.375q0.546875 -0.140625 1.0625 -0.140625zm-0.390625 5.296875q0.71875 0 1.171875 -0.484375q0.46875 -0.484375 0.46875 -1.25l0 -0.34375l-0.21875 0q-1.046875 0 -1.609375 0.09375q-0.546875 0.078125 -0.78125 0.296875q-0.234375 0.203125 -0.234375 0.609375q0 0.46875 0.34375 0.78125q0.34375 0.296875 0.859375 0.296875zm7.0631714 -0.015625q0.421875 0.03125 0.421875 0.375q0 0.203125 -0.15625 0.3125q-0.140625 0.09375 -0.4375 0.078125l-0.328125 -0.03125q-0.953125 -0.0625 -1.421875 -0.5625q-0.453125 -0.515625 -0.453125 -1.53125l0 -3.015625l-0.796875 0q-0.203125 0 -0.328125 -0.09375q-0.109375 -0.109375 -0.109375 -0.28125q0 -0.171875 0.109375 -0.28125q0.125 -0.109375 0.328125 -0.109375l0.796875 0l0 -1.359375q0 -0.21875 0.125 -0.34375q0.140625 -0.140625 0.375 -0.140625q0.21875 0 0.34375 0.140625q0.140625 0.125 0.140625 0.34375l0 1.359375l1.328125 0q0.1875 0 0.296875 0.109375q0.125 0.109375 0.125 0.28125q0 0.171875 -0.125 0.28125q-0.109375 0.09375 -0.296875 0.09375l-1.328125 0l0 3.0625q0 0.65625 0.265625 0.953125q0.265625 0.296875 0.8125 0.328125l0.3125 0.03125zm4.3300476 -5.28125q0.765625 0 1.34375 0.375q0.59375 0.359375 0.921875 1.046875q0.328125 0.6875 0.328125 1.59375q0 0.90625 -0.328125 1.59375q-0.328125 0.6875 -0.921875 1.078125q-0.578125 0.375 -1.34375 0.375q-0.6875 0 -1.203125 -0.296875q-0.5 -0.296875 -0.765625 -0.84375l0 0.640625q0 0.21875 -0.125 0.359375q-0.125 0.125 -0.359375 0.125q-0.234375 0 -0.359375 -0.125q-0.125 -0.140625 -0.125 -0.359375l0 -7.625q0 -0.203125 0.125 -0.328125q0.125 -0.140625 0.359375 -0.140625q0.234375 0 0.359375 0.125q0.125 0.125 0.125 0.34375l0 3.203125q0.265625 -0.546875 0.765625 -0.84375q0.515625 -0.296875 1.203125 -0.296875zm-0.203125 5.265625q0.859375 0 1.328125 -0.59375q0.46875 -0.59375 0.46875 -1.65625q0 -1.046875 -0.46875 -1.625q-0.46875 -0.578125 -1.328125 -0.578125q-0.84375 0 -1.3125 0.578125q-0.453125 0.578125 -0.453125 1.640625q0 1.0625 0.453125 1.65625q0.46875 0.578125 1.3125 0.578125zm8.687164 -5.25q0.21875 0 0.34375 0.140625q0.125 0.125 0.125 0.34375l0 5.078125q0 0.203125 -0.125 0.34375q-0.125 0.125 -0.34375 0.125q-0.21875 0 -0.34375 -0.125q-0.125 -0.125 -0.125 -0.328125l0 -0.609375q-0.28125 0.53125 -0.78125 0.8125q-0.5 0.265625 -1.125 0.265625q-1.03125 0 -1.5625 -0.578125q-0.53125 -0.578125 -0.53125 -1.71875l0 -3.265625q0 -0.21875 0.125 -0.34375q0.140625 -0.140625 0.375 -0.140625q0.21875 0 0.34375 0.140625q0.125 0.125 0.125 0.34375l0 3.234375q0 0.78125 0.3125 1.15625q0.3125 0.359375 0.984375 0.359375q0.765625 0 1.234375 -0.5q0.46875 -0.5 0.46875 -1.3125l0 -2.9375q0 -0.21875 0.125 -0.34375q0.140625 -0.140625 0.375 -0.140625zm4.8726807 -1.71875q-0.640625 0.046875 -0.96875 0.40625q-0.3125 0.34375 -0.3125 1.046875l0 0.390625l1.328125 0q0.203125 0 0.3125 0.109375q0.109375 0.109375 0.109375 0.28125q0 0.1875 -0.109375 0.28125q-0.109375 0.09375 -0.3125 0.09375l-1.328125 0l0 4.65625q0 0.234375 -0.140625 0.359375q-0.140625 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.140625 -0.125 -0.140625 -0.359375l0 -4.65625l-0.796875 0q-0.203125 0 -0.328125 -0.09375q-0.109375 -0.109375 -0.109375 -0.28125q0 -0.171875 0.109375 -0.28125q0.125 -0.109375 0.328125 -0.109375l0.796875 0l0 -0.21875q0 -1.078125 0.53125 -1.6875q0.546875 -0.625 1.5625 -0.703125l0.3125 -0.015625q0.3125 -0.03125 0.453125 0.0625q0.140625 0.078125 0.140625 0.296875q0 0.34375 -0.421875 0.390625l-0.3125 0.03125zm3.9360352 0q-0.640625 0.046875 -0.96875 0.40625q-0.3125 0.34375 -0.3125 1.046875l0 0.390625l1.328125 0q0.203125 0 0.3125 0.109375q0.109375 0.109375 0.109375 0.28125q0 0.1875 -0.109375 0.28125q-0.109375 0.09375 -0.3125 0.09375l-1.328125 0l0 4.65625q0 0.234375 -0.140625 0.359375q-0.140625 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.140625 -0.125 -0.140625 -0.359375l0 -4.65625l-0.796875 0q-0.203125 0 -0.328125 -0.09375q-0.109375 -0.109375 -0.109375 -0.28125q0 -0.171875 0.109375 -0.28125q0.125 -0.109375 0.328125 -0.109375l0.796875 0l0 -0.21875q0 -1.078125 0.53125 -1.6875q0.546875 -0.625 1.5625 -0.703125l0.3125 -0.015625q0.3125 -0.03125 0.453125 0.0625q0.140625 0.078125 0.140625 0.296875q0 0.34375 -0.421875 0.390625l-0.3125 0.03125zm5.873535 6.328125q0.140625 0 0.234375 0.109375q0.09375 0.109375 0.09375 0.28125q0 0.296875 -0.421875 0.546875q-0.4375 0.25 -0.921875 0.375q-0.46875 0.125 -0.921875 0.125q-1.359375 0 -2.15625 -0.796875q-0.78125 -0.8125 -0.78125 -2.21875q0 -0.90625 0.34375 -1.59375q0.359375 -0.6875 0.984375 -1.0625q0.640625 -0.390625 1.4375 -0.390625q1.140625 0 1.8125 0.75q0.671875 0.734375 0.671875 2.0q0 0.25 -0.09375 0.359375q-0.09375 0.109375 -0.3125 0.109375l-3.859375 0q0.09375 2.0625 1.953125 2.0625q0.46875 0 0.796875 -0.125q0.34375 -0.125 0.71875 -0.34375q0.3125 -0.1875 0.421875 -0.1875zm-2.09375 -3.875q-0.765625 0 -1.234375 0.484375q-0.46875 0.484375 -0.546875 1.359375l3.390625 0q-0.015625 -0.890625 -0.4375 -1.359375q-0.421875 -0.484375 -1.171875 -0.484375zm6.7927856 -0.734375q0.5 -0.03125 0.5 0.40625q0 0.203125 -0.109375 0.3125q-0.109375 0.109375 -0.375 0.140625l-0.359375 0.03125q-0.796875 0.078125 -1.1875 0.578125q-0.390625 0.484375 -0.390625 1.15625l0 2.921875q0 0.234375 -0.140625 0.359375q-0.125 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -5.0625q0 -0.234375 0.140625 -0.359375q0.140625 -0.125 0.34375 -0.125q0.1875 0 0.3125 0.125q0.140625 0.125 0.140625 0.34375l0 0.671875q0.25 -0.53125 0.71875 -0.796875q0.46875 -0.28125 1.0625 -0.328125l0.171875 -0.015625z" fill-rule="nonzero"/><path fill="#f4cccc" d="m413.02625 319.3983l87.49606 0l0 30.992126l-87.49606 0z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m413.02625 319.3983l87.49606 0l0 30.992126l-87.49606 0z" fill-rule="evenodd"/><path fill="#000000" d="m443.6039 332.47687q-0.234375 0 -0.375 -0.140625q-0.140625 -0.140625 -0.140625 -0.359375l0 -7.1875l-2.578125 0q-0.21875 0 -0.34375 -0.109375q-0.109375 -0.109375 -0.109375 -0.3125q0 -0.203125 0.109375 -0.296875q0.125 -0.109375 0.34375 -0.109375l6.15625 0q0.21875 0 0.328125 0.109375q0.125 0.09375 0.125 0.296875q0 0.203125 -0.125 0.3125q-0.109375 0.109375 -0.328125 0.109375l-2.578125 0l0 7.1875q0 0.21875 -0.140625 0.359375q-0.125 0.140625 -0.34375 0.140625zm5.113556 0q-0.21875 0 -0.359375 -0.140625q-0.125 -0.140625 -0.125 -0.359375l0 -7.546875q0 -0.21875 0.125 -0.34375q0.140625 -0.125 0.375 -0.125l4.375 0q0.203125 0 0.328125 0.109375q0.125 0.09375 0.125 0.296875q0 0.203125 -0.125 0.3125q-0.125 0.109375 -0.328125 0.109375l-3.90625 0l0 2.90625l3.65625 0q0.21875 0 0.328125 0.109375q0.125 0.109375 0.125 0.3125q0 0.1875 -0.125 0.296875q-0.109375 0.109375 -0.328125 0.109375l-3.65625 0l0 3.453125q0 0.21875 -0.125 0.359375q-0.125 0.140625 -0.359375 0.140625zm6.6840515 -0.0625q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.328125l0 -7.5625q0 -0.234375 0.125 -0.359375q0.140625 -0.140625 0.359375 -0.140625q0.234375 0 0.359375 0.140625q0.140625 0.125 0.140625 0.359375l0 7.171875l3.875 0q0.21875 0 0.328125 0.109375q0.125 0.109375 0.125 0.3125q0 0.203125 -0.125 0.3125q-0.109375 0.109375 -0.328125 0.109375l-4.375 0zm6.3394165 0.0625q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -5.0625q0 -0.234375 0.125 -0.359375q0.140625 -0.125 0.359375 -0.125q0.21875 0 0.34375 0.125q0.140625 0.125 0.140625 0.359375l0 5.0625q0 0.234375 -0.140625 0.359375q-0.125 0.125 -0.34375 0.125zm0 -7.28125q-0.296875 0 -0.484375 -0.171875q-0.171875 -0.171875 -0.171875 -0.453125q0 -0.25 0.171875 -0.421875q0.1875 -0.171875 0.484375 -0.171875q0.28125 0 0.453125 0.171875q0.1875 0.171875 0.1875 0.421875q0 0.28125 -0.1875 0.453125q-0.171875 0.171875 -0.453125 0.171875zm4.987152 6.515625q0.421875 0.03125 0.421875 0.375q0 0.203125 -0.15625 0.3125q-0.140625 0.09375 -0.4375 0.078125l-0.328125 -0.03125q-0.953125 -0.0625 -1.421875 -0.5625q-0.453125 -0.515625 -0.453125 -1.53125l0 -3.015625l-0.796875 0q-0.203125 0 -0.328125 -0.09375q-0.109375 -0.109375 -0.109375 -0.28125q0 -0.171875 0.109375 -0.28125q0.125 -0.109375 0.328125 -0.109375l0.796875 0l0 -1.359375q0 -0.21875 0.125 -0.34375q0.140625 -0.140625 0.375 -0.140625q0.21875 0 0.34375 0.140625q0.140625 0.125 0.140625 0.34375l0 1.359375l1.328125 0q0.1875 0 0.296875 0.109375q0.125 0.109375 0.125 0.28125q0 0.171875 -0.125 0.28125q-0.109375 0.09375 -0.296875 0.09375l-1.328125 0l0 3.0625q0 0.65625 0.265625 0.953125q0.265625 0.296875 0.8125 0.328125l0.3125 0.03125zm5.908142 -0.65625q0.140625 0 0.234375 0.109375q0.09375 0.109375 0.09375 0.28125q0 0.296875 -0.421875 0.546875q-0.4375 0.25 -0.921875 0.375q-0.46875 0.125 -0.921875 0.125q-1.359375 0 -2.15625 -0.796875q-0.78125 -0.8125 -0.78125 -2.21875q0 -0.90625 0.34375 -1.59375q0.359375 -0.6875 0.984375 -1.0625q0.640625 -0.390625 1.4375 -0.390625q1.140625 0 1.8125 0.75q0.671875 0.734375 0.671875 2.0q0 0.25 -0.09375 0.359375q-0.09375 0.109375 -0.3125 0.109375l-3.859375 0q0.09375 2.0625 1.953125 2.0625q0.46875 0 0.796875 -0.125q0.34375 -0.125 0.71875 -0.34375q0.3125 -0.1875 0.421875 -0.1875zm-2.09375 -3.875q-0.765625 0 -1.234375 0.484375q-0.46875 0.484375 -0.546875 1.359375l3.390625 0q-0.015625 -0.890625 -0.4375 -1.359375q-0.421875 -0.484375 -1.171875 -0.484375z" fill-rule="nonzero"/><path fill="#000000" d="m429.9527 346.47687q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -5.0625q0 -0.234375 0.125 -0.359375q0.140625 -0.125 0.359375 -0.125q0.21875 0 0.34375 0.125q0.140625 0.125 0.140625 0.359375l0 5.0625q0 0.234375 -0.140625 0.359375q-0.125 0.125 -0.34375 0.125zm0 -7.28125q-0.296875 0 -0.484375 -0.171875q-0.171875 -0.171875 -0.171875 -0.453125q0 -0.25 0.171875 -0.421875q0.1875 -0.171875 0.484375 -0.171875q0.28125 0 0.453125 0.171875q0.1875 0.171875 0.1875 0.421875q0 0.28125 -0.1875 0.453125q-0.171875 0.171875 -0.453125 0.171875zm5.237152 1.234375q2.09375 0 2.09375 2.3125l0 3.25q0 0.234375 -0.125 0.359375q-0.125 0.125 -0.359375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -3.1875q0 -0.8125 -0.328125 -1.1875q-0.3125 -0.375 -1.0 -0.375q-0.8125 0 -1.296875 0.5q-0.46875 0.484375 -0.46875 1.328125l0 2.921875q0 0.234375 -0.125 0.359375q-0.125 0.125 -0.359375 0.125q-0.234375 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -5.0625q0 -0.21875 0.125 -0.34375q0.125 -0.140625 0.359375 -0.140625q0.21875 0 0.34375 0.140625q0.125 0.125 0.125 0.328125l0 0.609375q0.28125 -0.53125 0.796875 -0.8125q0.53125 -0.28125 1.1875 -0.28125zm6.56604 5.28125q0.421875 0.03125 0.421875 0.375q0 0.203125 -0.15625 0.3125q-0.140625 0.09375 -0.4375 0.078125l-0.328125 -0.03125q-0.953125 -0.0625 -1.421875 -0.5625q-0.453125 -0.515625 -0.453125 -1.53125l0 -3.015625l-0.796875 0q-0.203125 0 -0.328125 -0.09375q-0.109375 -0.109375 -0.109375 -0.28125q0 -0.171875 0.109375 -0.28125q0.125 -0.109375 0.328125 -0.109375l0.796875 0l0 -1.359375q0 -0.21875 0.125 -0.34375q0.140625 -0.140625 0.375 -0.140625q0.21875 0 0.34375 0.140625q0.140625 0.125 0.140625 0.34375l0 1.359375l1.328125 0q0.1875 0 0.296875 0.109375q0.125 0.109375 0.125 0.28125q0 0.171875 -0.125 0.28125q-0.109375 0.09375 -0.296875 0.09375l-1.328125 0l0 3.0625q0 0.65625 0.265625 0.953125q0.265625 0.296875 0.8125 0.328125l0.3125 0.03125zm5.9081726 -0.65625q0.140625 0 0.234375 0.109375q0.09375 0.109375 0.09375 0.28125q0 0.296875 -0.421875 0.546875q-0.4375 0.25 -0.921875 0.375q-0.46875 0.125 -0.921875 0.125q-1.359375 0 -2.15625 -0.796875q-0.78125 -0.8125 -0.78125 -2.21875q0 -0.90625 0.34375 -1.59375q0.359375 -0.6875 0.984375 -1.0625q0.640625 -0.390625 1.4375 -0.390625q1.140625 0 1.8125 0.75q0.671875 0.734375 0.671875 2.0q0 0.25 -0.09375 0.359375q-0.09375 0.109375 -0.3125 0.109375l-3.859375 0q0.09375 2.0625 1.953125 2.0625q0.46875 0 0.796875 -0.125q0.34375 -0.125 0.71875 -0.34375q0.3125 -0.1875 0.421875 -0.1875zm-2.09375 -3.875q-0.765625 0 -1.234375 0.484375q-0.46875 0.484375 -0.546875 1.359375l3.390625 0q-0.015625 -0.890625 -0.4375 -1.359375q-0.421875 -0.484375 -1.171875 -0.484375zm6.7927856 -0.734375q0.5 -0.03125 0.5 0.40625q0 0.203125 -0.109375 0.3125q-0.109375 0.109375 -0.375 0.140625l-0.359375 0.03125q-0.796875 0.078125 -1.1875 0.578125q-0.390625 0.484375 -0.390625 1.15625l0 2.921875q0 0.234375 -0.140625 0.359375q-0.125 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -5.0625q0 -0.234375 0.140625 -0.359375q0.140625 -0.125 0.34375 -0.125q0.1875 0 0.3125 0.125q0.140625 0.125 0.140625 0.34375l0 0.671875q0.25 -0.53125 0.71875 -0.796875q0.46875 -0.28125 1.0625 -0.328125l0.171875 -0.015625zm4.282898 -0.015625q0.765625 0 1.34375 0.390625q0.59375 0.375 0.921875 1.0625q0.328125 0.6875 0.328125 1.609375q0 0.90625 -0.328125 1.59375q-0.328125 0.671875 -0.90625 1.046875q-0.578125 0.359375 -1.359375 0.359375q-0.6875 0 -1.203125 -0.296875q-0.5 -0.296875 -0.765625 -0.84375l0 2.8125q0 0.21875 -0.125 0.34375q-0.125 0.125 -0.359375 0.125q-0.234375 0 -0.359375 -0.140625q-0.125 -0.125 -0.125 -0.328125l0 -7.234375q0 -0.21875 0.125 -0.34375q0.125 -0.140625 0.359375 -0.140625q0.234375 0 0.359375 0.140625q0.125 0.125 0.125 0.34375l0 0.640625q0.265625 -0.546875 0.765625 -0.84375q0.515625 -0.296875 1.203125 -0.296875zm-0.203125 5.265625q0.859375 0 1.328125 -0.578125q0.46875 -0.578125 0.46875 -1.625q0 -1.0625 -0.46875 -1.65625q-0.46875 -0.59375 -1.328125 -0.59375q-0.84375 0 -1.3125 0.578125q-0.453125 0.578125 -0.453125 1.640625q0 1.0625 0.453125 1.65625q0.46875 0.578125 1.3125 0.578125zm7.14032 -5.25q0.5 -0.03125 0.5 0.40625q0 0.203125 -0.109375 0.3125q-0.109375 0.109375 -0.375 0.140625l-0.359375 0.03125q-0.796875 0.078125 -1.1875 0.578125q-0.390625 0.484375 -0.390625 1.15625l0 2.921875q0 0.234375 -0.140625 0.359375q-0.125 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -5.0625q0 -0.234375 0.140625 -0.359375q0.140625 -0.125 0.34375 -0.125q0.1875 0 0.3125 0.125q0.140625 0.125 0.140625 0.34375l0 0.671875q0.25 -0.53125 0.71875 -0.796875q0.46875 -0.28125 1.0625 -0.328125l0.171875 -0.015625zm5.861023 4.609375q0.140625 0 0.234375 0.109375q0.09375 0.109375 0.09375 0.28125q0 0.296875 -0.421875 0.546875q-0.4375 0.25 -0.921875 0.375q-0.46875 0.125 -0.921875 0.125q-1.359375 0 -2.15625 -0.796875q-0.78125 -0.8125 -0.78125 -2.21875q0 -0.90625 0.34375 -1.59375q0.359375 -0.6875 0.984375 -1.0625q0.640625 -0.390625 1.4375 -0.390625q1.140625 0 1.8125 0.75q0.671875 0.734375 0.671875 2.0q0 0.25 -0.09375 0.359375q-0.09375 0.109375 -0.3125 0.109375l-3.859375 0q0.09375 2.0625 1.953125 2.0625q0.46875 0 0.796875 -0.125q0.34375 -0.125 0.71875 -0.34375q0.3125 -0.1875 0.421875 -0.1875zm-2.09375 -3.875q-0.765625 0 -1.234375 0.484375q-0.46875 0.484375 -0.546875 1.359375l3.390625 0q-0.015625 -0.890625 -0.4375 -1.359375q-0.421875 -0.484375 -1.171875 -0.484375zm6.5896606 4.53125q0.421875 0.03125 0.421875 0.375q0 0.203125 -0.15625 0.3125q-0.140625 0.09375 -0.4375 0.078125l-0.328125 -0.03125q-0.953125 -0.0625 -1.421875 -0.5625q-0.453125 -0.515625 -0.453125 -1.53125l0 -3.015625l-0.796875 0q-0.203125 0 -0.328125 -0.09375q-0.109375 -0.109375 -0.109375 -0.28125q0 -0.171875 0.109375 -0.28125q0.125 -0.109375 0.328125 -0.109375l0.796875 0l0 -1.359375q0 -0.21875 0.125 -0.34375q0.140625 -0.140625 0.375 -0.140625q0.21875 0 0.34375 0.140625q0.140625 0.125 0.140625 0.34375l0 1.359375l1.328125 0q0.1875 0 0.296875 0.109375q0.125 0.109375 0.125 0.28125q0 0.171875 -0.125 0.28125q-0.109375 0.09375 -0.296875 0.09375l-1.328125 0l0 3.0625q0 0.65625 0.265625 0.953125q0.265625 0.296875 0.8125 0.328125l0.3125 0.03125zm5.9081726 -0.65625q0.140625 0 0.234375 0.109375q0.09375 0.109375 0.09375 0.28125q0 0.296875 -0.421875 0.546875q-0.4375 0.25 -0.921875 0.375q-0.46875 0.125 -0.921875 0.125q-1.359375 0 -2.15625 -0.796875q-0.78125 -0.8125 -0.78125 -2.21875q0 -0.90625 0.34375 -1.59375q0.359375 -0.6875 0.984375 -1.0625q0.640625 -0.390625 1.4375 -0.390625q1.140625 0 1.8125 0.75q0.671875 0.734375 0.671875 2.0q0 0.25 -0.09375 0.359375q-0.09375 0.109375 -0.3125 0.109375l-3.859375 0q0.09375 2.0625 1.953125 2.0625q0.46875 0 0.796875 -0.125q0.34375 -0.125 0.71875 -0.34375q0.3125 -0.1875 0.421875 -0.1875zm-2.09375 -3.875q-0.765625 0 -1.234375 0.484375q-0.46875 0.484375 -0.546875 1.359375l3.390625 0q-0.015625 -0.890625 -0.4375 -1.359375q-0.421875 -0.484375 -1.171875 -0.484375zm6.7927856 -0.734375q0.5 -0.03125 0.5 0.40625q0 0.203125 -0.109375 0.3125q-0.109375 0.109375 -0.375 0.140625l-0.359375 0.03125q-0.796875 0.078125 -1.1875 0.578125q-0.390625 0.484375 -0.390625 1.15625l0 2.921875q0 0.234375 -0.140625 0.359375q-0.125 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -5.0625q0 -0.234375 0.140625 -0.359375q0.140625 -0.125 0.34375 -0.125q0.1875 0 0.3125 0.125q0.140625 0.125 0.140625 0.34375l0 0.671875q0.25 -0.53125 0.71875 -0.796875q0.46875 -0.28125 1.0625 -0.328125l0.171875 -0.015625z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m371.61902 334.89435l41.417297 0" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m371.61902 334.89435l37.990234 0" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m409.60925 334.89435l-1.1245728 1.1246033l3.0897522 -1.1246033l-3.0897522 -1.1245728z" fill-rule="evenodd"/><path fill="#c9daf8" d="m548.5407 277.52954l87.49603 0l0 30.992126l-87.49603 0z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m548.5407 277.52954l87.49603 0l0 30.992126l-87.49603 0z" fill-rule="evenodd"/><path fill="#000000" d="m587.0588 293.13934q0.1875 0 0.296875 0.109375q0.109375 0.109375 0.109375 0.296875l0 2.984375q0 0.296875 -0.09375 0.4375q-0.078125 0.140625 -0.328125 0.234375q-0.46875 0.203125 -1.15625 0.328125q-0.6875 0.109375 -1.375 0.109375q-1.25 0 -2.171875 -0.515625q-0.90625 -0.515625 -1.390625 -1.484375q-0.484375 -0.96875 -0.484375 -2.328125q0 -1.328125 0.46875 -2.296875q0.484375 -0.984375 1.375 -1.5q0.90625 -0.53125 2.125 -0.53125q0.84375 0 1.5625 0.265625q0.71875 0.25 1.203125 0.734375q0.21875 0.203125 0.21875 0.421875q0 0.171875 -0.109375 0.296875q-0.09375 0.125 -0.234375 0.125q-0.140625 0 -0.328125 -0.140625q-0.625 -0.484375 -1.140625 -0.671875q-0.5 -0.1875 -1.15625 -0.1875q-1.4375 0 -2.203125 0.90625q-0.75 0.890625 -0.75 2.578125q0 1.71875 0.765625 2.609375q0.78125 0.890625 2.28125 0.890625q1.109375 0 2.03125 -0.328125l0 -2.578125l-1.75 0q-0.203125 0 -0.328125 -0.109375q-0.125 -0.109375 -0.125 -0.265625q0 -0.1875 0.125 -0.28125q0.125 -0.109375 0.328125 -0.109375l2.234375 0zm2.8911743 4.46875q-0.21875 0 -0.359375 -0.140625q-0.125 -0.140625 -0.125 -0.359375l0 -7.546875q0 -0.21875 0.125 -0.34375q0.140625 -0.125 0.375 -0.125l2.84375 0q1.328125 0 2.0625 0.65625q0.75 0.640625 0.75 1.828125q0 1.1875 -0.75 1.84375q-0.734375 0.65625 -2.0625 0.65625l-2.359375 0l0 3.03125q0 0.21875 -0.140625 0.359375q-0.125 0.140625 -0.359375 0.140625zm2.765625 -4.34375q1.9375 0 1.9375 -1.6875q0 -1.671875 -1.9375 -1.671875l-2.265625 0l0 3.359375l2.265625 0zm7.7869263 4.375q-1.65625 0 -2.515625 -0.859375q-0.84375 -0.859375 -0.84375 -2.546875l0 -4.703125q0 -0.234375 0.125 -0.359375q0.140625 -0.140625 0.359375 -0.140625q0.21875 0 0.34375 0.140625q0.140625 0.125 0.140625 0.359375l0 4.78125q0 1.25 0.609375 1.875q0.609375 0.609375 1.78125 0.609375q1.171875 0 1.765625 -0.609375q0.609375 -0.625 0.609375 -1.875l0 -4.78125q0 -0.234375 0.140625 -0.359375q0.140625 -0.140625 0.359375 -0.140625q0.21875 0 0.34375 0.140625q0.140625 0.125 0.140625 0.359375l0 4.703125q0 1.671875 -0.859375 2.546875q-0.859375 0.859375 -2.5 0.859375z" fill-rule="nonzero"/><path fill="#c9daf8" d="m548.5407 319.3983l87.49603 0l0 30.992126l-87.49603 0z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m548.5407 319.3983l87.49603 0l0 30.992126l-87.49603 0z" fill-rule="evenodd"/><path fill="#000000" d="m584.63763 339.50812q-1.1875 0 -2.0625 -0.515625q-0.875 -0.53125 -1.359375 -1.5q-0.46875 -0.984375 -0.46875 -2.3125q0 -1.328125 0.46875 -2.296875q0.484375 -0.984375 1.359375 -1.5q0.875 -0.53125 2.0625 -0.53125q0.8125 0 1.515625 0.265625q0.71875 0.25 1.25 0.734375q0.1875 0.1875 0.1875 0.421875q0 0.171875 -0.09375 0.296875q-0.09375 0.125 -0.21875 0.125q-0.15625 0 -0.359375 -0.140625q-0.609375 -0.46875 -1.109375 -0.65625q-0.5 -0.203125 -1.140625 -0.203125q-1.390625 0 -2.140625 0.90625q-0.75 0.90625 -0.75 2.578125q0 1.671875 0.75 2.578125q0.75 0.90625 2.140625 0.90625q0.640625 0 1.140625 -0.1875q0.5 -0.1875 1.109375 -0.671875q0.203125 -0.125 0.359375 -0.125q0.125 0 0.21875 0.125q0.09375 0.109375 0.09375 0.296875q0 0.234375 -0.1875 0.40625q-0.53125 0.484375 -1.25 0.75q-0.703125 0.25 -1.515625 0.25zm5.0302734 -0.03125q-0.21875 0 -0.359375 -0.140625q-0.125 -0.140625 -0.125 -0.359375l0 -7.546875q0 -0.21875 0.125 -0.34375q0.140625 -0.125 0.375 -0.125l2.84375 0q1.328125 0 2.0625 0.65625q0.75 0.640625 0.75 1.828125q0 1.1875 -0.75 1.84375q-0.734375 0.65625 -2.0625 0.65625l-2.359375 0l0 3.03125q0 0.21875 -0.140625 0.359375q-0.125 0.140625 -0.359375 0.140625zm2.765625 -4.34375q1.9375 0 1.9375 -1.6875q0 -1.671875 -1.9375 -1.671875l-2.265625 0l0 3.359375l2.265625 0zm7.7869263 4.375q-1.65625 0 -2.515625 -0.859375q-0.84375 -0.859375 -0.84375 -2.546875l0 -4.703125q0 -0.234375 0.125 -0.359375q0.140625 -0.140625 0.359375 -0.140625q0.21875 0 0.34375 0.140625q0.140625 0.125 0.140625 0.359375l0 4.78125q0 1.25 0.609375 1.875q0.609375 0.609375 1.78125 0.609375q1.171875 0 1.765625 -0.609375q0.609375 -0.625 0.609375 -1.875l0 -4.78125q0 -0.234375 0.140625 -0.359375q0.140625 -0.140625 0.359375 -0.140625q0.21875 0 0.34375 0.140625q0.140625 0.125 0.140625 0.359375l0 4.703125q0 1.671875 -0.859375 2.546875q-0.859375 0.859375 -2.5 0.859375z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m219.98688 334.92584l64.12598 -0.03149414" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m219.98688 334.92584l60.698914 -0.029815674" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m280.68576 334.89603l-1.1240234 1.1251526l3.0892334 -1.1260986l-3.090332 -1.1230774z" fill-rule="evenodd"/><path fill="#d9ead3" d="m413.02625 141.28871l20.53543 0l0 20.53543l-20.53543 0z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m413.02625 141.28871l20.53543 0l0 20.53543l-20.53543 0z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m437.52493 135.68242l73.763794 0l0 31.748032l-73.763794 0z" fill-rule="evenodd"/><path fill="#000000" d="m448.0718 156.20241q-0.234375 0 -0.375 -0.125q-0.125 -0.140625 -0.125 -0.359375l0 -7.5q0 -0.21875 0.125 -0.34375q0.140625 -0.125 0.375 -0.125l2.34375 0q2.03125 0 3.140625 1.09375q1.109375 1.09375 1.109375 3.125q0 2.03125 -1.125 3.140625q-1.109375 1.09375 -3.125 1.09375l-2.34375 0zm2.28125 -0.84375q3.28125 0 3.28125 -3.390625q0 -3.390625 -3.28125 -3.390625l-1.796875 0l0 6.78125l1.796875 0zm8.3211975 -5.140625q2.203125 0 2.203125 2.296875l0 3.265625q0 0.21875 -0.125 0.359375q-0.125 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.140625 -0.125 -0.359375l0 -0.578125q-0.21875 0.515625 -0.6875 0.796875q-0.46875 0.28125 -1.078125 0.28125q-0.5625 0 -1.046875 -0.21875q-0.46875 -0.234375 -0.75 -0.640625q-0.265625 -0.40625 -0.265625 -0.90625q0 -0.65625 0.328125 -1.015625q0.34375 -0.375 1.109375 -0.53125q0.765625 -0.15625 2.125 -0.15625l0.265625 0l0 -0.40625q0 -0.71875 -0.296875 -1.046875q-0.28125 -0.34375 -0.953125 -0.34375q-0.8125 0 -1.65625 0.453125q-0.3125 0.203125 -0.453125 0.203125q-0.140625 0 -0.234375 -0.109375q-0.09375 -0.109375 -0.09375 -0.28125q0 -0.171875 0.09375 -0.296875q0.109375 -0.125 0.328125 -0.25q0.421875 -0.25 0.953125 -0.375q0.546875 -0.140625 1.0625 -0.140625zm-0.390625 5.296875q0.71875 0 1.171875 -0.484375q0.46875 -0.484375 0.46875 -1.25l0 -0.34375l-0.21875 0q-1.046875 0 -1.609375 0.09375q-0.546875 0.078125 -0.78125 0.296875q-0.234375 0.203125 -0.234375 0.609375q0 0.46875 0.34375 0.78125q0.34375 0.296875 0.859375 0.296875zm7.0631714 -0.015625q0.421875 0.03125 0.421875 0.375q0 0.203125 -0.15625 0.3125q-0.140625 0.09375 -0.4375 0.078125l-0.328125 -0.03125q-0.953125 -0.0625 -1.421875 -0.5625q-0.453125 -0.515625 -0.453125 -1.53125l0 -3.015625l-0.796875 0q-0.203125 0 -0.328125 -0.09375q-0.109375 -0.109375 -0.109375 -0.28125q0 -0.171875 0.109375 -0.28125q0.125 -0.109375 0.328125 -0.109375l0.796875 0l0 -1.359375q0 -0.21875 0.125 -0.34375q0.140625 -0.140625 0.375 -0.140625q0.21875 0 0.34375 0.140625q0.140625 0.125 0.140625 0.34375l0 1.359375l1.328125 0q0.1875 0 0.296875 0.109375q0.125 0.109375 0.125 0.28125q0 0.171875 -0.125 0.28125q-0.109375 0.09375 -0.296875 0.09375l-1.328125 0l0 3.0625q0 0.65625 0.265625 0.953125q0.265625 0.296875 0.8125 0.328125l0.3125 0.03125zm3.767517 -5.28125q2.203125 0 2.203125 2.296875l0 3.265625q0 0.21875 -0.125 0.359375q-0.125 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.140625 -0.125 -0.359375l0 -0.578125q-0.21875 0.515625 -0.6875 0.796875q-0.46875 0.28125 -1.078125 0.28125q-0.5625 0 -1.046875 -0.21875q-0.46875 -0.234375 -0.75 -0.640625q-0.265625 -0.40625 -0.265625 -0.90625q0 -0.65625 0.328125 -1.015625q0.34375 -0.375 1.109375 -0.53125q0.765625 -0.15625 2.125 -0.15625l0.265625 0l0 -0.40625q0 -0.71875 -0.296875 -1.046875q-0.28125 -0.34375 -0.953125 -0.34375q-0.8125 0 -1.65625 0.453125q-0.3125 0.203125 -0.453125 0.203125q-0.140625 0 -0.234375 -0.109375q-0.09375 -0.109375 -0.09375 -0.28125q0 -0.171875 0.09375 -0.296875q0.109375 -0.125 0.328125 -0.25q0.421875 -0.25 0.953125 -0.375q0.546875 -0.140625 1.0625 -0.140625zm-0.390625 5.296875q0.71875 0 1.171875 -0.484375q0.46875 -0.484375 0.46875 -1.25l0 -0.34375l-0.21875 0q-1.046875 0 -1.609375 0.09375q-0.546875 0.078125 -0.78125 0.296875q-0.234375 0.203125 -0.234375 0.609375q0 0.46875 0.34375 0.78125q0.34375 0.296875 0.859375 0.296875zm10.15921 0.75q-0.234375 0 -0.375 -0.140625q-0.140625 -0.140625 -0.140625 -0.359375l0 -7.1875l-2.578125 0q-0.21875 0 -0.34375 -0.109375q-0.109375 -0.109375 -0.109375 -0.3125q0 -0.203125 0.109375 -0.296875q0.125 -0.109375 0.34375 -0.109375l6.15625 0q0.21875 0 0.328125 0.109375q0.125 0.09375 0.125 0.296875q0 0.203125 -0.125 0.3125q-0.109375 0.109375 -0.328125 0.109375l-2.578125 0l0 7.1875q0 0.21875 -0.140625 0.359375q-0.125 0.140625 -0.34375 0.140625zm8.691681 -5.71875q0.140625 -0.296875 0.421875 -0.296875q0.1875 0 0.328125 0.125q0.140625 0.109375 0.140625 0.296875q0 0.109375 -0.046875 0.1875l-3.375 7.28125q-0.0625 0.125 -0.171875 0.1875q-0.109375 0.078125 -0.234375 0.078125q-0.1875 0 -0.328125 -0.109375q-0.125 -0.109375 -0.125 -0.296875q0 -0.09375 0.046875 -0.1875l0.84375 -1.8125l-2.375 -5.140625q-0.046875 -0.078125 -0.046875 -0.171875q0 -0.1875 0.15625 -0.3125q0.15625 -0.140625 0.359375 -0.140625q0.109375 0 0.21875 0.078125q0.125 0.078125 0.1875 0.203125l2.0 4.5l2.0 -4.46875zm4.902405 -0.328125q0.765625 0 1.34375 0.390625q0.59375 0.375 0.921875 1.0625q0.328125 0.6875 0.328125 1.609375q0 0.90625 -0.328125 1.59375q-0.328125 0.671875 -0.90625 1.046875q-0.578125 0.359375 -1.359375 0.359375q-0.6875 0 -1.203125 -0.296875q-0.5 -0.296875 -0.765625 -0.84375l0 2.8125q0 0.21875 -0.125 0.34375q-0.125 0.125 -0.359375 0.125q-0.234375 0 -0.359375 -0.140625q-0.125 -0.125 -0.125 -0.328125l0 -7.234375q0 -0.21875 0.125 -0.34375q0.125 -0.140625 0.359375 -0.140625q0.234375 0 0.359375 0.140625q0.125 0.125 0.125 0.34375l0 0.640625q0.265625 -0.546875 0.765625 -0.84375q0.515625 -0.296875 1.203125 -0.296875zm-0.203125 5.265625q0.859375 0 1.328125 -0.578125q0.46875 -0.578125 0.46875 -1.625q0 -1.0625 -0.46875 -1.65625q-0.46875 -0.59375 -1.328125 -0.59375q-0.84375 0 -1.3125 0.578125q-0.453125 0.578125 -0.453125 1.640625q0 1.0625 0.453125 1.65625q0.46875 0.578125 1.3125 0.578125zm8.76532 -0.640625q0.140625 0 0.234375 0.109375q0.09375 0.109375 0.09375 0.28125q0 0.296875 -0.421875 0.546875q-0.4375 0.25 -0.921875 0.375q-0.46875 0.125 -0.921875 0.125q-1.359375 0 -2.15625 -0.796875q-0.78125 -0.8125 -0.78125 -2.21875q0 -0.90625 0.34375 -1.59375q0.359375 -0.6875 0.984375 -1.0625q0.640625 -0.390625 1.4375 -0.390625q1.140625 0 1.8125 0.75q0.671875 0.734375 0.671875 2.0q0 0.25 -0.09375 0.359375q-0.09375 0.109375 -0.3125 0.109375l-3.859375 0q0.09375 2.0625 1.953125 2.0625q0.46875 0 0.796875 -0.125q0.34375 -0.125 0.71875 -0.34375q0.3125 -0.1875 0.421875 -0.1875zm-2.09375 -3.875q-0.765625 0 -1.234375 0.484375q-0.46875 0.484375 -0.546875 1.359375l3.390625 0q-0.015625 -0.890625 -0.4375 -1.359375q-0.421875 -0.484375 -1.171875 -0.484375z" fill-rule="nonzero"/><path fill="#f4cccc" d="m519.9029 141.28871l20.5354 0l0 20.53543l-20.5354 0z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m519.9029 141.28871l20.5354 0l0 20.53543l-20.5354 0z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m544.40155 135.68242l100.0 0l0 31.748032l-100.0 0z" fill-rule="evenodd"/><path fill="#000000" d="m554.9328 156.26491q-0.21875 0 -0.359375 -0.140625q-0.125 -0.140625 -0.125 -0.359375l0 -7.578125q0 -0.234375 0.125 -0.359375q0.140625 -0.140625 0.359375 -0.140625q0.234375 0 0.359375 0.140625q0.140625 0.125 0.140625 0.359375l0 7.578125q0 0.21875 -0.140625 0.359375q-0.125 0.140625 -0.359375 0.140625zm5.3845215 -6.046875q2.09375 0 2.09375 2.3125l0 3.25q0 0.234375 -0.125 0.359375q-0.125 0.125 -0.359375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -3.1875q0 -0.8125 -0.328125 -1.1875q-0.3125 -0.375 -1.0 -0.375q-0.8125 0 -1.296875 0.5q-0.46875 0.484375 -0.46875 1.328125l0 2.921875q0 0.234375 -0.125 0.359375q-0.125 0.125 -0.359375 0.125q-0.234375 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -5.0625q0 -0.21875 0.125 -0.34375q0.125 -0.140625 0.359375 -0.140625q0.21875 0 0.34375 0.140625q0.125 0.125 0.125 0.328125l0 0.609375q0.28125 -0.53125 0.796875 -0.8125q0.53125 -0.28125 1.1875 -0.28125zm6.456726 -1.703125q-0.640625 0.046875 -0.96875 0.40625q-0.3125 0.34375 -0.3125 1.046875l0 0.390625l1.328125 0q0.203125 0 0.3125 0.109375q0.109375 0.109375 0.109375 0.28125q0 0.1875 -0.109375 0.28125q-0.109375 0.09375 -0.3125 0.09375l-1.328125 0l0 4.65625q0 0.234375 -0.140625 0.359375q-0.140625 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.140625 -0.125 -0.140625 -0.359375l0 -4.65625l-0.796875 0q-0.203125 0 -0.328125 -0.09375q-0.109375 -0.109375 -0.109375 -0.28125q0 -0.171875 0.109375 -0.28125q0.125 -0.109375 0.328125 -0.109375l0.796875 0l0 -0.21875q0 -1.078125 0.53125 -1.6875q0.546875 -0.625 1.5625 -0.703125l0.3125 -0.015625q0.3125 -0.03125 0.453125 0.0625q0.140625 0.078125 0.140625 0.296875q0 0.34375 -0.421875 0.390625l-0.3125 0.03125zm4.248535 1.71875q0.5 -0.03125 0.5 0.40625q0 0.203125 -0.109375 0.3125q-0.109375 0.109375 -0.375 0.140625l-0.359375 0.03125q-0.796875 0.078125 -1.1875 0.578125q-0.390625 0.484375 -0.390625 1.15625l0 2.921875q0 0.234375 -0.140625 0.359375q-0.125 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -5.0625q0 -0.234375 0.140625 -0.359375q0.140625 -0.125 0.34375 -0.125q0.1875 0 0.3125 0.125q0.140625 0.125 0.140625 0.34375l0 0.671875q0.25 -0.53125 0.71875 -0.796875q0.46875 -0.28125 1.0625 -0.328125l0.171875 -0.015625zm3.720398 -0.015625q2.203125 0 2.203125 2.296875l0 3.265625q0 0.21875 -0.125 0.359375q-0.125 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.140625 -0.125 -0.359375l0 -0.578125q-0.21875 0.515625 -0.6875 0.796875q-0.46875 0.28125 -1.078125 0.28125q-0.5625 0 -1.046875 -0.21875q-0.46875 -0.234375 -0.75 -0.640625q-0.265625 -0.40625 -0.265625 -0.90625q0 -0.65625 0.328125 -1.015625q0.34375 -0.375 1.109375 -0.53125q0.765625 -0.15625 2.125 -0.15625l0.265625 0l0 -0.40625q0 -0.71875 -0.296875 -1.046875q-0.28125 -0.34375 -0.953125 -0.34375q-0.8125 0 -1.65625 0.453125q-0.3125 0.203125 -0.453125 0.203125q-0.140625 0 -0.234375 -0.109375q-0.09375 -0.109375 -0.09375 -0.28125q0 -0.171875 0.09375 -0.296875q0.109375 -0.125 0.328125 -0.25q0.421875 -0.25 0.953125 -0.375q0.546875 -0.140625 1.0625 -0.140625zm-0.390625 5.296875q0.71875 0 1.171875 -0.484375q0.46875 -0.484375 0.46875 -1.25l0 -0.34375l-0.21875 0q-1.046875 0 -1.609375 0.09375q-0.546875 0.078125 -0.78125 0.296875q-0.234375 0.203125 -0.234375 0.609375q0 0.46875 0.34375 0.78125q0.34375 0.296875 0.859375 0.296875zm6.3444214 0.765625q-0.5625 0 -1.0625 -0.125q-0.5 -0.140625 -0.875 -0.375q-0.21875 -0.140625 -0.3125 -0.265625q-0.078125 -0.125 -0.078125 -0.3125q0 -0.15625 0.078125 -0.25q0.09375 -0.109375 0.234375 -0.109375q0.15625 0 0.421875 0.1875q0.359375 0.21875 0.71875 0.34375q0.359375 0.125 0.875 0.125q0.65625 0 1.015625 -0.21875q0.359375 -0.234375 0.359375 -0.671875q0 -0.265625 -0.140625 -0.421875q-0.125 -0.171875 -0.453125 -0.296875q-0.3125 -0.125 -0.9375 -0.25q-1.0625 -0.234375 -1.515625 -0.609375q-0.453125 -0.390625 -0.453125 -1.046875q0 -0.515625 0.28125 -0.90625q0.28125 -0.40625 0.796875 -0.625q0.515625 -0.234375 1.15625 -0.234375q0.46875 0 0.90625 0.125q0.4375 0.125 0.78125 0.34375q0.40625 0.296875 0.40625 0.609375q0 0.15625 -0.09375 0.265625q-0.09375 0.109375 -0.234375 0.109375q-0.140625 0 -0.4375 -0.203125q-0.328125 -0.21875 -0.625 -0.34375q-0.296875 -0.125 -0.75 -0.125q-0.5625 0 -0.90625 0.265625q-0.34375 0.25 -0.34375 0.671875q0 0.25 0.125 0.421875q0.125 0.15625 0.421875 0.28125q0.296875 0.125 0.84375 0.25q0.828125 0.1875 1.265625 0.40625q0.453125 0.203125 0.640625 0.515625q0.203125 0.3125 0.203125 0.796875q0 0.75 -0.640625 1.21875q-0.640625 0.453125 -1.671875 0.453125zm6.47876 -0.78125q0.421875 0.03125 0.421875 0.375q0 0.203125 -0.15625 0.3125q-0.140625 0.09375 -0.4375 0.078125l-0.328125 -0.03125q-0.953125 -0.0625 -1.421875 -0.5625q-0.453125 -0.515625 -0.453125 -1.53125l0 -3.015625l-0.796875 0q-0.203125 0 -0.328125 -0.09375q-0.109375 -0.109375 -0.109375 -0.28125q0 -0.171875 0.109375 -0.28125q0.125 -0.109375 0.328125 -0.109375l0.796875 0l0 -1.359375q0 -0.21875 0.125 -0.34375q0.140625 -0.140625 0.375 -0.140625q0.21875 0 0.34375 0.140625q0.140625 0.125 0.140625 0.34375l0 1.359375l1.328125 0q0.1875 0 0.296875 0.109375q0.125 0.109375 0.125 0.28125q0 0.171875 -0.125 0.28125q-0.109375 0.09375 -0.296875 0.09375l-1.328125 0l0 3.0625q0 0.65625 0.265625 0.953125q0.265625 0.296875 0.8125 0.328125l0.3125 0.03125zm4.283142 -5.265625q0.5 -0.03125 0.5 0.40625q0 0.203125 -0.109375 0.3125q-0.109375 0.109375 -0.375 0.140625l-0.359375 0.03125q-0.796875 0.078125 -1.1875 0.578125q-0.390625 0.484375 -0.390625 1.15625l0 2.921875q0 0.234375 -0.140625 0.359375q-0.125 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -5.0625q0 -0.234375 0.140625 -0.359375q0.140625 -0.125 0.34375 -0.125q0.1875 0 0.3125 0.125q0.140625 0.125 0.140625 0.34375l0 0.671875q0.25 -0.53125 0.71875 -0.796875q0.46875 -0.28125 1.0625 -0.328125l0.171875 -0.015625zm5.782898 0q0.21875 0 0.34375 0.140625q0.125 0.125 0.125 0.34375l0 5.078125q0 0.203125 -0.125 0.34375q-0.125 0.125 -0.34375 0.125q-0.21875 0 -0.34375 -0.125q-0.125 -0.125 -0.125 -0.328125l0 -0.609375q-0.28125 0.53125 -0.78125 0.8125q-0.5 0.265625 -1.125 0.265625q-1.03125 0 -1.5625 -0.578125q-0.53125 -0.578125 -0.53125 -1.71875l0 -3.265625q0 -0.21875 0.125 -0.34375q0.140625 -0.140625 0.375 -0.140625q0.21875 0 0.34375 0.140625q0.125 0.125 0.125 0.34375l0 3.234375q0 0.78125 0.3125 1.15625q0.3125 0.359375 0.984375 0.359375q0.765625 0 1.234375 -0.5q0.46875 -0.5 0.46875 -1.3125l0 -2.9375q0 -0.21875 0.125 -0.34375q0.140625 -0.140625 0.375 -0.140625zm4.7008057 6.046875q-0.8125 0 -1.453125 -0.359375q-0.625 -0.375 -0.96875 -1.0625q-0.34375 -0.6875 -0.34375 -1.578125q0 -0.90625 0.359375 -1.59375q0.359375 -0.703125 0.984375 -1.078125q0.640625 -0.390625 1.46875 -0.390625q0.453125 0 0.90625 0.125q0.453125 0.125 0.78125 0.359375q0.21875 0.140625 0.3125 0.28125q0.09375 0.140625 0.09375 0.3125q0 0.171875 -0.09375 0.28125q-0.09375 0.09375 -0.234375 0.09375q-0.078125 0 -0.1875 -0.046875q-0.09375 -0.046875 -0.15625 -0.09375q-0.0625 -0.046875 -0.09375 -0.0625q-0.3125 -0.203125 -0.59375 -0.3125q-0.28125 -0.125 -0.6875 -0.125q-0.875 0 -1.359375 0.59375q-0.484375 0.59375 -0.484375 1.65625q0 1.046875 0.484375 1.625q0.484375 0.578125 1.359375 0.578125q0.40625 0 0.703125 -0.109375q0.296875 -0.125 0.59375 -0.328125q0.140625 -0.09375 0.25 -0.15625q0.125 -0.0625 0.203125 -0.0625q0.140625 0 0.21875 0.109375q0.09375 0.109375 0.09375 0.28125q0 0.15625 -0.09375 0.28125q-0.078125 0.125 -0.296875 0.28125q-0.34375 0.234375 -0.8125 0.375q-0.46875 0.125 -0.953125 0.125zm6.029297 -0.78125q0.421875 0.03125 0.421875 0.375q0 0.203125 -0.15625 0.3125q-0.140625 0.09375 -0.4375 0.078125l-0.328125 -0.03125q-0.953125 -0.0625 -1.421875 -0.5625q-0.453125 -0.515625 -0.453125 -1.53125l0 -3.015625l-0.796875 0q-0.203125 0 -0.328125 -0.09375q-0.109375 -0.109375 -0.109375 -0.28125q0 -0.171875 0.109375 -0.28125q0.125 -0.109375 0.328125 -0.109375l0.796875 0l0 -1.359375q0 -0.21875 0.125 -0.34375q0.140625 -0.140625 0.375 -0.140625q0.21875 0 0.34375 0.140625q0.140625 0.125 0.140625 0.34375l0 1.359375l1.328125 0q0.1875 0 0.296875 0.109375q0.125 0.109375 0.125 0.28125q0 0.171875 -0.125 0.28125q-0.109375 0.09375 -0.296875 0.09375l-1.328125 0l0 3.0625q0 0.65625 0.265625 0.953125q0.265625 0.296875 0.8125 0.328125l0.3125 0.03125zm5.830017 -5.265625q0.21875 0 0.34375 0.140625q0.125 0.125 0.125 0.34375l0 5.078125q0 0.203125 -0.125 0.34375q-0.125 0.125 -0.34375 0.125q-0.21875 0 -0.34375 -0.125q-0.125 -0.125 -0.125 -0.328125l0 -0.609375q-0.28125 0.53125 -0.78125 0.8125q-0.5 0.265625 -1.125 0.265625q-1.03125 0 -1.5625 -0.578125q-0.53125 -0.578125 -0.53125 -1.71875l0 -3.265625q0 -0.21875 0.125 -0.34375q0.140625 -0.140625 0.375 -0.140625q0.21875 0 0.34375 0.140625q0.125 0.125 0.125 0.34375l0 3.234375q0 0.78125 0.3125 1.15625q0.3125 0.359375 0.984375 0.359375q0.765625 0 1.234375 -0.5q0.46875 -0.5 0.46875 -1.3125l0 -2.9375q0 -0.21875 0.125 -0.34375q0.140625 -0.140625 0.375 -0.140625zm5.1851807 0q0.5 -0.03125 0.5 0.40625q0 0.203125 -0.109375 0.3125q-0.109375 0.109375 -0.375 0.140625l-0.359375 0.03125q-0.796875 0.078125 -1.1875 0.578125q-0.390625 0.484375 -0.390625 1.15625l0 2.921875q0 0.234375 -0.140625 0.359375q-0.125 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -5.0625q0 -0.234375 0.140625 -0.359375q0.140625 -0.125 0.34375 -0.125q0.1875 0 0.3125 0.125q0.140625 0.125 0.140625 0.34375l0 0.671875q0.25 -0.53125 0.71875 -0.796875q0.46875 -0.28125 1.0625 -0.328125l0.171875 -0.015625zm5.861023 4.609375q0.140625 0 0.234375 0.109375q0.09375 0.109375 0.09375 0.28125q0 0.296875 -0.421875 0.546875q-0.4375 0.25 -0.921875 0.375q-0.46875 0.125 -0.921875 0.125q-1.359375 0 -2.15625 -0.796875q-0.78125 -0.8125 -0.78125 -2.21875q0 -0.90625 0.34375 -1.59375q0.359375 -0.6875 0.984375 -1.0625q0.640625 -0.390625 1.4375 -0.390625q1.140625 0 1.8125 0.75q0.671875 0.734375 0.671875 2.0q0 0.25 -0.09375 0.359375q-0.09375 0.109375 -0.3125 0.109375l-3.859375 0q0.09375 2.0625 1.953125 2.0625q0.46875 0 0.796875 -0.125q0.34375 -0.125 0.71875 -0.34375q0.3125 -0.1875 0.421875 -0.1875zm-2.09375 -3.875q-0.765625 0 -1.234375 0.484375q-0.46875 0.484375 -0.546875 1.359375l3.390625 0q-0.015625 -0.890625 -0.4375 -1.359375q-0.421875 -0.484375 -1.171875 -0.484375z" fill-rule="nonzero"/><path fill="#d9ead3" d="m31.874912 252.53609l87.49606 0l0 30.992142l-87.49606 0z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m31.874912 252.53609l87.49606 0l0 30.992142l-87.49606 0z" fill-rule="evenodd"/><path fill="#000000" d="m67.27695 264.03653q0.21875 0 0.34375 0.140625q0.125 0.125 0.125 0.359375l0 7.578125q0 0.21875 -0.125 0.359375q-0.125 0.140625 -0.34375 0.140625q-0.234375 0 -0.375 -0.140625q-0.125 -0.140625 -0.125 -0.359375l0 -3.4375l-5.062496 0l0 3.4375q0 0.21875 -0.140625 0.359375q-0.125 0.140625 -0.34375 0.140625q-0.234375 0 -0.359375 -0.140625q-0.125 -0.140625 -0.125 -0.359375l0 -7.578125q0 -0.234375 0.125 -0.359375q0.125 -0.140625 0.359375 -0.140625q0.21875 0 0.34375 0.140625q0.140625 0.125 0.140625 0.359375l0 3.296875l5.062496 0l0 -3.296875q0 -0.234375 0.125 -0.359375q0.140625 -0.140625 0.375 -0.140625zm3.0648193 8.515625q-0.234375 0 -0.375 -0.125q-0.125 -0.140625 -0.125 -0.359375l0 -7.5q0 -0.21875 0.125 -0.34375q0.140625 -0.125 0.375 -0.125l2.34375 0q2.03125 0 3.140625 1.09375q1.109375 1.09375 1.109375 3.125q0 2.03125 -1.125 3.140625q-1.109375 1.09375 -3.125 1.09375l-2.34375 0zm2.28125 -0.84375q3.28125 0 3.28125 -3.390625q0 -3.390625 -3.28125 -3.390625l-1.796875 0l0 6.78125l1.796875 0zm6.5711823 0.90625q-0.21875 0 -0.359375 -0.140625q-0.125 -0.140625 -0.125 -0.359375l0 -7.546875q0 -0.21875 0.125 -0.34375q0.140625 -0.125 0.375 -0.125l4.375 0q0.203125 0 0.328125 0.109375q0.125 0.09375 0.125 0.296875q0 0.203125 -0.125 0.3125q-0.125 0.109375 -0.328125 0.109375l-3.90625 0l0 2.90625l3.65625 0q0.21875 0 0.328125 0.109375q0.125 0.109375 0.125 0.3125q0 0.1875 -0.125 0.296875q-0.109375 0.109375 -0.328125 0.109375l-3.65625 0l0 3.453125q0 0.21875 -0.125 0.359375q-0.125 0.140625 -0.359375 0.140625zm9.0746765 -5.359375q0.8125 0 1.40625 0.34375q0.609375 0.328125 0.9375 0.9375q0.328125 0.59375 0.328125 1.390625q0 0.78125 -0.359375 1.40625q-0.359375 0.625 -1.0 0.96875q-0.640625 0.328125 -1.484375 0.328125q-0.734375 0 -1.453125 -0.25q-0.703125 -0.265625 -1.1875 -0.734375q-0.203125 -0.171875 -0.203125 -0.40625q0 -0.171875 0.09375 -0.296875q0.109375 -0.125 0.234375 -0.125q0.171875 0 0.34375 0.140625q0.515625 0.4375 1.046875 0.640625q0.53125 0.203125 1.109375 0.203125q0.890625 0 1.390625 -0.5q0.5 -0.5 0.5 -1.359375q0 -0.84375 -0.5 -1.359375q-0.5 -0.515625 -1.359375 -0.515625q-1.09375 0 -1.78125 0.84375q-0.15625 0.171875 -0.40625 0.171875q-0.15625 0 -0.28125 -0.09375q-0.109375 -0.109375 -0.109375 -0.296875l0 -4.125q0 -0.21875 0.125 -0.34375q0.125 -0.125 0.359375 -0.125l4.21875 0q0.21875 0 0.34375 0.109375q0.125 0.09375 0.125 0.296875q0 0.1875 -0.125 0.296875q-0.125 0.109375 -0.34375 0.109375l-3.734375 0l0 3.015625q0.34375 -0.328125 0.78125 -0.5q0.453125 -0.171875 0.984375 -0.171875z" fill-rule="nonzero"/><path fill="#d9ead3" d="m190.14 134.76706l87.49608 0l0 30.992126l-87.49608 0z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m190.14 134.76706l87.49608 0l0 30.992126l-87.49608 0z" fill-rule="evenodd"/><path fill="#000000" d="m215.10997 150.37688q0.1875 0 0.296875 0.109375q0.109375 0.109375 0.109375 0.296875l0 2.984375q0 0.296875 -0.09375 0.4375q-0.078125 0.140625 -0.328125 0.234375q-0.46875 0.203125 -1.15625 0.328125q-0.6875 0.109375 -1.375 0.109375q-1.25 0 -2.171875 -0.515625q-0.90625 -0.515625 -1.390625 -1.484375q-0.484375 -0.96875 -0.484375 -2.328125q0 -1.328125 0.46875 -2.296875q0.484375 -0.984375 1.375 -1.5q0.90625 -0.53125 2.125 -0.53125q0.84375 0 1.5625 0.265625q0.71875 0.25 1.203125 0.734375q0.21875 0.203125 0.21875 0.421875q0 0.171875 -0.109375 0.296875q-0.09375 0.125 -0.234375 0.125q-0.140625 0 -0.328125 -0.140625q-0.625 -0.484375 -1.140625 -0.671875q-0.5 -0.1875 -1.15625 -0.1875q-1.4375 0 -2.203125 0.90625q-0.75 0.890625 -0.75 2.578125q0 1.71875 0.765625 2.609375q0.78125 0.890625 2.28125 0.890625q1.109375 0 2.03125 -0.328125l0 -2.578125l-1.75 0q-0.203125 0 -0.328125 -0.109375q-0.125 -0.109375 -0.125 -0.265625q0 -0.1875 0.125 -0.28125q0.125 -0.109375 0.328125 -0.109375l2.234375 0zm5.1568146 -1.5625q0.5 -0.03125 0.5 0.40625q0 0.203125 -0.109375 0.3125q-0.109375 0.109375 -0.375 0.140625l-0.359375 0.03125q-0.796875 0.078125 -1.1875 0.578125q-0.390625 0.484375 -0.390625 1.15625l0 2.921875q0 0.234375 -0.140625 0.359375q-0.125 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -5.0625q0 -0.234375 0.140625 -0.359375q0.140625 -0.125 0.34375 -0.125q0.1875 0 0.3125 0.125q0.140625 0.125 0.140625 0.34375l0 0.671875q0.25 -0.53125 0.71875 -0.796875q0.46875 -0.28125 1.0625 -0.328125l0.171875 -0.015625zm3.720398 -0.015625q2.203125 0 2.203125 2.296875l0 3.265625q0 0.21875 -0.125 0.359375q-0.125 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.140625 -0.125 -0.359375l0 -0.578125q-0.21875 0.515625 -0.6875 0.796875q-0.46875 0.28125 -1.078125 0.28125q-0.5625 0 -1.046875 -0.21875q-0.46875 -0.234375 -0.75 -0.640625q-0.265625 -0.40625 -0.265625 -0.90625q0 -0.65625 0.328125 -1.015625q0.34375 -0.375 1.109375 -0.53125q0.765625 -0.15625 2.125 -0.15625l0.265625 0l0 -0.40625q0 -0.71875 -0.296875 -1.046875q-0.28125 -0.34375 -0.953125 -0.34375q-0.8125 0 -1.65625 0.453125q-0.3125 0.203125 -0.453125 0.203125q-0.140625 0 -0.234375 -0.109375q-0.09375 -0.109375 -0.09375 -0.28125q0 -0.171875 0.09375 -0.296875q0.109375 -0.125 0.328125 -0.25q0.421875 -0.25 0.953125 -0.375q0.546875 -0.140625 1.0625 -0.140625zm-0.390625 5.296875q0.71875 0 1.171875 -0.484375q0.46875 -0.484375 0.46875 -1.25l0 -0.34375l-0.21875 0q-1.046875 0 -1.609375 0.09375q-0.546875 0.078125 -0.78125 0.296875q-0.234375 0.203125 -0.234375 0.609375q0 0.46875 0.34375 0.78125q0.34375 0.296875 0.859375 0.296875zm7.3131714 -5.296875q0.765625 0 1.34375 0.390625q0.59375 0.375 0.921875 1.0625q0.328125 0.6875 0.328125 1.609375q0 0.90625 -0.328125 1.59375q-0.328125 0.671875 -0.90625 1.046875q-0.578125 0.359375 -1.359375 0.359375q-0.6875 0 -1.203125 -0.296875q-0.5 -0.296875 -0.765625 -0.84375l0 2.8125q0 0.21875 -0.125 0.34375q-0.125 0.125 -0.359375 0.125q-0.234375 0 -0.359375 -0.140625q-0.125 -0.125 -0.125 -0.328125l0 -7.234375q0 -0.21875 0.125 -0.34375q0.125 -0.140625 0.359375 -0.140625q0.234375 0 0.359375 0.140625q0.125 0.125 0.125 0.34375l0 0.640625q0.265625 -0.546875 0.765625 -0.84375q0.515625 -0.296875 1.203125 -0.296875zm-0.203125 5.265625q0.859375 0 1.328125 -0.578125q0.46875 -0.578125 0.46875 -1.625q0 -1.0625 -0.46875 -1.65625q-0.46875 -0.59375 -1.328125 -0.59375q-0.84375 0 -1.3125 0.578125q-0.453125 0.578125 -0.453125 1.640625q0 1.0625 0.453125 1.65625q0.46875 0.578125 1.3125 0.578125zm7.2028046 -5.265625q1.03125 0 1.546875 0.578125q0.53125 0.578125 0.53125 1.734375l0 3.25q0 0.234375 -0.140625 0.359375q-0.125 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -3.21875q0 -0.78125 -0.328125 -1.15625q-0.3125 -0.375 -1.0 -0.375q-0.8125 0 -1.296875 0.5q-0.46875 0.484375 -0.46875 1.328125l0 2.921875q0 0.234375 -0.125 0.359375q-0.125 0.125 -0.359375 0.125q-0.234375 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -7.625q0 -0.203125 0.125 -0.328125q0.140625 -0.140625 0.359375 -0.140625q0.234375 0 0.359375 0.125q0.125 0.125 0.125 0.34375l0 3.140625q0.28125 -0.53125 0.796875 -0.796875q0.515625 -0.28125 1.1875 -0.28125zm4.5035553 5.984375q-0.234375 0 -0.375 -0.125q-0.125 -0.140625 -0.125 -0.359375l0 -7.5q0 -0.21875 0.125 -0.34375q0.140625 -0.125 0.375 -0.125l2.34375 0q2.03125 0 3.140625 1.09375q1.109375 1.09375 1.109375 3.125q0 2.03125 -1.125 3.140625q-1.109375 1.09375 -3.125 1.09375l-2.34375 0zm2.28125 -0.84375q3.28125 0 3.28125 -3.390625q0 -3.390625 -3.28125 -3.390625l-1.796875 0l0 6.78125l1.796875 0zm10.461807 -0.515625q0.140625 0 0.234375 0.109375q0.09375 0.109375 0.09375 0.28125q0 0.296875 -0.421875 0.546875q-0.4375 0.25 -0.921875 0.375q-0.46875 0.125 -0.921875 0.125q-1.359375 0 -2.15625 -0.796875q-0.78125 -0.8125 -0.78125 -2.21875q0 -0.90625 0.34375 -1.59375q0.359375 -0.6875 0.984375 -1.0625q0.640625 -0.390625 1.4375 -0.390625q1.140625 0 1.8125 0.75q0.671875 0.734375 0.671875 2.0q0 0.25 -0.09375 0.359375q-0.09375 0.109375 -0.3125 0.109375l-3.859375 0q0.09375 2.0625 1.953125 2.0625q0.46875 0 0.796875 -0.125q0.34375 -0.125 0.71875 -0.34375q0.3125 -0.1875 0.421875 -0.1875zm-2.09375 -3.875q-0.765625 0 -1.234375 0.484375q-0.46875 0.484375 -0.546875 1.359375l3.390625 0q-0.015625 -0.890625 -0.4375 -1.359375q-0.421875 -0.484375 -1.171875 -0.484375zm6.480301 -2.453125q-0.640625 0.046875 -0.96875 0.40625q-0.3125 0.34375 -0.3125 1.046875l0 0.390625l1.328125 0q0.203125 0 0.3125 0.109375q0.109375 0.109375 0.109375 0.28125q0 0.1875 -0.109375 0.28125q-0.109375 0.09375 -0.3125 0.09375l-1.328125 0l0 4.65625q0 0.234375 -0.140625 0.359375q-0.140625 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.140625 -0.125 -0.140625 -0.359375l0 -4.65625l-0.796875 0q-0.203125 0 -0.328125 -0.09375q-0.109375 -0.109375 -0.109375 -0.28125q0 -0.171875 0.109375 -0.28125q0.125 -0.109375 0.328125 -0.109375l0.796875 0l0 -0.21875q0 -1.078125 0.53125 -1.6875q0.546875 -0.625 1.5625 -0.703125l0.3125 -0.015625q0.3125 -0.03125 0.453125 0.0625q0.140625 0.078125 0.140625 0.296875q0 0.34375 -0.421875 0.390625l-0.3125 0.03125z" fill-rule="nonzero"/><path fill="#d9ead3" d="m233.1085 252.53609l87.49608 0l0 30.992142l-87.49608 0z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m233.1085 252.53609l87.49608 0l0 30.992142l-87.49608 0z" fill-rule="evenodd"/><path fill="#000000" d="m260.00964 265.61465q-0.21875 0 -0.359375 -0.140625q-0.125 -0.140625 -0.125 -0.359375l0 -7.546875q0 -0.21875 0.125 -0.34375q0.140625 -0.125 0.375 -0.125l4.375 0q0.203125 0 0.328125 0.109375q0.125 0.09375 0.125 0.296875q0 0.203125 -0.125 0.3125q-0.125 0.109375 -0.328125 0.109375l-3.90625 0l0 2.90625l3.65625 0q0.21875 0 0.328125 0.109375q0.125 0.109375 0.125 0.3125q0 0.1875 -0.125 0.296875q-0.109375 0.109375 -0.328125 0.109375l-3.65625 0l0 3.453125q0 0.21875 -0.125 0.359375q-0.125 0.140625 -0.359375 0.140625zm8.9496765 -6.03125q0.5 -0.03125 0.5 0.40625q0 0.203125 -0.109375 0.3125q-0.109375 0.109375 -0.375 0.140625l-0.359375 0.03125q-0.796875 0.078125 -1.1875 0.578125q-0.390625 0.484375 -0.390625 1.15625l0 2.921875q0 0.234375 -0.140625 0.359375q-0.125 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -5.0625q0 -0.234375 0.140625 -0.359375q0.140625 -0.125 0.34375 -0.125q0.1875 0 0.3125 0.125q0.140625 0.125 0.140625 0.34375l0 0.671875q0.25 -0.53125 0.71875 -0.796875q0.46875 -0.28125 1.0625 -0.328125l0.171875 -0.015625zm3.767273 6.046875q-0.828125 0 -1.46875 -0.359375q-0.625 -0.375 -0.96875 -1.0625q-0.34375 -0.703125 -0.34375 -1.609375q0 -0.90625 0.34375 -1.59375q0.34375 -0.703125 0.96875 -1.0625q0.640625 -0.375 1.46875 -0.375q0.828125 0 1.453125 0.375q0.640625 0.359375 0.984375 1.0625q0.34375 0.6875 0.34375 1.59375q0 0.90625 -0.34375 1.609375q-0.34375 0.6875 -0.984375 1.0625q-0.625 0.359375 -1.453125 0.359375zm0 -0.796875q0.859375 0 1.3125 -0.5625q0.46875 -0.578125 0.46875 -1.671875q0 -1.0625 -0.46875 -1.640625q-0.46875 -0.59375 -1.3125 -0.59375q-0.859375 0 -1.328125 0.59375q-0.46875 0.578125 -0.46875 1.640625q0 1.078125 0.453125 1.65625q0.46875 0.578125 1.34375 0.578125zm8.535065 -0.046875q0.203125 0 0.296875 0.109375q0.109375 0.09375 0.109375 0.265625q0 0.1875 -0.109375 0.296875q-0.09375 0.09375 -0.296875 0.09375l-4.203125 0q-0.203125 0 -0.34375 -0.125q-0.125 -0.125 -0.125 -0.3125q0 -0.1875 0.140625 -0.359375l3.546875 -4.28125l-3.28125 0q-0.203125 0 -0.328125 -0.09375q-0.109375 -0.109375 -0.109375 -0.28125q0 -0.171875 0.109375 -0.28125q0.125 -0.109375 0.328125 -0.109375l4.0625 0q0.21875 0 0.34375 0.125q0.140625 0.125 0.140625 0.3125q0 0.1875 -0.140625 0.359375l-3.5625 4.28125l3.421875 0zm6.2547913 -0.59375q0.140625 0 0.234375 0.109375q0.09375 0.109375 0.09375 0.28125q0 0.296875 -0.421875 0.546875q-0.4375 0.25 -0.921875 0.375q-0.46875 0.125 -0.921875 0.125q-1.359375 0 -2.15625 -0.796875q-0.78125 -0.8125 -0.78125 -2.21875q0 -0.90625 0.34375 -1.59375q0.359375 -0.6875 0.984375 -1.0625q0.640625 -0.390625 1.4375 -0.390625q1.140625 0 1.8125 0.75q0.671875 0.734375 0.671875 2.0q0 0.25 -0.09375 0.359375q-0.09375 0.109375 -0.3125 0.109375l-3.859375 0q0.09375 2.0625 1.953125 2.0625q0.46875 0 0.796875 -0.125q0.34375 -0.125 0.71875 -0.34375q0.3125 -0.1875 0.421875 -0.1875zm-2.09375 -3.875q-0.765625 0 -1.234375 0.484375q-0.46875 0.484375 -0.546875 1.359375l3.390625 0q-0.015625 -0.890625 -0.4375 -1.359375q-0.421875 -0.484375 -1.171875 -0.484375zm6.8396606 -0.75q2.09375 0 2.09375 2.3125l0 3.25q0 0.234375 -0.125 0.359375q-0.125 0.125 -0.359375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -3.1875q0 -0.8125 -0.328125 -1.1875q-0.3125 -0.375 -1.0 -0.375q-0.8125 0 -1.296875 0.5q-0.46875 0.484375 -0.46875 1.328125l0 2.921875q0 0.234375 -0.125 0.359375q-0.125 0.125 -0.359375 0.125q-0.234375 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -5.0625q0 -0.21875 0.125 -0.34375q0.125 -0.140625 0.359375 -0.140625q0.21875 0 0.34375 0.140625q0.125 0.125 0.125 0.328125l0 0.609375q0.28125 -0.53125 0.796875 -0.8125q0.53125 -0.28125 1.1875 -0.28125z" fill-rule="nonzero"/><path fill="#000000" d="m258.07846 275.1459q0.1875 0 0.296875 0.109375q0.109375 0.109375 0.109375 0.296875l0 2.984375q0 0.296875 -0.09375 0.4375q-0.078125 0.140625 -0.328125 0.234375q-0.46875 0.203125 -1.15625 0.328125q-0.6875 0.109375 -1.3749847 0.109375q-1.25 0 -2.171875 -0.515625q-0.90625 -0.515625 -1.390625 -1.484375q-0.484375 -0.96875 -0.484375 -2.328125q0 -1.328125 0.46875 -2.296875q0.484375 -0.984375 1.375 -1.5q0.90625 -0.53125 2.125 -0.53125q0.84373474 0 1.5624847 0.265625q0.71875 0.25 1.203125 0.734375q0.21875 0.203125 0.21875 0.421875q0 0.171875 -0.109375 0.296875q-0.09375 0.125 -0.234375 0.125q-0.140625 0 -0.328125 -0.140625q-0.625 -0.484375 -1.140625 -0.671875q-0.5 -0.1875 -1.1562347 -0.1875q-1.4375 0 -2.203125 0.90625q-0.75 0.890625 -0.75 2.578125q0 1.71875 0.765625 2.609375q0.78125 0.890625 2.28125 0.890625q1.1093597 0 2.0312347 -0.328125l0 -2.578125l-1.7499847 0q-0.203125 0 -0.328125 -0.109375q-0.125 -0.109375 -0.125 -0.265625q0 -0.1875 0.125 -0.28125q0.125 -0.109375 0.328125 -0.109375l2.2343597 0zm5.15683 -1.5625q0.5 -0.03125 0.5 0.40625q0 0.203125 -0.109375 0.3125q-0.109375 0.109375 -0.375 0.140625l-0.359375 0.03125q-0.796875 0.078125 -1.1875 0.578125q-0.390625 0.484375 -0.390625 1.15625l0 2.921875q0 0.234375 -0.140625 0.359375q-0.125 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -5.0625q0 -0.234375 0.140625 -0.359375q0.140625 -0.125 0.34375 -0.125q0.1875 0 0.3125 0.125q0.140625 0.125 0.140625 0.34375l0 0.671875q0.25 -0.53125 0.71875 -0.796875q0.46875 -0.28125 1.0625 -0.328125l0.171875 -0.015625zm3.720398 -0.015625q2.203125 0 2.203125 2.296875l0 3.265625q0 0.21875 -0.125 0.359375q-0.125 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.140625 -0.125 -0.359375l0 -0.578125q-0.21875 0.515625 -0.6875 0.796875q-0.46875 0.28125 -1.078125 0.28125q-0.5625 0 -1.046875 -0.21875q-0.46875 -0.234375 -0.75 -0.640625q-0.265625 -0.40625 -0.265625 -0.90625q0 -0.65625 0.328125 -1.015625q0.34375 -0.375 1.109375 -0.53125q0.765625 -0.15625 2.125 -0.15625l0.265625 0l0 -0.40625q0 -0.71875 -0.296875 -1.046875q-0.28125 -0.34375 -0.953125 -0.34375q-0.8125 0 -1.65625 0.453125q-0.3125 0.203125 -0.453125 0.203125q-0.140625 0 -0.234375 -0.109375q-0.09375 -0.109375 -0.09375 -0.28125q0 -0.171875 0.09375 -0.296875q0.109375 -0.125 0.328125 -0.25q0.421875 -0.25 0.953125 -0.375q0.546875 -0.140625 1.0625 -0.140625zm-0.390625 5.296875q0.71875 0 1.171875 -0.484375q0.46875 -0.484375 0.46875 -1.25l0 -0.34375l-0.21875 0q-1.046875 0 -1.609375 0.09375q-0.546875 0.078125 -0.78125 0.296875q-0.234375 0.203125 -0.234375 0.609375q0 0.46875 0.34375 0.78125q0.34375 0.296875 0.859375 0.296875zm7.3131714 -5.296875q0.765625 0 1.34375 0.390625q0.59375 0.375 0.921875 1.0625q0.328125 0.6875 0.328125 1.609375q0 0.90625 -0.328125 1.59375q-0.328125 0.671875 -0.90625 1.046875q-0.578125 0.359375 -1.359375 0.359375q-0.6875 0 -1.203125 -0.296875q-0.5 -0.296875 -0.765625 -0.84375l0 2.8125q0 0.21875 -0.125 0.34375q-0.125 0.125 -0.359375 0.125q-0.234375 0 -0.359375 -0.140625q-0.125 -0.125 -0.125 -0.328125l0 -7.234375q0 -0.21875 0.125 -0.34375q0.125 -0.140625 0.359375 -0.140625q0.234375 0 0.359375 0.140625q0.125 0.125 0.125 0.34375l0 0.640625q0.265625 -0.546875 0.765625 -0.84375q0.515625 -0.296875 1.203125 -0.296875zm-0.203125 5.265625q0.859375 0 1.328125 -0.578125q0.46875 -0.578125 0.46875 -1.625q0 -1.0625 -0.46875 -1.65625q-0.46875 -0.59375 -1.328125 -0.59375q-0.84375 0 -1.3125 0.578125q-0.453125 0.578125 -0.453125 1.640625q0 1.0625 0.453125 1.65625q0.46875 0.578125 1.3125 0.578125zm7.2027893 -5.265625q1.03125 0 1.546875 0.578125q0.53125 0.578125 0.53125 1.734375l0 3.25q0 0.234375 -0.140625 0.359375q-0.125 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -3.21875q0 -0.78125 -0.328125 -1.15625q-0.3125 -0.375 -1.0 -0.375q-0.8125 0 -1.296875 0.5q-0.46875 0.484375 -0.46875 1.328125l0 2.921875q0 0.234375 -0.125 0.359375q-0.125 0.125 -0.359375 0.125q-0.234375 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -7.625q0 -0.203125 0.125 -0.328125q0.140625 -0.140625 0.359375 -0.140625q0.234375 0 0.359375 0.125q0.125 0.125 0.125 0.34375l0 3.140625q0.28125 -0.53125 0.796875 -0.796875q0.515625 -0.28125 1.1875 -0.28125zm4.5035706 5.984375q-0.234375 0 -0.375 -0.125q-0.125 -0.140625 -0.125 -0.359375l0 -7.5q0 -0.21875 0.125 -0.34375q0.140625 -0.125 0.375 -0.125l2.34375 0q2.03125 0 3.140625 1.09375q1.109375 1.09375 1.109375 3.125q0 2.03125 -1.125 3.140625q-1.109375 1.09375 -3.125 1.09375l-2.34375 0zm2.28125 -0.84375q3.28125 0 3.28125 -3.390625q0 -3.390625 -3.28125 -3.390625l-1.796875 0l0 6.78125l1.796875 0zm10.461792 -0.515625q0.140625 0 0.234375 0.109375q0.09375 0.109375 0.09375 0.28125q0 0.296875 -0.421875 0.546875q-0.4375 0.25 -0.921875 0.375q-0.46875 0.125 -0.921875 0.125q-1.359375 0 -2.15625 -0.796875q-0.78125 -0.8125 -0.78125 -2.21875q0 -0.90625 0.34375 -1.59375q0.359375 -0.6875 0.984375 -1.0625q0.640625 -0.390625 1.4375 -0.390625q1.140625 0 1.8125 0.75q0.671875 0.734375 0.671875 2.0q0 0.25 -0.09375 0.359375q-0.09375 0.109375 -0.3125 0.109375l-3.859375 0q0.09375 2.0625 1.953125 2.0625q0.46875 0 0.796875 -0.125q0.34375 -0.125 0.71875 -0.34375q0.3125 -0.1875 0.421875 -0.1875zm-2.09375 -3.875q-0.765625 0 -1.234375 0.484375q-0.46875 0.484375 -0.546875 1.359375l3.390625 0q-0.015625 -0.890625 -0.4375 -1.359375q-0.421875 -0.484375 -1.171875 -0.484375zm6.480316 -2.453125q-0.640625 0.046875 -0.96875 0.40625q-0.3125 0.34375 -0.3125 1.046875l0 0.390625l1.328125 0q0.203125 0 0.3125 0.109375q0.109375 0.109375 0.109375 0.28125q0 0.1875 -0.109375 0.28125q-0.109375 0.09375 -0.3125 0.09375l-1.328125 0l0 4.65625q0 0.234375 -0.140625 0.359375q-0.140625 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.140625 -0.125 -0.140625 -0.359375l0 -4.65625l-0.796875 0q-0.203125 0 -0.328125 -0.09375q-0.109375 -0.109375 -0.109375 -0.28125q0 -0.171875 0.109375 -0.28125q0.125 -0.109375 0.328125 -0.109375l0.796875 0l0 -0.21875q0 -1.078125 0.53125 -1.6875q0.546875 -0.625 1.5625 -0.703125l0.3125 -0.015625q0.3125 -0.03125 0.453125 0.0625q0.140625 0.078125 0.140625 0.296875q0 0.34375 -0.421875 0.390625l-0.3125 0.03125z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m276.85565 232.16667l0 20.377945" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m276.85565 232.16667l0 16.950867" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m276.85565 249.11754l-1.1246033 -1.124588l1.1246033 3.0897675l1.1245728 -3.0897675z" fill-rule="evenodd"/><path fill="#f4cccc" d="m31.874016 68.3563l87.49606 0l0 30.992126l-87.49606 0z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m31.874016 68.3563l87.49606 0l0 30.992126l-87.49606 0z" fill-rule="evenodd"/><path fill="#000000" d="m58.725647 87.669235q0.421875 0.03125 0.421875 0.375q0 0.203125 -0.15625 0.3125q-0.140625 0.09375 -0.4375 0.078125l-0.328125 -0.03125q-0.953125 -0.0625 -1.421875 -0.5625q-0.453125 -0.515625 -0.453125 -1.53125l0 -3.015625l-0.796875 0q-0.203125 0 -0.328125 -0.09375q-0.109375 -0.109375 -0.109375 -0.28125q0 -0.171875 0.109375 -0.28125q0.125 -0.109375 0.328125 -0.109375l0.796875 0l0 -1.359375q0 -0.21875 0.125 -0.34375q0.140625 -0.140625 0.375 -0.140625q0.21875 0 0.34375 0.140625q0.140625 0.125 0.140625 0.34375l0 1.359375l1.328125 0q0.1875 0 0.296875 0.109375q0.125 0.109375 0.125 0.28125q0 0.171875 -0.125 0.28125q-0.109375 0.09375 -0.296875 0.09375l-1.328125 0l0 3.0625q0 0.65625 0.265625 0.953125q0.265625 0.296875 0.8125 0.328125l0.3125 0.03125zm3.9706573 -6.984375q-0.640625 0.046875 -0.96875 0.40625q-0.3125 0.34375 -0.3125 1.046875l0 0.390625l1.328125 0q0.203125 0 0.3125 0.109375q0.109375 0.109375 0.109375 0.28125q0 0.1875 -0.109375 0.28125q-0.109375 0.09375 -0.3125 0.09375l-1.328125 0l0 4.65625q0 0.234375 -0.140625 0.359375q-0.140625 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.140625 -0.125 -0.140625 -0.359375l0 -4.65625l-0.796875 0q-0.203125 0 -0.328125 -0.09375q-0.109375 -0.109375 -0.109375 -0.28125q0 -0.171875 0.109375 -0.28125q0.125 -0.109375 0.328125 -0.109375l0.796875 0l0 -0.21875q0 -1.078125 0.53125 -1.6875q0.546875 -0.625 1.5625 -0.703125l0.3125 -0.015625q0.3125 -0.03125 0.453125 0.0625q0.140625 0.078125 0.140625 0.296875q0 0.34375 -0.421875 0.390625l-0.3125 0.03125zm1.8266602 7.75q-0.28125 0 -0.484375 -0.1875q-0.1875 -0.1875 -0.1875 -0.484375q0 -0.296875 0.1875 -0.484375q0.203125 -0.203125 0.484375 -0.203125q0.28125 0 0.46875 0.203125q0.1875 0.1875 0.1875 0.484375q0 0.296875 -0.1875 0.484375q-0.1875 0.1875 -0.46875 0.1875zm8.498016 -0.8125q0.171875 0.15625 0.171875 0.359375q0 0.15625 -0.140625 0.296875q-0.140625 0.140625 -0.3125 0.140625q-0.15625 0 -0.328125 -0.140625l-4.484375 -3.921875l0 3.578125q0 0.21875 -0.140625 0.359375q-0.125 0.140625 -0.359375 0.140625q-0.21875 0 -0.359375 -0.140625q-0.125 -0.140625 -0.125 -0.359375l0 -7.578125q0 -0.234375 0.125 -0.359375q0.140625 -0.140625 0.359375 -0.140625q0.234375 0 0.359375 0.140625q0.140625 0.125 0.140625 0.359375l0 3.4375l4.28125 -3.796875q0.125 -0.140625 0.3125 -0.140625q0.171875 0 0.296875 0.140625q0.140625 0.140625 0.140625 0.3125q0 0.171875 -0.15625 0.328125l-3.875 3.421875l4.09375 3.5625zm5.8329315 -0.609375q0.140625 0 0.234375 0.109375q0.09375 0.109375 0.09375 0.28125q0 0.296875 -0.421875 0.546875q-0.4375 0.25 -0.921875 0.375q-0.46875 0.125 -0.921875 0.125q-1.359375 0 -2.15625 -0.796875q-0.78125 -0.8125 -0.78125 -2.21875q0 -0.90625 0.34375 -1.59375q0.359375 -0.6875 0.984375 -1.0625q0.640625 -0.390625 1.4375 -0.390625q1.140625 0 1.8125 0.75q0.671875 0.734375 0.671875 2.0q0 0.25 -0.09375 0.359375q-0.09375 0.109375 -0.3125 0.109375l-3.859375 0q0.09375 2.0625 1.953125 2.0625q0.46875 0 0.796875 -0.125q0.34375 -0.125 0.71875 -0.34375q0.3125 -0.1875 0.421875 -0.1875zm-2.09375 -3.875q-0.765625 0 -1.234375 0.484375q-0.46875 0.484375 -0.546875 1.359375l3.390625 0q-0.015625 -0.890625 -0.4375 -1.359375q-0.421875 -0.484375 -1.171875 -0.484375zm6.792801 -0.734375q0.5 -0.03125 0.5 0.40625q0 0.203125 -0.109375 0.3125q-0.109375 0.109375 -0.375 0.140625l-0.359375 0.03125q-0.796875 0.078125 -1.1875 0.578125q-0.390625 0.484375 -0.390625 1.15625l0 2.921875q0 0.234375 -0.140625 0.359375q-0.125 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -5.0625q0 -0.234375 0.140625 -0.359375q0.140625 -0.125 0.34375 -0.125q0.1875 0 0.3125 0.125q0.140625 0.125 0.140625 0.34375l0 0.671875q0.25 -0.53125 0.71875 -0.796875q0.46875 -0.28125 1.0625 -0.328125l0.171875 -0.015625zm3.720398 -0.015625q2.203125 0 2.203125 2.296875l0 3.265625q0 0.21875 -0.125 0.359375q-0.125 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.140625 -0.125 -0.359375l0 -0.578125q-0.21875 0.515625 -0.6875 0.796875q-0.46875 0.28125 -1.078125 0.28125q-0.5625 0 -1.046875 -0.21875q-0.46875 -0.234375 -0.75 -0.640625q-0.265625 -0.40625 -0.265625 -0.90625q0 -0.65625 0.328125 -1.015625q0.34375 -0.375 1.109375 -0.53125q0.765625 -0.15625 2.125 -0.15625l0.265625 0l0 -0.40625q0 -0.71875 -0.296875 -1.046875q-0.28125 -0.34375 -0.953125 -0.34375q-0.8125 0 -1.65625 0.453125q-0.3125 0.203125 -0.453125 0.203125q-0.140625 0 -0.234375 -0.109375q-0.09375 -0.109375 -0.09375 -0.28125q0 -0.171875 0.09375 -0.296875q0.109375 -0.125 0.328125 -0.25q0.421875 -0.25 0.953125 -0.375q0.546875 -0.140625 1.0625 -0.140625zm-0.390625 5.296875q0.71875 0 1.171875 -0.484375q0.46875 -0.484375 0.46875 -1.25l0 -0.34375l-0.21875 0q-1.046875 0 -1.609375 0.09375q-0.546875 0.078125 -0.78125 0.296875q-0.234375 0.203125 -0.234375 0.609375q0 0.46875 0.34375 0.78125q0.34375 0.296875 0.859375 0.296875zm6.3444214 0.765625q-0.5625 0 -1.0625 -0.125q-0.5 -0.140625 -0.875 -0.375q-0.21875 -0.140625 -0.3125 -0.265625q-0.078125 -0.125 -0.078125 -0.3125q0 -0.15625 0.078125 -0.25q0.09375 -0.109375 0.234375 -0.109375q0.15625 0 0.421875 0.1875q0.359375 0.21875 0.71875 0.34375q0.359375 0.125 0.875 0.125q0.65625 0 1.015625 -0.21875q0.359375 -0.234375 0.359375 -0.671875q0 -0.265625 -0.140625 -0.421875q-0.125 -0.171875 -0.453125 -0.296875q-0.3125 -0.125 -0.9375 -0.25q-1.0625 -0.234375 -1.515625 -0.609375q-0.453125 -0.390625 -0.453125 -1.046875q0 -0.515625 0.28125 -0.90625q0.28125 -0.40625 0.796875 -0.625q0.515625 -0.234375 1.15625 -0.234375q0.46875 0 0.90625 0.125q0.4375 0.125 0.78125 0.34375q0.40625 0.296875 0.40625 0.609375q0 0.15625 -0.09375 0.265625q-0.09375 0.109375 -0.234375 0.109375q-0.140625 0 -0.4375 -0.203125q-0.328125 -0.21875 -0.625 -0.34375q-0.296875 -0.125 -0.75 -0.125q-0.5625 0 -0.90625 0.265625q-0.34375 0.25 -0.34375 0.671875q0 0.25 0.125 0.421875q0.125 0.15625 0.421875 0.28125q0.296875 0.125 0.84375 0.25q0.828125 0.1875 1.265625 0.40625q0.453125 0.203125 0.640625 0.515625q0.203125 0.3125 0.203125 0.796875q0 0.75 -0.640625 1.21875q-0.640625 0.453125 -1.671875 0.453125z" fill-rule="nonzero"/><path fill="#f4cccc" d="m132.49081 68.35761l87.49606 0l0 30.992126l-87.49606 0z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m132.49081 68.35761l87.49606 0l0 30.992126l-87.49606 0z" fill-rule="evenodd"/><path fill="#000000" d="m152.20152 88.37367q-0.234375 0 -0.375 -0.125q-0.125 -0.140625 -0.125 -0.359375l0 -7.5q0 -0.21875 0.125 -0.34375q0.140625 -0.125 0.375 -0.125l4.484375 0q0.21875 0 0.328125 0.109375q0.125 0.09375 0.125 0.296875q0 0.1875 -0.125 0.296875q-0.109375 0.109375 -0.328125 0.109375l-4.015625 0l0 2.9375l3.78125 0q0.21875 0 0.328125 0.109375q0.125 0.109375 0.125 0.296875q0 0.1875 -0.125 0.296875q-0.109375 0.109375 -0.328125 0.109375l-3.78125 0l0 3.078125l4.015625 0q0.21875 0 0.328125 0.109375q0.125 0.09375 0.125 0.296875q0 0.1875 -0.125 0.296875q-0.109375 0.109375 -0.328125 0.109375l-4.484375 0zm8.31218 0.078125q-0.5625 0 -1.0625 -0.125q-0.5 -0.140625 -0.875 -0.375q-0.21875 -0.140625 -0.3125 -0.265625q-0.078125 -0.125 -0.078125 -0.3125q0 -0.15625 0.078125 -0.25q0.09375 -0.109375 0.234375 -0.109375q0.15625 0 0.421875 0.1875q0.359375 0.21875 0.71875 0.34375q0.359375 0.125 0.875 0.125q0.65625 0 1.015625 -0.21875q0.359375 -0.234375 0.359375 -0.671875q0 -0.265625 -0.140625 -0.421875q-0.125 -0.171875 -0.453125 -0.296875q-0.3125 -0.125 -0.9375 -0.25q-1.0625 -0.234375 -1.515625 -0.609375q-0.453125 -0.390625 -0.453125 -1.046875q0 -0.515625 0.28125 -0.90625q0.28125 -0.40625 0.796875 -0.625q0.515625 -0.234375 1.15625 -0.234375q0.46875 0 0.90625 0.125q0.4375 0.125 0.78125 0.34375q0.40625 0.296875 0.40625 0.609375q0 0.15625 -0.09375 0.265625q-0.09375 0.109375 -0.234375 0.109375q-0.140625 0 -0.4375 -0.203125q-0.328125 -0.21875 -0.625 -0.34375q-0.296875 -0.125 -0.75 -0.125q-0.5625 0 -0.90625 0.265625q-0.34375 0.25 -0.34375 0.671875q0 0.25 0.125 0.421875q0.125 0.15625 0.421875 0.28125q0.296875 0.125 0.84375 0.25q0.828125 0.1875 1.265625 0.40625q0.453125 0.203125 0.640625 0.515625q0.203125 0.3125 0.203125 0.796875q0 0.75 -0.640625 1.21875q-0.640625 0.453125 -1.671875 0.453125zm6.4787903 -0.78125q0.421875 0.03125 0.421875 0.375q0 0.203125 -0.15625 0.3125q-0.140625 0.09375 -0.4375 0.078125l-0.328125 -0.03125q-0.953125 -0.0625 -1.421875 -0.5625q-0.453125 -0.515625 -0.453125 -1.53125l0 -3.015625l-0.796875 0q-0.203125 0 -0.328125 -0.09375q-0.109375 -0.109375 -0.109375 -0.28125q0 -0.171875 0.109375 -0.28125q0.125 -0.109375 0.328125 -0.109375l0.796875 0l0 -1.359375q0 -0.21875 0.125 -0.34375q0.140625 -0.140625 0.375 -0.140625q0.21875 0 0.34375 0.140625q0.140625 0.125 0.140625 0.34375l0 1.359375l1.328125 0q0.1875 0 0.296875 0.109375q0.125 0.109375 0.125 0.28125q0 0.171875 -0.125 0.28125q-0.109375 0.09375 -0.296875 0.09375l-1.328125 0l0 3.0625q0 0.65625 0.265625 0.953125q0.265625 0.296875 0.8125 0.328125l0.3125 0.03125zm1.8769073 0.765625q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -5.0625q0 -0.234375 0.125 -0.359375q0.140625 -0.125 0.359375 -0.125q0.21875 0 0.34375 0.125q0.140625 0.125 0.140625 0.359375l0 5.0625q0 0.234375 -0.140625 0.359375q-0.125 0.125 -0.34375 0.125zm0 -7.28125q-0.296875 0 -0.484375 -0.171875q-0.171875 -0.171875 -0.171875 -0.453125q0 -0.25 0.171875 -0.421875q0.1875 -0.171875 0.484375 -0.171875q0.28125 0 0.453125 0.171875q0.1875 0.171875 0.1875 0.421875q0 0.28125 -0.1875 0.453125q-0.171875 0.171875 -0.453125 0.171875zm8.799652 1.234375q1.9375 0 1.9375 2.3125l0 3.25q0 0.234375 -0.140625 0.359375q-0.125 0.125 -0.328125 0.125q-0.21875 0 -0.359375 -0.125q-0.140625 -0.125 -0.140625 -0.359375l0 -3.21875q0 -0.8125 -0.296875 -1.171875q-0.28125 -0.359375 -0.890625 -0.359375q-0.734375 0 -1.15625 0.5q-0.421875 0.484375 -0.421875 1.328125l0 2.921875q0 0.234375 -0.140625 0.359375q-0.125 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -3.21875q0 -0.8125 -0.296875 -1.171875q-0.28125 -0.359375 -0.90625 -0.359375q-0.71875 0 -1.140625 0.5q-0.421875 0.484375 -0.421875 1.328125l0 2.921875q0 0.234375 -0.140625 0.359375q-0.125 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -5.0625q0 -0.21875 0.125 -0.34375q0.140625 -0.140625 0.359375 -0.140625q0.203125 0 0.328125 0.125q0.140625 0.125 0.140625 0.34375l0 0.578125q0.265625 -0.515625 0.734375 -0.78125q0.46875 -0.28125 1.078125 -0.28125q1.375 0 1.78125 1.140625q0.265625 -0.515625 0.78125 -0.828125q0.515625 -0.3125 1.171875 -0.3125zm6.0990753 0q2.203125 0 2.203125 2.296875l0 3.265625q0 0.21875 -0.125 0.359375q-0.125 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.140625 -0.125 -0.359375l0 -0.578125q-0.21875 0.515625 -0.6875 0.796875q-0.46875 0.28125 -1.078125 0.28125q-0.5625 0 -1.046875 -0.21875q-0.46875 -0.234375 -0.75 -0.640625q-0.265625 -0.40625 -0.265625 -0.90625q0 -0.65625 0.328125 -1.015625q0.34375 -0.375 1.109375 -0.53125q0.765625 -0.15625 2.125 -0.15625l0.265625 0l0 -0.40625q0 -0.71875 -0.296875 -1.046875q-0.28125 -0.34375 -0.953125 -0.34375q-0.8125 0 -1.65625 0.453125q-0.3125 0.203125 -0.453125 0.203125q-0.140625 0 -0.234375 -0.109375q-0.09375 -0.109375 -0.09375 -0.28125q0 -0.171875 0.09375 -0.296875q0.109375 -0.125 0.328125 -0.25q0.421875 -0.25 0.953125 -0.375q0.546875 -0.140625 1.0625 -0.140625zm-0.390625 5.296875q0.71875 0 1.171875 -0.484375q0.46875 -0.484375 0.46875 -1.25l0 -0.34375l-0.21875 0q-1.046875 0 -1.609375 0.09375q-0.546875 0.078125 -0.78125 0.296875q-0.234375 0.203125 -0.234375 0.609375q0 0.46875 0.34375 0.78125q0.34375 0.296875 0.859375 0.296875zm7.0631714 -0.015625q0.421875 0.03125 0.421875 0.375q0 0.203125 -0.15625 0.3125q-0.140625 0.09375 -0.4375 0.078125l-0.328125 -0.03125q-0.953125 -0.0625 -1.421875 -0.5625q-0.453125 -0.515625 -0.453125 -1.53125l0 -3.015625l-0.796875 0q-0.203125 0 -0.328125 -0.09375q-0.109375 -0.109375 -0.109375 -0.28125q0 -0.171875 0.109375 -0.28125q0.125 -0.109375 0.328125 -0.109375l0.796875 0l0 -1.359375q0 -0.21875 0.125 -0.34375q0.140625 -0.140625 0.375 -0.140625q0.21875 0 0.34375 0.140625q0.140625 0.125 0.140625 0.34375l0 1.359375l1.328125 0q0.1875 0 0.296875 0.109375q0.125 0.109375 0.125 0.28125q0 0.171875 -0.125 0.28125q-0.109375 0.09375 -0.296875 0.09375l-1.328125 0l0 3.0625q0 0.65625 0.265625 0.953125q0.265625 0.296875 0.8125 0.328125l0.3125 0.03125zm3.8144073 0.78125q-0.828125 0 -1.46875 -0.359375q-0.625 -0.375 -0.96875 -1.0625q-0.34375 -0.703125 -0.34375 -1.609375q0 -0.90625 0.34375 -1.59375q0.34375 -0.703125 0.96875 -1.0625q0.640625 -0.375 1.46875 -0.375q0.828125 0 1.453125 0.375q0.640625 0.359375 0.984375 1.0625q0.34375 0.6875 0.34375 1.59375q0 0.90625 -0.34375 1.609375q-0.34375 0.6875 -0.984375 1.0625q-0.625 0.359375 -1.453125 0.359375zm0 -0.796875q0.859375 0 1.3125 -0.5625q0.46875 -0.578125 0.46875 -1.671875q0 -1.0625 -0.46875 -1.640625q-0.46875 -0.59375 -1.3125 -0.59375q-0.859375 0 -1.328125 0.59375q-0.46875 0.578125 -0.46875 1.640625q0 1.078125 0.453125 1.65625q0.46875 0.578125 1.34375 0.578125zm7.1287994 -5.25q0.5 -0.03125 0.5 0.40625q0 0.203125 -0.109375 0.3125q-0.109375 0.109375 -0.375 0.140625l-0.359375 0.03125q-0.796875 0.078125 -1.1875 0.578125q-0.390625 0.484375 -0.390625 1.15625l0 2.921875q0 0.234375 -0.140625 0.359375q-0.125 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -5.0625q0 -0.234375 0.140625 -0.359375q0.140625 -0.125 0.34375 -0.125q0.1875 0 0.3125 0.125q0.140625 0.125 0.140625 0.34375l0 0.671875q0.25 -0.53125 0.71875 -0.796875q0.46875 -0.28125 1.0625 -0.328125l0.171875 -0.015625z" fill-rule="nonzero"/><path fill="#f4cccc" d="m233.1076 68.35761l87.49606 0l0 30.992126l-87.49606 0z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m233.1076 68.35761l87.49606 0l0 30.992126l-87.49606 0z" fill-rule="evenodd"/><path fill="#000000" d="m269.00754 88.46742q-0.90625 0 -1.734375 -0.265625q-0.8125 -0.265625 -1.3125 -0.734375q-0.171875 -0.15625 -0.171875 -0.40625q0 -0.171875 0.09375 -0.296875q0.09375 -0.125 0.234375 -0.125q0.15625 0 0.328125 0.125q1.109375 0.859375 2.546875 0.859375q1.03125 0 1.578125 -0.390625q0.5625 -0.390625 0.5625 -1.125q0 -0.421875 -0.265625 -0.671875q-0.265625 -0.265625 -0.703125 -0.421875q-0.4375 -0.15625 -1.15625 -0.328125q-0.984375 -0.21875 -1.625 -0.46875q-0.625 -0.265625 -1.015625 -0.734375q-0.390625 -0.46875 -0.390625 -1.21875q0 -0.71875 0.390625 -1.265625q0.390625 -0.5625 1.09375 -0.875q0.703125 -0.3125 1.59375 -0.3125q0.84375 0 1.5625 0.265625q0.734375 0.25 1.234375 0.734375q0.1875 0.1875 0.1875 0.421875q0 0.171875 -0.09375 0.296875q-0.09375 0.125 -0.234375 0.125q-0.125 0 -0.34375 -0.140625q-0.59375 -0.46875 -1.09375 -0.65625q-0.5 -0.203125 -1.21875 -0.203125q-0.984375 0 -1.546875 0.421875q-0.546875 0.40625 -0.546875 1.15625q0 0.625 0.484375 0.953125q0.484375 0.3125 1.5 0.5625q1.09375 0.25 1.71875 0.484375q0.625 0.21875 1.03125 0.671875q0.421875 0.4375 0.421875 1.171875q0 0.71875 -0.390625 1.265625q-0.390625 0.53125 -1.109375 0.828125q-0.703125 0.296875 -1.609375 0.296875zm5.0446777 -0.03125q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -7.625q0 -0.21875 0.125 -0.34375q0.140625 -0.125 0.359375 -0.125q0.203125 0 0.34375 0.125q0.140625 0.125 0.140625 0.34375l0 7.625q0 0.234375 -0.140625 0.359375q-0.140625 0.125 -0.34375 0.125zm2.784027 0q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -5.0625q0 -0.234375 0.125 -0.359375q0.140625 -0.125 0.359375 -0.125q0.21875 0 0.34375 0.125q0.140625 0.125 0.140625 0.359375l0 5.0625q0 0.234375 -0.140625 0.359375q-0.125 0.125 -0.34375 0.125zm0 -7.28125q-0.296875 0 -0.484375 -0.171875q-0.171875 -0.171875 -0.171875 -0.453125q0 -0.25 0.171875 -0.421875q0.1875 -0.171875 0.484375 -0.171875q0.28125 0 0.453125 0.171875q0.1875 0.171875 0.1875 0.421875q0 0.28125 -0.1875 0.453125q-0.171875 0.171875 -0.453125 0.171875zm8.799652 1.234375q1.9375 0 1.9375 2.3125l0 3.25q0 0.234375 -0.140625 0.359375q-0.125 0.125 -0.328125 0.125q-0.21875 0 -0.359375 -0.125q-0.140625 -0.125 -0.140625 -0.359375l0 -3.21875q0 -0.8125 -0.296875 -1.171875q-0.28125 -0.359375 -0.890625 -0.359375q-0.734375 0 -1.15625 0.5q-0.421875 0.484375 -0.421875 1.328125l0 2.921875q0 0.234375 -0.140625 0.359375q-0.125 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -3.21875q0 -0.8125 -0.296875 -1.171875q-0.28125 -0.359375 -0.90625 -0.359375q-0.71875 0 -1.140625 0.5q-0.421875 0.484375 -0.421875 1.328125l0 2.921875q0 0.234375 -0.140625 0.359375q-0.125 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -5.0625q0 -0.21875 0.125 -0.34375q0.140625 -0.140625 0.359375 -0.140625q0.203125 0 0.328125 0.125q0.140625 0.125 0.140625 0.34375l0 0.578125q0.265625 -0.515625 0.734375 -0.78125q0.46875 -0.28125 1.078125 -0.28125q1.375 0 1.78125 1.140625q0.265625 -0.515625 0.78125 -0.828125q0.515625 -0.3125 1.171875 -0.3125z" fill-rule="nonzero"/><path fill="#d9ead3" d="m282.5035 134.76706l87.49606 0l0 30.992126l-87.49606 0z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m282.5035 134.76706l87.49606 0l0 30.992126l-87.49606 0z" fill-rule="evenodd"/><path fill="#000000" d="m297.8283 154.87688q-1.1875 0 -2.0625 -0.515625q-0.875 -0.53125 -1.359375 -1.5q-0.46875 -0.984375 -0.46875 -2.3125q0 -1.328125 0.46875 -2.296875q0.484375 -0.984375 1.359375 -1.5q0.875 -0.53125 2.0625 -0.53125q0.8125 0 1.515625 0.265625q0.71875 0.25 1.25 0.734375q0.1875 0.1875 0.1875 0.421875q0 0.171875 -0.09375 0.296875q-0.09375 0.125 -0.21875 0.125q-0.15625 0 -0.359375 -0.140625q-0.609375 -0.46875 -1.109375 -0.65625q-0.5 -0.203125 -1.140625 -0.203125q-1.390625 0 -2.140625 0.90625q-0.75 0.90625 -0.75 2.578125q0 1.671875 0.75 2.578125q0.75 0.90625 2.140625 0.90625q0.640625 0 1.140625 -0.1875q0.5 -0.1875 1.109375 -0.671875q0.203125 -0.125 0.359375 -0.125q0.125 0 0.21875 0.125q0.09375 0.109375 0.09375 0.296875q0 0.234375 -0.1875 0.40625q-0.53125 0.484375 -1.25 0.75q-0.703125 0.25 -1.515625 0.25zm7.358429 -6.078125q1.03125 0 1.546875 0.578125q0.53125 0.578125 0.53125 1.734375l0 3.25q0 0.234375 -0.140625 0.359375q-0.125 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -3.21875q0 -0.78125 -0.328125 -1.15625q-0.3125 -0.375 -1.0 -0.375q-0.8125 0 -1.296875 0.5q-0.46875 0.484375 -0.46875 1.328125l0 2.921875q0 0.234375 -0.125 0.359375q-0.125 0.125 -0.359375 0.125q-0.234375 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -7.625q0 -0.203125 0.125 -0.328125q0.140625 -0.140625 0.359375 -0.140625q0.234375 0 0.359375 0.125q0.125 0.125 0.125 0.34375l0 3.140625q0.28125 -0.53125 0.796875 -0.796875q0.515625 -0.28125 1.1875 -0.28125zm8.37854 4.625q0.140625 0 0.234375 0.109375q0.09375 0.109375 0.09375 0.28125q0 0.296875 -0.421875 0.546875q-0.4375 0.25 -0.921875 0.375q-0.46875 0.125 -0.921875 0.125q-1.359375 0 -2.15625 -0.796875q-0.78125 -0.8125 -0.78125 -2.21875q0 -0.90625 0.34375 -1.59375q0.359375 -0.6875 0.984375 -1.0625q0.640625 -0.390625 1.4375 -0.390625q1.140625 0 1.8125 0.75q0.671875 0.734375 0.671875 2.0q0 0.25 -0.09375 0.359375q-0.09375 0.109375 -0.3125 0.109375l-3.859375 0q0.09375 2.0625 1.953125 2.0625q0.46875 0 0.796875 -0.125q0.34375 -0.125 0.71875 -0.34375q0.3125 -0.1875 0.421875 -0.1875zm-2.09375 -3.875q-0.765625 0 -1.234375 0.484375q-0.46875 0.484375 -0.546875 1.359375l3.390625 0q-0.015625 -0.890625 -0.4375 -1.359375q-0.421875 -0.484375 -1.171875 -0.484375zm6.308441 5.3125q-0.8125 0 -1.453125 -0.359375q-0.625 -0.375 -0.96875 -1.0625q-0.34375 -0.6875 -0.34375 -1.578125q0 -0.90625 0.359375 -1.59375q0.359375 -0.703125 0.984375 -1.078125q0.640625 -0.390625 1.46875 -0.390625q0.453125 0 0.90625 0.125q0.453125 0.125 0.78125 0.359375q0.21875 0.140625 0.3125 0.28125q0.09375 0.140625 0.09375 0.3125q0 0.171875 -0.09375 0.28125q-0.09375 0.09375 -0.234375 0.09375q-0.078125 0 -0.1875 -0.046875q-0.09375 -0.046875 -0.15625 -0.09375q-0.0625 -0.046875 -0.09375 -0.0625q-0.3125 -0.203125 -0.59375 -0.3125q-0.28125 -0.125 -0.6875 -0.125q-0.875 0 -1.359375 0.59375q-0.484375 0.59375 -0.484375 1.65625q0 1.046875 0.484375 1.625q0.484375 0.578125 1.359375 0.578125q0.40625 0 0.703125 -0.109375q0.296875 -0.125 0.59375 -0.328125q0.140625 -0.09375 0.25 -0.15625q0.125 -0.0625 0.203125 -0.0625q0.140625 0 0.21875 0.109375q0.09375 0.109375 0.09375 0.28125q0 0.15625 -0.09375 0.28125q-0.078125 0.125 -0.296875 0.28125q-0.34375 0.234375 -0.8125 0.375q-0.46875 0.125 -0.953125 0.125zm7.998047 -0.84375q0.203125 0.171875 0.203125 0.375q0 0.1875 -0.125 0.328125q-0.125 0.125 -0.3125 0.125q-0.15625 0 -0.328125 -0.140625l-3.125 -2.703125l0 2.359375q0 0.234375 -0.140625 0.359375q-0.140625 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -7.625q0 -0.21875 0.125 -0.34375q0.140625 -0.125 0.359375 -0.125q0.203125 0 0.34375 0.125q0.140625 0.125 0.140625 0.34375l0 4.875l2.859375 -2.625q0.15625 -0.140625 0.328125 -0.140625q0.1875 0 0.3125 0.140625q0.140625 0.125 0.140625 0.296875q0 0.203125 -0.171875 0.359375l-2.375 2.109375l2.59375 2.265625zm4.2812805 -5.21875q0.765625 0 1.34375 0.390625q0.59375 0.375 0.921875 1.0625q0.328125 0.6875 0.328125 1.609375q0 0.90625 -0.328125 1.59375q-0.328125 0.671875 -0.90625 1.046875q-0.578125 0.359375 -1.359375 0.359375q-0.6875 0 -1.203125 -0.296875q-0.5 -0.296875 -0.765625 -0.84375l0 2.8125q0 0.21875 -0.125 0.34375q-0.125 0.125 -0.359375 0.125q-0.234375 0 -0.359375 -0.140625q-0.125 -0.125 -0.125 -0.328125l0 -7.234375q0 -0.21875 0.125 -0.34375q0.125 -0.140625 0.359375 -0.140625q0.234375 0 0.359375 0.140625q0.125 0.125 0.125 0.34375l0 0.640625q0.265625 -0.546875 0.765625 -0.84375q0.515625 -0.296875 1.203125 -0.296875zm-0.203125 5.265625q0.859375 0 1.328125 -0.578125q0.46875 -0.578125 0.46875 -1.625q0 -1.0625 -0.46875 -1.65625q-0.46875 -0.59375 -1.328125 -0.59375q-0.84375 0 -1.3125 0.578125q-0.453125 0.578125 -0.453125 1.640625q0 1.0625 0.453125 1.65625q0.46875 0.578125 1.3125 0.578125zm6.67157 0.796875q-0.828125 0 -1.46875 -0.359375q-0.625 -0.375 -0.96875 -1.0625q-0.34375 -0.703125 -0.34375 -1.609375q0 -0.90625 0.34375 -1.59375q0.34375 -0.703125 0.96875 -1.0625q0.640625 -0.375 1.46875 -0.375q0.828125 0 1.453125 0.375q0.640625 0.359375 0.984375 1.0625q0.34375 0.6875 0.34375 1.59375q0 0.90625 -0.34375 1.609375q-0.34375 0.6875 -0.984375 1.0625q-0.625 0.359375 -1.453125 0.359375zm0 -0.796875q0.859375 0 1.3125 -0.5625q0.46875 -0.578125 0.46875 -1.671875q0 -1.0625 -0.46875 -1.640625q-0.46875 -0.59375 -1.3125 -0.59375q-0.859375 0 -1.328125 0.59375q-0.46875 0.578125 -0.46875 1.640625q0 1.078125 0.453125 1.65625q0.46875 0.578125 1.34375 0.578125zm4.722534 0.78125q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -5.0625q0 -0.234375 0.125 -0.359375q0.140625 -0.125 0.359375 -0.125q0.21875 0 0.34375 0.125q0.140625 0.125 0.140625 0.359375l0 5.0625q0 0.234375 -0.140625 0.359375q-0.125 0.125 -0.34375 0.125zm0 -7.28125q-0.296875 0 -0.484375 -0.171875q-0.171875 -0.171875 -0.171875 -0.453125q0 -0.25 0.171875 -0.421875q0.1875 -0.171875 0.484375 -0.171875q0.28125 0 0.453125 0.171875q0.1875 0.171875 0.1875 0.421875q0 0.28125 -0.1875 0.453125q-0.171875 0.171875 -0.453125 0.171875zm5.237152 1.234375q2.09375 0 2.09375 2.3125l0 3.25q0 0.234375 -0.125 0.359375q-0.125 0.125 -0.359375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -3.1875q0 -0.8125 -0.328125 -1.1875q-0.3125 -0.375 -1.0 -0.375q-0.8125 0 -1.296875 0.5q-0.46875 0.484375 -0.46875 1.328125l0 2.921875q0 0.234375 -0.125 0.359375q-0.125 0.125 -0.359375 0.125q-0.234375 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -5.0625q0 -0.21875 0.125 -0.34375q0.125 -0.140625 0.359375 -0.140625q0.21875 0 0.34375 0.140625q0.125 0.125 0.125 0.328125l0 0.609375q0.28125 -0.53125 0.796875 -0.8125q0.53125 -0.28125 1.1875 -0.28125zm6.5660706 5.28125q0.421875 0.03125 0.421875 0.375q0 0.203125 -0.15625 0.3125q-0.140625 0.09375 -0.4375 0.078125l-0.328125 -0.03125q-0.953125 -0.0625 -1.421875 -0.5625q-0.453125 -0.515625 -0.453125 -1.53125l0 -3.015625l-0.796875 0q-0.203125 0 -0.328125 -0.09375q-0.109375 -0.109375 -0.109375 -0.28125q0 -0.171875 0.109375 -0.28125q0.125 -0.109375 0.328125 -0.109375l0.796875 0l0 -1.359375q0 -0.21875 0.125 -0.34375q0.140625 -0.140625 0.375 -0.140625q0.21875 0 0.34375 0.140625q0.140625 0.125 0.140625 0.34375l0 1.359375l1.328125 0q0.1875 0 0.296875 0.109375q0.125 0.109375 0.125 0.28125q0 0.171875 -0.125 0.28125q-0.109375 0.09375 -0.296875 0.09375l-1.328125 0l0 3.0625q0 0.65625 0.265625 0.953125q0.265625 0.296875 0.8125 0.328125l0.3125 0.03125zm3.361267 0.78125q-0.5625 0 -1.0625 -0.125q-0.5 -0.140625 -0.875 -0.375q-0.21875 -0.140625 -0.3125 -0.265625q-0.078125 -0.125 -0.078125 -0.3125q0 -0.15625 0.078125 -0.25q0.09375 -0.109375 0.234375 -0.109375q0.15625 0 0.421875 0.1875q0.359375 0.21875 0.71875 0.34375q0.359375 0.125 0.875 0.125q0.65625 0 1.015625 -0.21875q0.359375 -0.234375 0.359375 -0.671875q0 -0.265625 -0.140625 -0.421875q-0.125 -0.171875 -0.453125 -0.296875q-0.3125 -0.125 -0.9375 -0.25q-1.0625 -0.234375 -1.515625 -0.609375q-0.453125 -0.390625 -0.453125 -1.046875q0 -0.515625 0.28125 -0.90625q0.28125 -0.40625 0.796875 -0.625q0.515625 -0.234375 1.15625 -0.234375q0.46875 0 0.90625 0.125q0.4375 0.125 0.78125 0.34375q0.40625 0.296875 0.40625 0.609375q0 0.15625 -0.09375 0.265625q-0.09375 0.109375 -0.234375 0.109375q-0.140625 0 -0.4375 -0.203125q-0.328125 -0.21875 -0.625 -0.34375q-0.296875 -0.125 -0.75 -0.125q-0.5625 0 -0.90625 0.265625q-0.34375 0.25 -0.34375 0.671875q0 0.25 0.125 0.421875q0.125 0.15625 0.421875 0.28125q0.296875 0.125 0.84375 0.25q0.828125 0.1875 1.265625 0.40625q0.453125 0.203125 0.640625 0.515625q0.203125 0.3125 0.203125 0.796875q0 0.75 -0.640625 1.21875q-0.640625 0.453125 -1.671875 0.453125z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m276.85565 99.34974l0 17.70874l-42.960632 0l0 17.724327" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m276.85565 99.34974l0 17.70874l-42.960632 0l0 14.297249" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m233.89502 131.35573l-1.124588 -1.124588l1.124588 3.0897675l1.1245728 -3.0897675z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m276.85565 99.34974l0 17.70874l49.385803 0l0 17.724327" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m276.85565 99.34974l0 17.70874l49.385803 0l0 14.297249" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m326.24146 131.35573l-1.1245728 -1.124588l1.1245728 3.0897675l1.1246033 -3.0897675z" fill-rule="evenodd"/><path fill="#c9daf8" d="m548.5407 235.66077l87.49603 0l0 30.992126l-87.49603 0z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m548.5407 235.66077l87.49603 0l0 30.992126l-87.49603 0z" fill-rule="evenodd"/><path fill="#000000" d="m579.47955 247.1612q0.203125 0 0.328125 0.140625q0.125 0.125 0.125 0.359375l0 7.578125q0 0.21875 -0.125 0.359375q-0.125 0.140625 -0.359375 0.140625q-0.234375 0 -0.390625 -0.203125l-4.984375 -6.65625l0 6.359375q0 0.21875 -0.125 0.359375q-0.125 0.140625 -0.34375 0.140625q-0.21875 0 -0.34375 -0.140625q-0.109375 -0.140625 -0.109375 -0.359375l0 -7.578125q0 -0.234375 0.125 -0.359375q0.125 -0.140625 0.359375 -0.140625q0.234375 0 0.40625 0.203125l4.96875 6.65625l0 -6.359375q0 -0.234375 0.125 -0.359375q0.125 -0.140625 0.34375 -0.140625zm8.868103 0q0.203125 0 0.328125 0.140625q0.125 0.125 0.125 0.359375l0 7.578125q0 0.21875 -0.125 0.359375q-0.125 0.140625 -0.359375 0.140625q-0.234375 0 -0.390625 -0.203125l-4.984375 -6.65625l0 6.359375q0 0.21875 -0.125 0.359375q-0.125 0.140625 -0.34375 0.140625q-0.21875 0 -0.34375 -0.140625q-0.109375 -0.140625 -0.109375 -0.359375l0 -7.578125q0 -0.234375 0.125 -0.359375q0.125 -0.140625 0.359375 -0.140625q0.234375 0 0.40625 0.203125l4.96875 6.65625l0 -6.359375q0 -0.234375 0.125 -0.359375q0.125 -0.140625 0.34375 -0.140625zm12.917175 7.953125q0.046875 0.09375 0.046875 0.203125q0 0.171875 -0.140625 0.296875q-0.140625 0.125 -0.328125 0.125q-0.296875 0 -0.421875 -0.296875l-0.84375 -1.9375l-4.53125 0l-0.859375 1.9375q-0.125 0.296875 -0.421875 0.296875q-0.1875 0 -0.34375 -0.125q-0.140625 -0.125 -0.140625 -0.3125q0 -0.09375 0.046875 -0.1875l3.4375 -7.640625q0.078125 -0.15625 0.21875 -0.234375q0.140625 -0.09375 0.3125 -0.09375q0.171875 0 0.3125 0.09375q0.15625 0.078125 0.21875 0.234375l3.4375 7.640625zm-5.859375 -2.421875l3.8125 0l-1.90625 -4.3125l-1.90625 4.3125zm7.78656 3.046875q-0.21875 0 -0.359375 -0.140625q-0.125 -0.140625 -0.125 -0.359375l0 -7.546875q0 -0.21875 0.125 -0.34375q0.140625 -0.125 0.375 -0.125l2.84375 0q1.328125 0 2.0625 0.65625q0.75 0.640625 0.75 1.828125q0 1.1875 -0.75 1.84375q-0.734375 0.65625 -2.0625 0.65625l-2.359375 0l0 3.03125q0 0.21875 -0.140625 0.359375q-0.125 0.140625 -0.359375 0.140625zm2.765625 -4.34375q1.9375 0 1.9375 -1.6875q0 -1.671875 -1.9375 -1.671875l-2.265625 0l0 3.359375l2.265625 0zm4.9744263 4.34375q-0.21875 0 -0.359375 -0.140625q-0.125 -0.140625 -0.125 -0.359375l0 -7.578125q0 -0.234375 0.125 -0.359375q0.140625 -0.140625 0.359375 -0.140625q0.234375 0 0.359375 0.140625q0.140625 0.125 0.140625 0.359375l0 7.578125q0 0.21875 -0.140625 0.359375q-0.125 0.140625 -0.359375 0.140625z" fill-rule="nonzero"/><path fill="#c9daf8" d="m548.5407 193.79199l87.49603 0l0 30.992126l-87.49603 0z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m548.5407 193.79199l87.49603 0l0 30.992126l-87.49603 0z" fill-rule="evenodd"/><path fill="#000000" d="m589.5417 213.87056q-0.28125 0 -0.484375 -0.1875q-0.1875 -0.1875 -0.1875 -0.484375q0 -0.296875 0.1875 -0.484375q0.203125 -0.203125 0.484375 -0.203125q0.28125 0 0.46875 0.203125q0.1875 0.1875 0.1875 0.484375q0 0.296875 -0.1875 0.484375q-0.1875 0.1875 -0.46875 0.1875zm2.7480469 0q-0.28125 0 -0.484375 -0.1875q-0.1875 -0.1875 -0.1875 -0.484375q0 -0.296875 0.1875 -0.484375q0.203125 -0.203125 0.484375 -0.203125q0.28125 0 0.46875 0.203125q0.1875 0.1875 0.1875 0.484375q0 0.296875 -0.1875 0.484375q-0.1875 0.1875 -0.46875 0.1875zm2.7479858 0q-0.28125 0 -0.484375 -0.1875q-0.1875 -0.1875 -0.1875 -0.484375q0 -0.296875 0.1875 -0.484375q0.203125 -0.203125 0.484375 -0.203125q0.28125 0 0.46875 0.203125q0.1875 0.1875 0.1875 0.484375q0 0.296875 -0.1875 0.484375q-0.1875 0.1875 -0.46875 0.1875z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m75.62294 283.52823l0 17.950958l100.62993 0l0 17.954529" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m75.62295 283.52823l0 17.950928l100.62992 0l0 14.527496" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m176.25287 316.00665l-1.124588 -1.1246033l1.124588 3.0897827l1.124588 -3.0897827z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m276.85654 283.52823l0 17.950958l-100.62991 0l0 17.954529" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m276.85654 283.52823l0 17.950928l-100.62991 0l0 14.527496" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m176.22662 316.00665l-1.124588 -1.1246033l1.124588 3.0897827l1.124588 -3.0897827z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m500.5223 334.89435l24.009003 0l0 0.06298828l24.022522 0" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m500.5223 334.89435l24.009003 0l0 0.06298828l20.595398 0" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m545.1267 334.95734l-1.1245728 1.1246033l3.0897827 -1.1246033l-3.0897827 -1.1245728z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m500.5223 334.89435l24.009003 0l0 -41.858246l24.022522 0" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m500.5223 334.89435l24.009003 0l0 -41.858246l20.595398 0" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m545.1267 293.0361l-1.1245728 1.1245728l3.0897827 -1.1245728l-3.0897827 -1.1246033z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m500.5223 334.89435l24.009003 0l0 -83.74802l24.022522 0" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m500.5223 334.89435l24.009003 0l0 -83.74802l20.595398 0" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m545.1267 251.14633l-1.1245728 1.1245728l3.0897827 -1.1245728l-3.0897827 -1.124588z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m500.5223 334.89435l24.009003 0l0 -125.60629l24.022522 0" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m500.5223 334.89435l24.009003 0l0 -125.60629l20.595398 0" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m545.1267 209.28806l-1.1245728 1.124588l3.0897827 -1.124588l-3.0897827 -1.124588z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m233.88803 165.75919l0 17.70752l42.960632 0l0 17.694061" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m233.88805 165.75919l0 17.70752l42.960617 0l0 14.266968" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m276.84866 197.73367l-1.1245728 -1.124588l1.1245728 3.0897675l1.1246033 -3.0897675z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m326.25156 165.75919l0 17.70752l-49.385834 0l0 17.694061" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m326.25156 165.75919l0 17.70752l-49.385834 0l0 14.266968" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m276.86572 197.73367l-1.1245728 -1.124588l1.1245728 3.0897675l1.1246033 -3.0897675z" fill-rule="evenodd"/><path fill="#d9ead3" d="m132.49171 252.53609l87.49606 0l0 30.992142l-87.49606 0z" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m132.49171 252.53609l87.49606 0l0 30.992142l-87.49606 0z" fill-rule="evenodd"/><path fill="#000000" d="m146.9475 272.6459q-0.90625 0 -1.734375 -0.265625q-0.8125 -0.265625 -1.3125 -0.734375q-0.171875 -0.15625 -0.171875 -0.40625q0 -0.171875 0.09375 -0.296875q0.09375 -0.125 0.234375 -0.125q0.15625 0 0.328125 0.125q1.109375 0.859375 2.546875 0.859375q1.03125 0 1.578125 -0.390625q0.5625 -0.390625 0.5625 -1.125q0 -0.421875 -0.265625 -0.671875q-0.265625 -0.265625 -0.703125 -0.421875q-0.4375 -0.15625 -1.15625 -0.328125q-0.984375 -0.21875 -1.625 -0.46875q-0.625 -0.265625 -1.015625 -0.734375q-0.390625 -0.46875 -0.390625 -1.21875q0 -0.71875 0.390625 -1.265625q0.390625 -0.5625 1.09375 -0.875q0.703125 -0.3125 1.59375 -0.3125q0.84375 0 1.5625 0.265625q0.734375 0.25 1.234375 0.734375q0.1875 0.1875 0.1875 0.421875q0 0.171875 -0.09375 0.296875q-0.09375 0.125 -0.234375 0.125q-0.125 0 -0.34375 -0.140625q-0.59375 -0.46875 -1.09375 -0.65625q-0.5 -0.203125 -1.21875 -0.203125q-0.984375 0 -1.546875 0.421875q-0.546875 0.40625 -0.546875 1.15625q0 0.625 0.484375 0.953125q0.484375 0.3125 1.5 0.5625q1.09375 0.25 1.71875 0.484375q0.625 0.21875 1.03125 0.671875q0.421875 0.4375 0.421875 1.171875q0 0.71875 -0.390625 1.265625q-0.390625 0.53125 -1.109375 0.828125q-0.703125 0.296875 -1.609375 0.296875zm6.9353027 -6.078125q2.203125 0 2.203125 2.296875l0 3.265625q0 0.21875 -0.125 0.359375q-0.125 0.125 -0.34375 0.125q-0.21875 0 -0.359375 -0.125q-0.125 -0.140625 -0.125 -0.359375l0 -0.578125q-0.21875 0.515625 -0.6875 0.796875q-0.46875 0.28125 -1.078125 0.28125q-0.5625 0 -1.046875 -0.21875q-0.46875 -0.234375 -0.75 -0.640625q-0.265625 -0.40625 -0.265625 -0.90625q0 -0.65625 0.328125 -1.015625q0.34375 -0.375 1.109375 -0.53125q0.765625 -0.15625 2.125 -0.15625l0.265625 0l0 -0.40625q0 -0.71875 -0.296875 -1.046875q-0.28125 -0.34375 -0.953125 -0.34375q-0.8125 0 -1.65625 0.453125q-0.3125 0.203125 -0.453125 0.203125q-0.140625 0 -0.234375 -0.109375q-0.09375 -0.109375 -0.09375 -0.28125q0 -0.171875 0.09375 -0.296875q0.109375 -0.125 0.328125 -0.25q0.421875 -0.25 0.953125 -0.375q0.546875 -0.140625 1.0625 -0.140625zm-0.390625 5.296875q0.71875 0 1.171875 -0.484375q0.46875 -0.484375 0.46875 -1.25l0 -0.34375l-0.21875 0q-1.046875 0 -1.609375 0.09375q-0.546875 0.078125 -0.78125 0.296875q-0.234375 0.203125 -0.234375 0.609375q0 0.46875 0.34375 0.78125q0.34375 0.296875 0.859375 0.296875zm8.578796 -4.96875q0.140625 -0.296875 0.421875 -0.296875q0.1875 0 0.328125 0.125q0.140625 0.109375 0.140625 0.296875q0 0.109375 -0.046875 0.1875l-2.34375 5.046875q-0.0625 0.15625 -0.21875 0.25q-0.140625 0.078125 -0.3125 0.078125q-0.15625 0 -0.296875 -0.078125q-0.140625 -0.09375 -0.21875 -0.25l-2.328125 -5.046875q-0.046875 -0.078125 -0.046875 -0.171875q0 -0.1875 0.15625 -0.3125q0.15625 -0.140625 0.359375 -0.140625q0.109375 0 0.21875 0.078125q0.125 0.078125 0.1875 0.203125l2.0 4.5l2.0 -4.46875zm6.480545 4.296875q0.140625 0 0.234375 0.109375q0.09375 0.109375 0.09375 0.28125q0 0.296875 -0.421875 0.546875q-0.4375 0.25 -0.921875 0.375q-0.46875 0.125 -0.921875 0.125q-1.359375 0 -2.15625 -0.796875q-0.78125 -0.8125 -0.78125 -2.21875q0 -0.90625 0.34375 -1.59375q0.359375 -0.6875 0.984375 -1.0625q0.640625 -0.390625 1.4375 -0.390625q1.140625 0 1.8125 0.75q0.671875 0.734375 0.671875 2.0q0 0.25 -0.09375 0.359375q-0.09375 0.109375 -0.3125 0.109375l-3.859375 0q0.09375 2.0625 1.953125 2.0625q0.46875 0 0.796875 -0.125q0.34375 -0.125 0.71875 -0.34375q0.3125 -0.1875 0.421875 -0.1875zm-2.09375 -3.875q-0.765625 0 -1.234375 0.484375q-0.46875 0.484375 -0.546875 1.359375l3.390625 0q-0.015625 -0.890625 -0.4375 -1.359375q-0.421875 -0.484375 -1.171875 -0.484375zm8.589676 -3.28125q0.21875 0 0.34375 0.140625q0.140625 0.125 0.140625 0.328125l0 7.625q0 0.21875 -0.140625 0.359375q-0.125 0.125 -0.34375 0.125q-0.234375 0 -0.359375 -0.125q-0.125 -0.140625 -0.125 -0.359375l0 -0.640625q-0.265625 0.546875 -0.78125 0.84375q-0.5 0.296875 -1.1875 0.296875q-0.765625 0 -1.359375 -0.375q-0.578125 -0.390625 -0.90625 -1.078125q-0.328125 -0.6875 -0.328125 -1.59375q0 -0.90625 0.328125 -1.59375q0.328125 -0.6875 0.90625 -1.046875q0.59375 -0.375 1.359375 -0.375q0.6875 0 1.1875 0.296875q0.515625 0.296875 0.78125 0.84375l0 -3.203125q0 -0.21875 0.125 -0.34375q0.125 -0.125 0.359375 -0.125zm-2.25 7.796875q0.84375 0 1.296875 -0.578125q0.46875 -0.59375 0.46875 -1.65625q0 -1.0625 -0.46875 -1.640625q-0.453125 -0.578125 -1.296875 -0.578125q-0.859375 0 -1.34375 0.578125q-0.46875 0.578125 -0.46875 1.625q0 1.0625 0.46875 1.65625q0.484375 0.59375 1.34375 0.59375zm12.202805 -7.796875q0.21875 0 0.34375 0.140625q0.125 0.125 0.125 0.359375l0 7.59375q0 0.21875 -0.125 0.359375q-0.109375 0.125 -0.328125 0.125q-0.21875 0 -0.328125 -0.125q-0.109375 -0.140625 -0.109375 -0.359375l0 -6.125l-2.59375 4.984375q-0.171875 0.34375 -0.5 0.34375q-0.3125 0 -0.484375 -0.34375l-2.625 -4.921875l0 6.0625q0 0.21875 -0.109375 0.359375q-0.109375 0.125 -0.328125 0.125q-0.21875 0 -0.34375 -0.125q-0.109375 -0.140625 -0.109375 -0.359375l0 -7.59375q0 -0.234375 0.125 -0.359375q0.140625 -0.140625 0.359375 -0.140625q0.3125 0 0.484375 0.34375l3.046875 5.84375l3.015625 -5.84375q0.09375 -0.1875 0.203125 -0.265625q0.125 -0.078125 0.28125 -0.078125zm4.8576965 8.59375q-0.828125 0 -1.46875 -0.359375q-0.625 -0.375 -0.96875 -1.0625q-0.34375 -0.703125 -0.34375 -1.609375q0 -0.90625 0.34375 -1.59375q0.34375 -0.703125 0.96875 -1.0625q0.640625 -0.375 1.46875 -0.375q0.828125 0 1.453125 0.375q0.640625 0.359375 0.984375 1.0625q0.34375 0.6875 0.34375 1.59375q0 0.90625 -0.34375 1.609375q-0.34375 0.6875 -0.984375 1.0625q-0.625 0.359375 -1.453125 0.359375zm0 -0.796875q0.859375 0 1.3125 -0.5625q0.46875 -0.578125 0.46875 -1.671875q0 -1.0625 -0.46875 -1.640625q-0.46875 -0.59375 -1.3125 -0.59375q-0.859375 0 -1.328125 0.59375q-0.46875 0.578125 -0.46875 1.640625q0 1.078125 0.453125 1.65625q0.46875 0.578125 1.34375 0.578125zm8.925674 -7.796875q0.21875 0 0.34375 0.140625q0.140625 0.125 0.140625 0.328125l0 7.625q0 0.21875 -0.140625 0.359375q-0.125 0.125 -0.34375 0.125q-0.234375 0 -0.359375 -0.125q-0.125 -0.140625 -0.125 -0.359375l0 -0.640625q-0.265625 0.546875 -0.78125 0.84375q-0.5 0.296875 -1.1875 0.296875q-0.765625 0 -1.359375 -0.375q-0.578125 -0.390625 -0.90625 -1.078125q-0.328125 -0.6875 -0.328125 -1.59375q0 -0.90625 0.328125 -1.59375q0.328125 -0.6875 0.90625 -1.046875q0.59375 -0.375 1.359375 -0.375q0.6875 0 1.1875 0.296875q0.515625 0.296875 0.78125 0.84375l0 -3.203125q0 -0.21875 0.125 -0.34375q0.125 -0.125 0.359375 -0.125zm-2.25 7.796875q0.84375 0 1.296875 -0.578125q0.46875 -0.59375 0.46875 -1.65625q0 -1.0625 -0.46875 -1.640625q-0.453125 -0.578125 -1.296875 -0.578125q-0.859375 0 -1.34375 0.578125q-0.46875 0.578125 -0.46875 1.625q0 1.0625 0.46875 1.65625q0.484375 0.59375 1.34375 0.59375zm9.06218 -0.640625q0.140625 0 0.234375 0.109375q0.09375 0.109375 0.09375 0.28125q0 0.296875 -0.421875 0.546875q-0.4375 0.25 -0.921875 0.375q-0.46875 0.125 -0.921875 0.125q-1.359375 0 -2.15625 -0.796875q-0.78125 -0.8125 -0.78125 -2.21875q0 -0.90625 0.34375 -1.59375q0.359375 -0.6875 0.984375 -1.0625q0.640625 -0.390625 1.4375 -0.390625q1.140625 0 1.8125 0.75q0.671875 0.734375 0.671875 2.0q0 0.25 -0.09375 0.359375q-0.09375 0.109375 -0.3125 0.109375l-3.859375 0q0.09375 2.0625 1.953125 2.0625q0.46875 0 0.796875 -0.125q0.34375 -0.125 0.71875 -0.34375q0.3125 -0.1875 0.421875 -0.1875zm-2.09375 -3.875q-0.765625 0 -1.234375 0.484375q-0.46875 0.484375 -0.546875 1.359375l3.390625 0q-0.015625 -0.890625 -0.4375 -1.359375q-0.421875 -0.484375 -1.171875 -0.484375zm4.386551 5.296875q-0.21875 0 -0.359375 -0.125q-0.125 -0.125 -0.125 -0.359375l0 -7.625q0 -0.21875 0.125 -0.34375q0.140625 -0.125 0.359375 -0.125q0.203125 0 0.34375 0.125q0.140625 0.125 0.140625 0.34375l0 7.625q0 0.234375 -0.140625 0.359375q-0.140625 0.125 -0.34375 0.125z" fill-rule="nonzero"/><path fill="#000000" fill-opacity="0.0" d="m176.23885 99.34974l0 153.19684" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m176.23885 99.34974l0 149.76978" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m176.23885 249.1195l-1.124588 -1.124588l1.124588 3.0897675l1.124588 -3.0897675z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m176.23975 283.52823l0 17.950958l0.06298828 0l0 17.954529" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m176.23975 283.52823l0 17.950928l0.06298828 0l0 14.527496" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m176.30273 316.00665l-1.1245728 -1.1246033l1.1245728 3.0897827l1.124588 -3.0897827z" fill-rule="evenodd"/><path fill="#000000" fill-opacity="0.0" d="m75.62205 99.34843l0 153.19684" fill-rule="evenodd"/><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m75.62205 99.34843l0 149.76978" fill-rule="evenodd"/><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m75.62205 249.1182l-1.1245804 -1.124588l1.1245804 3.0897675l1.1245804 -3.0897675z" fill-rule="evenodd"/></g></svg> \ No newline at end of file
diff --git a/tensorflow/contrib/optimizer_v2/checkpointable_utils_test.py b/tensorflow/contrib/optimizer_v2/checkpointable_utils_test.py
index b6972a7a45..06ab58188a 100644
--- a/tensorflow/contrib/optimizer_v2/checkpointable_utils_test.py
+++ b/tensorflow/contrib/optimizer_v2/checkpointable_utils_test.py
@@ -43,15 +43,15 @@ from tensorflow.python.ops import template
from tensorflow.python.ops import variable_scope
from tensorflow.python.training import saver as core_saver
from tensorflow.python.training import training_util
-from tensorflow.python.training.checkpointable import base as checkpointable
-from tensorflow.python.training.checkpointable import util as checkpointable_utils
+from tensorflow.python.training.checkpointable import tracking
+from tensorflow.python.training.checkpointable import util
-class NonLayerCheckpointable(checkpointable.Checkpointable):
+class NonLayerCheckpointable(tracking.Checkpointable):
def __init__(self):
super(NonLayerCheckpointable, self).__init__()
- self.a_variable = checkpointable_utils.add_variable(
+ self.a_variable = util.add_variable(
self, name="a_variable", shape=[])
@@ -88,29 +88,6 @@ class _MirroringSaveable(
self._mirrored_variable.assign(tensor))
-class _OwnsMirroredVariables(checkpointable.CheckpointableBase):
- """A Checkpointable object which returns a more complex SaveableObject."""
-
- def __init__(self):
- self.non_dep_variable = variable_scope.get_variable(
- name="non_dep_variable", initializer=6., use_resource=True)
- self.mirrored = variable_scope.get_variable(
- name="mirrored", initializer=15., use_resource=True)
-
- def _gather_saveables_for_checkpoint(self):
- def _saveable_factory(name=self.non_dep_variable.name):
- return _MirroringSaveable(
- primary_variable=self.non_dep_variable,
- mirrored_variable=self.mirrored,
- name=name)
- return {checkpointable.VARIABLE_VALUE_KEY: _saveable_factory}
-
- # The Saver sorts by name before parsing, so we need a name property.
- @property
- def name(self):
- return self.non_dep_variable.name
-
-
class CheckpointingTests(test.TestCase):
@test_util.run_in_graph_and_eager_modes(assert_no_eager_garbage=True)
@@ -122,7 +99,7 @@ class CheckpointingTests(test.TestCase):
other_model = MyModel()
optimizer = adam.AdamOptimizer(0.001)
optimizer_step = training_util.get_or_create_global_step()
- root_checkpointable = checkpointable_utils.Checkpoint(
+ root_checkpointable = util.Checkpoint(
optimizer=optimizer, model=model, optimizer_step=optimizer_step)
if context.executing_eagerly():
optimizer.minimize(
@@ -137,11 +114,11 @@ class CheckpointingTests(test.TestCase):
optimizer.minimize(
other_model(input_value),
global_step=optimizer_step)
- self.evaluate(checkpointable_utils.gather_initializers(
+ self.evaluate(util.gather_initializers(
root_checkpointable))
self.evaluate(train_op)
named_variables, serialized_graph, _ = (
- checkpointable_utils._serialize_object_graph(
+ util._serialize_object_graph(
root_checkpointable, saveables_cache=None))
expected_checkpoint_names = (
# Created in the root node, so no prefix.
@@ -230,7 +207,7 @@ class CheckpointingTests(test.TestCase):
def testSaveRestore(self):
model = MyModel()
optimizer = adam.AdamOptimizer(0.001)
- root_checkpointable = checkpointable_utils.Checkpoint(
+ root_checkpointable = util.Checkpoint(
optimizer=optimizer, model=model)
input_value = constant_op.constant([[3.]])
if context.executing_eagerly():
@@ -240,7 +217,7 @@ class CheckpointingTests(test.TestCase):
train_op = optimizer.minimize(model(input_value))
# TODO(allenl): Make initialization more pleasant when graph building.
root_checkpointable.save_counter # pylint: disable=pointless-statement
- self.evaluate(checkpointable_utils.gather_initializers(
+ self.evaluate(util.gather_initializers(
root_checkpointable))
self.evaluate(train_op)
prefix = os.path.join(self.get_temp_dir(), "ckpt")
@@ -266,7 +243,7 @@ class CheckpointingTests(test.TestCase):
# Preserve beta1_power and beta2_power when appying gradients so we can
# test that they've been restored correctly.
beta1=1.0, beta2=1.0)
- on_create_root = checkpointable_utils.Checkpoint(
+ on_create_root = util.Checkpoint(
optimizer=on_create_optimizer, model=on_create_model)
# Deferred restoration
status = on_create_root.restore(save_path=save_path)
@@ -298,7 +275,7 @@ class CheckpointingTests(test.TestCase):
for training_continuation in range(3):
model = MyModel()
optimizer = adam.AdamOptimizer(0.001)
- root = checkpointable_utils.Checkpoint(
+ root = util.Checkpoint(
optimizer=optimizer, model=model,
optimizer_step=training_util.get_or_create_global_step())
root.restore(core_saver.latest_checkpoint(checkpoint_directory))
@@ -322,7 +299,7 @@ class CheckpointingTests(test.TestCase):
with ops.Graph().as_default():
model = MyModel()
optimizer = adam.AdamOptimizer(0.001)
- root = checkpointable_utils.Checkpoint(
+ root = util.Checkpoint(
optimizer=optimizer, model=model,
global_step=training_util.get_or_create_global_step())
input_value = constant_op.constant([[3.]])
@@ -359,7 +336,7 @@ class CheckpointingTests(test.TestCase):
graph=ops.get_default_graph()), test_util.device(use_gpu=True):
model = MyModel()
optimizer = adam.AdamOptimizer(0.001)
- root = checkpointable_utils.Checkpoint(
+ root = util.Checkpoint(
optimizer=optimizer, model=model,
global_step=training_util.get_or_create_global_step())
checkpoint_path = core_saver.latest_checkpoint(checkpoint_directory)
@@ -392,7 +369,7 @@ class CheckpointingTests(test.TestCase):
model = MyModel()
# Don't actually train so we can test variable values
optimizer = adam.AdamOptimizer(0.)
- root = checkpointable_utils.Checkpoint(
+ root = util.Checkpoint(
optimizer=optimizer, model=model,
global_step=training_util.get_or_create_global_step())
checkpoint_path = core_saver.latest_checkpoint(checkpoint_directory)
@@ -442,7 +419,7 @@ class CheckpointingTests(test.TestCase):
optimizer = adam.AdamOptimizer(learning_rate=0.05)
checkpoint_directory = self.get_temp_dir()
checkpoint_prefix = os.path.join(checkpoint_directory, "ckpt")
- checkpoint = checkpointable_utils.Checkpoint(
+ checkpoint = util.Checkpoint(
model=model, optimizer=optimizer)
for _ in range(2):
checkpoint.save(checkpoint_prefix)
@@ -457,8 +434,8 @@ class CheckpointingTests(test.TestCase):
def testDeferredSlotRestoration(self):
checkpoint_directory = self.get_temp_dir()
- root = checkpointable.Checkpointable()
- root.var = checkpointable_utils.add_variable(
+ root = tracking.Checkpointable()
+ root.var = util.add_variable(
root, name="var", initializer=0.)
optimizer = adam.AdamOptimizer(0.1)
if context.executing_eagerly():
@@ -468,28 +445,28 @@ class CheckpointingTests(test.TestCase):
# Note that `optimizer` has not been added as a dependency of
# `root`. Create a one-off grouping so that slot variables for `root.var`
# get initialized too.
- self.evaluate(checkpointable_utils.gather_initializers(
- checkpointable_utils.Checkpoint(root=root, optimizer=optimizer)))
+ self.evaluate(util.gather_initializers(
+ util.Checkpoint(root=root, optimizer=optimizer)))
self.evaluate(train_op)
self.evaluate(state_ops.assign(root.var, 12.))
- no_slots_path = checkpointable_utils.CheckpointableSaver(root).save(
+ no_slots_path = util.CheckpointableSaver(root).save(
os.path.join(checkpoint_directory, "no_slots"))
root.optimizer = optimizer
self.evaluate(state_ops.assign(root.var, 13.))
self.evaluate(state_ops.assign(optimizer.get_slot(name="m", var=root.var),
14.))
- slots_path = checkpointable_utils.CheckpointableSaver(root).save(
+ slots_path = util.CheckpointableSaver(root).save(
os.path.join(checkpoint_directory, "with_slots"))
- new_root = checkpointable.Checkpointable()
+ new_root = tracking.Checkpointable()
# Load the slot-containing checkpoint (deferred), then immediately overwrite
# the non-slot variable (also deferred).
- slot_status = checkpointable_utils.CheckpointableSaver(
+ slot_status = util.CheckpointableSaver(
new_root).restore(slots_path)
- no_slot_status = checkpointable_utils.CheckpointableSaver(
+ no_slot_status = util.CheckpointableSaver(
new_root).restore(no_slots_path)
with self.assertRaises(AssertionError):
no_slot_status.assert_consumed()
- new_root.var = checkpointable_utils.add_variable(
+ new_root.var = util.add_variable(
new_root, name="var", shape=[])
no_slot_status.assert_consumed()
no_slot_status.run_restore_ops()
@@ -525,12 +502,12 @@ class CheckpointingTests(test.TestCase):
with graph.as_default(), self.test_session(graph):
checkpoint_directory = self.get_temp_dir()
checkpoint_prefix = os.path.join(checkpoint_directory, "ckpt")
- obj = checkpointable.Checkpointable()
+ obj = tracking.Checkpointable()
obj.var = variable_scope.get_variable(name="v", initializer=0.)
obj.opt = adam.AdamOptimizer(0.1)
obj.opt.minimize(obj.var.read_value())
- self.evaluate(checkpointable_utils.gather_initializers(obj))
- saver = checkpointable_utils.CheckpointableSaver(obj)
+ self.evaluate(util.gather_initializers(obj))
+ saver = util.CheckpointableSaver(obj)
saver.save(checkpoint_prefix)
before_ops = graph.get_operations()
saver.save(checkpoint_prefix)
@@ -543,12 +520,12 @@ class CheckpointingTests(test.TestCase):
with graph.as_default(), self.test_session(graph):
checkpoint_directory = self.get_temp_dir()
checkpoint_prefix = os.path.join(checkpoint_directory, "ckpt")
- obj = checkpointable.Checkpointable()
+ obj = tracking.Checkpointable()
obj.var = variable_scope.get_variable(name="v", initializer=0.)
obj.opt = adam.AdamOptimizer(0.1)
obj.opt.minimize(obj.var.read_value())
- self.evaluate(checkpointable_utils.gather_initializers(obj))
- saver = checkpointable_utils.CheckpointableSaver(obj)
+ self.evaluate(util.gather_initializers(obj))
+ saver = util.CheckpointableSaver(obj)
save_path = saver.save(checkpoint_prefix)
saver.restore(save_path)
before_ops = graph.get_operations()
@@ -565,10 +542,10 @@ class CheckpointingTests(test.TestCase):
first_session = session_lib.Session(graph=first_graph)
with first_graph.as_default(), first_session.as_default():
first_variable = resource_variable_ops.ResourceVariable([1.])
- first_root_checkpointable = checkpointable_utils.Checkpoint(
+ first_root_checkpointable = util.Checkpoint(
optimizer=optimizer, variable=first_variable)
train_op = optimizer.minimize(first_variable.read_value)
- self.evaluate(checkpointable_utils.gather_initializers(
+ self.evaluate(util.gather_initializers(
first_root_checkpointable))
self.evaluate(train_op)
self.evaluate(first_variable.assign([1.]))
@@ -581,7 +558,7 @@ class CheckpointingTests(test.TestCase):
second_graph = ops.Graph()
with second_graph.as_default(), session_lib.Session(graph=second_graph):
second_variable = resource_variable_ops.ResourceVariable([1.])
- second_root_checkpointable = checkpointable_utils.Checkpoint(
+ second_root_checkpointable = util.Checkpoint(
optimizer=optimizer, variable=second_variable)
train_op = optimizer.minimize(second_variable.read_value)
second_root_checkpointable.restore(None).initialize_or_restore()
@@ -631,7 +608,7 @@ class TemplateTests(test.TestCase):
save_template = template.make_template("s1", _templated)
v1_save, _, v2_save = save_template()
optimizer = adam.AdamOptimizer(0.0)
- save_root = checkpointable_utils.Checkpoint(
+ save_root = util.Checkpoint(
my_template=save_template, optimizer=optimizer)
optimizer.minimize(v1_save.read_value)
self.evaluate([v.initializer for v in optimizer.variables()])
@@ -643,7 +620,7 @@ class TemplateTests(test.TestCase):
load_template = template.make_template("s2", _templated)
load_optimizer = adam.AdamOptimizer(0.0)
- load_root = checkpointable_utils.Checkpoint(
+ load_root = util.Checkpoint(
my_template=load_template, optimizer=load_optimizer)
status = load_root.restore(save_path)
var, var_plus_one, var2 = load_template()
@@ -664,12 +641,12 @@ class CheckpointCompatibilityTests(test.TestCase):
model = MyModel()
optimizer = adam.AdamOptimizer(0.001)
optimizer_step = training_util.get_or_create_global_step()
- root_checkpointable = checkpointable_utils.Checkpoint(
+ root_checkpointable = util.Checkpoint(
optimizer=optimizer, model=model, optimizer_step=optimizer_step)
train_op = optimizer.minimize(
functools.partial(model, input_value),
global_step=optimizer_step)
- self.evaluate(checkpointable_utils.gather_initializers(
+ self.evaluate(util.gather_initializers(
root_checkpointable))
self.evaluate(train_op)
# A regular variable, a slot variable, and a non-slot Optimizer variable
@@ -721,7 +698,7 @@ class CheckpointCompatibilityTests(test.TestCase):
self._set_sentinels(root)
with self.assertRaises(AssertionError):
self._check_sentinels(root)
- object_saver = checkpointable_utils.CheckpointableSaver(root)
+ object_saver = util.CheckpointableSaver(root)
self._set_sentinels(root)
status = object_saver.restore(save_path)
if context.executing_eagerly():
diff --git a/tensorflow/contrib/tpu/python/tpu/tpu_estimator.py b/tensorflow/contrib/tpu/python/tpu/tpu_estimator.py
index 85ea4d3df3..2b1cb4245e 100644
--- a/tensorflow/contrib/tpu/python/tpu/tpu_estimator.py
+++ b/tensorflow/contrib/tpu/python/tpu/tpu_estimator.py
@@ -1136,7 +1136,7 @@ class _InputPipeline(object):
err_msg = ('Input pipeline contains one or more QueueRunners. '
'It could be slow and not scalable. Please consider '
'converting your input pipeline to use `tf.data` instead (see '
- 'https://www.tensorflow.org/programmers_guide/datasets for '
+ 'https://www.tensorflow.org/guide/datasets for '
'instructions.')
if _WRAP_INPUT_FN_INTO_WHILE_LOOP:
raise RuntimeError(err_msg)
diff --git a/tensorflow/core/common_runtime/constant_folding.cc b/tensorflow/core/common_runtime/constant_folding.cc
index 715c0a2384..b5a51d2526 100644
--- a/tensorflow/core/common_runtime/constant_folding.cc
+++ b/tensorflow/core/common_runtime/constant_folding.cc
@@ -47,8 +47,7 @@ namespace {
// inputs' shapes are known.
bool IsShapeOp(const Node* n) {
const auto& ts = n->type_string();
- return ts == "Shape" || ts == "ShapeN" || ts == "Rank" || ts == "Size" ||
- ts == "ZerosLike" || ts == "OnesLike";
+ return ts == "Shape" || ts == "ShapeN" || ts == "Rank" || ts == "Size";
}
// Reads the partially-known shape of each of n's inputs from shape_map, and
@@ -162,63 +161,6 @@ bool MaybeReplaceSizeOp(const Node* n,
return true;
}
-template <class T>
-void SetAll(Tensor* t, T val) {
- auto flat_t = t->flat<T>();
- for (int i = 0; i < flat_t.size(); i++) {
- flat_t(i) = val;
- }
-}
-
-#define REPLACE_ZEROS_OR_ONES_CASE(DTYPE) \
- case DTYPE: \
- SetAll<EnumToDataType<DTYPE>::Type>(&t, \
- EnumToDataType<DTYPE>::Type(int_val)); \
- break;
-
-bool MaybeReplaceZerosOrOnesLikeOp(
- const Node* n, const std::vector<PartialTensorShape>& input_shapes,
- std::unordered_map<const Node*, std::vector<Tensor>>*
- shape_replacement_map) {
- std::vector<Tensor> defined_shape;
- for (const auto& shape : input_shapes) {
- if (!shape.IsFullyDefined()) {
- return false;
- }
- const DataType op_type = n->output_type(0);
- TensorShape fully_defined_shape;
- shape.AsTensorShape(&fully_defined_shape);
- Tensor t(op_type, fully_defined_shape);
-
- int int_val = n->type_string() == "OnesLike" ? 1 : 0;
- switch (op_type) {
- REPLACE_ZEROS_OR_ONES_CASE(DT_BOOL);
- REPLACE_ZEROS_OR_ONES_CASE(DT_HALF);
- REPLACE_ZEROS_OR_ONES_CASE(DT_BFLOAT16);
- REPLACE_ZEROS_OR_ONES_CASE(DT_FLOAT);
- REPLACE_ZEROS_OR_ONES_CASE(DT_DOUBLE);
- REPLACE_ZEROS_OR_ONES_CASE(DT_COMPLEX64);
- REPLACE_ZEROS_OR_ONES_CASE(DT_COMPLEX128);
- REPLACE_ZEROS_OR_ONES_CASE(DT_UINT8);
- REPLACE_ZEROS_OR_ONES_CASE(DT_INT8);
- REPLACE_ZEROS_OR_ONES_CASE(DT_UINT16);
- REPLACE_ZEROS_OR_ONES_CASE(DT_INT16);
- REPLACE_ZEROS_OR_ONES_CASE(DT_INT32);
- REPLACE_ZEROS_OR_ONES_CASE(DT_INT64);
- default:
- VLOG(1) << "Unsupported type " << DataTypeString(op_type);
- return false;
- }
- defined_shape.push_back(t);
- }
- // All the inputs had known shapes so we can replace the node by constants
- // later in the rewrite.
- shape_replacement_map->insert({n, defined_shape});
- return true;
-}
-
-#undef REPLACE_ZEROS_OR_ONES_CASE
-
// If n is a shape Op (Shape, ShapeN, Rank, or Size) and its inputs have their
// shapes specified in shape_map, then adds to shape_replacement_map a mapping
// from n to a vector of Tensors, where Tensor k is the (statically known) value
@@ -249,14 +191,9 @@ bool MaybeReplaceShapeOp(
if (!MaybeReplaceRankOp(n, input_shapes, shape_replacement_map)) {
return false;
}
- } else if (ts == "Size") {
- if (!MaybeReplaceSizeOp(n, input_shapes, shape_replacement_map)) {
- return false;
- }
} else {
- CHECK(ts == "ZerosLike" || ts == "OnesLike");
- if (!MaybeReplaceZerosOrOnesLikeOp(n, input_shapes,
- shape_replacement_map)) {
+ CHECK_EQ(ts, "Size");
+ if (!MaybeReplaceSizeOp(n, input_shapes, shape_replacement_map)) {
return false;
}
}
diff --git a/tensorflow/core/common_runtime/constant_folding_test.cc b/tensorflow/core/common_runtime/constant_folding_test.cc
index d5600d39a7..16b61315f2 100644
--- a/tensorflow/core/common_runtime/constant_folding_test.cc
+++ b/tensorflow/core/common_runtime/constant_folding_test.cc
@@ -619,42 +619,6 @@ TEST_F(ConstantFoldingTest, ConstShapeKnown) {
}
}
-TEST_F(ConstantFoldingTest, ZerosLikeAndOnesLike) {
- Graph g(OpRegistry::Global());
- {
- Scope s = Scope::NewRootScope();
- auto recv = ops::_Recv(s.WithOpName("recv"), DT_FLOAT, "recv", "sender", 0,
- "receiver");
- auto zeros = ops::ZerosLike(s.WithOpName("zeros"), recv);
- auto ones = ops::OnesLike(s.WithOpName("ones"), recv);
- auto out = ops::Add(s.WithOpName("out"), ones, zeros);
- auto send =
- ops::_Send(s.WithOpName("send"), out, "out", "sender", 0, "receiver");
- TF_ASSERT_OK(s.ToGraph(&g));
- }
-
- std::unordered_map<string, Node*> orig_index = NodeNameIndex(g);
- Node* recv = orig_index.at("recv");
- PartialTensorShape ps;
- int dims[] = {2, 3};
- TF_EXPECT_OK(PartialTensorShape::MakePartialShape(dims, 2, &ps));
- std::unordered_map<string, std::vector<PartialTensorShape>> map;
- map[recv->name()].push_back(ps);
- ConstantFoldingOptions opts;
- opts.shape_map = &map;
-
- bool was_mutated;
- TF_EXPECT_OK(
- ConstantFold(opts, nullptr, Env::Default(), nullptr, &g, &was_mutated));
- EXPECT_TRUE(was_mutated);
-
- std::unordered_map<string, Node*> index = NodeNameIndex(g);
- Node* send = index.at("send");
- ASSERT_EQ(1, send->num_inputs());
- Node* cf = *(send->in_nodes().begin());
- ExpectNodeEqual<float>(cf, {1, 1, 1, 1, 1, 1}, {2, 3});
-}
-
namespace {
const char kTestMemRegionName[] = "test://test";
diff --git a/tensorflow/core/framework/kernel_def.proto b/tensorflow/core/framework/kernel_def.proto
index a17b9c8492..e16c2ae73b 100644
--- a/tensorflow/core/framework/kernel_def.proto
+++ b/tensorflow/core/framework/kernel_def.proto
@@ -34,3 +34,8 @@ message KernelDef {
// value matching this.
string label = 5;
}
+
+// A collection of KernelDefs
+message KernelList {
+ repeated KernelDef kernel = 1;
+};
diff --git a/tensorflow/core/framework/op_kernel.cc b/tensorflow/core/framework/op_kernel.cc
index ce213a63be..c2561b5019 100644
--- a/tensorflow/core/framework/op_kernel.cc
+++ b/tensorflow/core/framework/op_kernel.cc
@@ -1120,14 +1120,14 @@ void LogAllRegisteredKernels() {
}
}
-std::vector<KernelDef> GetAllRegisteredKernels() {
+KernelList GetAllRegisteredKernels() {
const KernelRegistry* const typed_registry = GlobalKernelRegistryTyped();
- std::vector<KernelDef> kernels;
- kernels.reserve(typed_registry->size());
+ KernelList kernel_list;
+ kernel_list.mutable_kernel()->Reserve(typed_registry->size());
for (const auto& p : *typed_registry) {
- kernels.emplace_back(p.second.def);
+ *kernel_list.add_kernel() = p.second.def;
}
- return kernels;
+ return kernel_list;
}
string KernelsRegisteredForOp(StringPiece op_name) {
diff --git a/tensorflow/core/framework/op_kernel.h b/tensorflow/core/framework/op_kernel.h
index a3ad29e02f..6c4c3a2ac1 100644
--- a/tensorflow/core/framework/op_kernel.h
+++ b/tensorflow/core/framework/op_kernel.h
@@ -1304,8 +1304,8 @@ Status FindKernelDef(const DeviceType& device_type, const NodeDef& node_def,
// missing kernel errors.
void LogAllRegisteredKernels();
-// Gets a vector of all registered kernels.
-std::vector<KernelDef> GetAllRegisteredKernels();
+// Gets a list of all registered kernels.
+KernelList GetAllRegisteredKernels();
namespace kernel_factory {
diff --git a/tensorflow/core/framework/op_kernel_test.cc b/tensorflow/core/framework/op_kernel_test.cc
index 50319ca576..b76a3400a8 100644
--- a/tensorflow/core/framework/op_kernel_test.cc
+++ b/tensorflow/core/framework/op_kernel_test.cc
@@ -965,7 +965,7 @@ BENCHMARK(BM_ConcatInputRange);
BENCHMARK(BM_SelectInputRange);
TEST(RegisteredKernels, CanCallGetAllRegisteredKernels) {
- auto all_registered_kernels = GetAllRegisteredKernels();
+ auto all_registered_kernels = GetAllRegisteredKernels().kernel();
auto has_name_test1 = [](const KernelDef& k) { return k.op() == "Test1"; };
// Verify we can find the "Test1" op registered above
diff --git a/tensorflow/core/graph/validate.cc b/tensorflow/core/graph/validate.cc
index bd905651d2..e44eb91d48 100644
--- a/tensorflow/core/graph/validate.cc
+++ b/tensorflow/core/graph/validate.cc
@@ -59,5 +59,59 @@ void GetOpListForValidation(OpList* op_list, const OpRegistry& op_registry) {
RemoveDescriptionsFromOpList(op_list);
}
+Status ValidateGraphHasNoCycle(const Graph& graph) {
+ // A node is ready when all of its inputs have been visited.
+ std::vector<const Node*> ready;
+ std::vector<int> pending_count(graph.num_node_ids(), 0);
+
+ for (int i = 0; i < graph.num_node_ids(); ++i) {
+ const Node* n = graph.FindNodeId(i);
+ if (n == nullptr) continue;
+ pending_count[i] = n->in_edges().size();
+ if (n->IsMerge()) {
+ // While-loop cycles are legal cycles so we manually adjust the
+ // pending_count to make sure that the loop is visited.
+ for (const Edge* e : n->in_edges()) {
+ if (!e->IsControlEdge() && e->src()->IsNextIteration()) {
+ pending_count[i]--;
+ }
+ }
+ }
+ if (pending_count[i] == 0) {
+ ready.push_back(n);
+ }
+ }
+
+ int processed = 0;
+ while (!ready.empty()) {
+ const Node* node = ready.back();
+ ready.pop_back();
+ ++processed;
+
+ for (const Edge* out : node->out_edges()) {
+ const int output_id = out->dst()->id();
+ pending_count[output_id]--;
+ if (pending_count[output_id] == 0) {
+ ready.push_back(out->dst());
+ }
+ }
+ }
+
+ if (processed < graph.num_nodes()) {
+ std::vector<string> nodes_in_cycle;
+ for (int i = 0; i < pending_count.size() && nodes_in_cycle.size() < 3;
+ ++i) {
+ if (pending_count[i] != 0) {
+ nodes_in_cycle.push_back(graph.FindNodeId(i)->name());
+ }
+ }
+ return errors::InvalidArgument(
+ "Graph is invalid, contains a cycle with ",
+ graph.num_nodes() - processed,
+ " nodes, including: ", str_util::Join(nodes_in_cycle, ", "));
+ }
+ return Status::OK();
+}
+
} // namespace graph
} // namespace tensorflow
diff --git a/tensorflow/core/graph/validate.h b/tensorflow/core/graph/validate.h
index cda93fe1de..08879dca60 100644
--- a/tensorflow/core/graph/validate.h
+++ b/tensorflow/core/graph/validate.h
@@ -18,6 +18,7 @@ limitations under the License.
#include "tensorflow/core/framework/graph.pb.h"
#include "tensorflow/core/framework/op.h"
+#include "tensorflow/core/graph/graph.h"
#include "tensorflow/core/lib/core/status.h"
namespace tensorflow {
@@ -50,6 +51,14 @@ Status ValidateGraphDefAgainstOpList(const GraphDef& graph_def,
void GetOpListForValidation(
OpList* op_list, const OpRegistry& op_registry = *OpRegistry::Global());
+// Validate that the graph has no cycle except for legal while loop cycles.
+// This traverses the specified nodes in topological order to verify there are
+// no cycles. Starting with inputless nodes, it visits nodes whose inputs have
+// all been visited, and counts the total number of visited nodes. If there is a
+// cycle, nodes in the cycle will never be visited, and the visited count will
+// be less than the total node count.
+Status ValidateGraphHasNoCycle(const Graph& graph);
+
} // namespace graph
} // namespace tensorflow
diff --git a/tensorflow/core/kernels/batching_util/serial_device_batch_scheduler_test.cc b/tensorflow/core/kernels/batching_util/serial_device_batch_scheduler_test.cc
index a2f8f9a03e..a91356c095 100644
--- a/tensorflow/core/kernels/batching_util/serial_device_batch_scheduler_test.cc
+++ b/tensorflow/core/kernels/batching_util/serial_device_batch_scheduler_test.cc
@@ -145,8 +145,6 @@ TEST(SerialDeviceBatchSchedulerTest, PendingOnSerialDevice) {
std::shared_ptr<SerialDeviceBatchScheduler<FakeTask>> scheduler;
TF_ASSERT_OK(
SerialDeviceBatchScheduler<FakeTask>::Create(options, &scheduler));
- // Make sure batch processing thread has gone to sleep.
- Env::Default()->SleepForMicroseconds(1000);
int processed_batches = 0;
Notification start_processing;
auto queue_callback = [&mu, &processed_batches, &start_processing, &pending,
@@ -163,26 +161,18 @@ TEST(SerialDeviceBatchSchedulerTest, PendingOnSerialDevice) {
start_processing.WaitForNotification();
{
mutex_lock l(mu);
- pending = 2;
- }
- break;
- case 2:
- // No batches initially --> low traffic --> no adjustment.
- CHECK_EQ(scheduler->in_flight_batches_limit(), 1);
- {
- mutex_lock l(mu);
pending = 3;
}
break;
- case 3:
- // Pending at target --> no adjustment.
+ case 2:
+ // Either low traffic or pending at target --> no adjustment.
CHECK_EQ(scheduler->in_flight_batches_limit(), 1);
{
mutex_lock l(mu);
pending = 1;
}
break;
- case 4:
+ case 3:
// Small pending --> 2 additional threads added.
CHECK_EQ(scheduler->in_flight_batches_limit(), 3);
{
@@ -196,8 +186,8 @@ TEST(SerialDeviceBatchSchedulerTest, PendingOnSerialDevice) {
};
std::unique_ptr<BatchScheduler<FakeTask>> queue;
TF_ASSERT_OK(scheduler->AddQueue({}, queue_callback, &queue));
- // Create 4 batches.
- for (int i = 0; i < 4; i++) {
+ // Create 3 batches.
+ for (int i = 0; i < 3; i++) {
TF_ASSERT_OK(ScheduleTask(800, queue.get()));
}
start_processing.Notify();
@@ -295,12 +285,22 @@ TEST(SerialDeviceBatchSchedulerTest, DeleteQueue) {
TF_ASSERT_OK(ScheduleTask(800, queue.get()));
}
std::unique_ptr<Thread> queue_deleter(Env::Default()->StartThread(
- {}, "QueueDeleterThread", [&queue, &mu, &processed_batches] {
+ {}, "QueueDeleterThread",
+ [&queue, &mu, &processed_batches, scheduler]() mutable {
// Delete queue, should be kept alive until empty.
queue.reset();
+ {
+ mutex_lock l(mu);
+ // queue may be destroyed before 2nd batch finishes processing.
+ EXPECT_GT(processed_batches, 0);
+ }
+ // Delete scheduler, should be kept alive until all batches processed.
+ scheduler.reset();
mutex_lock l(mu);
EXPECT_EQ(processed_batches, 2);
}));
+ // Release reference to scheduler, queue and callback above should keep alive.
+ scheduler.reset();
// Give queue_deleter thread time to delete queue.
Env::Default()->SleepForMicroseconds(1000);
finish_processing.Notify();
diff --git a/tensorflow/core/kernels/bias_op.cc b/tensorflow/core/kernels/bias_op.cc
index 127c3a1ca1..7b28c8e91f 100644
--- a/tensorflow/core/kernels/bias_op.cc
+++ b/tensorflow/core/kernels/bias_op.cc
@@ -394,6 +394,59 @@ class BiasAddGradGPUConfig {
private:
BiasAddGradGPUMode mode_;
};
+
+// Encapsulate all the shape information that is used in bias add grad
+// operations.
+class BiasAddParams {
+ public:
+ // We use a list to maintain both the shape value and the order (data format).
+ using SpatialArray = gtl::InlinedVector<int64, 4>;
+ BiasAddParams(const SpatialArray& in_shape, TensorFormat data_format,
+ DataType dtype, int device_id)
+ : in_shape_(in_shape),
+ data_format_(data_format),
+ dtype_(dtype),
+ device_id_(device_id) {
+ for (int64 val : in_shape_) {
+ hash_code_ = Hash64Combine(hash_code_, val);
+ }
+ hash_code_ = Hash64Combine(hash_code_, data_format);
+ hash_code_ = Hash64Combine(hash_code_, dtype);
+ hash_code_ = Hash64Combine(hash_code_, device_id);
+ }
+ bool operator==(const BiasAddParams& other) const {
+ return this->get_data_as_tuple() == other.get_data_as_tuple();
+ }
+
+ bool operator!=(const BiasAddParams& other) const {
+ return !(*this == other);
+ }
+ uint64 hash() const { return hash_code_; }
+
+ string ToString() const {
+ // clang-format off
+ return strings::StrCat(
+ "(", str_util::Join(in_shape_, ", "), "), ",
+ data_format_, ", ", dtype_, ", ", device_id_);
+ // clang-format on
+ }
+
+ protected:
+ using ParamsDataType = std::tuple<SpatialArray, TensorFormat, DataType, int>;
+
+ ParamsDataType get_data_as_tuple() const {
+ return std::make_tuple(in_shape_, data_format_, dtype_, device_id_);
+ }
+
+ uint64 hash_code_ = 0;
+
+ private:
+ SpatialArray in_shape_;
+ TensorFormat data_format_;
+ DataType dtype_;
+ int device_id_;
+};
+
typedef AutoTuneSingleton<BiasGradAutotuneGroup, BiasAddParams,
BiasAddGradGPUConfig>
AutotuneBiasGrad;
diff --git a/tensorflow/core/kernels/bias_op_gpu.h b/tensorflow/core/kernels/bias_op_gpu.h
index 60c274c826..c1051f43c9 100644
--- a/tensorflow/core/kernels/bias_op_gpu.h
+++ b/tensorflow/core/kernels/bias_op_gpu.h
@@ -75,58 +75,6 @@ class BiasGradGPUProfileResult {
uint64 elapsed_time_ = std::numeric_limits<uint64>::max();
};
-// Encapsulate all the shape information that is used in bias add grad
-// operations.
-class BiasAddParams {
- public:
- // We use a list to maintain both the shape value and the order (data format).
- using SpatialArray = gtl::InlinedVector<int64, 4>;
- BiasAddParams(const SpatialArray& in_shape, TensorFormat data_format,
- DataType dtype, int device_id)
- : in_shape_(in_shape),
- data_format_(data_format),
- dtype_(dtype),
- device_id_(device_id) {
- for (int64 val : in_shape_) {
- hash_code_ = Hash64Combine(hash_code_, val);
- }
- hash_code_ = Hash64Combine(hash_code_, data_format);
- hash_code_ = Hash64Combine(hash_code_, dtype);
- hash_code_ = Hash64Combine(hash_code_, device_id);
- }
- bool operator==(const BiasAddParams& other) const {
- return this->get_data_as_tuple() == other.get_data_as_tuple();
- }
-
- bool operator!=(const BiasAddParams& other) const {
- return !(*this == other);
- }
- uint64 hash() const { return hash_code_; }
-
- string ToString() const {
- // clang-format off
- return strings::StrCat(
- "(", str_util::Join(in_shape_, ", "), "), ",
- data_format_, ", ", dtype_, ", ", device_id_);
- // clang-format on
- }
-
- protected:
- using ParamsDataType = std::tuple<SpatialArray, TensorFormat, DataType, int>;
-
- ParamsDataType get_data_as_tuple() const {
- return std::make_tuple(in_shape_, data_format_, dtype_, device_id_);
- }
-
- uint64 hash_code_ = 0;
-
- private:
- SpatialArray in_shape_;
- TensorFormat data_format_;
- DataType dtype_;
- int device_id_;
-};
-
} // namespace tensorflow
#endif // TENSORFLOW_CORE_KERNELS_BIAS_OP_GPU_H_
diff --git a/tensorflow/core/kernels/data/map_and_batch_dataset_op.cc b/tensorflow/core/kernels/data/map_and_batch_dataset_op.cc
index aa40f95cde..004f153af6 100644
--- a/tensorflow/core/kernels/data/map_and_batch_dataset_op.cc
+++ b/tensorflow/core/kernels/data/map_and_batch_dataset_op.cc
@@ -221,7 +221,7 @@ class MapAndBatchDatasetOp : public UnaryDatasetOpKernel {
batch_results_.pop_front();
}
cond_var_.notify_all();
- return ProcessBatch(ctx, result, out_tensors, end_of_sequence);
+ return ProcessResult(ctx, result, out_tensors, end_of_sequence);
}
protected:
@@ -313,10 +313,10 @@ class MapAndBatchDatasetOp : public UnaryDatasetOpKernel {
break;
}
}
- }
- {
- mutex_lock l(result->mu);
- result->num_elements++;
+ {
+ mutex_lock l(result->mu);
+ result->num_elements++;
+ }
}
CallCompleted(result);
}
@@ -370,7 +370,7 @@ class MapAndBatchDatasetOp : public UnaryDatasetOpKernel {
Status CopyPartialBatch(Tensor* output, const Tensor& value,
int64 num_elements) {
switch (value.dtype()) {
-#define CASE(type) \
+#define HANDLE_TYPE(type) \
case DataTypeToEnum<type>::value: { \
auto output_t = output->flat_outer_dims<type>(); \
auto value_t = value.flat_outer_dims<type>(); \
@@ -379,10 +379,8 @@ class MapAndBatchDatasetOp : public UnaryDatasetOpKernel {
} \
return Status::OK(); \
}
- TF_CALL_NUMBER_TYPES(CASE);
- TF_CALL_string(CASE);
- TF_CALL_variant(CASE);
-#undef CASE
+ TF_CALL_DATASET_TYPES(HANDLE_TYPE);
+#undef HANDLE_TYPE
default:
return errors::InvalidArgument("Unsupported data type: ",
value.dtype());
@@ -426,47 +424,49 @@ class MapAndBatchDatasetOp : public UnaryDatasetOpKernel {
dataset()->batch_size_;
}
- Status ProcessBatch(IteratorContext* ctx,
- const std::shared_ptr<BatchResult>& result,
- std::vector<Tensor>* out_tensors,
- bool* end_of_sequence) {
+ Status ProcessResult(IteratorContext* ctx,
+ const std::shared_ptr<BatchResult>& result,
+ std::vector<Tensor>* out_tensors,
+ bool* end_of_sequence) {
mutex_lock l(result->mu);
if (result->num_elements == 0) {
*end_of_sequence = true;
return Status::OK();
}
-
- if (!result->status.ok()) {
+ // `f` may deliberately raise `errors::OutOfRange` to indicate that we
+ // should terminate the iteration early.
+ if (!result->status.ok() && !errors::IsOutOfRange(result->status)) {
// Deallocate tensors allocated for the output.
result->output.clear();
- } else {
- if (result->num_elements < dataset()->batch_size_) {
- if (dataset()->drop_remainder_) {
- // Deallocate tensors allocated for the output.
- result->output.clear();
- *end_of_sequence = true;
- return Status::OK();
- }
- const std::vector<Tensor>& output = result->output;
- for (size_t i = 0; i < output.size(); ++i) {
- TensorShape component_shape(result->output[i].shape());
- component_shape.set_dim(0, result->num_elements);
- AllocatorAttributes attr;
- attr.set_gpu_compatible(true);
- Tensor component(ctx->allocator(attr), output[i].dtype(),
- component_shape);
- TF_RETURN_IF_ERROR(CopyPartialBatch(&component, output[i],
- result->num_elements));
- out_tensors->emplace_back(std::move(component));
- }
+ *end_of_sequence = false;
+ return result->status;
+ }
+ if (result->num_elements < dataset()->batch_size_) {
+ if (dataset()->drop_remainder_) {
// Deallocate tensors allocated for the output.
result->output.clear();
- } else {
- *out_tensors = std::move(result->output);
+ *end_of_sequence = true;
+ return Status::OK();
}
- *end_of_sequence = false;
+ const std::vector<Tensor>& output = result->output;
+ for (size_t i = 0; i < output.size(); ++i) {
+ TensorShape component_shape(result->output[i].shape());
+ component_shape.set_dim(0, result->num_elements);
+ AllocatorAttributes attr;
+ attr.set_gpu_compatible(true);
+ Tensor component(ctx->allocator(attr), output[i].dtype(),
+ component_shape);
+ TF_RETURN_IF_ERROR(
+ CopyPartialBatch(&component, output[i], result->num_elements));
+ out_tensors->emplace_back(std::move(component));
+ }
+ // Deallocate tensors allocated for the output.
+ result->output.clear();
+ } else {
+ *out_tensors = std::move(result->output);
}
- return result->status;
+ *end_of_sequence = result->num_elements == 0;
+ return Status::OK();
}
void RunnerThread(const std::shared_ptr<IteratorContext>& ctx)
diff --git a/tensorflow/core/kernels/inplace_ops.cc b/tensorflow/core/kernels/inplace_ops.cc
index 8f51cc3819..8ddf3c38e8 100644
--- a/tensorflow/core/kernels/inplace_ops.cc
+++ b/tensorflow/core/kernels/inplace_ops.cc
@@ -50,7 +50,7 @@ Status DoParallelConcat(const CPUDevice& d, const Tensor& value, int32 loc,
#define CASE(type) \
case DataTypeToEnum<type>::value: \
return DoParallelConcatUpdate<CPUDevice, type>(d, value, loc, output);
- TF_CALL_NUMBER_TYPES(CASE);
+ TF_CALL_POD_TYPES(CASE);
TF_CALL_string(CASE);
TF_CALL_variant(CASE);
#undef CASE
diff --git a/tensorflow/core/kernels/matmul_op.cc b/tensorflow/core/kernels/matmul_op.cc
index fc3b3d3445..b596dbc782 100644
--- a/tensorflow/core/kernels/matmul_op.cc
+++ b/tensorflow/core/kernels/matmul_op.cc
@@ -488,8 +488,31 @@ class MatMulOp : public OpKernel {
return;
}
- LaunchMatMul<Device, T, USE_CUBLAS>::launch(
- ctx, a, b, dim_pair, &algorithms_, use_autotune_, out);
+ if (std::is_same<T, bfloat16>::value) {
+ bool is_cpu = std::is_same<Device, CPUDevice>::value;
+ OP_REQUIRES(ctx, is_cpu,
+ errors::Internal("bfloat16 matmul is not supported by GPU"));
+ Tensor a_float, b_float, out_float;
+ OP_REQUIRES_OK(ctx, ctx->allocate_temp(DT_FLOAT, a.shape(), &a_float));
+ OP_REQUIRES_OK(ctx, ctx->allocate_temp(DT_FLOAT, b.shape(), &b_float));
+ OP_REQUIRES_OK(ctx,
+ ctx->allocate_temp(DT_FLOAT, out->shape(), &out_float));
+
+ // TODO: Avoid extra copy to make bfloat16 matmul efficient on CPU.
+ BFloat16ToFloat(a.flat<bfloat16>().data(), a_float.flat<float>().data(),
+ a.NumElements());
+ BFloat16ToFloat(b.flat<bfloat16>().data(), b_float.flat<float>().data(),
+ b.NumElements());
+
+ LaunchMatMul<Device, float, USE_CUBLAS>::launch(
+ ctx, a_float, b_float, dim_pair, &algorithms_, use_autotune_,
+ &out_float);
+ FloatToBFloat16(out_float.flat<float>().data(),
+ out->flat<bfloat16>().data(), out->NumElements());
+ } else {
+ LaunchMatMul<Device, T, USE_CUBLAS>::launch(
+ ctx, a, b, dim_pair, &algorithms_, use_autotune_, out);
+ }
}
private:
@@ -559,6 +582,7 @@ struct MatMulFunctor<SYCLDevice, T> {
TF_CALL_float(REGISTER_CPU_EIGEN);
TF_CALL_double(REGISTER_CPU_EIGEN);
TF_CALL_half(REGISTER_CPU);
+TF_CALL_bfloat16(REGISTER_CPU);
TF_CALL_int32(REGISTER_CPU);
TF_CALL_complex64(REGISTER_CPU_EIGEN);
@@ -567,6 +591,7 @@ TF_CALL_complex128(REGISTER_CPU_EIGEN);
TF_CALL_float(REGISTER_CPU);
TF_CALL_double(REGISTER_CPU);
TF_CALL_half(REGISTER_CPU);
+TF_CALL_bfloat16(REGISTER_CPU);
TF_CALL_int32(REGISTER_CPU);
TF_CALL_complex64(REGISTER_CPU);
diff --git a/tensorflow/core/ops/compat/ops_history.v1.pbtxt b/tensorflow/core/ops/compat/ops_history.v1.pbtxt
index 11ed50d30e..ee4faa5033 100644
--- a/tensorflow/core/ops/compat/ops_history.v1.pbtxt
+++ b/tensorflow/core/ops/compat/ops_history.v1.pbtxt
@@ -7681,6 +7681,66 @@ op {
}
}
op {
+ name: "AvgPool"
+ input_arg {
+ name: "value"
+ type_attr: "T"
+ }
+ output_arg {
+ name: "output"
+ type_attr: "T"
+ }
+ attr {
+ name: "ksize"
+ type: "list(int)"
+ has_minimum: true
+ minimum: 4
+ }
+ attr {
+ name: "strides"
+ type: "list(int)"
+ has_minimum: true
+ minimum: 4
+ }
+ attr {
+ name: "padding"
+ type: "string"
+ allowed_values {
+ list {
+ s: "SAME"
+ s: "VALID"
+ }
+ }
+ }
+ attr {
+ name: "data_format"
+ type: "string"
+ default_value {
+ s: "NHWC"
+ }
+ allowed_values {
+ list {
+ s: "NHWC"
+ s: "NCHW"
+ s: "HWNC"
+ s: "HWCN"
+ }
+ }
+ }
+ attr {
+ name: "T"
+ type: "type"
+ allowed_values {
+ list {
+ type: DT_HALF
+ type: DT_BFLOAT16
+ type: DT_FLOAT
+ type: DT_DOUBLE
+ }
+ }
+ }
+}
+op {
name: "AvgPool3D"
input_arg {
name: "input"
@@ -8370,6 +8430,70 @@ op {
}
}
op {
+ name: "AvgPoolGrad"
+ input_arg {
+ name: "orig_input_shape"
+ type: DT_INT32
+ }
+ input_arg {
+ name: "grad"
+ type_attr: "T"
+ }
+ output_arg {
+ name: "output"
+ type_attr: "T"
+ }
+ attr {
+ name: "ksize"
+ type: "list(int)"
+ has_minimum: true
+ minimum: 4
+ }
+ attr {
+ name: "strides"
+ type: "list(int)"
+ has_minimum: true
+ minimum: 4
+ }
+ attr {
+ name: "padding"
+ type: "string"
+ allowed_values {
+ list {
+ s: "SAME"
+ s: "VALID"
+ }
+ }
+ }
+ attr {
+ name: "data_format"
+ type: "string"
+ default_value {
+ s: "NHWC"
+ }
+ allowed_values {
+ list {
+ s: "NHWC"
+ s: "NCHW"
+ s: "HWNC"
+ s: "HWCN"
+ }
+ }
+ }
+ attr {
+ name: "T"
+ type: "type"
+ allowed_values {
+ list {
+ type: DT_HALF
+ type: DT_BFLOAT16
+ type: DT_FLOAT
+ type: DT_DOUBLE
+ }
+ }
+ }
+}
+op {
name: "Barrier"
output_arg {
name: "handle"
@@ -10431,6 +10555,61 @@ op {
}
}
op {
+ name: "BiasAdd"
+ input_arg {
+ name: "value"
+ type_attr: "T"
+ }
+ input_arg {
+ name: "bias"
+ type_attr: "T"
+ }
+ output_arg {
+ name: "output"
+ type_attr: "T"
+ }
+ attr {
+ name: "T"
+ type: "type"
+ allowed_values {
+ list {
+ type: DT_FLOAT
+ type: DT_DOUBLE
+ type: DT_INT32
+ type: DT_UINT8
+ type: DT_INT16
+ type: DT_INT8
+ type: DT_COMPLEX64
+ type: DT_INT64
+ type: DT_QINT8
+ type: DT_QUINT8
+ type: DT_QINT32
+ type: DT_BFLOAT16
+ type: DT_UINT16
+ type: DT_COMPLEX128
+ type: DT_HALF
+ type: DT_UINT32
+ type: DT_UINT64
+ }
+ }
+ }
+ attr {
+ name: "data_format"
+ type: "string"
+ default_value {
+ s: "NHWC"
+ }
+ allowed_values {
+ list {
+ s: "NHWC"
+ s: "NCHW"
+ s: "HWNC"
+ s: "HWCN"
+ }
+ }
+ }
+}
+op {
name: "BiasAddGrad"
input_arg {
name: "out_backprop"
@@ -10623,6 +10802,57 @@ op {
}
}
op {
+ name: "BiasAddGrad"
+ input_arg {
+ name: "out_backprop"
+ type_attr: "T"
+ }
+ output_arg {
+ name: "output"
+ type_attr: "T"
+ }
+ attr {
+ name: "T"
+ type: "type"
+ allowed_values {
+ list {
+ type: DT_FLOAT
+ type: DT_DOUBLE
+ type: DT_INT32
+ type: DT_UINT8
+ type: DT_INT16
+ type: DT_INT8
+ type: DT_COMPLEX64
+ type: DT_INT64
+ type: DT_QINT8
+ type: DT_QUINT8
+ type: DT_QINT32
+ type: DT_BFLOAT16
+ type: DT_UINT16
+ type: DT_COMPLEX128
+ type: DT_HALF
+ type: DT_UINT32
+ type: DT_UINT64
+ }
+ }
+ }
+ attr {
+ name: "data_format"
+ type: "string"
+ default_value {
+ s: "NHWC"
+ }
+ allowed_values {
+ list {
+ s: "NHWC"
+ s: "NCHW"
+ s: "HWNC"
+ s: "HWCN"
+ }
+ }
+ }
+}
+op {
name: "BiasAddV1"
input_arg {
name: "value"
@@ -13172,6 +13402,144 @@ op {
}
}
op {
+ name: "Conv2D"
+ input_arg {
+ name: "input"
+ type_attr: "T"
+ }
+ input_arg {
+ name: "filter"
+ type_attr: "T"
+ }
+ output_arg {
+ name: "output"
+ type_attr: "T"
+ }
+ attr {
+ name: "T"
+ type: "type"
+ allowed_values {
+ list {
+ type: DT_HALF
+ type: DT_BFLOAT16
+ type: DT_FLOAT
+ type: DT_DOUBLE
+ }
+ }
+ }
+ attr {
+ name: "strides"
+ type: "list(int)"
+ }
+ attr {
+ name: "use_cudnn_on_gpu"
+ type: "bool"
+ default_value {
+ b: true
+ }
+ }
+ attr {
+ name: "padding"
+ type: "string"
+ allowed_values {
+ list {
+ s: "SAME"
+ s: "VALID"
+ }
+ }
+ }
+ attr {
+ name: "data_format"
+ type: "string"
+ default_value {
+ s: "NHWC"
+ }
+ allowed_values {
+ list {
+ s: "NHWC"
+ s: "NCHW"
+ s: "HWNC"
+ s: "HWCN"
+ }
+ }
+ }
+ attr {
+ name: "dilations"
+ type: "list(int)"
+ default_value {
+ list {
+ i: 1
+ i: 1
+ i: 1
+ i: 1
+ }
+ }
+ }
+}
+op {
+ name: "Conv2DBackpropFilter"
+ input_arg {
+ name: "input"
+ type_attr: "T"
+ }
+ input_arg {
+ name: "filter_sizes"
+ type: DT_INT32
+ }
+ input_arg {
+ name: "out_backprop"
+ type_attr: "T"
+ }
+ output_arg {
+ name: "output"
+ type_attr: "T"
+ }
+ attr {
+ name: "T"
+ type: "type"
+ allowed_values {
+ list {
+ type: DT_HALF
+ type: DT_FLOAT
+ }
+ }
+ }
+ attr {
+ name: "strides"
+ type: "list(int)"
+ }
+ attr {
+ name: "use_cudnn_on_gpu"
+ type: "bool"
+ default_value {
+ b: true
+ }
+ }
+ attr {
+ name: "padding"
+ type: "string"
+ allowed_values {
+ list {
+ s: "SAME"
+ s: "VALID"
+ }
+ }
+ }
+ attr {
+ name: "data_format"
+ type: "string"
+ default_value {
+ s: "NHWC"
+ }
+ allowed_values {
+ list {
+ s: "NHWC"
+ s: "NCHW"
+ }
+ }
+ }
+}
+op {
name: "Conv2DBackpropFilter"
input_arg {
name: "input"
@@ -13195,6 +13563,7 @@ op {
allowed_values {
list {
type: DT_HALF
+ type: DT_BFLOAT16
type: DT_FLOAT
}
}
@@ -13233,6 +13602,18 @@ op {
}
}
}
+ attr {
+ name: "dilations"
+ type: "list(int)"
+ default_value {
+ list {
+ i: 1
+ i: 1
+ i: 1
+ i: 1
+ }
+ }
+ }
}
op {
name: "Conv2DBackpropFilter"
@@ -13260,6 +13641,7 @@ op {
type: DT_HALF
type: DT_BFLOAT16
type: DT_FLOAT
+ type: DT_DOUBLE
}
}
}
@@ -13371,6 +13753,8 @@ op {
list {
s: "NHWC"
s: "NCHW"
+ s: "HWNC"
+ s: "HWCN"
}
}
}
@@ -13604,6 +13988,85 @@ op {
}
}
op {
+ name: "Conv2DBackpropInput"
+ input_arg {
+ name: "input_sizes"
+ type: DT_INT32
+ }
+ input_arg {
+ name: "filter"
+ type_attr: "T"
+ }
+ input_arg {
+ name: "out_backprop"
+ type_attr: "T"
+ }
+ output_arg {
+ name: "output"
+ type_attr: "T"
+ }
+ attr {
+ name: "T"
+ type: "type"
+ allowed_values {
+ list {
+ type: DT_HALF
+ type: DT_BFLOAT16
+ type: DT_FLOAT
+ type: DT_DOUBLE
+ }
+ }
+ }
+ attr {
+ name: "strides"
+ type: "list(int)"
+ }
+ attr {
+ name: "use_cudnn_on_gpu"
+ type: "bool"
+ default_value {
+ b: true
+ }
+ }
+ attr {
+ name: "padding"
+ type: "string"
+ allowed_values {
+ list {
+ s: "SAME"
+ s: "VALID"
+ }
+ }
+ }
+ attr {
+ name: "data_format"
+ type: "string"
+ default_value {
+ s: "NHWC"
+ }
+ allowed_values {
+ list {
+ s: "NHWC"
+ s: "NCHW"
+ s: "HWNC"
+ s: "HWCN"
+ }
+ }
+ }
+ attr {
+ name: "dilations"
+ type: "list(int)"
+ default_value {
+ list {
+ i: 1
+ i: 1
+ i: 1
+ i: 1
+ }
+ }
+ }
+}
+op {
name: "Conv3D"
input_arg {
name: "input"
@@ -18334,6 +18797,117 @@ op {
}
}
op {
+ name: "DepthwiseConv2dNative"
+ input_arg {
+ name: "input"
+ type_attr: "T"
+ }
+ input_arg {
+ name: "filter"
+ type_attr: "T"
+ }
+ output_arg {
+ name: "output"
+ type_attr: "T"
+ }
+ attr {
+ name: "T"
+ type: "type"
+ allowed_values {
+ list {
+ type: DT_HALF
+ type: DT_BFLOAT16
+ type: DT_FLOAT
+ type: DT_DOUBLE
+ }
+ }
+ }
+ attr {
+ name: "strides"
+ type: "list(int)"
+ }
+ attr {
+ name: "padding"
+ type: "string"
+ allowed_values {
+ list {
+ s: "SAME"
+ s: "VALID"
+ }
+ }
+ }
+ attr {
+ name: "data_format"
+ type: "string"
+ default_value {
+ s: "NHWC"
+ }
+ allowed_values {
+ list {
+ s: "NHWC"
+ s: "NCHW"
+ s: "HWNC"
+ s: "HWCN"
+ }
+ }
+ }
+ attr {
+ name: "dilations"
+ type: "list(int)"
+ default_value {
+ list {
+ i: 1
+ i: 1
+ i: 1
+ i: 1
+ }
+ }
+ }
+}
+op {
+ name: "DepthwiseConv2dNativeBackpropFilter"
+ input_arg {
+ name: "input"
+ type_attr: "T"
+ }
+ input_arg {
+ name: "filter_sizes"
+ type: DT_INT32
+ }
+ input_arg {
+ name: "out_backprop"
+ type_attr: "T"
+ }
+ output_arg {
+ name: "output"
+ type_attr: "T"
+ }
+ attr {
+ name: "T"
+ type: "type"
+ allowed_values {
+ list {
+ type: DT_FLOAT
+ type: DT_DOUBLE
+ }
+ }
+ }
+ attr {
+ name: "strides"
+ type: "list(int)"
+ }
+ attr {
+ name: "padding"
+ type: "string"
+ allowed_values {
+ list {
+ s: "SAME"
+ s: "VALID"
+ }
+ }
+ }
+}
+op {
name: "DepthwiseConv2dNativeBackpropFilter"
input_arg {
name: "input"
@@ -18375,6 +18949,19 @@ op {
}
}
}
+ attr {
+ name: "data_format"
+ type: "string"
+ default_value {
+ s: "NHWC"
+ }
+ allowed_values {
+ list {
+ s: "NHWC"
+ s: "NCHW"
+ }
+ }
+ }
}
op {
name: "DepthwiseConv2dNativeBackpropFilter"
@@ -18399,6 +18986,7 @@ op {
type: "type"
allowed_values {
list {
+ type: DT_BFLOAT16
type: DT_FLOAT
type: DT_DOUBLE
}
@@ -18431,6 +19019,18 @@ op {
}
}
}
+ attr {
+ name: "dilations"
+ type: "list(int)"
+ default_value {
+ list {
+ i: 1
+ i: 1
+ i: 1
+ i: 1
+ }
+ }
+ }
}
op {
name: "DepthwiseConv2dNativeBackpropFilter"
@@ -18455,6 +19055,7 @@ op {
type: "type"
allowed_values {
list {
+ type: DT_HALF
type: DT_BFLOAT16
type: DT_FLOAT
type: DT_DOUBLE
@@ -18555,6 +19156,8 @@ op {
list {
s: "NHWC"
s: "NCHW"
+ s: "HWNC"
+ s: "HWCN"
}
}
}
@@ -18810,6 +19413,78 @@ op {
}
}
op {
+ name: "DepthwiseConv2dNativeBackpropInput"
+ input_arg {
+ name: "input_sizes"
+ type: DT_INT32
+ }
+ input_arg {
+ name: "filter"
+ type_attr: "T"
+ }
+ input_arg {
+ name: "out_backprop"
+ type_attr: "T"
+ }
+ output_arg {
+ name: "output"
+ type_attr: "T"
+ }
+ attr {
+ name: "T"
+ type: "type"
+ allowed_values {
+ list {
+ type: DT_HALF
+ type: DT_BFLOAT16
+ type: DT_FLOAT
+ type: DT_DOUBLE
+ }
+ }
+ }
+ attr {
+ name: "strides"
+ type: "list(int)"
+ }
+ attr {
+ name: "padding"
+ type: "string"
+ allowed_values {
+ list {
+ s: "SAME"
+ s: "VALID"
+ }
+ }
+ }
+ attr {
+ name: "data_format"
+ type: "string"
+ default_value {
+ s: "NHWC"
+ }
+ allowed_values {
+ list {
+ s: "NHWC"
+ s: "NCHW"
+ s: "HWNC"
+ s: "HWCN"
+ }
+ }
+ }
+ attr {
+ name: "dilations"
+ type: "list(int)"
+ default_value {
+ list {
+ i: 1
+ i: 1
+ i: 1
+ i: 1
+ }
+ }
+ }
+}
+op {
name: "Dequantize"
input_arg {
name: "input"
@@ -30868,6 +31543,80 @@ op {
}
}
op {
+ name: "MaxPool3DGradGrad"
+ input_arg {
+ name: "orig_input"
+ type_attr: "T"
+ }
+ input_arg {
+ name: "orig_output"
+ type_attr: "T"
+ }
+ input_arg {
+ name: "grad"
+ type_attr: "T"
+ }
+ output_arg {
+ name: "output"
+ type_attr: "T"
+ }
+ attr {
+ name: "ksize"
+ type: "list(int)"
+ has_minimum: true
+ minimum: 5
+ }
+ attr {
+ name: "strides"
+ type: "list(int)"
+ has_minimum: true
+ minimum: 5
+ }
+ attr {
+ name: "padding"
+ type: "string"
+ allowed_values {
+ list {
+ s: "SAME"
+ s: "VALID"
+ }
+ }
+ }
+ attr {
+ name: "data_format"
+ type: "string"
+ default_value {
+ s: "NDHWC"
+ }
+ allowed_values {
+ list {
+ s: "NDHWC"
+ s: "NCDHW"
+ }
+ }
+ }
+ attr {
+ name: "T"
+ type: "type"
+ allowed_values {
+ list {
+ type: DT_FLOAT
+ type: DT_DOUBLE
+ type: DT_INT32
+ type: DT_UINT8
+ type: DT_INT16
+ type: DT_INT8
+ type: DT_INT64
+ type: DT_BFLOAT16
+ type: DT_UINT16
+ type: DT_HALF
+ type: DT_UINT32
+ type: DT_UINT64
+ }
+ }
+ }
+}
+op {
name: "MaxPoolGrad"
input_arg {
name: "orig_input"
@@ -31239,6 +31988,85 @@ op {
}
}
op {
+ name: "MaxPoolGrad"
+ input_arg {
+ name: "orig_input"
+ type_attr: "T"
+ }
+ input_arg {
+ name: "orig_output"
+ type_attr: "T"
+ }
+ input_arg {
+ name: "grad"
+ type_attr: "T"
+ }
+ output_arg {
+ name: "output"
+ type_attr: "T"
+ }
+ attr {
+ name: "ksize"
+ type: "list(int)"
+ has_minimum: true
+ minimum: 4
+ }
+ attr {
+ name: "strides"
+ type: "list(int)"
+ has_minimum: true
+ minimum: 4
+ }
+ attr {
+ name: "padding"
+ type: "string"
+ allowed_values {
+ list {
+ s: "SAME"
+ s: "VALID"
+ }
+ }
+ }
+ attr {
+ name: "data_format"
+ type: "string"
+ default_value {
+ s: "NHWC"
+ }
+ allowed_values {
+ list {
+ s: "NHWC"
+ s: "NCHW"
+ s: "HWNC"
+ s: "HWCN"
+ }
+ }
+ }
+ attr {
+ name: "T"
+ type: "type"
+ default_value {
+ type: DT_FLOAT
+ }
+ allowed_values {
+ list {
+ type: DT_FLOAT
+ type: DT_DOUBLE
+ type: DT_INT32
+ type: DT_UINT8
+ type: DT_INT16
+ type: DT_INT8
+ type: DT_INT64
+ type: DT_BFLOAT16
+ type: DT_UINT16
+ type: DT_HALF
+ type: DT_UINT32
+ type: DT_UINT64
+ }
+ }
+ }
+}
+op {
name: "MaxPoolGradGrad"
input_arg {
name: "orig_input"
@@ -31531,6 +32359,82 @@ op {
}
}
op {
+ name: "MaxPoolGradGrad"
+ input_arg {
+ name: "orig_input"
+ type_attr: "T"
+ }
+ input_arg {
+ name: "orig_output"
+ type_attr: "T"
+ }
+ input_arg {
+ name: "grad"
+ type_attr: "T"
+ }
+ output_arg {
+ name: "output"
+ type_attr: "T"
+ }
+ attr {
+ name: "ksize"
+ type: "list(int)"
+ has_minimum: true
+ minimum: 4
+ }
+ attr {
+ name: "strides"
+ type: "list(int)"
+ has_minimum: true
+ minimum: 4
+ }
+ attr {
+ name: "padding"
+ type: "string"
+ allowed_values {
+ list {
+ s: "SAME"
+ s: "VALID"
+ }
+ }
+ }
+ attr {
+ name: "data_format"
+ type: "string"
+ default_value {
+ s: "NHWC"
+ }
+ allowed_values {
+ list {
+ s: "NHWC"
+ s: "NCHW"
+ s: "HWNC"
+ s: "HWCN"
+ }
+ }
+ }
+ attr {
+ name: "T"
+ type: "type"
+ allowed_values {
+ list {
+ type: DT_FLOAT
+ type: DT_DOUBLE
+ type: DT_INT32
+ type: DT_UINT8
+ type: DT_INT16
+ type: DT_INT8
+ type: DT_INT64
+ type: DT_BFLOAT16
+ type: DT_UINT16
+ type: DT_HALF
+ type: DT_UINT32
+ type: DT_UINT64
+ }
+ }
+ }
+}
+op {
name: "MaxPoolGradGradV2"
input_arg {
name: "orig_input"
@@ -31807,6 +32711,78 @@ op {
}
}
op {
+ name: "MaxPoolGradGradV2"
+ input_arg {
+ name: "orig_input"
+ type_attr: "T"
+ }
+ input_arg {
+ name: "orig_output"
+ type_attr: "T"
+ }
+ input_arg {
+ name: "grad"
+ type_attr: "T"
+ }
+ input_arg {
+ name: "ksize"
+ type: DT_INT32
+ }
+ input_arg {
+ name: "strides"
+ type: DT_INT32
+ }
+ output_arg {
+ name: "output"
+ type_attr: "T"
+ }
+ attr {
+ name: "padding"
+ type: "string"
+ allowed_values {
+ list {
+ s: "SAME"
+ s: "VALID"
+ }
+ }
+ }
+ attr {
+ name: "data_format"
+ type: "string"
+ default_value {
+ s: "NHWC"
+ }
+ allowed_values {
+ list {
+ s: "NHWC"
+ s: "NCHW"
+ s: "HWNC"
+ s: "HWCN"
+ }
+ }
+ }
+ attr {
+ name: "T"
+ type: "type"
+ allowed_values {
+ list {
+ type: DT_FLOAT
+ type: DT_DOUBLE
+ type: DT_INT32
+ type: DT_UINT8
+ type: DT_INT16
+ type: DT_INT8
+ type: DT_INT64
+ type: DT_BFLOAT16
+ type: DT_UINT16
+ type: DT_HALF
+ type: DT_UINT32
+ type: DT_UINT64
+ }
+ }
+ }
+}
+op {
name: "MaxPoolGradGradWithArgmax"
input_arg {
name: "input"
@@ -32375,6 +33351,81 @@ op {
}
}
op {
+ name: "MaxPoolGradV2"
+ input_arg {
+ name: "orig_input"
+ type_attr: "T"
+ }
+ input_arg {
+ name: "orig_output"
+ type_attr: "T"
+ }
+ input_arg {
+ name: "grad"
+ type_attr: "T"
+ }
+ input_arg {
+ name: "ksize"
+ type: DT_INT32
+ }
+ input_arg {
+ name: "strides"
+ type: DT_INT32
+ }
+ output_arg {
+ name: "output"
+ type_attr: "T"
+ }
+ attr {
+ name: "padding"
+ type: "string"
+ allowed_values {
+ list {
+ s: "SAME"
+ s: "VALID"
+ }
+ }
+ }
+ attr {
+ name: "data_format"
+ type: "string"
+ default_value {
+ s: "NHWC"
+ }
+ allowed_values {
+ list {
+ s: "NHWC"
+ s: "NCHW"
+ s: "HWNC"
+ s: "HWCN"
+ }
+ }
+ }
+ attr {
+ name: "T"
+ type: "type"
+ default_value {
+ type: DT_FLOAT
+ }
+ allowed_values {
+ list {
+ type: DT_FLOAT
+ type: DT_DOUBLE
+ type: DT_INT32
+ type: DT_UINT8
+ type: DT_INT16
+ type: DT_INT8
+ type: DT_INT64
+ type: DT_BFLOAT16
+ type: DT_UINT16
+ type: DT_HALF
+ type: DT_UINT32
+ type: DT_UINT64
+ }
+ }
+ }
+}
+op {
name: "MaxPoolGradWithArgmax"
input_arg {
name: "input"
diff --git a/tensorflow/core/ops/nn_ops.cc b/tensorflow/core/ops/nn_ops.cc
index 41efa49ce3..f1bbfac5e6 100644
--- a/tensorflow/core/ops/nn_ops.cc
+++ b/tensorflow/core/ops/nn_ops.cc
@@ -648,7 +648,7 @@ REGISTER_OP("MaxPool3DGradGrad")
.Attr("strides: list(int) >= 5")
.Attr(GetPaddingAttrString())
.Attr(GetConvnet3dDataFormatAttrString())
- .Attr("T: {float}")
+ .Attr("T: realnumbertype")
.SetShapeFn([](InferenceContext* c) {
TF_RETURN_IF_ERROR(shape_inference::Pool3DShape(c));
ShapeHandle unused;
diff --git a/tensorflow/core/ops/ops.pbtxt b/tensorflow/core/ops/ops.pbtxt
index c7f74c205a..e18771c389 100644
--- a/tensorflow/core/ops/ops.pbtxt
+++ b/tensorflow/core/ops/ops.pbtxt
@@ -2490,6 +2490,8 @@ op {
list {
s: "NHWC"
s: "NCHW"
+ s: "HWNC"
+ s: "HWCN"
}
}
}
@@ -2672,6 +2674,8 @@ op {
list {
s: "NHWC"
s: "NCHW"
+ s: "HWNC"
+ s: "HWCN"
}
}
}
@@ -3985,6 +3989,8 @@ op {
list {
s: "NHWC"
s: "NCHW"
+ s: "HWNC"
+ s: "HWCN"
}
}
}
@@ -4034,6 +4040,8 @@ op {
list {
s: "NHWC"
s: "NCHW"
+ s: "HWNC"
+ s: "HWCN"
}
}
}
@@ -5667,6 +5675,8 @@ op {
list {
s: "NHWC"
s: "NCHW"
+ s: "HWNC"
+ s: "HWCN"
}
}
}
@@ -5744,6 +5754,8 @@ op {
list {
s: "NHWC"
s: "NCHW"
+ s: "HWNC"
+ s: "HWCN"
}
}
}
@@ -5821,6 +5833,8 @@ op {
list {
s: "NHWC"
s: "NCHW"
+ s: "HWNC"
+ s: "HWCN"
}
}
}
@@ -8523,6 +8537,8 @@ op {
list {
s: "NHWC"
s: "NCHW"
+ s: "HWNC"
+ s: "HWCN"
}
}
}
@@ -8593,6 +8609,8 @@ op {
list {
s: "NHWC"
s: "NCHW"
+ s: "HWNC"
+ s: "HWCN"
}
}
}
@@ -8663,6 +8681,8 @@ op {
list {
s: "NHWC"
s: "NCHW"
+ s: "HWNC"
+ s: "HWCN"
}
}
}
@@ -15331,6 +15351,17 @@ op {
allowed_values {
list {
type: DT_FLOAT
+ type: DT_DOUBLE
+ type: DT_INT32
+ type: DT_UINT8
+ type: DT_INT16
+ type: DT_INT8
+ type: DT_INT64
+ type: DT_BFLOAT16
+ type: DT_UINT16
+ type: DT_HALF
+ type: DT_UINT32
+ type: DT_UINT64
}
}
}
@@ -15385,6 +15416,8 @@ op {
list {
s: "NHWC"
s: "NCHW"
+ s: "HWNC"
+ s: "HWCN"
}
}
}
@@ -15462,6 +15495,8 @@ op {
list {
s: "NHWC"
s: "NCHW"
+ s: "HWNC"
+ s: "HWCN"
}
}
}
@@ -15532,6 +15567,8 @@ op {
list {
s: "NHWC"
s: "NCHW"
+ s: "HWNC"
+ s: "HWCN"
}
}
}
@@ -15673,6 +15710,8 @@ op {
list {
s: "NHWC"
s: "NCHW"
+ s: "HWNC"
+ s: "HWCN"
}
}
}
diff --git a/tensorflow/core/protobuf/config.proto b/tensorflow/core/protobuf/config.proto
index 9a48f43a63..d83215d5c2 100644
--- a/tensorflow/core/protobuf/config.proto
+++ b/tensorflow/core/protobuf/config.proto
@@ -147,7 +147,7 @@ message GPUOptions {
// Everything inside experimental is subject to change and is not subject
// to API stability guarantees in
- // https://www.tensorflow.org/programmers_guide/version_compat.
+ // https://www.tensorflow.org/guide/version_compat.
Experimental experimental = 9;
};
@@ -381,7 +381,7 @@ message ConfigProto {
// Everything inside Experimental is subject to change and is not subject
// to API stability guarantees in
- // https://www.tensorflow.org/programmers_guide/version_compat.
+ // https://www.tensorflow.org/guide/version_compat.
message Experimental {
// Task name for group resolution.
string collective_group_leader = 1;
@@ -426,7 +426,7 @@ message RunOptions {
// Everything inside Experimental is subject to change and is not subject
// to API stability guarantees in
- // https://www.tensorflow.org/programmers_guide/version_compat.
+ // https://www.tensorflow.org/guide/version_compat.
message Experimental {
// If non-zero, declares that this graph is going to use collective
// ops and must synchronize step_ids with any other graph with this
diff --git a/tensorflow/core/util/tensor_format.cc b/tensorflow/core/util/tensor_format.cc
index a5f7ecf0d1..33ab87aa78 100644
--- a/tensorflow/core/util/tensor_format.cc
+++ b/tensorflow/core/util/tensor_format.cc
@@ -18,7 +18,7 @@ limitations under the License.
namespace tensorflow {
string GetConvnetDataFormatAttrString() {
- return "data_format: { 'NHWC', 'NCHW' } = 'NHWC' ";
+ return "data_format: { 'NHWC', 'NCHW', 'HWNC', 'HWCN' } = 'NHWC' ";
}
string GetConvnet3dDataFormatAttrString() {
diff --git a/tensorflow/docs_src/api_guides/python/client.md b/tensorflow/docs_src/api_guides/python/client.md
index eef23696db..27fc8610bf 100644
--- a/tensorflow/docs_src/api_guides/python/client.md
+++ b/tensorflow/docs_src/api_guides/python/client.md
@@ -3,7 +3,7 @@
This library contains classes for launching graphs and executing operations.
-@{$programmers_guide/low_level_intro$This guide} has examples of how a graph
+@{$guide/low_level_intro$This guide} has examples of how a graph
is launched in a @{tf.Session}.
## Session management
diff --git a/tensorflow/docs_src/api_guides/python/input_dataset.md b/tensorflow/docs_src/api_guides/python/input_dataset.md
index a6e2fc48e0..a6612d1bf7 100644
--- a/tensorflow/docs_src/api_guides/python/input_dataset.md
+++ b/tensorflow/docs_src/api_guides/python/input_dataset.md
@@ -2,8 +2,7 @@
[TOC]
@{tf.data.Dataset} allows you to build complex input pipelines. See the
-@{$datasets$programmer's guide} for an in-depth explanation of how to use this
-API.
+@{$guide/datasets} for an in-depth explanation of how to use this API.
## Reader classes
diff --git a/tensorflow/docs_src/api_guides/python/reading_data.md b/tensorflow/docs_src/api_guides/python/reading_data.md
index 5bbbfd3216..d7d0904ae2 100644
--- a/tensorflow/docs_src/api_guides/python/reading_data.md
+++ b/tensorflow/docs_src/api_guides/python/reading_data.md
@@ -16,8 +16,8 @@ There are four methods of getting data into a TensorFlow program:
## `tf.data` API
-See the @{$datasets$programmer's guide} for an in-depth explanation of
-@{tf.data.Dataset}. The `tf.data` API enables you to extract and preprocess data
+See the @{$guide/datasets} for an in-depth explanation of @{tf.data.Dataset}.
+The `tf.data` API enables you to extract and preprocess data
from different input/file formats, and apply transformations such as batching,
shuffling, and mapping functions over the dataset. This is an improved version
of the old input methods---feeding and `QueueRunner`---which are described
@@ -511,8 +511,8 @@ You can have the train and eval in the same graph in the same process, and share
their trained variables or layers. See @{$variables$the shared variables tutorial}.
To support the single-graph approach
-@{$programmers_guide/datasets$`tf.data`} also supplies
-@{$programmers_guide/datasets#creating_an_iterator$advanced iterator types} that
+@{$guide/datasets$`tf.data`} also supplies
+@{$guide/datasets#creating_an_iterator$advanced iterator types} that
that allow the user to change the input pipeline without rebuilding the graph or
session.
diff --git a/tensorflow/docs_src/deploy/distributed.md b/tensorflow/docs_src/deploy/distributed.md
index d7ed6b1deb..8e2c818e39 100644
--- a/tensorflow/docs_src/deploy/distributed.md
+++ b/tensorflow/docs_src/deploy/distributed.md
@@ -2,7 +2,7 @@
This document shows how to create a cluster of TensorFlow servers, and how to
distribute a computation graph across that cluster. We assume that you are
-familiar with the @{$programmers_guide/low_level_intro$basic concepts} of
+familiar with the @{$guide/low_level_intro$basic concepts} of
writing low level TensorFlow programs.
## Hello distributed TensorFlow!
diff --git a/tensorflow/docs_src/extend/architecture.md b/tensorflow/docs_src/extend/architecture.md
index c8f522a03a..84435a57f2 100644
--- a/tensorflow/docs_src/extend/architecture.md
+++ b/tensorflow/docs_src/extend/architecture.md
@@ -7,9 +7,8 @@ learning models and system-level optimizations.
This document describes the system architecture that makes this
combination of scale and flexibility possible. It assumes that you have basic familiarity
with TensorFlow programming concepts such as the computation graph, operations,
-and sessions. See @{$programmers_guide/low_level_intro$this document}
-for an introduction to these topics. Some familiarity
-with @{$distributed$distributed TensorFlow}
+and sessions. See @{$guide/low_level_intro$this document} for an introduction to
+these topics. Some familiarity with @{$distributed$distributed TensorFlow}
will also be helpful.
This document is for developers who want to extend TensorFlow in some way not
diff --git a/tensorflow/docs_src/get_started/_index.yaml b/tensorflow/docs_src/get_started/_index.yaml
index af255a482d..277fc852fb 100644
--- a/tensorflow/docs_src/get_started/_index.yaml
+++ b/tensorflow/docs_src/get_started/_index.yaml
@@ -74,7 +74,7 @@ landing_page:
The high-level Keras API provides building blocks to create and
train deep learning models. Start with these beginner-friendly
notebook examples, then read the
- <a href="/programmers_guide/keras">TensorFlow Keras guide</a>.
+ <a href="/guide/keras">TensorFlow Keras guide</a>.
</p>
<ol style="padding-left:20px;">
<li><a href="/get_started/basic_classification">Basic classification</a></li>
@@ -85,7 +85,7 @@ landing_page:
</ol>
</div>
<div class="devsite-landing-row-item-buttons" style="margin-top:0;">
- <a class="button button-primary tfo-button-primary" href="/programmers_guide/keras">Read the Keras guide</a>
+ <a class="button button-primary tfo-button-primary" href="/guide/keras">Read the Keras guide</a>
</div>
</div>
- classname: tfo-landing-row-item-code-block
@@ -123,7 +123,7 @@ landing_page:
<div class="devsite-landing-row-item-description-content">
<p>
Eager execution provides an imperative, define-by-run interface for advanced operations. Write custom layers, forward passes, and training loops with auto‑differentiation. Start with
- these notebooks, then read the <a href="/programmers_guide/eager">eager execution guide</a>.
+ these notebooks, then read the <a href="/guide/eager">eager execution guide</a>.
</p>
<ol style="padding-left:20px;">
<li>
@@ -165,7 +165,7 @@ landing_page:
</ol>
</div>
<div class="devsite-landing-row-item-buttons">
- <a class="button button-primary tfo-button-primary" href="/programmers_guide/eager">Read the eager execution guide</a>
+ <a class="button button-primary tfo-button-primary" href="/guide/eager">Read the eager execution guide</a>
</div>
</div>
- custom_html: >
@@ -177,7 +177,7 @@ landing_page:
<p>
Estimators can train large models on multiple machines in a
production environment. Try the examples below and read the
- <a href="/programmers_guide/estimators">Estimators guide</a>.
+ <a href="/guide/estimators">Estimators guide</a>.
</p>
<ol style="padding-left: 20px;">
<li><a href="/tutorials/text_classification_with_tf_hub">How to build a simple text classifier with TF-Hub</a></li>
@@ -186,7 +186,7 @@ landing_page:
</ol>
</div>
<div class="devsite-landing-row-item-buttons">
- <a class="button button-primary tfo-button-primary" href="/programmers_guide/estimators">Read the Estimators guide</a>
+ <a class="button button-primary tfo-button-primary" href="/guide/estimators">Read the Estimators guide</a>
</div>
</div>
diff --git a/tensorflow/docs_src/get_started/next_steps.md b/tensorflow/docs_src/get_started/next_steps.md
index 79c0ef3346..6318a39c6c 100644
--- a/tensorflow/docs_src/get_started/next_steps.md
+++ b/tensorflow/docs_src/get_started/next_steps.md
@@ -2,9 +2,9 @@
## Learn more about TensorFlow
-* The [TensorFlow Guide](/programmers_guide) includes usage guides for the
+* The [TensorFlow Guide](/guide) includes usage guides for the
high-level APIs, as well as advanced TensorFlow operations.
-* [Premade Estimators](/programmers_guide/premade_estimators) are designed to
+* [Premade Estimators](/guide/premade_estimators) are designed to
get results out of the box. Use TensorFlow without building your own models.
* [TensorFlow.js](https://js.tensorflow.org/) allows web developers to train and
deploy ML models in the browser and using Node.js.
diff --git a/tensorflow/docs_src/programmers_guide/checkpoints.md b/tensorflow/docs_src/guide/checkpoints.md
index 8dfd91e3c8..dfb2626b86 100644
--- a/tensorflow/docs_src/programmers_guide/checkpoints.md
+++ b/tensorflow/docs_src/guide/checkpoints.md
@@ -8,9 +8,8 @@ Estimators. TensorFlow provides two model formats:
* SavedModel, which is a format independent of the code that created
the model.
-This document focuses on checkpoints. For details on SavedModel, see the
-@{$saved_model$Saving and Restoring} chapter of the
-*TensorFlow Programmer's Guide*.
+This document focuses on checkpoints. For details on `SavedModel`, see the
+@{$saved_model$Saving and Restoring} guide.
## Sample code
@@ -232,8 +231,7 @@ This separation will keep your checkpoints recoverable.
Checkpoints provide an easy automatic mechanism for saving and restoring
models created by Estimators.
-See the @{$saved_model$Saving and Restoring}
-chapter of the *TensorFlow Programmer's Guide* for details on:
+See the @{$saved_model$Saving and Restoring} guide for details about:
* Saving and restoring models using low-level TensorFlow APIs.
* Exporting and importing models in the SavedModel format, which is a
diff --git a/tensorflow/docs_src/programmers_guide/custom_estimators.md b/tensorflow/docs_src/guide/custom_estimators.md
index a63e2bafb3..a63e2bafb3 100644
--- a/tensorflow/docs_src/programmers_guide/custom_estimators.md
+++ b/tensorflow/docs_src/guide/custom_estimators.md
diff --git a/tensorflow/docs_src/programmers_guide/datasets.md b/tensorflow/docs_src/guide/datasets.md
index 8b69860a68..8b69860a68 100644
--- a/tensorflow/docs_src/programmers_guide/datasets.md
+++ b/tensorflow/docs_src/guide/datasets.md
diff --git a/tensorflow/docs_src/programmers_guide/datasets_for_estimators.md b/tensorflow/docs_src/guide/datasets_for_estimators.md
index 345a31b985..b04af78cd8 100644
--- a/tensorflow/docs_src/programmers_guide/datasets_for_estimators.md
+++ b/tensorflow/docs_src/guide/datasets_for_estimators.md
@@ -91,8 +91,8 @@ print(mnist_ds)
```
This will print the following line, showing the
-@{$programmers_guide/tensors#shapes$shapes} and
-@{$programmers_guide/tensors#data_types$types} of the items in
+@{$guide/tensors#shapes$shapes} and
+@{$guide/tensors#data_types$types} of the items in
the dataset. Note that a `Dataset` does not know how many items it contains.
``` None
@@ -128,7 +128,7 @@ print(dataset)
Here we see that when a `Dataset` contains structured elements, the `shapes`
and `types` of the `Dataset` take on the same structure. This dataset contains
-dictionaries of @{$programmers_guide/tensors#rank$scalars}, all of type
+dictionaries of @{$guide/tensors#rank$scalars}, all of type
`tf.float64`.
The first line of the iris `train_input_fn` uses the same functionality, but
@@ -382,6 +382,6 @@ Estimator. Consider the following documents next:
* The @{$low_level_intro#datasets$Low Level Introduction}, which demonstrates
how to experiment directly with `tf.data.Datasets` using TensorFlow's low
level APIs.
-* @{$programmers_guide/datasets} which goes into great detail about additional
+* @{$guide/datasets} which goes into great detail about additional
functionality of `Datasets`.
diff --git a/tensorflow/docs_src/programmers_guide/debugger.md b/tensorflow/docs_src/guide/debugger.md
index dc4db58857..dc4db58857 100644
--- a/tensorflow/docs_src/programmers_guide/debugger.md
+++ b/tensorflow/docs_src/guide/debugger.md
diff --git a/tensorflow/docs_src/programmers_guide/eager.md b/tensorflow/docs_src/guide/eager.md
index b2bc3273b4..b2bc3273b4 100644
--- a/tensorflow/docs_src/programmers_guide/eager.md
+++ b/tensorflow/docs_src/guide/eager.md
diff --git a/tensorflow/docs_src/programmers_guide/embedding.md b/tensorflow/docs_src/guide/embedding.md
index 8a98367dfb..8a98367dfb 100644
--- a/tensorflow/docs_src/programmers_guide/embedding.md
+++ b/tensorflow/docs_src/guide/embedding.md
diff --git a/tensorflow/docs_src/programmers_guide/estimators.md b/tensorflow/docs_src/guide/estimators.md
index b13b47184d..78b30c3040 100644
--- a/tensorflow/docs_src/programmers_guide/estimators.md
+++ b/tensorflow/docs_src/guide/estimators.md
@@ -81,7 +81,7 @@ of the following four steps:
... # manipulate dataset, extracting the feature dict and the label
return feature_dict, label
- (See @{$programmers_guide/datasets} for full details.)
+ (See @{$guide/datasets} for full details.)
2. **Define the feature columns.** Each @{tf.feature_column}
identifies a feature name, its type, and any input pre-processing.
diff --git a/tensorflow/docs_src/programmers_guide/faq.md b/tensorflow/docs_src/guide/faq.md
index b6291a9ffa..b6291a9ffa 100644
--- a/tensorflow/docs_src/programmers_guide/faq.md
+++ b/tensorflow/docs_src/guide/faq.md
diff --git a/tensorflow/docs_src/programmers_guide/feature_columns.md b/tensorflow/docs_src/guide/feature_columns.md
index 90f5c53a17..1013ec910c 100644
--- a/tensorflow/docs_src/programmers_guide/feature_columns.md
+++ b/tensorflow/docs_src/guide/feature_columns.md
@@ -534,7 +534,7 @@ embedding_column = tf.feature_column.embedding_column(
dimension=embedding_dimensions)
```
-@{$programmers_guide/embedding$Embeddings} is a significant topic within machine
+@{$guide/embedding$Embeddings} is a significant topic within machine
learning. This information was just to get you started using them as feature
columns.
diff --git a/tensorflow/docs_src/programmers_guide/graph_viz.md b/tensorflow/docs_src/guide/graph_viz.md
index f581ae56da..f581ae56da 100644
--- a/tensorflow/docs_src/programmers_guide/graph_viz.md
+++ b/tensorflow/docs_src/guide/graph_viz.md
diff --git a/tensorflow/docs_src/programmers_guide/graphs.md b/tensorflow/docs_src/guide/graphs.md
index f0dd8def17..e6246ef148 100644
--- a/tensorflow/docs_src/programmers_guide/graphs.md
+++ b/tensorflow/docs_src/guide/graphs.md
@@ -93,7 +93,7 @@ to all API functions in the same context. For example:
stored value. The @{tf.Variable} object also has methods such as
@{tf.Variable.assign$`assign`} and @{tf.Variable.assign_add$`assign_add`} that
create @{tf.Operation} objects that, when executed, update the stored value.
- (See @{$programmers_guide/variables} for more information about variables.)
+ (See @{$guide/variables} for more information about variables.)
* Calling @{tf.train.Optimizer.minimize} will add operations and tensors to the
default graph that calculates gradients, and return a @{tf.Operation} that,
diff --git a/tensorflow/docs_src/programmers_guide/index.md b/tensorflow/docs_src/guide/index.md
index 9c58a3b45e..eefdb9ceae 100644
--- a/tensorflow/docs_src/programmers_guide/index.md
+++ b/tensorflow/docs_src/guide/index.md
@@ -1,17 +1,17 @@
-# Programmer's Guide
+# TensorFlow Guide
The documents in this unit dive into the details of how TensorFlow
works. The units are as follows:
## High Level APIs
- * @{$programmers_guide/keras}, TensorFlow's high-level API for building and
+ * @{$guide/keras}, TensorFlow's high-level API for building and
training deep learning models.
- * @{$programmers_guide/eager}, an API for writing TensorFlow code
+ * @{$guide/eager}, an API for writing TensorFlow code
imperatively, like you would use Numpy.
- * @{$programmers_guide/estimators}, a high-level API that provides
+ * @{$guide/estimators}, a high-level API that provides
fully-packaged models ready for large-scale training and production.
- * @{$programmers_guide/datasets}, easy input pipelines to bring your data into
+ * @{$guide/datasets}, easy input pipelines to bring your data into
your TensorFlow program.
## Estimators
@@ -34,13 +34,13 @@ works. The units are as follows:
## Low Level APIs
- * @{$programmers_guide/low_level_intro}, which introduces the
+ * @{$guide/low_level_intro}, which introduces the
basics of how you can use TensorFlow outside of the high Level APIs.
- * @{$programmers_guide/tensors}, which explains how to create,
+ * @{$guide/tensors}, which explains how to create,
manipulate, and access Tensors--the fundamental object in TensorFlow.
- * @{$programmers_guide/variables}, which details how
+ * @{$guide/variables}, which details how
to represent shared, persistent state in your program.
- * @{$programmers_guide/graphs}, which explains:
+ * @{$guide/graphs}, which explains:
* dataflow graphs, which are TensorFlow's representation of computations
as dependencies between operations.
* sessions, which are TensorFlow's mechanism for running dataflow graphs
@@ -50,19 +50,19 @@ works. The units are as follows:
such as Estimators or Keras, the high-level API creates and manages
graphs and sessions for you, but understanding graphs and sessions
can still be helpful.
- * @{$programmers_guide/saved_model}, which
+ * @{$guide/saved_model}, which
explains how to save and restore variables and models.
## ML Concepts
- * @{$programmers_guide/embedding}, which introduces the concept
+ * @{$guide/embedding}, which introduces the concept
of embeddings, provides a simple example of training an embedding in
TensorFlow, and explains how to view embeddings with the TensorBoard
Embedding Projector.
## Debugging
- * @{$programmers_guide/debugger}, which
+ * @{$guide/debugger}, which
explains how to use the TensorFlow debugger (tfdbg).
## TensorBoard
@@ -70,17 +70,17 @@ works. The units are as follows:
TensorBoard is a utility to visualize different aspects of machine learning.
The following guides explain how to use TensorBoard:
- * @{$programmers_guide/summaries_and_tensorboard},
+ * @{$guide/summaries_and_tensorboard},
which introduces TensorBoard.
- * @{$programmers_guide/graph_viz}, which
+ * @{$guide/graph_viz}, which
explains how to visualize the computational graph.
- * @{$programmers_guide/tensorboard_histograms} which demonstrates the how to
+ * @{$guide/tensorboard_histograms} which demonstrates the how to
use TensorBoard's histogram dashboard.
## Misc
- * @{$programmers_guide/version_compat},
+ * @{$guide/version_compat},
which explains backward compatibility guarantees and non-guarantees.
- * @{$programmers_guide/faq}, which contains frequently asked
+ * @{$guide/faq}, which contains frequently asked
questions about TensorFlow.
diff --git a/tensorflow/docs_src/programmers_guide/keras.md b/tensorflow/docs_src/guide/keras.md
index c6aca7ebf4..83172dab7f 100644
--- a/tensorflow/docs_src/programmers_guide/keras.md
+++ b/tensorflow/docs_src/guide/keras.md
@@ -19,7 +19,7 @@ fast prototyping, advanced research, and production, with three key advantages:
[Keras API specification](https://keras.io){:.external}. This is a high-level
API to build and train models that includes first-class support for
TensorFlow-specific functionality, such as [eager execution](#eager_execution),
-`tf.data` pipelines, and [Estimators](/programmers_guide/estimators).
+`tf.data` pipelines, and [Estimators](./estimators.md).
`tf.keras` makes TensorFlow easier to use without sacrificing flexibility and
performance.
@@ -35,8 +35,8 @@ from tensorflow import keras
* The `tf.keras` version in the latest TensorFlow release might not be the same
as the latest `keras` version from PyPI. Check `tf.keras.__version__`.
* When [saving a model's weights](#weights_only), `tf.keras` defaults to the
- [checkpoint format](/get_started/checkpoints). Pass `save_format='h5'` to use
- HDF5.
+ [checkpoint format](../get_started/checkpoints.md). Pass `save_format='h5'` to
+ use HDF5.
## Build a simple model
@@ -179,7 +179,7 @@ model.fit(data, labels, epochs=10, batch_size=32,
### Input tf.data datasets
-Use the [Datasets API](/programmers_guide/datasets) to scale to large datasets
+Use the [Datasets API](./datasets.md) to scale to large datasets
or multi-device training. Pass a `tf.data.Dataset` instance to the `fit`
method:
@@ -285,7 +285,7 @@ your own forward pass. Create layers in the `__init__` method and set them as
attributes of the class instance. Define the forward pass in the `call` method.
Model subclassing is particularly useful when
-[eager execution](/programmers_guide/eager) is enabled since the forward pass
+[eager execution](./eager.md) is enabled since the forward pass
can be written imperatively.
Key Point: Use the right API for the job. While model subclassing offers
@@ -410,7 +410,7 @@ during training. You can write your own custom callback, or use the built-in
* `tf.keras.callbacks.EarlyStopping`: Interrupt training when validation
performance has stopped improving.
* `tf.keras.callbacks.TensorBoard`: Monitor the model's behavior using
- [TensorBoard](/programmers_guide/summaries_and_tensorboard).
+ [TensorBoard](./summaries_and_tensorboard.md).
To use a `tf.keras.callbacks.Callback`, pass it to the model's `fit` method:
@@ -442,8 +442,8 @@ model.load_weights('my_model')
```
By default, this saves the model's weights in the
-[TensorFlow checkpoint](/get_started/checkpoints) file format. Weights can also
-be saved to the Keras HDF5 format (the default for the multi-backend
+[TensorFlow checkpoint](../get_started/checkpoints.md) file format. Weights can
+also be saved to the Keras HDF5 format (the default for the multi-backend
implementation of Keras):
```python
@@ -509,7 +509,7 @@ model = keras.models.load_model('my_model.h5')
## Eager execution
-[Eager execution](/programmers_guide/eager) is an imperative programming
+[Eager execution](./eager.md) is an imperative programming
environment that evaluates operations immediately. This is not required for
Keras, but is supported by `tf.keras` and useful for inspecting your program and
debugging.
@@ -520,7 +520,7 @@ especially benefits *model subclassing* and building *custom layers*—the APIs
that require you to write the forward pass as code (instead of the APIs that
create models by assembling existing layers).
-See the [eager execution guide](/programmers_guide/eager#build_a_model) for
+See the [eager execution guide](./eager.md#build_a_model) for
examples of using Keras models with custom training loops and `tf.GradientTape`.
@@ -528,14 +528,14 @@ examples of using Keras models with custom training loops and `tf.GradientTape`.
### Estimators
-The [Estimators](/programmers_guide/estimators) API is used for training models
+The [Estimators](./estimators.md) API is used for training models
for distributed environments. This targets industry use cases such as
distributed training on large datasets that can export a model for production.
A `tf.keras.Model` can be trained with the `tf.estimator` API by converting the
model to an `tf.estimator.Estimator` object with
`tf.keras.estimator.model_to_estimator`. See
-[Creating Estimators from Keras models](/programmers_guide/estimators#creating_estimators_from_keras_models).
+[Creating Estimators from Keras models](./estimators.md#creating_estimators_from_keras_models).
```python
model = keras.Sequential([layers.Dense(10,activation='softmax'),
@@ -548,8 +548,8 @@ model.compile(optimizer=tf.train.RMSPropOptimizer(0.001),
estimator = keras.estimator.model_to_estimator(model)
```
-Note: Enable [eager execution](/programmers_guide/eager) for debugging
-[Estimator input functions](/programmers_guide/premade_estimators#create_input_functions)
+Note: Enable [eager execution](./eager.md) for debugging
+[Estimator input functions](./premade_estimators.md#create_input_functions)
and inspecting data.
### Multiple GPUs
diff --git a/tensorflow/docs_src/programmers_guide/leftnav_files b/tensorflow/docs_src/guide/leftnav_files
index 357a2a1cb9..357a2a1cb9 100644
--- a/tensorflow/docs_src/programmers_guide/leftnav_files
+++ b/tensorflow/docs_src/guide/leftnav_files
diff --git a/tensorflow/docs_src/programmers_guide/low_level_intro.md b/tensorflow/docs_src/guide/low_level_intro.md
index 478e2bb70b..665a5568b4 100644
--- a/tensorflow/docs_src/programmers_guide/low_level_intro.md
+++ b/tensorflow/docs_src/guide/low_level_intro.md
@@ -303,7 +303,7 @@ while True:
break
```
-For more details on Datasets and Iterators see: @{$programmers_guide/datasets}.
+For more details on Datasets and Iterators see: @{$guide/datasets}.
## Layers
diff --git a/tensorflow/docs_src/programmers_guide/premade_estimators.md b/tensorflow/docs_src/guide/premade_estimators.md
index 02e2caf64b..3e910c1fe2 100644
--- a/tensorflow/docs_src/programmers_guide/premade_estimators.md
+++ b/tensorflow/docs_src/guide/premade_estimators.md
@@ -78,10 +78,10 @@ provides a programming stack consisting of multiple API layers:
We strongly recommend writing TensorFlow programs with the following APIs:
-* @{$programmers_guide/estimators$Estimators}, which represent a complete model.
+* @{$guide/estimators$Estimators}, which represent a complete model.
The Estimator API provides methods to train the model, to judge the model's
accuracy, and to generate predictions.
-* @{$programmers_guide/datasets_for_estimators}, which build a data input
+* @{$guide/datasets_for_estimators}, which build a data input
pipeline. The Dataset API has methods to load and manipulate data, and feed
it into your model. The Dataset API meshes well with the Estimators API.
@@ -173,7 +173,7 @@ example is an Iris Versicolor.
An Estimator is TensorFlow's high-level representation of a complete model. It
handles the details of initialization, logging, saving and restoring, and many
other features so you can concentrate on your model. For more details see
-@{$programmers_guide/estimators}.
+@{$guide/estimators}.
An Estimator is any class derived from @{tf.estimator.Estimator}. TensorFlow
provides a collection of
@@ -424,7 +424,7 @@ Now that you've gotten started writing TensorFlow programs, consider the
following material:
* @{$checkpoints$Checkpoints} to learn how to save and restore models.
-* @{$programmers_guide/datasets_for_estimators} to learn more about importing
+* @{$guide/datasets_for_estimators} to learn more about importing
data into your model.
* @{$custom_estimators$Creating Custom Estimators} to learn how to
write your own Estimator, customized for a particular problem.
diff --git a/tensorflow/docs_src/programmers_guide/saved_model.md b/tensorflow/docs_src/guide/saved_model.md
index c6ef87c54a..27ef7bb0da 100644
--- a/tensorflow/docs_src/programmers_guide/saved_model.md
+++ b/tensorflow/docs_src/guide/saved_model.md
@@ -3,7 +3,7 @@
The @{tf.train.Saver} class provides methods to save and restore models. The
@{tf.saved_model.simple_save} function is an easy way to build a
@{tf.saved_model$saved model} suitable for serving.
-[Estimators](@{$programmers_guide/estimators}) automatically save and restore
+[Estimators](@{$guide/estimators}) automatically save and restore
variables in the `model_dir`.
## Save and restore variables
@@ -299,7 +299,7 @@ following:
added attributes with defaults don't cause older model consumers to fail
loading models regenerated with newer training binaries.
-See [compatibility guidance](https://www.tensorflow.org/programmers_guide/version_compat)
+See [compatibility guidance](./version_compat.md)
for more information.
### Loading a SavedModel in Python
diff --git a/tensorflow/docs_src/programmers_guide/summaries_and_tensorboard.md b/tensorflow/docs_src/guide/summaries_and_tensorboard.md
index fadfa03e78..fadfa03e78 100644
--- a/tensorflow/docs_src/programmers_guide/summaries_and_tensorboard.md
+++ b/tensorflow/docs_src/guide/summaries_and_tensorboard.md
diff --git a/tensorflow/docs_src/programmers_guide/tensorboard_histograms.md b/tensorflow/docs_src/guide/tensorboard_histograms.md
index 918deda190..918deda190 100644
--- a/tensorflow/docs_src/programmers_guide/tensorboard_histograms.md
+++ b/tensorflow/docs_src/guide/tensorboard_histograms.md
diff --git a/tensorflow/docs_src/programmers_guide/tensors.md b/tensorflow/docs_src/guide/tensors.md
index 1248c3cabe..7227260f1a 100644
--- a/tensorflow/docs_src/programmers_guide/tensors.md
+++ b/tensorflow/docs_src/guide/tensors.md
@@ -26,7 +26,7 @@ some cases it's only possible to find the shape of a tensor at graph execution
time.
Some types of tensors are special, and these will be covered in other
-units of the Programmer's guide. The main ones are:
+units of the TensorFlow guide. The main ones are:
* `tf.Variable`
* `tf.constant`
@@ -230,7 +230,7 @@ yet_another = tf.reshape(matrixAlt, [13, 2, -1]) # ERROR!
## Data types
In addition to dimensionality, Tensors have a data type. Refer to the
-`tf.DataType` page in the programmer's guide for a full list of the data types.
+`tf.DType` page for a complete list of the data types.
It is not possible to have a `tf.Tensor` with more than one data type. It is
possible, however, to serialize arbitrary data structures as `string`s and store
diff --git a/tensorflow/docs_src/programmers_guide/using_gpu.md b/tensorflow/docs_src/guide/using_gpu.md
index c429ca4750..c429ca4750 100644
--- a/tensorflow/docs_src/programmers_guide/using_gpu.md
+++ b/tensorflow/docs_src/guide/using_gpu.md
diff --git a/tensorflow/docs_src/programmers_guide/using_tpu.md b/tensorflow/docs_src/guide/using_tpu.md
index 44aabf0557..41d80d9d60 100644
--- a/tensorflow/docs_src/programmers_guide/using_tpu.md
+++ b/tensorflow/docs_src/guide/using_tpu.md
@@ -171,7 +171,7 @@ This section details the changes you must make to the model function
During regular usage TensorFlow attempts to determine the shapes of each
`tf.Tensor` during graph construction. During execution any unknown shape
dimensions are determined dynamically,
-see @{$programmers_guide/tensors#shape$Tensor Shapes} for more details.
+see @{$guide/tensors#shape$Tensor Shapes} for more details.
To run on Cloud TPUs TensorFlow models are compiled using @{$xla$XLA}.
XLA uses a similar system for determining shapes at compile time. XLA requires
@@ -195,7 +195,7 @@ TPU.
Build your evaluation metrics dictionary in a stand-alone `metric_fn`.
-<!-- TODO(markdaoust) link to programmers_guide/metrics when it exists -->
+<!-- TODO(markdaoust) link to guide/metrics when it exists -->
Evaluation metrics are an essential part of training a model. These are fully
supported on Cloud TPUs, but with a slightly different syntax.
diff --git a/tensorflow/docs_src/programmers_guide/variables.md b/tensorflow/docs_src/guide/variables.md
index cd8c4b5b9a..cd8c4b5b9a 100644
--- a/tensorflow/docs_src/programmers_guide/variables.md
+++ b/tensorflow/docs_src/guide/variables.md
diff --git a/tensorflow/docs_src/programmers_guide/version_compat.md b/tensorflow/docs_src/guide/version_compat.md
index 72e427c5f8..72e427c5f8 100644
--- a/tensorflow/docs_src/programmers_guide/version_compat.md
+++ b/tensorflow/docs_src/guide/version_compat.md
diff --git a/tensorflow/docs_src/install/install_go.md b/tensorflow/docs_src/install/install_go.md
index 55bc0f64e7..2c126df5aa 100644
--- a/tensorflow/docs_src/install/install_go.md
+++ b/tensorflow/docs_src/install/install_go.md
@@ -6,7 +6,7 @@ a Go application. This guide explains how to install and set up the
[TensorFlow Go package](https://godoc.org/github.com/tensorflow/tensorflow/tensorflow/go).
Warning: The TensorFlow Go API is *not* covered by the TensorFlow
-[API stability guarantees](https://www.tensorflow.org/programmers_guide/version_semantics).
+[API stability guarantees](../guide/version_semantics.md).
## Supported Platforms
diff --git a/tensorflow/docs_src/install/install_java.md b/tensorflow/docs_src/install/install_java.md
index 637231da12..692dfc9cef 100644
--- a/tensorflow/docs_src/install/install_java.md
+++ b/tensorflow/docs_src/install/install_java.md
@@ -7,7 +7,7 @@ Java application. This guide explains how to install
and use it in a Java application.
Warning: The TensorFlow Java API is *not* covered by the TensorFlow
-[API stability guarantees](https://www.tensorflow.org/programmers_guide/version_semantics).
+[API stability guarantees](../guide/version_semantics.md).
## Supported Platforms
diff --git a/tensorflow/docs_src/install/leftnav_files b/tensorflow/docs_src/install/leftnav_files
index e523e06f67..ace275c0e8 100644
--- a/tensorflow/docs_src/install/leftnav_files
+++ b/tensorflow/docs_src/install/leftnav_files
@@ -4,6 +4,7 @@ index.md
install_linux.md: Ubuntu
install_mac.md: MacOS
install_windows.md: Windows
+install_raspbian.md: Raspbian
install_sources.md: From source
>>>
migration.md
diff --git a/tensorflow/docs_src/tutorials/deep_cnn.md b/tensorflow/docs_src/tutorials/deep_cnn.md
index 6a4c9a9b07..44a32d9d1d 100644
--- a/tensorflow/docs_src/tutorials/deep_cnn.md
+++ b/tensorflow/docs_src/tutorials/deep_cnn.md
@@ -268,7 +268,7 @@ in `cifar10_input.py`.
`cifar10_train.py` periodically @{tf.train.Saver$saves}
all model parameters in
-@{$programmers_guide/saved_model$checkpoint files}
+@{$guide/saved_model$checkpoint files}
but it does *not* evaluate the model. The checkpoint file
will be used by `cifar10_eval.py` to measure the predictive
performance (see [Evaluating a Model](#evaluating-a-model) below).
diff --git a/tensorflow/docs_src/tutorials/layers.md b/tensorflow/docs_src/tutorials/layers.md
index 0f17899dae..212e337637 100644
--- a/tensorflow/docs_src/tutorials/layers.md
+++ b/tensorflow/docs_src/tutorials/layers.md
@@ -627,7 +627,7 @@ operation earlier when we generated the probabilities in `cnn_model_fn`.
> argument, TensorFlow will assign a default name. A couple easy ways to
> discover the names applied to operations are to visualize your graph on
> @{$graph_viz$TensorBoard}) or to enable the
-> @{$programmers_guide/debugger$TensorFlow Debugger (tfdbg)}.
+> @{$guide/debugger$TensorFlow Debugger (tfdbg)}.
Next, we create the `LoggingTensorHook`, passing `tensors_to_log` to the
`tensors` argument. We set `every_n_iter=50`, which specifies that probabilities
diff --git a/tensorflow/examples/how_tos/reading_data/fully_connected_reader.py b/tensorflow/examples/how_tos/reading_data/fully_connected_reader.py
index 307eede5c0..7402247448 100644
--- a/tensorflow/examples/how_tos/reading_data/fully_connected_reader.py
+++ b/tensorflow/examples/how_tos/reading_data/fully_connected_reader.py
@@ -17,7 +17,7 @@
This version is like fully_connected_feed.py but uses data converted
to a TFRecords file containing tf.train.Example protocol buffers.
See:
-https://www.tensorflow.org/programmers_guide/reading_data#reading_from_files
+https://www.tensorflow.org/guide/reading_data#reading_from_files
for context.
YOU MUST run convert_to_records before running this (but you only need to
diff --git a/tensorflow/java/README.md b/tensorflow/java/README.md
index 2f1ce253b2..c7382ff231 100644
--- a/tensorflow/java/README.md
+++ b/tensorflow/java/README.md
@@ -1,7 +1,7 @@
# TensorFlow for Java
> *WARNING*: The TensorFlow Java API is not currently covered by the TensorFlow
-> [API stability guarantees](https://www.tensorflow.org/programmers_guide/version_semantics).
+> [API stability guarantees](https://www.tensorflow.org/guide/version_semantics).
>
> For using TensorFlow on Android refer instead to
> [contrib/android](https://www.tensorflow.org/code/tensorflow/contrib/android),
@@ -23,8 +23,7 @@ native libraries will need to be built from source.
2. Setup the environment to build TensorFlow from source code
([Linux](https://www.tensorflow.org/install/install_sources#PrepareLinux)
- or [Mac OS
- X](https://www.tensorflow.org/install/install_sources#PrepareMac)).
+ or [macOS](https://www.tensorflow.org/install/install_sources#PrepareMac)).
If you'd like to skip reading those details and do not care about GPU
support, try the following:
diff --git a/tensorflow/java/src/main/java/org/tensorflow/package-info.java b/tensorflow/java/src/main/java/org/tensorflow/package-info.java
index 521c5c610c..f353ee3145 100644
--- a/tensorflow/java/src/main/java/org/tensorflow/package-info.java
+++ b/tensorflow/java/src/main/java/org/tensorflow/package-info.java
@@ -17,7 +17,7 @@ limitations under the License.
* Defines classes to build, save, load and execute TensorFlow models.
*
* <p><b>WARNING</b>: The API is currently experimental and is not covered by TensorFlow <a
- * href="https://www.tensorflow.org/programmers_guide/version_semantics">API stability
+ * href="https://www.tensorflow.org/guide/version_semantics">API stability
* guarantees</a>. See <a
* href="https://www.tensorflow.org/code/tensorflow/java/README.md">README.md</a> for installation
* instructions.
diff --git a/tensorflow/python/data/__init__.py b/tensorflow/python/data/__init__.py
index 7efe0948e7..3b9bf2469e 100644
--- a/tensorflow/python/data/__init__.py
+++ b/tensorflow/python/data/__init__.py
@@ -14,7 +14,7 @@
# ==============================================================================
"""`tf.data.Dataset` API for input pipelines.
-See the @{$datasets$Importing Data} Programmer's Guide for an overview.
+See @{$guide/datasets$Importing Data} for an overview.
"""
from __future__ import absolute_import
diff --git a/tensorflow/python/data/ops/dataset_ops.py b/tensorflow/python/data/ops/dataset_ops.py
index 0e60ccb536..7cb6627615 100644
--- a/tensorflow/python/data/ops/dataset_ops.py
+++ b/tensorflow/python/data/ops/dataset_ops.py
@@ -218,7 +218,7 @@ class Dataset(object):
@{tf.constant} operations. For large datasets (> 1 GB), this can waste
memory and run into byte limits of graph serialization. If tensors contains
one or more large NumPy arrays, consider the alternative described in
- @{$programmers_guide/datasets#consuming_numpy_arrays$this guide}.
+ @{$guide/datasets#consuming_numpy_arrays$this guide}.
Args:
tensors: A nested structure of tensors.
@@ -237,7 +237,7 @@ class Dataset(object):
@{tf.constant} operations. For large datasets (> 1 GB), this can waste
memory and run into byte limits of graph serialization. If tensors contains
one or more large NumPy arrays, consider the alternative described in
- @{$programmers_guide/datasets#consuming_numpy_arrays$this guide}.
+ @{$guide/datasets#consuming_numpy_arrays$this guide}.
Args:
tensors: A nested structure of tensors, each having the same size in the
diff --git a/tensorflow/python/debug/BUILD b/tensorflow/python/debug/BUILD
index 78ef03a2d8..6941cacf23 100644
--- a/tensorflow/python/debug/BUILD
+++ b/tensorflow/python/debug/BUILD
@@ -5,7 +5,7 @@
#
# ":debug_py": Public Python methods and classes of tfdbg.
# For API documentation, see https://www.tensorflow.org/api_docs/python/tfdbg
-# For a user interface walkthrough, see https://www.tensorflow.org/programmers_guide/debugger
+# For a user interface walkthrough, see https://www.tensorflow.org/guide/debugger
# ":grpc_debug_server": Server interface for grpc:// debug URLs.
package(
diff --git a/tensorflow/python/debug/README.md b/tensorflow/python/debug/README.md
index 269bbb19bd..9c16af4d79 100644
--- a/tensorflow/python/debug/README.md
+++ b/tensorflow/python/debug/README.md
@@ -28,7 +28,7 @@ models:
* Easy access through session wrappers
* Easy integration with common high-level APIs, such as
- [TensorFlow Estimators](https://www.tensorflow.org/programmers_guide/estimators) and
+ [TensorFlow Estimators](https://www.tensorflow.org/guide/estimators) and
[Keras](https://keras.io/)
* Inspection of runtime tensor values and node connections
* Conditional breaking after runs that generate tensors satisfying given
@@ -43,7 +43,7 @@ models:
## How to use TFDBG?
-* For a walkthrough of TFDBG command-line interface, see https://www.tensorflow.org/programmers_guide/debugger.
+* For a walkthrough of TFDBG command-line interface, see https://www.tensorflow.org/guide/debugger.
* For information on the web GUI of TFDBG (TensorBoard Debugger Plugin), see
[this README](https://github.com/tensorflow/tensorboard/blob/master/tensorboard/plugins/debugger/README.md).
* For programmatic use of the API of TFDBG, see https://www.tensorflow.org/api_docs/python/tfdbg.
diff --git a/tensorflow/python/debug/examples/README.md b/tensorflow/python/debug/examples/README.md
index cb4d484092..3b431e04dc 100644
--- a/tensorflow/python/debug/examples/README.md
+++ b/tensorflow/python/debug/examples/README.md
@@ -3,7 +3,7 @@ Hi, there!
The documentation of **TensorFlow Debugger (tfdbg)** has moved.
See the source version at
-[this new location](../../../docs_src/programmers_guide/debugger.md).
+[this new location](../../../docs_src/guide/debugger.md).
See the public website version at
-[https://www.tensorflow.org/programmers_guide/debugger](https://www.tensorflow.org/programmers_guide/debugger).
+[https://www.tensorflow.org/guide/debugger](https://www.tensorflow.org/guide/debugger).
diff --git a/tensorflow/python/estimator/BUILD b/tensorflow/python/estimator/BUILD
index 38e446da0c..8ee38d35cc 100644
--- a/tensorflow/python/estimator/BUILD
+++ b/tensorflow/python/estimator/BUILD
@@ -1,8 +1,4 @@
-package(
- default_visibility = [
- "//tensorflow:internal",
- ],
-)
+package(default_visibility = ["//tensorflow:internal"])
licenses(["notice"]) # Apache 2.0
diff --git a/tensorflow/python/estimator/estimator.py b/tensorflow/python/estimator/estimator.py
index 28768aaf81..8df75d9eee 100644
--- a/tensorflow/python/estimator/estimator.py
+++ b/tensorflow/python/estimator/estimator.py
@@ -1133,6 +1133,18 @@ class Estimator(object):
return self._train_model_default(input_fn, hooks, saving_listeners)
def _train_model_default(self, input_fn, hooks, saving_listeners):
+ """Initiate training with input_fn, without DistributionStrategies.
+
+ Args:
+ input_fn: A function that provides input data for training as minibatches.
+ hooks: List of `SessionRunHook` subclass instances. Used for callbacks
+ inside the training loop.
+ saving_listeners: list of `CheckpointSaverListener` objects. Used for
+ callbacks that run immediately before or after checkpoint savings.
+
+ Returns:
+ Loss from training
+ """
worker_hooks = []
with ops.Graph().as_default() as g, g.device(self._device_fn):
random_seed.set_random_seed(self._config.tf_random_seed)
@@ -1149,6 +1161,18 @@ class Estimator(object):
saving_listeners)
def _train_model_distributed(self, input_fn, hooks, saving_listeners):
+ """Initiate training with input_fn, using DistributionStrategies.
+
+ Args:
+ input_fn: A function that provides input data for training as minibatches.
+ hooks: List of `SessionRunHook` subclass instances. Used for callbacks
+ inside the training loop.
+ saving_listeners: list of `CheckpointSaverListener` objects. Used for
+ callbacks that run immediately before or after checkpoint savings.
+
+ Returns:
+ Loss from training
+ """
self._distribution.configure(self._session_config)
worker_hooks = []
with ops.Graph().as_default() as g:
diff --git a/tensorflow/python/estimator/estimator_test.py b/tensorflow/python/estimator/estimator_test.py
index 4459de99a6..733c7fb95d 100644
--- a/tensorflow/python/estimator/estimator_test.py
+++ b/tensorflow/python/estimator/estimator_test.py
@@ -61,6 +61,7 @@ from tensorflow.python.platform import test
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.saved_model import loader
from tensorflow.python.saved_model import loader_impl
+from tensorflow.python.saved_model import signature_constants
from tensorflow.python.saved_model import tag_constants
from tensorflow.python.summary import summary
from tensorflow.python.summary import summary_iterator
@@ -2829,6 +2830,45 @@ class EstimatorExportTest(test.TestCase):
# Clean up.
gfile.DeleteRecursively(tmpdir)
+ def test_export_savedmodel_no_export_outputs(self):
+ """Ensure that an EstimatorSpec without outputs defined can be exported."""
+
+ def _model_fn(features, labels, mode):
+ _, _ = features, labels
+ variables.Variable(1., name='weight')
+ return model_fn_lib.EstimatorSpec(
+ mode,
+ predictions=constant_op.constant(10.),
+ loss=constant_op.constant(1.),
+ train_op=state_ops.assign_add(training.get_global_step(), 1))
+
+ tmpdir = tempfile.mkdtemp()
+ est = estimator.Estimator(model_fn=_model_fn)
+ est.train(input_fn=dummy_input_fn, steps=1)
+
+ # Perform the export.
+ export_dir_base = os.path.join(
+ compat.as_bytes(tmpdir), compat.as_bytes('no_export_outputs'))
+ export_dir = est.export_savedmodel(
+ export_dir_base, _get_serving_input_receiver_fn())
+
+ # Check that all the files are in the right places.
+ self.assertTrue(gfile.Exists(export_dir_base))
+ self._validate_exported_files(export_dir)
+
+ # Restore, to validate that the export was well-formed.
+ with ops.Graph().as_default() as graph:
+ with session.Session(graph=graph) as sess:
+ meta_graph = loader.load(sess, [tag_constants.SERVING], export_dir)
+ graph_ops = [x.name for x in graph.get_operations()]
+ self.assertTrue('weight' in graph_ops)
+
+ sig_def = meta_graph.signature_def
+ self.assertEqual(len(sig_def), 1)
+ sig_outputs = sig_def[
+ signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY].outputs
+ self.assertEqual(sig_outputs['output'].name, 'Const:0')
+
class EstimatorHookOrderingTest(test.TestCase):
diff --git a/tensorflow/python/estimator/keras.py b/tensorflow/python/estimator/keras.py
index 312eb9a035..408752d360 100644
--- a/tensorflow/python/estimator/keras.py
+++ b/tensorflow/python/estimator/keras.py
@@ -453,7 +453,7 @@ def model_to_estimator(keras_model=None,
"""Constructs an `Estimator` instance from given keras model.
For usage example, please see
- @{$programmers_guide/estimators$creating_estimators_from_keras_models}.
+ @{$guide/estimators$creating_estimators_from_keras_models}.
Args:
keras_model: A compiled Keras model object. This argument is mutually
diff --git a/tensorflow/python/estimator/model_fn.py b/tensorflow/python/estimator/model_fn.py
index 40886f9c3c..a9fd8f8e1a 100644
--- a/tensorflow/python/estimator/model_fn.py
+++ b/tensorflow/python/estimator/model_fn.py
@@ -23,7 +23,7 @@ import collections
import six
-from tensorflow.python.estimator.export.export_output import ExportOutput
+from tensorflow.python.estimator.export import export_output as export_output_lib
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import array_ops
@@ -158,6 +158,8 @@ class EstimatorSpec(
Multi-headed models should specify one entry for each head, one of
which must be named using
signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY.
+ If no entry is provided, a default `PredictOutput` mapping to
+ `predictions` will be created.
training_chief_hooks: Iterable of `tf.train.SessionRunHook` objects to
run on the chief worker during training.
training_hooks: Iterable of `tf.train.SessionRunHook` objects to run
@@ -232,29 +234,9 @@ class EstimatorSpec(
_check_is_tensor_or_operation(metric_update,
'eval_metric_ops[{}]'.format(key))
- # Validate export_outputs.
- if export_outputs is not None:
- if not isinstance(export_outputs, dict):
- raise TypeError('export_outputs must be dict, given: {}'.format(
- export_outputs))
- for v in six.itervalues(export_outputs):
- if not isinstance(v, ExportOutput):
- raise TypeError(
- 'Values in export_outputs must be ExportOutput objects. '
- 'Given: {}'.format(export_outputs))
- # Note export_outputs is allowed to be empty.
- if len(export_outputs) == 1:
- (key, value), = export_outputs.items()
- if key != signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY:
- export_outputs[
- signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY] = value
- if len(export_outputs) > 1:
- if (signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY
- not in export_outputs):
- raise ValueError(
- 'Multiple export_outputs were provided, but none of them is '
- 'specified as the default. Do this by naming one of them with '
- 'signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY.')
+ # Validate the passed export outputs, or generate defaults.
+ if mode == ModeKeys.PREDICT:
+ export_outputs = _get_export_outputs(export_outputs, predictions)
# Validate that all tensors and ops are from the default graph.
default_graph = ops.get_default_graph()
@@ -286,11 +268,11 @@ class EstimatorSpec(
raise ValueError(error_message_template.format('train_op', train_op.name))
for key, value in list(six.iteritems(eval_metric_ops)):
values = nest.flatten(value)
- for value in values:
- if value.graph is not default_graph:
+ for val in values:
+ if val.graph is not default_graph:
raise ValueError(error_message_template.format(
'eval_metric_ops',
- '{0}: {1}'.format(key, value.name)))
+ '{0}: {1}'.format(key, val.name)))
# Validate hooks.
training_chief_hooks = tuple(training_chief_hooks or [])
@@ -334,6 +316,70 @@ class EstimatorSpec(
return EstimatorSpec(*new_fields)
+def _get_export_outputs(export_outputs, predictions):
+ """Validate export_outputs or create default export_outputs.
+
+ Args:
+ export_outputs: Describes the output signatures to be exported to
+ `SavedModel` and used during serving. Should be a dict or None.
+ predictions: Predictions `Tensor` or dict of `Tensor`.
+
+ Returns:
+ Valid export_outputs dict
+
+ Raises:
+ TypeError: if export_outputs is not a dict or its values are not
+ ExportOutput instances.
+ """
+ if export_outputs is None:
+ default_output = export_output_lib.PredictOutput(predictions)
+ export_outputs = {
+ signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY: default_output}
+
+ if not isinstance(export_outputs, dict):
+ raise TypeError('export_outputs must be dict, given: {}'.format(
+ export_outputs))
+ for v in six.itervalues(export_outputs):
+ if not isinstance(v, export_output_lib.ExportOutput):
+ raise TypeError(
+ 'Values in export_outputs must be ExportOutput objects. '
+ 'Given: {}'.format(export_outputs))
+
+ _maybe_add_default_serving_output(export_outputs)
+
+ return export_outputs
+
+
+def _maybe_add_default_serving_output(export_outputs):
+ """Add a default serving output to the export_outputs if not present.
+
+ Args:
+ export_outputs: Describes the output signatures to be exported to
+ `SavedModel` and used during serving. Should be a dict.
+
+ Returns:
+ export_outputs dict with default serving signature added if necessary
+
+ Raises:
+ ValueError: if multiple export_outputs were provided without a default
+ serving key.
+ """
+ if len(export_outputs) == 1:
+ (key, value), = export_outputs.items()
+ if key != signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY:
+ export_outputs[
+ signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY] = value
+ if len(export_outputs) > 1:
+ if (signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY
+ not in export_outputs):
+ raise ValueError(
+ 'Multiple export_outputs were provided, but none of them is '
+ 'specified as the default. Do this by naming one of them with '
+ 'signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY.')
+
+ return export_outputs
+
+
class _TPUEstimatorSpec(collections.namedtuple('TPUEstimatorSpec', [
'mode',
'predictions',
diff --git a/tensorflow/python/estimator/model_fn_test.py b/tensorflow/python/estimator/model_fn_test.py
index b7eeeb437c..08e41fd414 100644
--- a/tensorflow/python/estimator/model_fn_test.py
+++ b/tensorflow/python/estimator/model_fn_test.py
@@ -592,6 +592,27 @@ class EstimatorSpecInferTest(test.TestCase):
predictions=predictions,
export_outputs=export_outputs)
+ def testDefaultExportOutputCreated(self):
+ """Ensure that a default PredictOutput is created for export."""
+ with ops.Graph().as_default(), self.test_session():
+ predictions = constant_op.constant(1.)
+ self._assertDefaultExportOutputForPredictions(predictions)
+
+ def testDefaultExportOutputCreatedDict(self):
+ """Ensure that a default PredictOutput is created for export for dicts."""
+ with ops.Graph().as_default(), self.test_session():
+ predictions = {'loss': constant_op.constant(1.),
+ 'score': constant_op.constant(10.)}
+ self._assertDefaultExportOutputForPredictions(predictions)
+
+ def _assertDefaultExportOutputForPredictions(self, predictions):
+ spec = model_fn.EstimatorSpec(
+ mode=model_fn.ModeKeys.PREDICT, predictions=predictions)
+
+ expected = export_output.PredictOutput(predictions).outputs
+ serving_output = spec.export_outputs[
+ signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY]
+ self.assertEqual(serving_output.outputs, expected)
if __name__ == '__main__':
test.main()
diff --git a/tensorflow/python/feature_column/feature_column.py b/tensorflow/python/feature_column/feature_column.py
index 7d070dc27c..40219e4b34 100644
--- a/tensorflow/python/feature_column/feature_column.py
+++ b/tensorflow/python/feature_column/feature_column.py
@@ -468,13 +468,25 @@ def linear_model(features,
def _add_to_collections(var, weight_collections):
- # TODO(rohanj): Explore adding a _get_variable_list method on `Variable`
- # so that we don't have to do this check.
- if isinstance(var, variables.PartitionedVariable):
- for constituent_var in list(var):
- ops.add_to_collections(weight_collections, constituent_var)
- else:
- ops.add_to_collections(weight_collections, var)
+ """Adds a var to the list of weight_collections provided.
+
+ Handles the case for partitioned and non-partitioned variables.
+
+ Args:
+ var: A variable or Partitioned Variable.
+ weight_collections: List of collections to add variable to.
+ """
+ for weight_collection in weight_collections:
+ # The layer self.add_variable call already adds it to GLOBAL_VARIABLES.
+ if weight_collection == ops.GraphKeys.GLOBAL_VARIABLES:
+ continue
+ # TODO(rohanj): Explore adding a _get_variable_list method on `Variable`
+ # so that we don't have to do this check.
+ if isinstance(var, variables.PartitionedVariable):
+ for constituent_var in list(var):
+ ops.add_to_collection(weight_collection, constituent_var)
+ else:
+ ops.add_to_collection(weight_collection, var)
class _FCLinearWrapper(base.Layer):
@@ -585,6 +597,8 @@ class _LinearModel(training.Model):
self._feature_columns = _normalize_feature_columns(
feature_columns)
self._weight_collections = list(weight_collections or [])
+ if ops.GraphKeys.GLOBAL_VARIABLES not in self._weight_collections:
+ self._weight_collections.append(ops.GraphKeys.GLOBAL_VARIABLES)
if ops.GraphKeys.MODEL_VARIABLES not in self._weight_collections:
self._weight_collections.append(ops.GraphKeys.MODEL_VARIABLES)
@@ -973,7 +987,12 @@ def shared_embedding_columns(
ValueError: if exactly one of `ckpt_to_load_from` and `tensor_name_in_ckpt`
is specified.
ValueError: if `initializer` is specified and is not callable.
+ RuntimeError: if eager execution is enabled.
"""
+ if context.executing_eagerly():
+ raise RuntimeError('shared_embedding_columns are not supported when eager '
+ 'execution is enabled.')
+
if (dimension is None) or (dimension < 1):
raise ValueError('Invalid dimension {}.'.format(dimension))
if (ckpt_to_load_from is None) != (tensor_name_in_ckpt is None):
@@ -1018,16 +1037,6 @@ def shared_embedding_columns(
shared_embedding_collection_name = '_'.join(c.name for c in sorted_columns)
shared_embedding_collection_name += '_shared_embedding'
- # Create the state (_SharedEmbeddingColumnLayer) here.
- embedding_shape = num_buckets, dimension
-
- shared_embedding_column_layer = _EmbeddingColumnLayer(
- embedding_shape=embedding_shape,
- initializer=initializer,
- weight_collections=[],
- trainable=trainable,
- name=shared_embedding_collection_name)
-
result = []
for column in categorical_columns:
result.append(
@@ -1036,16 +1045,12 @@ def shared_embedding_columns(
initializer=initializer,
dimension=dimension,
combiner=combiner,
- var_scope_name=shared_embedding_collection_name,
+ shared_embedding_collection_name=shared_embedding_collection_name,
ckpt_to_load_from=ckpt_to_load_from,
tensor_name_in_ckpt=tensor_name_in_ckpt,
max_norm=max_norm,
trainable=trainable))
- for single_result in result:
- single_result._set_layer(shared_embedding_column_layer) # pylint: disable=protected-access
- single_result._set_all_columns(result) # pylint: disable=protected-access
-
return result
@@ -1865,11 +1870,8 @@ class _EmbeddingColumnLayer(base.Layer):
dtype=dtypes.float32,
initializer=self._initializer,
trainable=self.trainable)
- # self.add_variable already appends to GLOBAL_VARIABLES collection.
if self._weight_collections and not context.executing_eagerly():
- for weight_collection in self._weight_collections:
- if weight_collection != ops.GraphKeys.GLOBAL_VARIABLES:
- _add_to_collections(self._embedding_weight_var, [weight_collection])
+ _add_to_collections(self._embedding_weight_var, self._weight_collections)
self.built = True
def call(self, _):
@@ -2651,8 +2653,8 @@ class _SharedEmbeddingColumn(
collections.namedtuple(
'_SharedEmbeddingColumn',
('categorical_column', 'dimension', 'combiner', 'initializer',
- 'var_scope_name', 'ckpt_to_load_from', 'tensor_name_in_ckpt',
- 'max_norm', 'trainable'))):
+ 'shared_embedding_collection_name', 'ckpt_to_load_from',
+ 'tensor_name_in_ckpt', 'max_norm', 'trainable'))):
"""See `embedding_column`."""
@property
@@ -2663,7 +2665,7 @@ class _SharedEmbeddingColumn(
@property
def _var_scope_name(self):
- return self.var_scope_name
+ return self.shared_embedding_collection_name
@property
def _parse_example_spec(self):
@@ -2672,22 +2674,6 @@ class _SharedEmbeddingColumn(
def _transform_feature(self, inputs):
return inputs.get(self.categorical_column)
- def _set_layer(self, layer):
- self._layer = layer
-
- def _set_all_columns(self, all_columns):
- self._all_columns = all_columns
-
- def _reset_config(self):
- config = self._layer.get_config()
- config['embedding_shape'] = (
- self.categorical_column._num_buckets, # pylint: disable=protected-access
- self.dimension)
- config['initializer'] = self.initializer
- self._layer = self._layer.__class__.from_config(config)
- for column in self._all_columns:
- column._set_layer(self._layer) # pylint: disable=protected-access
-
@property
def _variable_shape(self):
if not hasattr(self, '_shape'):
@@ -2709,19 +2695,38 @@ class _SharedEmbeddingColumn(
sparse_ids = sparse_tensors.id_tensor
sparse_weights = sparse_tensors.weight_tensor
- self._layer.set_weight_collections(weight_collections)
- embedding_weights = self._layer(
- None, scope=variable_scope.get_variable_scope())
- # If we're in graph mode and this is called with a different graph,
- # then we should reset.
- if not context.executing_eagerly() and (
- ops.get_default_graph() !=
- _get_graph_for_variable(embedding_weights)):
- self._reset_config()
- self._layer.set_weight_collections(weight_collections)
- embedding_weights = self._layer(
- None, scope=variable_scope.get_variable_scope())
-
+ embedding_shape = (self.categorical_column._num_buckets, self.dimension) # pylint: disable=protected-access
+ shared_embedding_collection = ops.get_collection(
+ self.shared_embedding_collection_name)
+ if shared_embedding_collection:
+ if len(shared_embedding_collection) > 1:
+ raise ValueError(
+ 'Collection {} can only contain one variable. '
+ 'Suggested fix A: Choose a unique name for this collection. '
+ 'Suggested fix B: Do not add any variables to this collection. '
+ 'The feature_column library already adds a variable under the '
+ 'hood.'.format(shared_embedding_collection))
+ embedding_weights = shared_embedding_collection[0]
+ if embedding_weights.get_shape() != embedding_shape:
+ raise ValueError(
+ 'Shared embedding collection {} contains variable {} of '
+ 'unexpected shape {}. Expected shape is {}. '
+ 'Suggested fix A: Choose a unique name for this collection. '
+ 'Suggested fix B: Do not add any variables to this collection. '
+ 'The feature_column library already adds a variable under the '
+ 'hood.'.format(self.shared_embedding_collection_name,
+ embedding_weights.name,
+ embedding_weights.get_shape(), embedding_shape))
+ else:
+ embedding_weights = variable_scope.get_variable(
+ name='embedding_weights',
+ shape=embedding_shape,
+ dtype=dtypes.float32,
+ initializer=self.initializer,
+ trainable=self.trainable and trainable,
+ collections=weight_collections)
+ ops.add_to_collection(self.shared_embedding_collection_name,
+ embedding_weights)
if self.ckpt_to_load_from is not None:
to_restore = embedding_weights
if isinstance(to_restore, variables.PartitionedVariable):
@@ -3581,5 +3586,3 @@ class _SequenceCategoricalColumn(
weight_tensor,
shape=array_ops.concat([weight_tensor.dense_shape, [1]], axis=0))
return _CategoricalColumn.IdWeightPair(id_tensor, weight_tensor)
-
-
diff --git a/tensorflow/python/feature_column/feature_column_test.py b/tensorflow/python/feature_column/feature_column_test.py
index a9013d8e42..511205451c 100644
--- a/tensorflow/python/feature_column/feature_column_test.py
+++ b/tensorflow/python/feature_column/feature_column_test.py
@@ -5350,9 +5350,9 @@ class SharedEmbeddingColumnTest(test.TestCase):
self.assertIsNone(embedding_column_a.ckpt_to_load_from)
self.assertIsNone(embedding_column_b.ckpt_to_load_from)
self.assertEqual('aaa_bbb_shared_embedding',
- embedding_column_a.var_scope_name)
+ embedding_column_a.shared_embedding_collection_name)
self.assertEqual('aaa_bbb_shared_embedding',
- embedding_column_b.var_scope_name)
+ embedding_column_b.shared_embedding_collection_name)
self.assertIsNone(embedding_column_a.tensor_name_in_ckpt)
self.assertIsNone(embedding_column_b.tensor_name_in_ckpt)
self.assertIsNone(embedding_column_a.max_norm)
@@ -5399,9 +5399,9 @@ class SharedEmbeddingColumnTest(test.TestCase):
self.assertEqual('my_combiner', embedding_column_a.combiner)
self.assertEqual('my_combiner', embedding_column_b.combiner)
self.assertEqual('shared_embedding_collection_name',
- embedding_column_a.var_scope_name)
+ embedding_column_a.shared_embedding_collection_name)
self.assertEqual('shared_embedding_collection_name',
- embedding_column_b.var_scope_name)
+ embedding_column_b.shared_embedding_collection_name)
self.assertEqual('my_ckpt', embedding_column_a.ckpt_to_load_from)
self.assertEqual('my_ckpt', embedding_column_b.ckpt_to_load_from)
self.assertEqual('my_ckpt_tensor', embedding_column_a.tensor_name_in_ckpt)
@@ -5452,7 +5452,7 @@ class SharedEmbeddingColumnTest(test.TestCase):
self.assertEqual(embedding_dimension, embedding_column_a.dimension)
self.assertEqual('my_combiner', embedding_column_a.combiner)
self.assertEqual('shared_embedding_collection_name',
- embedding_column_a.var_scope_name)
+ embedding_column_a.shared_embedding_collection_name)
self.assertEqual('my_ckpt', embedding_column_a.ckpt_to_load_from)
self.assertEqual('my_ckpt_tensor', embedding_column_a.tensor_name_in_ckpt)
self.assertEqual(42., embedding_column_a.max_norm)
diff --git a/tensorflow/python/keras/BUILD b/tensorflow/python/keras/BUILD
index 5ad2ee5d5c..8b6b28bc77 100755
--- a/tensorflow/python/keras/BUILD
+++ b/tensorflow/python/keras/BUILD
@@ -136,7 +136,7 @@ py_library(
deps = [
":backend",
"//tensorflow/python/data",
- "//tensorflow/python/training/checkpointable:data_structures_base",
+ "//tensorflow/python/training/checkpointable:data_structures",
"@six_archive//:six",
],
)
diff --git a/tensorflow/python/keras/applications/densenet.py b/tensorflow/python/keras/applications/densenet.py
index f81f10719a..8df6d08611 100644
--- a/tensorflow/python/keras/applications/densenet.py
+++ b/tensorflow/python/keras/applications/densenet.py
@@ -31,7 +31,6 @@ from tensorflow.python.keras import backend as K
from tensorflow.python.keras.applications import imagenet_utils
from tensorflow.python.keras.applications.imagenet_utils import _obtain_input_shape
from tensorflow.python.keras.applications.imagenet_utils import decode_predictions
-from tensorflow.python.keras.engine.network import get_source_inputs
from tensorflow.python.keras.layers import Activation
from tensorflow.python.keras.layers import AveragePooling2D
from tensorflow.python.keras.layers import BatchNormalization
@@ -44,6 +43,7 @@ from tensorflow.python.keras.layers import Input
from tensorflow.python.keras.layers import MaxPooling2D
from tensorflow.python.keras.layers import ZeroPadding2D
from tensorflow.python.keras.models import Model
+from tensorflow.python.keras.utils import layer_utils
from tensorflow.python.keras.utils.data_utils import get_file
from tensorflow.python.util.tf_export import tf_export
@@ -238,7 +238,7 @@ def DenseNet(blocks,
# Ensure that the model takes into account
# any potential predecessors of `input_tensor`.
if input_tensor is not None:
- inputs = get_source_inputs(input_tensor)
+ inputs = layer_utils.get_source_inputs(input_tensor)
else:
inputs = img_input
diff --git a/tensorflow/python/keras/applications/inception_resnet_v2.py b/tensorflow/python/keras/applications/inception_resnet_v2.py
index fe1d0f2d4f..14e3b6aa60 100644
--- a/tensorflow/python/keras/applications/inception_resnet_v2.py
+++ b/tensorflow/python/keras/applications/inception_resnet_v2.py
@@ -31,7 +31,6 @@ from tensorflow.python.keras import backend as K
from tensorflow.python.keras.applications import imagenet_utils
from tensorflow.python.keras.applications.imagenet_utils import _obtain_input_shape
from tensorflow.python.keras.applications.imagenet_utils import decode_predictions
-from tensorflow.python.keras.engine.network import get_source_inputs
from tensorflow.python.keras.layers import Activation
from tensorflow.python.keras.layers import AveragePooling2D
from tensorflow.python.keras.layers import BatchNormalization
@@ -44,6 +43,7 @@ from tensorflow.python.keras.layers import Input
from tensorflow.python.keras.layers import Lambda
from tensorflow.python.keras.layers import MaxPooling2D
from tensorflow.python.keras.models import Model
+from tensorflow.python.keras.utils import layer_utils
from tensorflow.python.keras.utils.data_utils import get_file
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util.tf_export import tf_export
@@ -354,7 +354,7 @@ def InceptionResNetV2(include_top=True,
# Ensure that the model takes into account
# any potential predecessors of `input_tensor`
if input_tensor is not None:
- inputs = get_source_inputs(input_tensor)
+ inputs = layer_utils.get_source_inputs(input_tensor)
else:
inputs = img_input
diff --git a/tensorflow/python/keras/applications/inception_v3.py b/tensorflow/python/keras/applications/inception_v3.py
index 857ad49dae..b5e28c781f 100644
--- a/tensorflow/python/keras/applications/inception_v3.py
+++ b/tensorflow/python/keras/applications/inception_v3.py
@@ -37,7 +37,6 @@ from tensorflow.python.keras import layers
from tensorflow.python.keras.applications import imagenet_utils
from tensorflow.python.keras.applications.imagenet_utils import _obtain_input_shape
from tensorflow.python.keras.applications.imagenet_utils import decode_predictions
-from tensorflow.python.keras.engine.network import get_source_inputs
from tensorflow.python.keras.layers import Activation
from tensorflow.python.keras.layers import AveragePooling2D
from tensorflow.python.keras.layers import BatchNormalization
@@ -48,6 +47,7 @@ from tensorflow.python.keras.layers import GlobalMaxPooling2D
from tensorflow.python.keras.layers import Input
from tensorflow.python.keras.layers import MaxPooling2D
from tensorflow.python.keras.models import Model
+from tensorflow.python.keras.utils import layer_utils
from tensorflow.python.keras.utils.data_utils import get_file
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util.tf_export import tf_export
@@ -375,7 +375,7 @@ def InceptionV3(include_top=True,
# Ensure that the model takes into account
# any potential predecessors of `input_tensor`.
if input_tensor is not None:
- inputs = get_source_inputs(input_tensor)
+ inputs = layer_utils.get_source_inputs(input_tensor)
else:
inputs = img_input
# Create model.
diff --git a/tensorflow/python/keras/applications/mobilenet.py b/tensorflow/python/keras/applications/mobilenet.py
index 9d845be0d5..e56c695a28 100644
--- a/tensorflow/python/keras/applications/mobilenet.py
+++ b/tensorflow/python/keras/applications/mobilenet.py
@@ -78,8 +78,7 @@ from tensorflow.python.keras import regularizers
from tensorflow.python.keras.applications import imagenet_utils
from tensorflow.python.keras.applications.imagenet_utils import _obtain_input_shape
from tensorflow.python.keras.applications.imagenet_utils import decode_predictions
-from tensorflow.python.keras.engine import InputSpec
-from tensorflow.python.keras.engine.network import get_source_inputs
+from tensorflow.python.keras.engine.base_layer import InputSpec
from tensorflow.python.keras.layers import Activation
from tensorflow.python.keras.layers import BatchNormalization
from tensorflow.python.keras.layers import Conv2D
@@ -92,6 +91,7 @@ from tensorflow.python.keras.layers import Reshape
from tensorflow.python.keras.layers import ZeroPadding2D
from tensorflow.python.keras.models import Model
from tensorflow.python.keras.utils import conv_utils
+from tensorflow.python.keras.utils import layer_utils
from tensorflow.python.keras.utils.data_utils import get_file
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util.tf_export import tf_export
@@ -317,7 +317,7 @@ def MobileNet(input_shape=None,
# Ensure that the model takes into account
# any potential predecessors of `input_tensor`.
if input_tensor is not None:
- inputs = get_source_inputs(input_tensor)
+ inputs = layer_utils.get_source_inputs(input_tensor)
else:
inputs = img_input
diff --git a/tensorflow/python/keras/applications/nasnet.py b/tensorflow/python/keras/applications/nasnet.py
index b521bc6731..ff79b3a057 100644
--- a/tensorflow/python/keras/applications/nasnet.py
+++ b/tensorflow/python/keras/applications/nasnet.py
@@ -49,7 +49,6 @@ from tensorflow.python.keras import backend as K
from tensorflow.python.keras.applications.imagenet_utils import _obtain_input_shape
from tensorflow.python.keras.applications.imagenet_utils import decode_predictions
from tensorflow.python.keras.applications.inception_v3 import preprocess_input
-from tensorflow.python.keras.engine.network import get_source_inputs
from tensorflow.python.keras.layers import Activation
from tensorflow.python.keras.layers import add
from tensorflow.python.keras.layers import AveragePooling2D
@@ -65,6 +64,7 @@ from tensorflow.python.keras.layers import MaxPooling2D
from tensorflow.python.keras.layers import SeparableConv2D
from tensorflow.python.keras.layers import ZeroPadding2D
from tensorflow.python.keras.models import Model
+from tensorflow.python.keras.utils import layer_utils
from tensorflow.python.keras.utils.data_utils import get_file
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util.tf_export import tf_export
@@ -290,7 +290,7 @@ def NASNet(input_shape=None,
# Ensure that the model takes into account
# any potential predecessors of `input_tensor`.
if input_tensor is not None:
- inputs = get_source_inputs(input_tensor)
+ inputs = layer_utils.get_source_inputs(input_tensor)
else:
inputs = img_input
diff --git a/tensorflow/python/keras/applications/resnet50.py b/tensorflow/python/keras/applications/resnet50.py
index 508550f445..6afc086812 100644
--- a/tensorflow/python/keras/applications/resnet50.py
+++ b/tensorflow/python/keras/applications/resnet50.py
@@ -34,7 +34,6 @@ from tensorflow.python.keras import layers
from tensorflow.python.keras.applications.imagenet_utils import _obtain_input_shape
from tensorflow.python.keras.applications.imagenet_utils import decode_predictions
from tensorflow.python.keras.applications.imagenet_utils import preprocess_input
-from tensorflow.python.keras.engine.network import get_source_inputs
from tensorflow.python.keras.layers import Activation
from tensorflow.python.keras.layers import AveragePooling2D
from tensorflow.python.keras.layers import BatchNormalization
@@ -277,7 +276,7 @@ def ResNet50(include_top=True,
# Ensure that the model takes into account
# any potential predecessors of `input_tensor`.
if input_tensor is not None:
- inputs = get_source_inputs(input_tensor)
+ inputs = layer_utils.get_source_inputs(input_tensor)
else:
inputs = img_input
# Create model.
diff --git a/tensorflow/python/keras/applications/vgg16.py b/tensorflow/python/keras/applications/vgg16.py
index 659a6533e6..cef0230da9 100644
--- a/tensorflow/python/keras/applications/vgg16.py
+++ b/tensorflow/python/keras/applications/vgg16.py
@@ -32,7 +32,6 @@ from tensorflow.python.keras import backend as K
from tensorflow.python.keras.applications.imagenet_utils import _obtain_input_shape
from tensorflow.python.keras.applications.imagenet_utils import decode_predictions
from tensorflow.python.keras.applications.imagenet_utils import preprocess_input
-from tensorflow.python.keras.engine.network import get_source_inputs
from tensorflow.python.keras.layers import Conv2D
from tensorflow.python.keras.layers import Dense
from tensorflow.python.keras.layers import Flatten
@@ -202,7 +201,7 @@ def VGG16(include_top=True,
# Ensure that the model takes into account
# any potential predecessors of `input_tensor`.
if input_tensor is not None:
- inputs = get_source_inputs(input_tensor)
+ inputs = layer_utils.get_source_inputs(input_tensor)
else:
inputs = img_input
# Create model.
diff --git a/tensorflow/python/keras/applications/vgg19.py b/tensorflow/python/keras/applications/vgg19.py
index 5e27ab8fb1..c4031f5510 100644
--- a/tensorflow/python/keras/applications/vgg19.py
+++ b/tensorflow/python/keras/applications/vgg19.py
@@ -32,7 +32,6 @@ from tensorflow.python.keras import backend as K
from tensorflow.python.keras.applications.imagenet_utils import _obtain_input_shape
from tensorflow.python.keras.applications.imagenet_utils import decode_predictions
from tensorflow.python.keras.applications.imagenet_utils import preprocess_input
-from tensorflow.python.keras.engine.network import get_source_inputs
from tensorflow.python.keras.layers import Conv2D
from tensorflow.python.keras.layers import Dense
from tensorflow.python.keras.layers import Flatten
@@ -211,7 +210,7 @@ def VGG19(include_top=True,
# Ensure that the model takes into account
# any potential predecessors of `input_tensor`.
if input_tensor is not None:
- inputs = get_source_inputs(input_tensor)
+ inputs = layer_utils.get_source_inputs(input_tensor)
else:
inputs = img_input
# Create model.
diff --git a/tensorflow/python/keras/applications/xception.py b/tensorflow/python/keras/applications/xception.py
index e1be8a3c46..01397cfac2 100644
--- a/tensorflow/python/keras/applications/xception.py
+++ b/tensorflow/python/keras/applications/xception.py
@@ -44,7 +44,6 @@ from tensorflow.python.keras import layers
from tensorflow.python.keras.applications import imagenet_utils
from tensorflow.python.keras.applications.imagenet_utils import _obtain_input_shape
from tensorflow.python.keras.applications.imagenet_utils import decode_predictions
-from tensorflow.python.keras.engine.network import get_source_inputs
from tensorflow.python.keras.layers import Activation
from tensorflow.python.keras.layers import BatchNormalization
from tensorflow.python.keras.layers import Conv2D
@@ -55,6 +54,7 @@ from tensorflow.python.keras.layers import Input
from tensorflow.python.keras.layers import MaxPooling2D
from tensorflow.python.keras.layers import SeparableConv2D
from tensorflow.python.keras.models import Model
+from tensorflow.python.keras.utils import layer_utils
from tensorflow.python.keras.utils.data_utils import get_file
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util.tf_export import tf_export
@@ -302,7 +302,7 @@ def Xception(include_top=True,
# Ensure that the model takes into account
# any potential predecessors of `input_tensor`.
if input_tensor is not None:
- inputs = get_source_inputs(input_tensor)
+ inputs = layer_utils.get_source_inputs(input_tensor)
else:
inputs = img_input
# Create model.
diff --git a/tensorflow/python/keras/engine/__init__.py b/tensorflow/python/keras/engine/__init__.py
index ec7c083199..26aed34766 100644
--- a/tensorflow/python/keras/engine/__init__.py
+++ b/tensorflow/python/keras/engine/__init__.py
@@ -18,13 +18,13 @@ from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
+# TODO(fchollet): Remove hourglass imports once external code is done importing
+# non-public APIs.
from tensorflow.python.keras.engine.base_layer import InputSpec
from tensorflow.python.keras.engine.base_layer import Layer
from tensorflow.python.keras.engine.input_layer import Input
from tensorflow.python.keras.engine.input_layer import InputLayer
-from tensorflow.python.keras.engine.network import get_source_inputs
-from tensorflow.python.keras.engine.network import Network
-from tensorflow.python.keras.engine.training import Model
+from tensorflow.python.keras.utils.layer_utils import get_source_inputs
del absolute_import
del division
diff --git a/tensorflow/python/keras/engine/network.py b/tensorflow/python/keras/engine/network.py
index 2b32fc9151..3edb8033ff 100644
--- a/tensorflow/python/keras/engine/network.py
+++ b/tensorflow/python/keras/engine/network.py
@@ -43,7 +43,7 @@ from tensorflow.python.keras.utils.io_utils import ask_to_proceed_with_overwrite
from tensorflow.python.ops import variables
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.training.checkpointable import base as checkpointable
-from tensorflow.python.training.checkpointable import data_structures_base
+from tensorflow.python.training.checkpointable import data_structures
from tensorflow.python.training.checkpointable import util as checkpointable_utils
from tensorflow.python.util import nest
from tensorflow.python.util import tf_inspect
@@ -368,7 +368,7 @@ class Network(base_layer.Layer):
if isinstance(value, (
base_layer.Layer,
Network,
- data_structures_base.CheckpointableDataStructureBase)):
+ data_structures.CheckpointableDataStructure)):
try:
is_graph_network = self._is_graph_network
except AttributeError:
@@ -1518,47 +1518,6 @@ class Network(base_layer.Layer):
print_fn=print_fn)
-def get_source_inputs(tensor, layer=None, node_index=None):
- """Returns the list of input tensors necessary to compute `tensor`.
-
- Output will always be a list of tensors
- (potentially with 1 element).
-
- Arguments:
- tensor: The tensor to start from.
- layer: Origin layer of the tensor. Will be
- determined via tensor._keras_history if not provided.
- node_index: Origin node index of the tensor.
-
- Returns:
- List of input tensors.
- """
- if not hasattr(tensor, '_keras_history'):
- return tensor
-
- if layer is None or node_index:
- layer, node_index, _ = tensor._keras_history
- if not layer._inbound_nodes:
- return [tensor]
- else:
- node = layer._inbound_nodes[node_index]
- if not node.inbound_layers:
- # Reached an Input layer, stop recursion.
- return node.input_tensors
- else:
- source_tensors = []
- for i in range(len(node.inbound_layers)):
- x = node.input_tensors[i]
- layer = node.inbound_layers[i]
- node_index = node.node_indices[i]
- previous_sources = get_source_inputs(x, layer, node_index)
- # Avoid input redundancy.
- for x in previous_sources:
- if x not in source_tensors:
- source_tensors.append(x)
- return source_tensors
-
-
def _is_hdf5_filepath(filepath):
return filepath.endswith('.h5') or filepath.endswith('.keras')
diff --git a/tensorflow/python/keras/engine/sequential.py b/tensorflow/python/keras/engine/sequential.py
index 3ca8fdd326..89b40b5d38 100644
--- a/tensorflow/python/keras/engine/sequential.py
+++ b/tensorflow/python/keras/engine/sequential.py
@@ -24,10 +24,10 @@ import copy
from tensorflow.python.keras import backend as K
from tensorflow.python.keras import layers as layer_module
from tensorflow.python.keras.engine import base_layer
-from tensorflow.python.keras.engine import network
from tensorflow.python.keras.engine.input_layer import Input
from tensorflow.python.keras.engine.input_layer import InputLayer
from tensorflow.python.keras.engine.training import Model
+from tensorflow.python.keras.utils import layer_utils
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util.tf_export import tf_export
@@ -179,7 +179,7 @@ class Sequential(Model):
'use the functional API.')
self.outputs = [layer._inbound_nodes[-1].output_tensors[0]]
- self.inputs = network.get_source_inputs(self.outputs[0])
+ self.inputs = layer_utils.get_source_inputs(self.outputs[0])
elif self.outputs:
output_tensor = layer(self.outputs[0])
if isinstance(output_tensor, list):
diff --git a/tensorflow/python/keras/engine/topology_test.py b/tensorflow/python/keras/engine/topology_test.py
index f10a16e6dc..3eb69bd7f3 100644
--- a/tensorflow/python/keras/engine/topology_test.py
+++ b/tensorflow/python/keras/engine/topology_test.py
@@ -26,6 +26,8 @@ from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import test_util
from tensorflow.python.keras.engine import base_layer
+from tensorflow.python.keras.engine import input_layer as input_layer_lib
+from tensorflow.python.keras.engine import network as network_lib
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import state_ops
@@ -62,7 +64,7 @@ class TopologyConstructionTest(test.TestCase):
inputs=True)
return inputs + 1
- x1 = keras.Input(shape=(1,))
+ x1 = input_layer_lib.Input(shape=(1,))
layer = MyLayer()
_ = layer.apply(x1)
@@ -70,7 +72,7 @@ class TopologyConstructionTest(test.TestCase):
self.assertEqual(len(layer.get_updates_for(x1)), 1)
self.assertEqual(len(layer.get_updates_for(None)), 1)
- x2 = keras.Input(shape=(1,))
+ x2 = input_layer_lib.Input(shape=(1,))
y2 = layer.apply(x2)
self.assertEqual(len(layer.updates), 3)
@@ -78,17 +80,17 @@ class TopologyConstructionTest(test.TestCase):
self.assertEqual(len(layer.get_updates_for(x2)), 1)
self.assertEqual(len(layer.get_updates_for(None)), 1)
- network = keras.engine.Network(x2, y2)
+ network = network_lib.Network(x2, y2)
self.assertEqual(len(network.updates), 2)
self.assertEqual(len(network.get_updates_for(x1)), 0)
self.assertEqual(len(network.get_updates_for(x2)), 1)
self.assertEqual(len(network.get_updates_for(None)), 1)
- x3 = keras.Input(shape=(1,))
+ x3 = input_layer_lib.Input(shape=(1,))
_ = layer.apply(x3)
self.assertEqual(len(network.updates), 2)
- x4 = keras.Input(shape=(1,))
+ x4 = input_layer_lib.Input(shape=(1,))
_ = network(x4)
self.assertEqual(len(network.updates), 3)
self.assertEqual(len(network.get_updates_for(x2)), 1)
@@ -104,7 +106,7 @@ class TopologyConstructionTest(test.TestCase):
self.assertEqual(len(network.get_updates_for(x4)), 2)
def test_get_updates_bn(self):
- x1 = keras.Input(shape=(1,))
+ x1 = input_layer_lib.Input(shape=(1,))
layer = keras.layers.BatchNormalization()
_ = layer.apply(x1)
@@ -134,7 +136,7 @@ class TopologyConstructionTest(test.TestCase):
inputs=True)
return inputs + 1
- x1 = keras.Input(shape=(1,))
+ x1 = input_layer_lib.Input(shape=(1,))
layer = MyLayer()
_ = layer.apply(x1)
@@ -142,7 +144,7 @@ class TopologyConstructionTest(test.TestCase):
self.assertEqual(len(layer.get_losses_for(x1)), 1)
self.assertEqual(len(layer.get_losses_for(None)), 1)
- x2 = keras.Input(shape=(1,))
+ x2 = input_layer_lib.Input(shape=(1,))
y2 = layer.apply(x2)
self.assertEqual(len(layer.losses), 3)
@@ -150,17 +152,17 @@ class TopologyConstructionTest(test.TestCase):
self.assertEqual(len(layer.get_losses_for(x2)), 1)
self.assertEqual(len(layer.get_losses_for(None)), 1)
- network = keras.engine.Network(x2, y2)
+ network = network_lib.Network(x2, y2)
self.assertEqual(len(network.losses), 2)
self.assertEqual(len(network.get_losses_for(x1)), 0)
self.assertEqual(len(network.get_losses_for(x2)), 1)
self.assertEqual(len(network.get_losses_for(None)), 1)
- x3 = keras.Input(shape=(1,))
+ x3 = input_layer_lib.Input(shape=(1,))
_ = layer.apply(x3)
self.assertEqual(len(network.losses), 2)
- x4 = keras.Input(shape=(1,))
+ x4 = input_layer_lib.Input(shape=(1,))
_ = network(x4)
self.assertEqual(len(network.losses), 3)
self.assertEqual(len(network.get_losses_for(x2)), 1)
@@ -177,8 +179,8 @@ class TopologyConstructionTest(test.TestCase):
def testTopologicalAttributes(self):
# test layer attributes / methods related to cross-layer connectivity.
- a = keras.Input(shape=(32,), name='input_a')
- b = keras.Input(shape=(32,), name='input_b')
+ a = input_layer_lib.Input(shape=(32,), name='input_a')
+ b = input_layer_lib.Input(shape=(32,), name='input_b')
# test input, output, input_shape, output_shape
test_layer = keras.layers.Dense(16, name='test_layer')
@@ -219,15 +221,15 @@ class TopologyConstructionTest(test.TestCase):
_ = new_dense.input_shape
with self.assertRaises(AttributeError):
new_dense = keras.layers.Dense(16)
- a = keras.Input(shape=(3, 32))
- a = keras.Input(shape=(5, 32))
+ a = input_layer_lib.Input(shape=(3, 32))
+ a = input_layer_lib.Input(shape=(5, 32))
a_2 = dense(a)
b_2 = dense(b)
_ = new_dense.input_shape
with self.assertRaises(AttributeError):
new_dense = keras.layers.Dense(16)
- a = keras.Input(shape=(3, 32))
- a = keras.Input(shape=(5, 32))
+ a = input_layer_lib.Input(shape=(3, 32))
+ a = input_layer_lib.Input(shape=(5, 32))
a_2 = dense(a)
b_2 = dense(b)
_ = new_dense.output_shape
@@ -239,7 +241,7 @@ class TopologyConstructionTest(test.TestCase):
def call(self, inputs):
return [inputs**2, inputs**3]
- x = keras.Input(shape=(32,))
+ x = input_layer_lib.Input(shape=(32,))
test_layer = PowersLayer()
p1, p2 = test_layer(x) # pylint: disable=not-callable
@@ -256,8 +258,8 @@ class TopologyConstructionTest(test.TestCase):
assert len(inputs) == 2
return inputs[0] + inputs[1]
- a = keras.Input(shape=(32,))
- b = keras.Input(shape=(32,))
+ a = input_layer_lib.Input(shape=(32,))
+ b = input_layer_lib.Input(shape=(32,))
test_layer = AddLayer()
y = test_layer([a, b]) # pylint: disable=not-callable
@@ -268,10 +270,10 @@ class TopologyConstructionTest(test.TestCase):
def testBasicNetwork(self):
# minimum viable network
- x = keras.Input(shape=(32,))
+ x = input_layer_lib.Input(shape=(32,))
dense = keras.layers.Dense(2)
y = dense(x)
- network = keras.engine.Network(x, y, name='dense_network')
+ network = network_lib.Network(x, y, name='dense_network')
# test basic attributes
self.assertEqual(network.name, 'dense_network')
@@ -282,7 +284,7 @@ class TopologyConstructionTest(test.TestCase):
self.assertEqual(network.non_trainable_weights, dense.non_trainable_weights)
# test callability on Input
- x_2 = keras.Input(shape=(32,))
+ x_2 = input_layer_lib.Input(shape=(32,))
y_2 = network(x_2)
self.assertEqual(y_2.get_shape().as_list(), [None, 2])
@@ -506,7 +508,7 @@ class TopologyConstructionTest(test.TestCase):
self.assertListEqual([x.shape for x in fn_outputs], [(10, 64), (10, 5)])
# test get_source_inputs
- self.assertListEqual(keras.engine.network.get_source_inputs(c), [a, b])
+ self.assertListEqual(keras.engine.get_source_inputs(c), [a, b])
# serialization / deserialization
json_config = model.to_json()
@@ -778,12 +780,12 @@ class TopologyConstructionTest(test.TestCase):
self.evaluate(getattr(b, '_keras_mask')))
self.assertAllEqual(self.evaluate(a * mask), self.evaluate(b))
else:
- x = keras.Input(shape=(32,))
+ x = input_layer_lib.Input(shape=(32,))
y = MaskedLayer()(x) # pylint: disable=not-callable
- network = keras.engine.Network(x, y)
+ network = network_lib.Network(x, y)
# test callability on Input
- x_2 = keras.Input(shape=(32,))
+ x_2 = input_layer_lib.Input(shape=(32,))
y_2 = network(x_2)
self.assertEqual(y_2.get_shape().as_list(), [None, 32])
@@ -797,14 +799,14 @@ class TopologyConstructionTest(test.TestCase):
def reg(x):
return math_ops.reduce_sum(x)
- net_a_input = keras.Input((2,))
+ net_a_input = input_layer_lib.Input((2,))
net_a = net_a_input
net_a = keras.layers.Dense(2, kernel_initializer='ones',
use_bias=False,
activity_regularizer=reg)(net_a)
model_a = keras.Model([net_a_input], [net_a])
- net_b_input = keras.Input((2,))
+ net_b_input = input_layer_lib.Input((2,))
net_b = model_a(net_b_input)
model_b = keras.Model([net_b_input], [net_b])
@@ -817,7 +819,7 @@ class TopologyConstructionTest(test.TestCase):
with self.test_session():
x_val = np.random.random((10, 5))
- x = keras.Input(shape=(5,))
+ x = input_layer_lib.Input(shape=(5,))
a = keras.layers.Dense(5, name='A')
b = keras.layers.Dense(5, name='B')
output = a(b(a(b(x))))
@@ -837,7 +839,7 @@ class TopologyConstructionTest(test.TestCase):
def test_layer_sharing_at_heterogenous_depth_with_concat(self):
with self.test_session():
input_shape = (16, 9, 3)
- input_layer = keras.Input(shape=input_shape)
+ input_layer = input_layer_lib.Input(shape=input_shape)
a = keras.layers.Dense(3, name='dense_A')
b = keras.layers.Dense(3, name='dense_B')
@@ -922,9 +924,9 @@ class DeferredModeTest(test.TestCase):
self.assertEqual(repr(x),
'<DeferredTensor \'x\' shape=(?, 2) dtype=float32>')
- @test_util.run_in_graph_and_eager_modes
+ @test_util.run_in_graph_and_eager_modes()
def testSimpleNetworkBuilding(self):
- inputs = keras.engine.Input(shape=(32,))
+ inputs = input_layer_lib.Input(shape=(32,))
if context.executing_eagerly():
self.assertIsInstance(inputs, base_layer.DeferredTensor)
self.assertEqual(inputs.dtype.name, 'float32')
@@ -937,8 +939,8 @@ class DeferredModeTest(test.TestCase):
self.assertEqual(x.shape.as_list(), [None, 2])
outputs = keras.layers.Dense(4)(x)
- network = keras.engine.Network(inputs, outputs)
- self.assertIsInstance(network, keras.engine.Network)
+ network = network_lib.Network(inputs, outputs)
+ self.assertIsInstance(network, network_lib.Network)
if context.executing_eagerly():
# It should be possible to call such a network on EagerTensors.
@@ -947,10 +949,10 @@ class DeferredModeTest(test.TestCase):
outputs = network(inputs)
self.assertEqual(outputs.shape.as_list(), [10, 4])
- @test_util.run_in_graph_and_eager_modes
+ @test_util.run_in_graph_and_eager_modes()
def testMultiIONetworkbuilding(self):
- input_a = keras.engine.Input(shape=(32,))
- input_b = keras.engine.Input(shape=(16,))
+ input_a = input_layer_lib.Input(shape=(32,))
+ input_b = input_layer_lib.Input(shape=(16,))
a = keras.layers.Dense(16)(input_a)
class AddLayer(keras.layers.Layer):
@@ -964,7 +966,7 @@ class DeferredModeTest(test.TestCase):
c = AddLayer()([a, input_b]) # pylint: disable=not-callable
c = keras.layers.Dense(2)(c)
- network = keras.engine.Network([input_a, input_b], [a, c])
+ network = network_lib.Network([input_a, input_b], [a, c])
if context.executing_eagerly():
a_val = constant_op.constant(
np.random.random((10, 32)).astype('float32'))
diff --git a/tensorflow/python/keras/layers/__init__.py b/tensorflow/python/keras/layers/__init__.py
index def9184de1..e3a686f45d 100644
--- a/tensorflow/python/keras/layers/__init__.py
+++ b/tensorflow/python/keras/layers/__init__.py
@@ -20,10 +20,10 @@ from __future__ import print_function
# Generic layers.
# pylint: disable=g-bad-import-order
-from tensorflow.python.keras.engine import Input
-from tensorflow.python.keras.engine import InputLayer
-from tensorflow.python.keras.engine import InputSpec
-from tensorflow.python.keras.engine import Layer
+from tensorflow.python.keras.engine.input_layer import Input
+from tensorflow.python.keras.engine.input_layer import InputLayer
+from tensorflow.python.keras.engine.base_layer import InputSpec
+from tensorflow.python.keras.engine.base_layer import Layer
# Advanced activations.
from tensorflow.python.keras.layers.advanced_activations import LeakyReLU
diff --git a/tensorflow/python/keras/layers/advanced_activations.py b/tensorflow/python/keras/layers/advanced_activations.py
index bb52ed5ad0..eba10da6f3 100644
--- a/tensorflow/python/keras/layers/advanced_activations.py
+++ b/tensorflow/python/keras/layers/advanced_activations.py
@@ -23,8 +23,8 @@ from tensorflow.python.keras import backend as K
from tensorflow.python.keras import constraints
from tensorflow.python.keras import initializers
from tensorflow.python.keras import regularizers
-from tensorflow.python.keras.engine import InputSpec
-from tensorflow.python.keras.engine import Layer
+from tensorflow.python.keras.engine.base_layer import InputSpec
+from tensorflow.python.keras.engine.base_layer import Layer
from tensorflow.python.keras.utils import tf_utils
from tensorflow.python.ops import math_ops
from tensorflow.python.util.tf_export import tf_export
diff --git a/tensorflow/python/keras/layers/convolutional.py b/tensorflow/python/keras/layers/convolutional.py
index f1df8d5e79..a57ac121ed 100644
--- a/tensorflow/python/keras/layers/convolutional.py
+++ b/tensorflow/python/keras/layers/convolutional.py
@@ -26,8 +26,8 @@ from tensorflow.python.keras import backend
from tensorflow.python.keras import constraints
from tensorflow.python.keras import initializers
from tensorflow.python.keras import regularizers
-from tensorflow.python.keras.engine import InputSpec
-from tensorflow.python.keras.engine import Layer
+from tensorflow.python.keras.engine.base_layer import InputSpec
+from tensorflow.python.keras.engine.base_layer import Layer
# imports for backwards namespace compatibility
# pylint: disable=unused-import
from tensorflow.python.keras.layers.pooling import AveragePooling1D
diff --git a/tensorflow/python/keras/layers/convolutional_recurrent.py b/tensorflow/python/keras/layers/convolutional_recurrent.py
index c731508b3c..84d794cada 100644
--- a/tensorflow/python/keras/layers/convolutional_recurrent.py
+++ b/tensorflow/python/keras/layers/convolutional_recurrent.py
@@ -26,8 +26,8 @@ from tensorflow.python.keras import backend as K
from tensorflow.python.keras import constraints
from tensorflow.python.keras import initializers
from tensorflow.python.keras import regularizers
-from tensorflow.python.keras.engine import InputSpec
-from tensorflow.python.keras.engine import Layer
+from tensorflow.python.keras.engine.base_layer import InputSpec
+from tensorflow.python.keras.engine.base_layer import Layer
from tensorflow.python.keras.layers.recurrent import _generate_dropout_mask
from tensorflow.python.keras.layers.recurrent import _standardize_args
from tensorflow.python.keras.layers.recurrent import RNN
diff --git a/tensorflow/python/keras/layers/core.py b/tensorflow/python/keras/layers/core.py
index f60064ed63..2bf6229ccb 100644
--- a/tensorflow/python/keras/layers/core.py
+++ b/tensorflow/python/keras/layers/core.py
@@ -33,8 +33,8 @@ from tensorflow.python.keras import backend as K
from tensorflow.python.keras import constraints
from tensorflow.python.keras import initializers
from tensorflow.python.keras import regularizers
-from tensorflow.python.keras.engine import InputSpec
-from tensorflow.python.keras.engine import Layer
+from tensorflow.python.keras.engine.base_layer import InputSpec
+from tensorflow.python.keras.engine.base_layer import Layer
from tensorflow.python.keras.utils import conv_utils
from tensorflow.python.keras.utils import generic_utils
from tensorflow.python.keras.utils import tf_utils
diff --git a/tensorflow/python/keras/layers/cudnn_recurrent.py b/tensorflow/python/keras/layers/cudnn_recurrent.py
index ad6594279d..cf2b0c476c 100644
--- a/tensorflow/python/keras/layers/cudnn_recurrent.py
+++ b/tensorflow/python/keras/layers/cudnn_recurrent.py
@@ -25,7 +25,7 @@ from tensorflow.python.keras import backend as K
from tensorflow.python.keras import constraints
from tensorflow.python.keras import initializers
from tensorflow.python.keras import regularizers
-from tensorflow.python.keras.engine import InputSpec
+from tensorflow.python.keras.engine.base_layer import InputSpec
from tensorflow.python.keras.layers.recurrent import RNN
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gen_cudnn_rnn_ops
diff --git a/tensorflow/python/keras/layers/embeddings.py b/tensorflow/python/keras/layers/embeddings.py
index 25eeeee952..910fff720f 100644
--- a/tensorflow/python/keras/layers/embeddings.py
+++ b/tensorflow/python/keras/layers/embeddings.py
@@ -22,7 +22,7 @@ from tensorflow.python.keras import backend as K
from tensorflow.python.keras import constraints
from tensorflow.python.keras import initializers
from tensorflow.python.keras import regularizers
-from tensorflow.python.keras.engine import Layer
+from tensorflow.python.keras.engine.base_layer import Layer
from tensorflow.python.keras.utils import tf_utils
from tensorflow.python.ops import embedding_ops
from tensorflow.python.ops import math_ops
diff --git a/tensorflow/python/keras/layers/local.py b/tensorflow/python/keras/layers/local.py
index 0983e35e21..0ebafe07cc 100644
--- a/tensorflow/python/keras/layers/local.py
+++ b/tensorflow/python/keras/layers/local.py
@@ -23,8 +23,8 @@ from tensorflow.python.keras import backend as K
from tensorflow.python.keras import constraints
from tensorflow.python.keras import initializers
from tensorflow.python.keras import regularizers
-from tensorflow.python.keras.engine import InputSpec
-from tensorflow.python.keras.engine import Layer
+from tensorflow.python.keras.engine.base_layer import InputSpec
+from tensorflow.python.keras.engine.base_layer import Layer
from tensorflow.python.keras.utils import conv_utils
from tensorflow.python.keras.utils import tf_utils
from tensorflow.python.util.tf_export import tf_export
diff --git a/tensorflow/python/keras/layers/noise.py b/tensorflow/python/keras/layers/noise.py
index a895caa25b..cb7cee3ebc 100644
--- a/tensorflow/python/keras/layers/noise.py
+++ b/tensorflow/python/keras/layers/noise.py
@@ -21,7 +21,7 @@ from __future__ import print_function
import numpy as np
from tensorflow.python.keras import backend as K
-from tensorflow.python.keras.engine import Layer
+from tensorflow.python.keras.engine.base_layer import Layer
from tensorflow.python.keras.utils import tf_utils
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
diff --git a/tensorflow/python/keras/layers/normalization.py b/tensorflow/python/keras/layers/normalization.py
index 28cedec338..d4c213eedd 100644
--- a/tensorflow/python/keras/layers/normalization.py
+++ b/tensorflow/python/keras/layers/normalization.py
@@ -26,8 +26,8 @@ from tensorflow.python.keras import backend as K
from tensorflow.python.keras import constraints
from tensorflow.python.keras import initializers
from tensorflow.python.keras import regularizers
-from tensorflow.python.keras.engine import InputSpec
-from tensorflow.python.keras.engine import Layer
+from tensorflow.python.keras.engine.base_layer import InputSpec
+from tensorflow.python.keras.engine.base_layer import Layer
from tensorflow.python.keras.utils import tf_utils
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import init_ops
diff --git a/tensorflow/python/keras/layers/pooling.py b/tensorflow/python/keras/layers/pooling.py
index 10a82b285e..912e8bd619 100644
--- a/tensorflow/python/keras/layers/pooling.py
+++ b/tensorflow/python/keras/layers/pooling.py
@@ -20,8 +20,8 @@ from __future__ import print_function
from tensorflow.python.framework import tensor_shape
from tensorflow.python.keras import backend
-from tensorflow.python.keras.engine import InputSpec
-from tensorflow.python.keras.engine import Layer
+from tensorflow.python.keras.engine.base_layer import InputSpec
+from tensorflow.python.keras.engine.base_layer import Layer
from tensorflow.python.keras.utils import conv_utils
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import nn
diff --git a/tensorflow/python/keras/layers/recurrent.py b/tensorflow/python/keras/layers/recurrent.py
index 7e509fb451..32d25c5a65 100644
--- a/tensorflow/python/keras/layers/recurrent.py
+++ b/tensorflow/python/keras/layers/recurrent.py
@@ -29,8 +29,8 @@ from tensorflow.python.keras import backend as K
from tensorflow.python.keras import constraints
from tensorflow.python.keras import initializers
from tensorflow.python.keras import regularizers
-from tensorflow.python.keras.engine import InputSpec
-from tensorflow.python.keras.engine import Layer
+from tensorflow.python.keras.engine.base_layer import InputSpec
+from tensorflow.python.keras.engine.base_layer import Layer
from tensorflow.python.keras.utils import generic_utils
from tensorflow.python.keras.utils import tf_utils
from tensorflow.python.ops import array_ops
diff --git a/tensorflow/python/keras/layers/serialization.py b/tensorflow/python/keras/layers/serialization.py
index be306c0af7..7c45e08b5c 100644
--- a/tensorflow/python/keras/layers/serialization.py
+++ b/tensorflow/python/keras/layers/serialization.py
@@ -20,8 +20,8 @@ from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
-from tensorflow.python.keras.engine import Input
-from tensorflow.python.keras.engine import InputLayer
+from tensorflow.python.keras.engine.input_layer import Input
+from tensorflow.python.keras.engine.input_layer import InputLayer
from tensorflow.python.keras.layers.advanced_activations import *
from tensorflow.python.keras.layers.convolutional import *
from tensorflow.python.keras.layers.convolutional_recurrent import *
diff --git a/tensorflow/python/keras/layers/wrappers.py b/tensorflow/python/keras/layers/wrappers.py
index 00d0fc67d1..22e1cf0b36 100644
--- a/tensorflow/python/keras/layers/wrappers.py
+++ b/tensorflow/python/keras/layers/wrappers.py
@@ -23,8 +23,8 @@ import copy
from tensorflow.python.framework import tensor_shape
from tensorflow.python.keras import backend as K
-from tensorflow.python.keras.engine import InputSpec
-from tensorflow.python.keras.engine import Layer
+from tensorflow.python.keras.engine.base_layer import InputSpec
+from tensorflow.python.keras.engine.base_layer import Layer
from tensorflow.python.keras.layers.recurrent import _standardize_args
from tensorflow.python.keras.utils import generic_utils
from tensorflow.python.keras.utils import tf_utils
diff --git a/tensorflow/python/keras/optimizers.py b/tensorflow/python/keras/optimizers.py
index f58aeaea1a..34951791b5 100644
--- a/tensorflow/python/keras/optimizers.py
+++ b/tensorflow/python/keras/optimizers.py
@@ -35,7 +35,7 @@ from tensorflow.python.ops import state_ops
from tensorflow.python.training import distribute as distribute_lib
from tensorflow.python.training import optimizer as tf_optimizer_module
from tensorflow.python.training import training_util
-from tensorflow.python.training.checkpointable import base as checkpointable
+from tensorflow.python.training.checkpointable import tracking as checkpointable
from tensorflow.python.util.tf_export import tf_export
diff --git a/tensorflow/python/keras/utils/layer_utils.py b/tensorflow/python/keras/utils/layer_utils.py
index 88daff0461..1f28c59ea4 100644
--- a/tensorflow/python/keras/utils/layer_utils.py
+++ b/tensorflow/python/keras/utils/layer_utils.py
@@ -26,6 +26,47 @@ from tensorflow.python.keras.utils.conv_utils import convert_kernel
from tensorflow.python.util.tf_export import tf_export
+def get_source_inputs(tensor, layer=None, node_index=None):
+ """Returns the list of input tensors necessary to compute `tensor`.
+
+ Output will always be a list of tensors
+ (potentially with 1 element).
+
+ Arguments:
+ tensor: The tensor to start from.
+ layer: Origin layer of the tensor. Will be
+ determined via tensor._keras_history if not provided.
+ node_index: Origin node index of the tensor.
+
+ Returns:
+ List of input tensors.
+ """
+ if not hasattr(tensor, '_keras_history'):
+ return tensor
+
+ if layer is None or node_index:
+ layer, node_index, _ = tensor._keras_history
+ if not layer._inbound_nodes:
+ return [tensor]
+ else:
+ node = layer._inbound_nodes[node_index]
+ if not node.inbound_layers:
+ # Reached an Input layer, stop recursion.
+ return node.input_tensors
+ else:
+ source_tensors = []
+ for i in range(len(node.inbound_layers)):
+ x = node.input_tensors[i]
+ layer = node.inbound_layers[i]
+ node_index = node.node_indices[i]
+ previous_sources = get_source_inputs(x, layer, node_index)
+ # Avoid input redundancy.
+ for x in previous_sources:
+ if x not in source_tensors:
+ source_tensors.append(x)
+ return source_tensors
+
+
def count_params(weights):
"""Count the total number of scalars composing the weights.
diff --git a/tensorflow/python/kernel_tests/distributions/beta_test.py b/tensorflow/python/kernel_tests/distributions/beta_test.py
index 4bc8303ebb..36f3ffc333 100644
--- a/tensorflow/python/kernel_tests/distributions/beta_test.py
+++ b/tensorflow/python/kernel_tests/distributions/beta_test.py
@@ -21,6 +21,7 @@ import importlib
import numpy as np
from tensorflow.python.client import session
+from tensorflow.python.eager import backprop
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import random_seed
from tensorflow.python.framework import tensor_shape
@@ -282,6 +283,18 @@ class BetaTest(test.TestCase):
self.assertAllClose(
np.cov(sample_values, rowvar=0), stats.beta.var(a, b), atol=1e-1)
+ def testBetaFullyReparameterized(self):
+ a = constant_op.constant(1.0)
+ b = constant_op.constant(2.0)
+ with backprop.GradientTape() as tape:
+ tape.watch(a)
+ tape.watch(b)
+ beta = beta_lib.Beta(a, b)
+ samples = beta.sample(100)
+ grad_a, grad_b = tape.gradient(samples, [a, b])
+ self.assertIsNotNone(grad_a)
+ self.assertIsNotNone(grad_b)
+
# Test that sampling with the same seed twice gives the same results.
def testBetaSampleMultipleTimes(self):
with self.test_session():
diff --git a/tensorflow/python/kernel_tests/distributions/dirichlet_test.py b/tensorflow/python/kernel_tests/distributions/dirichlet_test.py
index 54cada9bc0..67ed0447ed 100644
--- a/tensorflow/python/kernel_tests/distributions/dirichlet_test.py
+++ b/tensorflow/python/kernel_tests/distributions/dirichlet_test.py
@@ -20,6 +20,7 @@ import importlib
import numpy as np
+from tensorflow.python.eager import backprop
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import test_util
@@ -264,6 +265,15 @@ class DirichletTest(test.TestCase):
a=1., b=2.).cdf)[0],
0.01)
+ def testDirichletFullyReparameterized(self):
+ alpha = constant_op.constant([1.0, 2.0, 3.0])
+ with backprop.GradientTape() as tape:
+ tape.watch(alpha)
+ dirichlet = dirichlet_lib.Dirichlet(alpha)
+ samples = dirichlet.sample(100)
+ grad_alpha = tape.gradient(samples, alpha)
+ self.assertIsNotNone(grad_alpha)
+
def testDirichletDirichletKL(self):
conc1 = np.array([[1., 2., 3., 1.5, 2.5, 3.5],
[1.5, 2.5, 3.5, 4.5, 5.5, 6.5]])
diff --git a/tensorflow/python/kernel_tests/distributions/gamma_test.py b/tensorflow/python/kernel_tests/distributions/gamma_test.py
index 66ae076a04..297e20264c 100644
--- a/tensorflow/python/kernel_tests/distributions/gamma_test.py
+++ b/tensorflow/python/kernel_tests/distributions/gamma_test.py
@@ -21,6 +21,7 @@ import importlib
import numpy as np
+from tensorflow.python.eager import backprop
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import test_util
@@ -265,6 +266,18 @@ class GammaTest(test.TestCase):
stats.gamma.var(alpha_v, scale=1 / beta_v),
atol=.15)
+ def testGammaFullyReparameterized(self):
+ alpha = constant_op.constant(4.0)
+ beta = constant_op.constant(3.0)
+ with backprop.GradientTape() as tape:
+ tape.watch(alpha)
+ tape.watch(beta)
+ gamma = gamma_lib.Gamma(concentration=alpha, rate=beta)
+ samples = gamma.sample(100)
+ grad_alpha, grad_beta = tape.gradient(samples, [alpha, beta])
+ self.assertIsNotNone(grad_alpha)
+ self.assertIsNotNone(grad_beta)
+
def testGammaSampleMultiDimensional(self):
with self.test_session():
alpha_v = np.array([np.arange(1, 101, dtype=np.float32)]) # 1 x 100
diff --git a/tensorflow/python/kernel_tests/distributions/student_t_test.py b/tensorflow/python/kernel_tests/distributions/student_t_test.py
index c4931d0b79..05590542ef 100644
--- a/tensorflow/python/kernel_tests/distributions/student_t_test.py
+++ b/tensorflow/python/kernel_tests/distributions/student_t_test.py
@@ -23,6 +23,7 @@ import math
import numpy as np
+from tensorflow.python.eager import backprop
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import random_seed
from tensorflow.python.framework import test_util
@@ -454,6 +455,21 @@ class StudentTTest(test.TestCase):
return
self.assertNear(stats.t.pdf(np.pi, 3., loc=np.pi), mean_pdf_val, err=1e-6)
+ def testFullyReparameterized(self):
+ df = constant_op.constant(2.0)
+ mu = constant_op.constant(1.0)
+ sigma = constant_op.constant(3.0)
+ with backprop.GradientTape() as tape:
+ tape.watch(df)
+ tape.watch(mu)
+ tape.watch(sigma)
+ student = student_t.StudentT(df=df, loc=mu, scale=sigma)
+ samples = student.sample(100)
+ grad_df, grad_mu, grad_sigma = tape.gradient(samples, [df, mu, sigma])
+ self.assertIsNotNone(grad_df)
+ self.assertIsNotNone(grad_mu)
+ self.assertIsNotNone(grad_sigma)
+
def testPdfOfSampleMultiDims(self):
student = student_t.StudentT(df=[7., 11.], loc=[[5.], [6.]], scale=3.)
self.assertAllEqual([], student.event_shape)
diff --git a/tensorflow/python/ops/conv2d_benchmark.py b/tensorflow/python/ops/conv2d_benchmark.py
index 907df85cd9..aacdaa7ad0 100644
--- a/tensorflow/python/ops/conv2d_benchmark.py
+++ b/tensorflow/python/ops/conv2d_benchmark.py
@@ -21,6 +21,8 @@ from __future__ import print_function
import itertools
import time
+from tensorflow.core.protobuf import config_pb2
+from tensorflow.core.protobuf import rewriter_config_pb2
from tensorflow.python.client import session as session_lib
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
@@ -28,22 +30,32 @@ from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import nn_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import variables
+from tensorflow.python.platform import flags
from tensorflow.python.platform import test
+FLAGS = flags.FLAGS
-def build_graph(device, input_shape, filter_shape, strides, padding, dtype,
- num_iters, warmup_iters):
+flags.DEFINE_boolean(
+ "enable_layout_optimizer", False,
+ "If true, enables layout optimizer to update input data format for faster "
+ "execution of convolution ops.")
+
+
+def build_graph(device, dtype, data_format, input_shape, filter_shape, strides,
+ padding, num_iters, warmup_iters):
"""builds a graph containing a sequence of conv2d operations.
Args:
device: String, the device to run on.
+ dtype: Data type for the convolution.
+ data_format: A string from: "NHWC" or "NCHW". Data format for input and
+ output data.
input_shape: Shape of the input tensor.
filter_shape: Shape of the filter tensor.
strides: A list of ints. 1-D of length 4. The stride of sliding
window for each dimension of input.
padding: A string from: "SAME", "VALID". The type of padding
algorithm to use.
- dtype: Data type for the convolution.
num_iters: number of iterations to run conv2d.
warmup_iters: number of iterations for warmup runs.
@@ -57,22 +69,23 @@ def build_graph(device, input_shape, filter_shape, strides, padding, dtype,
random_ops.truncated_normal(filter_shape, dtype=dtype))
outputs = []
- conv2d_op = nn_ops.conv2d(inp, filt, strides, padding, data_format="NHWC")
+ conv2d_op = nn_ops.conv2d(
+ inp, filt, strides, padding, data_format=data_format)
outputs.append(conv2d_op)
for _ in range(1, num_iters):
with ops.control_dependencies([conv2d_op]):
conv2d_op = nn_ops.conv2d(
- inp, filt, strides, padding, data_format="NHWC")
+ inp, filt, strides, padding, data_format=data_format)
outputs.append(conv2d_op)
warmup_groups = []
warmup_conv2d_op = nn_ops.conv2d(
- inp, filt, strides, padding, data_format="NHWC")
+ inp, filt, strides, padding, data_format=data_format)
warmup_groups.append(warmup_conv2d_op)
for _ in range(1, warmup_iters):
with ops.control_dependencies([warmup_conv2d_op]):
warmup_conv2d_op = nn_ops.conv2d(
- inp, filt, strides, padding, data_format="NHWC")
+ inp, filt, strides, padding, data_format=data_format)
warmup_groups.append(warmup_conv2d_op)
return control_flow_ops.group(*warmup_groups), control_flow_ops.group(
*outputs)
@@ -81,12 +94,15 @@ def build_graph(device, input_shape, filter_shape, strides, padding, dtype,
class Conv2DBenchmark(test.Benchmark):
"""Benchmark conv2d!"""
- def _run_graph(self, device, input_shape, filter_shape, strides, padding,
- dtype, num_iters, warmup_iters):
+ def _run_graph(self, device, dtype, data_format, input_shape, filter_shape,
+ strides, padding, num_iters, warmup_iters):
"""runs the graph and print its execution time.
Args:
device: String, the device to run on.
+ dtype: Data type for the convolution.
+ data_format: A string from: "NHWC" or "NCHW". Data format for input and
+ output data.
input_shape: Shape of the input tensor.
filter_shape: Shape of the filter tensor.
strides: A list of ints. 1-D of length 4. The stride of sliding
@@ -94,7 +110,6 @@ class Conv2DBenchmark(test.Benchmark):
padding: A string from: "SAME", "VALID". The type of padding
algorithm to use. num_iters: Number of iterations to run the
benchmark.
- dtype: Data type for the convolution.
num_iters: number of iterations to run conv2d.
warmup_iters: number of iterations for warmup runs.
@@ -103,10 +118,27 @@ class Conv2DBenchmark(test.Benchmark):
"""
graph = ops.Graph()
with graph.as_default():
- warmup_outputs, outputs = build_graph(device, input_shape, filter_shape,
- strides, padding, dtype, num_iters,
- warmup_iters)
- with session_lib.Session(graph=graph) as session:
+ warmup_outputs, outputs = build_graph(device, dtype, data_format,
+ input_shape, filter_shape, strides,
+ padding, num_iters, warmup_iters)
+
+ config = config_pb2.ConfigProto()
+ config.graph_options.optimizer_options.opt_level = -1
+ rewrite_options = config.graph_options.rewrite_options
+
+ # Disable layout optimizer to not change input data_format.
+ rewrite_options.layout_optimizer = (
+ rewriter_config_pb2.RewriterConfig.ON if FLAGS.enable_layout_optimizer
+ else rewriter_config_pb2.RewriterConfig.OFF)
+ # Convolution ops are effectively noop in the test graph as we are not
+ # fetching the convolution outputs. Disable dependency optimizer to not
+ # remove the conv ops.
+ rewrite_options.dependency_optimization = (
+ rewriter_config_pb2.RewriterConfig.OFF)
+
+ with session_lib.Session(graph=graph, config=config) as session:
+ # TODO(hinsu): Use run_op_benchmark method from test.Benchmark to run
+ # benchmark along with warmup.
variables.global_variables_initializer().run()
# warmup runs
session.run(warmup_outputs)
@@ -114,20 +146,21 @@ class Conv2DBenchmark(test.Benchmark):
start_time = time.time()
session.run(outputs)
duration = (time.time() - start_time) / num_iters
- print("%s %s inputshape:%s filtershape:%s strides:%s padding:%s "
+ print("%s %s %s inputshape:%s filtershape:%s strides:%s padding:%s "
"%d iters: %.8f sec" %
- (device, str(dtype), str(input_shape).replace(" ", ""),
- str(filter_shape).replace(" ", ""),
+ (device, str(dtype), data_format, str(input_shape).replace(
+ " ", ""), str(filter_shape).replace(" ", ""),
str(strides).replace(" ", ""), padding, num_iters, duration))
name_template = (
- "conv2d_{device}_{datatype}_input_shape_{inputshape}_"
+ "conv2d_{device}_{datatype}_{data_format}_input_shape_{inputshape}_"
"filter_shape_{filtershape}_strides_{strides}_padding_{padding}")
self.report_benchmark(
name=name_template.format(
device=device,
datatype=str(dtype),
+ data_format=str(data_format),
inputshape=str(input_shape).replace(" ", ""),
filtershape=str(filter_shape).replace(" ", ""),
strides=str(strides).replace(" ", ""),
@@ -140,24 +173,37 @@ class Conv2DBenchmark(test.Benchmark):
def benchmark_conv2d(self):
print("conv2d benchmark:")
- h = 500
- w = 500
- fh = 3
- fw = 3
- input_shapes = []
- filter_shapes = []
data_types = [dtypes.float32, dtypes.float16]
- for b, c in itertools.product([4, 16, 32], [i for i in range(3, 16)]):
- input_shapes += [[b, h, w, c]]
- filter_shapes += [[fh, fw, c, b]]
- strides = [[1, 2, 2, 1]]
+ data_formats = ["NHWC", "NCHW"]
+ in_channels = list(range(3, 16))
+ out_channels = [4, 16, 32]
+ hw_strides = [[2, 2]]
paddings = ["VALID", "SAME"]
- for ishape, fshape in zip(input_shapes, filter_shapes):
- for dtype in data_types:
- for stride in strides:
- for padding in paddings:
- self._run_graph("gpu", ishape, fshape, stride, padding, dtype, 80,
- 2)
+
+ args_lists = [
+ data_types, data_formats, in_channels, out_channels, hw_strides,
+ paddings
+ ]
+ for args in itertools.product(*args_lists):
+ dtype, data_format, in_channel, out_channel, hw_stride, padding = args
+
+ # Keep batch size same as out channels just to reduce the number of
+ # different configurations to benchmark.
+ batch_size = out_channel
+ h, w, fh, fw = 500, 500, 3, 3
+ if data_format == "NHWC":
+ ishape = [batch_size, h, w, in_channel]
+ stride = [1] + hw_stride + [1]
+ elif data_format == "NCHW":
+ ishape = [batch_size, in_channel, h, w]
+ stride = [1, 1] + hw_stride
+ else:
+ raise ValueError("Unknown data_format: " + str(data_format))
+ fshape = [fh, fw, in_channel, out_channel]
+ num_iters = 80
+ warmup_iters = 2
+ self._run_graph("gpu", dtype, data_format, ishape, fshape, stride,
+ padding, num_iters, warmup_iters)
if __name__ == "__main__":
diff --git a/tensorflow/python/ops/distributions/beta.py b/tensorflow/python/ops/distributions/beta.py
index 0d8a75ce23..99d30b0bd1 100644
--- a/tensorflow/python/ops/distributions/beta.py
+++ b/tensorflow/python/ops/distributions/beta.py
@@ -89,13 +89,19 @@ class Beta(distribution.Distribution):
Make sure to round the samples to `np.finfo(dtype).tiny` before computing the
density.
+ Samples of this distribution are reparameterized (pathwise differentiable).
+ The derivatives are computed using the approach described in the paper
+
+ [Michael Figurnov, Shakir Mohamed, Andriy Mnih.
+ Implicit Reparameterization Gradients, 2018](https://arxiv.org/abs/1805.08498)
+
#### Examples
```python
# Create a batch of three Beta distributions.
alpha = [1, 2, 3]
beta = [1, 2, 3]
- dist = Beta(alpha, beta)
+ dist = tf.distributions.Beta(alpha, beta)
dist.sample([4, 5]) # Shape [4, 5, 3]
@@ -111,7 +117,7 @@ class Beta(distribution.Distribution):
# Create batch_shape=[2, 3] via parameter broadcast:
alpha = [[1.], [2]] # Shape [2, 1]
beta = [3., 4, 5] # Shape [3]
- dist = Beta(alpha, beta)
+ dist = tf.distributions.Beta(alpha, beta)
# alpha broadcast as: [[1., 1, 1,],
# [2, 2, 2]]
@@ -127,6 +133,18 @@ class Beta(distribution.Distribution):
dist.prob(x) # Shape [2, 3]
```
+ Compute the gradients of samples w.r.t. the parameters:
+
+ ```python
+ alpha = tf.constant(1.0)
+ beta = tf.constant(2.0)
+ dist = tf.distributions.Beta(alpha, beta)
+ samples = dist.sample(5) # Shape [5]
+ loss = tf.reduce_mean(tf.square(samples)) # Arbitrary loss function
+ # Unbiased stochastic gradients of the loss function
+ grads = tf.gradients(loss, [alpha, beta])
+ ```
+
"""
def __init__(self,
@@ -170,7 +188,7 @@ class Beta(distribution.Distribution):
dtype=self._total_concentration.dtype,
validate_args=validate_args,
allow_nan_stats=allow_nan_stats,
- reparameterization_type=distribution.NOT_REPARAMETERIZED,
+ reparameterization_type=distribution.FULLY_REPARAMETERIZED,
parameters=parameters,
graph_parents=[self._concentration1,
self._concentration0,
diff --git a/tensorflow/python/ops/distributions/dirichlet.py b/tensorflow/python/ops/distributions/dirichlet.py
index d45a05063b..9104a1d071 100644
--- a/tensorflow/python/ops/distributions/dirichlet.py
+++ b/tensorflow/python/ops/distributions/dirichlet.py
@@ -95,13 +95,19 @@ class Dirichlet(distribution.Distribution):
Make sure to round the samples to `np.finfo(dtype).tiny` before computing the
density.
+ Samples of this distribution are reparameterized (pathwise differentiable).
+ The derivatives are computed using the approach described in the paper
+
+ [Michael Figurnov, Shakir Mohamed, Andriy Mnih.
+ Implicit Reparameterization Gradients, 2018](https://arxiv.org/abs/1805.08498)
+
#### Examples
```python
# Create a single trivariate Dirichlet, with the 3rd class being three times
# more frequent than the first. I.e., batch_shape=[], event_shape=[3].
alpha = [1., 2, 3]
- dist = Dirichlet(alpha)
+ dist = tf.distributions.Dirichlet(alpha)
dist.sample([4, 5]) # shape: [4, 5, 3]
@@ -123,7 +129,7 @@ class Dirichlet(distribution.Distribution):
# Create batch_shape=[2], event_shape=[3]:
alpha = [[1., 2, 3],
[4, 5, 6]] # shape: [2, 3]
- dist = Dirichlet(alpha)
+ dist = tf.distributions.Dirichlet(alpha)
dist.sample([4, 5]) # shape: [4, 5, 2, 3]
@@ -134,6 +140,17 @@ class Dirichlet(distribution.Distribution):
dist.prob(x) # shape: [2]
```
+ Compute the gradients of samples w.r.t. the parameters:
+
+ ```python
+ alpha = tf.constant([1.0, 2.0, 3.0])
+ dist = tf.distributions.Dirichlet(alpha)
+ samples = dist.sample(5) # Shape [5, 3]
+ loss = tf.reduce_mean(tf.square(samples)) # Arbitrary loss function
+ # Unbiased stochastic gradients of the loss function
+ grads = tf.gradients(loss, alpha)
+ ```
+
"""
def __init__(self,
@@ -170,7 +187,7 @@ class Dirichlet(distribution.Distribution):
dtype=self._concentration.dtype,
validate_args=validate_args,
allow_nan_stats=allow_nan_stats,
- reparameterization_type=distribution.NOT_REPARAMETERIZED,
+ reparameterization_type=distribution.FULLY_REPARAMETERIZED,
parameters=parameters,
graph_parents=[self._concentration,
self._total_concentration],
diff --git a/tensorflow/python/ops/distributions/gamma.py b/tensorflow/python/ops/distributions/gamma.py
index 4f05b58fdb..b631f0247c 100644
--- a/tensorflow/python/ops/distributions/gamma.py
+++ b/tensorflow/python/ops/distributions/gamma.py
@@ -91,11 +91,29 @@ class Gamma(distribution.Distribution):
This should only be noticeable when the `concentration` is very small, or the
`rate` is very large. See note in `tf.random_gamma` docstring.
+ Samples of this distribution are reparameterized (pathwise differentiable).
+ The derivatives are computed using the approach described in the paper
+
+ [Michael Figurnov, Shakir Mohamed, Andriy Mnih.
+ Implicit Reparameterization Gradients, 2018](https://arxiv.org/abs/1805.08498)
+
#### Examples
```python
- dist = Gamma(concentration=3.0, rate=2.0)
- dist2 = Gamma(concentration=[3.0, 4.0], rate=[2.0, 3.0])
+ dist = tf.distributions.Gamma(concentration=3.0, rate=2.0)
+ dist2 = tf.distributions.Gamma(concentration=[3.0, 4.0], rate=[2.0, 3.0])
+ ```
+
+ Compute the gradients of samples w.r.t. the parameters:
+
+ ```python
+ concentration = tf.constant(3.0)
+ rate = tf.constant(2.0)
+ dist = tf.distributions.Gamma(concentration, rate)
+ samples = dist.sample(5) # Shape [5]
+ loss = tf.reduce_mean(tf.square(samples)) # Arbitrary loss function
+ # Unbiased stochastic gradients of the loss function
+ grads = tf.gradients(loss, [concentration, rate])
```
"""
@@ -144,7 +162,7 @@ class Gamma(distribution.Distribution):
dtype=self._concentration.dtype,
validate_args=validate_args,
allow_nan_stats=allow_nan_stats,
- reparameterization_type=distribution.NOT_REPARAMETERIZED,
+ reparameterization_type=distribution.FULLY_REPARAMETERIZED,
parameters=parameters,
graph_parents=[self._concentration,
self._rate],
diff --git a/tensorflow/python/ops/distributions/student_t.py b/tensorflow/python/ops/distributions/student_t.py
index 20a2d16181..e0cf6f86f1 100644
--- a/tensorflow/python/ops/distributions/student_t.py
+++ b/tensorflow/python/ops/distributions/student_t.py
@@ -80,6 +80,12 @@ class StudentT(distribution.Distribution):
variance. However it is not actually the std. deviation; the Student's
t-distribution std. dev. is `scale sqrt(df / (df - 2))` when `df > 2`.
+ Samples of this distribution are reparameterized (pathwise differentiable).
+ The derivatives are computed using the approach described in the paper
+
+ [Michael Figurnov, Shakir Mohamed, Andriy Mnih.
+ Implicit Reparameterization Gradients, 2018](https://arxiv.org/abs/1805.08498)
+
#### Examples
Examples of initialization of one or a batch of distributions.
@@ -118,6 +124,19 @@ class StudentT(distribution.Distribution):
dist.prob(3.0)
```
+ Compute the gradients of samples w.r.t. the parameters:
+
+ ```python
+ df = tf.constant(2.0)
+ loc = tf.constant(2.0)
+ scale = tf.constant(11.0)
+ dist = tf.distributions.StudentT(df=df, loc=loc, scale=scale)
+ samples = dist.sample(5) # Shape [5]
+ loss = tf.reduce_mean(tf.square(samples)) # Arbitrary loss function
+ # Unbiased stochastic gradients of the loss function
+ grads = tf.gradients(loss, [df, loc, scale])
+ ```
+
"""
# pylint: enable=line-too-long
@@ -168,7 +187,7 @@ class StudentT(distribution.Distribution):
(self._df, self._loc, self._scale))
super(StudentT, self).__init__(
dtype=self._scale.dtype,
- reparameterization_type=distribution.NOT_REPARAMETERIZED,
+ reparameterization_type=distribution.FULLY_REPARAMETERIZED,
validate_args=validate_args,
allow_nan_stats=allow_nan_stats,
parameters=parameters,
diff --git a/tensorflow/python/ops/gradients_impl.py b/tensorflow/python/ops/gradients_impl.py
index ee7a98c60b..99909ac38e 100644
--- a/tensorflow/python/ops/gradients_impl.py
+++ b/tensorflow/python/ops/gradients_impl.py
@@ -49,6 +49,7 @@ from tensorflow.python.ops import logging_ops # pylint: disable=unused-import
from tensorflow.python.ops import manip_grad # pylint: disable=unused-import
from tensorflow.python.ops import math_grad # pylint: disable=unused-import
from tensorflow.python.ops import math_ops
+from tensorflow.python.ops import random_grad # pylint: disable=unused-import
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.ops import spectral_grad # pylint: disable=unused-import
from tensorflow.python.ops import tensor_array_ops
diff --git a/tensorflow/python/ops/math_ops.py b/tensorflow/python/ops/math_ops.py
index 466d0dadc8..cdb6dc8f22 100644
--- a/tensorflow/python/ops/math_ops.py
+++ b/tensorflow/python/ops/math_ops.py
@@ -1990,7 +1990,7 @@ def matmul(a,
sparse_matmul_types = [dtypes.bfloat16, dtypes.float32]
use_sparse_matmul = (
a.dtype in sparse_matmul_types and b.dtype in sparse_matmul_types)
- if (a.dtype == dtypes.bfloat16 or b.dtype == dtypes.bfloat16 and
+ if ((a.dtype == dtypes.bfloat16 or b.dtype == dtypes.bfloat16) and
a.dtype != b.dtype):
# matmul currently doesn't handle mixed-precision inputs.
use_sparse_matmul = True
diff --git a/tensorflow/python/ops/rnn_cell_impl.py b/tensorflow/python/ops/rnn_cell_impl.py
index 05723c6960..82a044a0d4 100644
--- a/tensorflow/python/ops/rnn_cell_impl.py
+++ b/tensorflow/python/ops/rnn_cell_impl.py
@@ -47,6 +47,7 @@ from tensorflow.python.ops import variable_scope as vs
from tensorflow.python.ops import variables as tf_variables
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.training.checkpointable import base as checkpointable
+from tensorflow.python.training.checkpointable import tracking as checkpointable_tracking
from tensorflow.python.util import nest
from tensorflow.python.util.tf_export import tf_export
@@ -1331,7 +1332,7 @@ class MultiRNNCell(RNNCell):
return cur_inp, new_states
-class _SlimRNNCell(RNNCell, checkpointable.NotCheckpointable):
+class _SlimRNNCell(RNNCell, checkpointable_tracking.NotCheckpointable):
"""A simple wrapper for slim.rnn_cells."""
def __init__(self, cell_fn):
diff --git a/tensorflow/python/ops/script_ops.py b/tensorflow/python/ops/script_ops.py
index 219562de5d..1e3f662ff3 100644
--- a/tensorflow/python/ops/script_ops.py
+++ b/tensorflow/python/ops/script_ops.py
@@ -343,7 +343,7 @@ def eager_py_func(func, inp, Tout, name=None):
or print statements as desired, and wrap those functions in
`tf.contrib.eager.py_func`.
- For more information on eager execution, see @{$programmers_guide/eager}.
+ For more information on eager execution, see @{$guide/eager}.
`tf.contrib.eager.py_func` is similar in spirit to @{tf.py_func}, but unlike
the latter, the former lets you use TensorFlow operations in the wrapped
diff --git a/tensorflow/python/tools/saved_model_cli.py b/tensorflow/python/tools/saved_model_cli.py
index 5b9d25d449..38fed5335e 100644
--- a/tensorflow/python/tools/saved_model_cli.py
+++ b/tensorflow/python/tools/saved_model_cli.py
@@ -15,7 +15,7 @@
"""Command-line interface to inspect and execute a graph in a SavedModel.
For detailed usages and examples, please refer to:
-https://www.tensorflow.org/programmers_guide/saved_model_cli
+https://www.tensorflow.org/guide/saved_model_cli
"""
@@ -720,7 +720,7 @@ def create_parser():
'\'input4_key=[{"id":[26],"weights":[0.5, 0.5]}]\' \\\n'
' --outdir=/out\n\n'
'For more information about input file format, please see:\n'
- 'https://www.tensorflow.org/programmers_guide/saved_model_cli\n')
+ 'https://www.tensorflow.org/guide/saved_model_cli\n')
parser_run = subparsers.add_parser(
'run', description=run_msg, formatter_class=argparse.RawTextHelpFormatter)
parser_run.add_argument(
diff --git a/tensorflow/python/training/checkpointable/BUILD b/tensorflow/python/training/checkpointable/BUILD
index 87ba4dc91c..9232b6089a 100644
--- a/tensorflow/python/training/checkpointable/BUILD
+++ b/tensorflow/python/training/checkpointable/BUILD
@@ -42,21 +42,31 @@ py_test(
)
py_library(
- name = "data_structures_base",
- srcs = ["data_structures_base.py"],
+ name = "tracking",
+ srcs = ["tracking.py"],
srcs_version = "PY2AND3",
deps = [
":base",
],
)
+py_test(
+ name = "tracking_test",
+ srcs = ["tracking_test.py"],
+ srcs_version = "PY2AND3",
+ deps = [
+ ":base",
+ ":tracking",
+ "//tensorflow/python:client_testlib",
+ ],
+)
+
py_library(
name = "data_structures",
srcs = ["data_structures.py"],
srcs_version = "PY2AND3",
deps = [
":base",
- ":data_structures_base",
],
)
@@ -83,6 +93,7 @@ py_library(
srcs_version = "PY2AND3",
deps = [
":base",
+ ":tracking",
"//tensorflow/python:array_ops",
"//tensorflow/python:constant_op",
"//tensorflow/python:control_flow_ops",
diff --git a/tensorflow/python/training/checkpointable/base.py b/tensorflow/python/training/checkpointable/base.py
index cfe7259e1b..99c8098eca 100644
--- a/tensorflow/python/training/checkpointable/base.py
+++ b/tensorflow/python/training/checkpointable/base.py
@@ -758,61 +758,3 @@ class NoDependency(object):
def __init__(self, value):
self.value = value
-
-
-class NotCheckpointable(object):
- """Marks instances of child classes as unsaveable using an object-based API.
-
- Useful for marking objects which would otherwise look checkpointable because
- of inheritance (e.g. through `Layer`) as not checkpointable. Inheriting from
- `NotCheckpointable` does not prevent an object from being assigned to any
- attributes, but will throw an error on save/restore.
- """
- pass
-
-
-class Checkpointable(CheckpointableBase):
- """Manages dependencies on other objects.
-
- `Checkpointable` objects may have dependencies: other `Checkpointable` objects
- which should be saved if the object declaring the dependency is saved. A
- correctly saveable program has a dependency graph such that if changing a
- global variable affects an object (e.g. changes the behavior of any of its
- methods) then there is a chain of dependencies from the influenced object to
- the variable.
-
- Dependency edges have names, and are created implicitly when a
- `Checkpointable` object is assigned to an attribute of another
- `Checkpointable` object. For example:
-
- ```
- obj = Checkpointable()
- obj.v = ResourceVariable(0.)
- ```
-
- The `Checkpointable` object `obj` now has a dependency named "v" on a
- variable.
-
- `Checkpointable` objects may specify `Tensor`s to be saved and restored
- directly (e.g. a `Variable` indicating how to save itself) rather than through
- dependencies on other objects. See
- `Checkpointable._gather_saveables_for_checkpoint` for details.
- """
-
- def __setattr__(self, name, value):
- """Support self.foo = checkpointable syntax."""
- # Perform the attribute assignment, and potentially call other __setattr__
- # overrides such as that for tf.keras.Model.
- no_dependency = isinstance(value, NoDependency)
- if no_dependency:
- value = value.value
- super(Checkpointable, self).__setattr__(name, value)
- if not no_dependency and isinstance(value, CheckpointableBase):
- self._track_checkpointable(
- value, name=name,
- # Allow the user to switch the Checkpointable which is tracked by this
- # name, since assigning a new variable to an attribute has
- # historically been fine (e.g. Adam did this).
- # TODO(allenl): Should this be a warning once Checkpointable save/load
- # is usable?
- overwrite=True)
diff --git a/tensorflow/python/training/checkpointable/base_test.py b/tensorflow/python/training/checkpointable/base_test.py
index 0a274cdfed..950e9c5b53 100644
--- a/tensorflow/python/training/checkpointable/base_test.py
+++ b/tensorflow/python/training/checkpointable/base_test.py
@@ -17,33 +17,25 @@ from __future__ import division
from __future__ import print_function
from tensorflow.python.platform import test
-from tensorflow.python.training.checkpointable import base as checkpointable
+from tensorflow.python.training.checkpointable import base
class InterfaceTests(test.TestCase):
- def testMultipleAssignment(self):
- root = checkpointable.Checkpointable()
- root.leaf = checkpointable.Checkpointable()
- root.leaf = root.leaf
- duplicate_name_dep = checkpointable.Checkpointable()
+ def testOverwrite(self):
+ root = base.CheckpointableBase()
+ leaf = base.CheckpointableBase()
+ root._track_checkpointable(leaf, name="leaf")
+ (current_name, current_dependency), = root._checkpoint_dependencies
+ self.assertIs(leaf, current_dependency)
+ self.assertEqual("leaf", current_name)
+ duplicate_name_dep = base.CheckpointableBase()
with self.assertRaises(ValueError):
root._track_checkpointable(duplicate_name_dep, name="leaf")
- # No error; we're overriding __setattr__, so we can't really stop people
- # from doing this while maintaining backward compatibility.
- root.leaf = duplicate_name_dep
root._track_checkpointable(duplicate_name_dep, name="leaf", overwrite=True)
-
- def testNoDependency(self):
- root = checkpointable.Checkpointable()
- hasdep = checkpointable.Checkpointable()
- root.hasdep = hasdep
- nodep = checkpointable.Checkpointable()
- root.nodep = checkpointable.NoDependency(nodep)
- self.assertEqual(1, len(root._checkpoint_dependencies))
- self.assertIs(root._checkpoint_dependencies[0].ref, root.hasdep)
- self.assertIs(root.hasdep, hasdep)
- self.assertIs(root.nodep, nodep)
+ (current_name, current_dependency), = root._checkpoint_dependencies
+ self.assertIs(duplicate_name_dep, current_dependency)
+ self.assertEqual("leaf", current_name)
if __name__ == "__main__":
test.main()
diff --git a/tensorflow/python/training/checkpointable/data_structures.py b/tensorflow/python/training/checkpointable/data_structures.py
index 69ed253fb2..680cf3441f 100644
--- a/tensorflow/python/training/checkpointable/data_structures.py
+++ b/tensorflow/python/training/checkpointable/data_structures.py
@@ -25,7 +25,6 @@ from tensorflow.python.keras.engine import base_layer
from tensorflow.python.keras.utils import layer_utils
from tensorflow.python.ops import variables
from tensorflow.python.training.checkpointable import base as checkpointable_lib
-from tensorflow.python.training.checkpointable import data_structures_base
# TODO(allenl): We could track regular Python data structures which get assigned
@@ -36,8 +35,7 @@ from tensorflow.python.training.checkpointable import data_structures_base
# user's updated structure, but would have no way to support restore-on-create
# for those modifications).
# TODO(allenl): A dictionary data structure would be good too.
-class CheckpointableDataStructure(
- data_structures_base.CheckpointableDataStructureBase):
+class CheckpointableDataStructure(checkpointable_lib.CheckpointableBase):
"""Base class for data structures which contain checkpointable objects."""
def __init__(self):
@@ -56,9 +54,7 @@ class CheckpointableDataStructure(
("Only checkpointable objects (such as Layers or Optimizers) may be "
"stored in a List object. Got %s, which does not inherit from "
"CheckpointableBase.") % (value,))
- if isinstance(value, (
- base_layer.Layer,
- data_structures_base.CheckpointableDataStructureBase)):
+ if isinstance(value, (base_layer.Layer, CheckpointableDataStructure)):
if value not in self._layers:
self._layers.append(value)
if hasattr(value, "_use_resource_variables"):
diff --git a/tensorflow/python/training/checkpointable/data_structures_base.py b/tensorflow/python/training/checkpointable/data_structures_base.py
deleted file mode 100644
index f1b2cf105b..0000000000
--- a/tensorflow/python/training/checkpointable/data_structures_base.py
+++ /dev/null
@@ -1,27 +0,0 @@
-"""A trivial base class to avoid circular imports for isinstance checks."""
-# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# ==============================================================================
-from __future__ import absolute_import
-from __future__ import division
-from __future__ import print_function
-
-
-from tensorflow.python.training.checkpointable import base as checkpointable_lib
-
-
-class CheckpointableDataStructureBase(checkpointable_lib.CheckpointableBase):
- """Base class for data structures which contain checkpointable objects."""
-
- pass
diff --git a/tensorflow/python/training/checkpointable/tracking.py b/tensorflow/python/training/checkpointable/tracking.py
new file mode 100644
index 0000000000..00e14ac982
--- /dev/null
+++ b/tensorflow/python/training/checkpointable/tracking.py
@@ -0,0 +1,103 @@
+"""Dependency tracking for checkpointable objects."""
+# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+from tensorflow.python.training.checkpointable import base
+
+
+class NoDependency(object):
+ """Allows attribute assignment to `Checkpointable` objects with no dependency.
+
+ Example usage:
+ ```python
+ obj = Checkpointable()
+ obj.has_dependency = tf.Variable(0., name="dep")
+ obj.no_dependency = NoDependency(tf.Variable(1., name="nodep"))
+ assert obj.no_dependency.name == "nodep:0"
+ ```
+
+ `obj` in this example has a dependency on the variable "dep", and both
+ attributes contain un-wrapped `Variable` objects.
+
+ `NoDependency` also works with `tf.keras.Model`, but only for checkpoint
+ dependencies: wrapping a `Layer` in `NoDependency` will assign the (unwrapped)
+ `Layer` to the attribute without a checkpoint dependency, but the `Model` will
+ still track the `Layer` (so it will appear in `Model.layers`, and its
+ variables will appear in `Model.variables`).
+ """
+
+ def __init__(self, value):
+ self.value = value
+
+
+class NotCheckpointable(object):
+ """Marks instances of child classes as unsaveable using an object-based API.
+
+ Useful for marking objects which would otherwise look checkpointable because
+ of inheritance (e.g. through `Layer`) as not checkpointable. Inheriting from
+ `NotCheckpointable` does not prevent an object from being assigned to any
+ attributes, but will throw an error on save/restore.
+ """
+ pass
+
+
+class Checkpointable(base.CheckpointableBase):
+ """Manages dependencies on other objects.
+
+ `Checkpointable` objects may have dependencies: other `Checkpointable` objects
+ which should be saved if the object declaring the dependency is saved. A
+ correctly saveable program has a dependency graph such that if changing a
+ global variable affects an object (e.g. changes the behavior of any of its
+ methods) then there is a chain of dependencies from the influenced object to
+ the variable.
+
+ Dependency edges have names, and are created implicitly when a
+ `Checkpointable` object is assigned to an attribute of another
+ `Checkpointable` object. For example:
+
+ ```
+ obj = Checkpointable()
+ obj.v = ResourceVariable(0.)
+ ```
+
+ The `Checkpointable` object `obj` now has a dependency named "v" on a
+ variable.
+
+ `Checkpointable` objects may specify `Tensor`s to be saved and restored
+ directly (e.g. a `Variable` indicating how to save itself) rather than through
+ dependencies on other objects. See
+ `Checkpointable._gather_saveables_for_checkpoint` for details.
+ """
+
+ def __setattr__(self, name, value):
+ """Support self.foo = checkpointable syntax."""
+ # Perform the attribute assignment, and potentially call other __setattr__
+ # overrides such as that for tf.keras.Model.
+ no_dependency = isinstance(value, NoDependency)
+ if no_dependency:
+ value = value.value
+ super(Checkpointable, self).__setattr__(name, value)
+ if not no_dependency and isinstance(value, base.CheckpointableBase):
+ self._track_checkpointable(
+ value, name=name,
+ # Allow the user to switch the Checkpointable which is tracked by this
+ # name, since assigning a new variable to an attribute has
+ # historically been fine (e.g. Adam did this).
+ # TODO(allenl): Should this be a warning once Checkpointable save/load
+ # is usable?
+ overwrite=True)
diff --git a/tensorflow/python/training/checkpointable/tracking_test.py b/tensorflow/python/training/checkpointable/tracking_test.py
new file mode 100644
index 0000000000..baf6f57efb
--- /dev/null
+++ b/tensorflow/python/training/checkpointable/tracking_test.py
@@ -0,0 +1,49 @@
+# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+from tensorflow.python.platform import test
+from tensorflow.python.training.checkpointable import tracking
+
+
+class InterfaceTests(test.TestCase):
+
+ def testMultipleAssignment(self):
+ root = tracking.Checkpointable()
+ root.leaf = tracking.Checkpointable()
+ root.leaf = root.leaf
+ duplicate_name_dep = tracking.Checkpointable()
+ with self.assertRaises(ValueError):
+ root._track_checkpointable(duplicate_name_dep, name="leaf")
+ # No error; we're overriding __setattr__, so we can't really stop people
+ # from doing this while maintaining backward compatibility.
+ root.leaf = duplicate_name_dep
+ root._track_checkpointable(duplicate_name_dep, name="leaf", overwrite=True)
+
+ def testNoDependency(self):
+ root = tracking.Checkpointable()
+ hasdep = tracking.Checkpointable()
+ root.hasdep = hasdep
+ nodep = tracking.Checkpointable()
+ root.nodep = tracking.NoDependency(nodep)
+ self.assertEqual(1, len(root._checkpoint_dependencies))
+ self.assertIs(root._checkpoint_dependencies[0].ref, root.hasdep)
+ self.assertIs(root.hasdep, hasdep)
+ self.assertIs(root.nodep, nodep)
+
+if __name__ == "__main__":
+ test.main()
diff --git a/tensorflow/python/training/checkpointable/util.py b/tensorflow/python/training/checkpointable/util.py
index 0608076e6d..e0f61137b1 100644
--- a/tensorflow/python/training/checkpointable/util.py
+++ b/tensorflow/python/training/checkpointable/util.py
@@ -39,7 +39,8 @@ from tensorflow.python.ops import variable_scope
from tensorflow.python.training import optimizer as optimizer_lib
from tensorflow.python.training import saveable_object as saveable_object_lib
from tensorflow.python.training import saver as saver_lib
-from tensorflow.python.training.checkpointable import base as checkpointable_lib
+from tensorflow.python.training.checkpointable import base
+from tensorflow.python.training.checkpointable import tracking
from tensorflow.python.util import deprecation
from tensorflow.python.util import tf_contextlib
from tensorflow.python.util.tf_export import tf_export
@@ -114,7 +115,7 @@ class _CheckpointRestoreCoordinator(object):
# `node` refers to an `Optimizer`, since only these have slot variables.
self.slot_restorations.setdefault(
slot_reference.original_variable_node_id, []).append(
- checkpointable_lib._SlotVariableRestoration( # pylint: disable=protected-access
+ base._SlotVariableRestoration( # pylint: disable=protected-access
optimizer_id=node_index,
slot_variable_id=slot_reference.slot_variable_node_id,
slot_name=slot_reference.slot_name))
@@ -258,13 +259,13 @@ def object_metadata(save_path):
reader = pywrap_tensorflow.NewCheckpointReader(save_path)
try:
object_graph_string = reader.get_tensor(
- checkpointable_lib.OBJECT_GRAPH_PROTO_KEY)
+ base.OBJECT_GRAPH_PROTO_KEY)
except errors_impl.NotFoundError:
raise ValueError(
('The specified checkpoint "%s" does not appear to be object-based (it '
'is missing the key "%s"). Likely it was created with a name-based '
'saver and does not contain an object dependency graph.') % (
- save_path, checkpointable_lib.OBJECT_GRAPH_PROTO_KEY))
+ save_path, base.OBJECT_GRAPH_PROTO_KEY))
object_graph_proto = (
checkpointable_object_graph_pb2.CheckpointableObjectGraph())
object_graph_proto.ParseFromString(object_graph_string)
@@ -278,7 +279,7 @@ def _breadth_first_checkpointable_traversal(root_checkpointable):
path_to_root = {root_checkpointable: ()}
while to_visit:
current_checkpointable = to_visit.popleft()
- if isinstance(current_checkpointable, checkpointable_lib.NotCheckpointable):
+ if isinstance(current_checkpointable, tracking.NotCheckpointable):
raise NotImplementedError(
("The object %s does not support object-based saving. File a feature "
"request if this limitation bothers you. In the meantime, you can "
@@ -1038,11 +1039,11 @@ class CheckpointableSaver(object):
with ops.device("/cpu:0"):
object_graph_tensor = constant_op.constant(
graph_proto.SerializeToString(), dtype=dtypes.string)
- assert checkpointable_lib.OBJECT_GRAPH_PROTO_KEY not in named_variables
+ assert base.OBJECT_GRAPH_PROTO_KEY not in named_variables
named_variables.append(
_NoRestoreSaveable(
tensor=object_graph_tensor,
- name=checkpointable_lib.OBJECT_GRAPH_PROTO_KEY))
+ name=base.OBJECT_GRAPH_PROTO_KEY))
if (self._last_save_object_graph != graph_proto
# When executing eagerly, we need to re-create SaveableObjects each time
# save() is called so they pick up new Tensors passed to their
@@ -1132,7 +1133,7 @@ class CheckpointableSaver(object):
dtype_map = reader.get_variable_to_dtype_map()
try:
object_graph_string = reader.get_tensor(
- checkpointable_lib.OBJECT_GRAPH_PROTO_KEY)
+ base.OBJECT_GRAPH_PROTO_KEY)
except errors_impl.NotFoundError:
# The object graph proto does not exist in this checkpoint. Try the
# name-based compatibility mode.
@@ -1178,7 +1179,7 @@ class CheckpointableSaver(object):
"file a feature request if this limitation bothers you.")
self._last_restore_checkpoint = checkpoint
self._last_restore_object_graph = object_graph_proto
- checkpointable_lib._CheckpointPosition( # pylint: disable=protected-access
+ base._CheckpointPosition( # pylint: disable=protected-access
checkpoint=checkpoint, proto_id=0).restore(self._root_checkpointable)
load_status = CheckpointLoadStatus(
checkpoint,
@@ -1188,7 +1189,7 @@ class CheckpointableSaver(object):
@tf_export("train.Checkpoint")
-class Checkpoint(checkpointable_lib.Checkpointable):
+class Checkpoint(tracking.Checkpointable):
"""Groups checkpointable objects, saving and restoring them.
`Checkpoint`'s constructor accepts keyword arguments whose values are types
@@ -1290,7 +1291,7 @@ class Checkpoint(checkpointable_lib.Checkpointable):
"""
super(Checkpoint, self).__init__()
for k, v in sorted(kwargs.items(), key=lambda item: item[0]):
- if not isinstance(v, checkpointable_lib.CheckpointableBase):
+ if not isinstance(v, base.CheckpointableBase):
raise ValueError(
("`Checkpoint` was expecting a checkpointable object (an object "
"derived from `CheckpointableBase`), got %s. If you believe this "
@@ -1309,7 +1310,7 @@ class Checkpoint(checkpointable_lib.Checkpointable):
with ops.device("/cpu:0"):
# add_variable creates a dependency named "save_counter"; NoDependency
# prevents creating a second dependency named "_save_counter".
- self._save_counter = checkpointable_lib.NoDependency(
+ self._save_counter = tracking.NoDependency(
add_variable(self, name="save_counter", initializer=0,
dtype=dtypes.int64))
diff --git a/tensorflow/python/training/checkpointable/util_test.py b/tensorflow/python/training/checkpointable/util_test.py
index 9f479bcd1e..896ea47b97 100644
--- a/tensorflow/python/training/checkpointable/util_test.py
+++ b/tensorflow/python/training/checkpointable/util_test.py
@@ -44,11 +44,12 @@ from tensorflow.python.ops import variable_scope
from tensorflow.python.training import adam
from tensorflow.python.training import saver as saver_lib
from tensorflow.python.training import training_util
-from tensorflow.python.training.checkpointable import base as checkpointable
+from tensorflow.python.training.checkpointable import base
+from tensorflow.python.training.checkpointable import tracking
from tensorflow.python.training.checkpointable import util as checkpointable_utils
-class NonLayerCheckpointable(checkpointable.Checkpointable):
+class NonLayerCheckpointable(tracking.Checkpointable):
def __init__(self):
super(NonLayerCheckpointable, self).__init__()
@@ -136,7 +137,7 @@ class InterfaceTests(test.TestCase):
def testInitNotCalled(self):
- class NoInit(checkpointable.Checkpointable):
+ class NoInit(tracking.Checkpointable):
def __init__(self):
pass
@@ -145,7 +146,7 @@ class InterfaceTests(test.TestCase):
checkpointable_utils.add_variable(NoInit(), "var", shape=[])
def testShapeDtype(self):
- root = checkpointable.Checkpointable()
+ root = tracking.Checkpointable()
v1 = checkpointable_utils.add_variable(
root, name="v1", initializer=3., dtype=dtypes.float64)
self.assertEqual(dtypes.float64, v1.dtype)
@@ -177,7 +178,7 @@ class InterfaceTests(test.TestCase):
def testNotCheckpointable(self):
class CallsFunctionalStuff(
- checkpointable.NotCheckpointable, checkpointable.Checkpointable):
+ tracking.NotCheckpointable, tracking.Checkpointable):
pass
test_dir = self.get_temp_dir()
@@ -187,7 +188,7 @@ class InterfaceTests(test.TestCase):
checkpoint.save(prefix)
class CallsFunctionalStuffOtherMRO(
- checkpointable.Checkpointable, checkpointable.NotCheckpointable):
+ tracking.Checkpointable, tracking.NotCheckpointable):
pass
checkpoint_reversed = checkpointable_utils.Checkpoint(
@@ -217,7 +218,7 @@ class _MirroringSaveable(saver_lib.BaseSaverBuilder.SaveableObject):
self._mirrored_variable.assign(tensor))
-class _OwnsMirroredVariables(checkpointable.CheckpointableBase):
+class _OwnsMirroredVariables(base.CheckpointableBase):
"""A Checkpointable object which returns a more complex SaveableObject."""
def __init__(self):
@@ -232,7 +233,7 @@ class _OwnsMirroredVariables(checkpointable.CheckpointableBase):
primary_variable=self.non_dep_variable,
mirrored_variable=self.mirrored,
name=name)
- return {checkpointable.VARIABLE_VALUE_KEY: _saveable_factory}
+ return {base.VARIABLE_VALUE_KEY: _saveable_factory}
# The Saver sorts by name before parsing, so we need a name property.
@property
@@ -590,7 +591,7 @@ class CheckpointingTests(test.TestCase):
# pylint: enable=cell-var-from-loop
def _get_checkpoint_name(self, name):
- root = checkpointable.Checkpointable()
+ root = tracking.Checkpointable()
checkpointable_utils.add_variable(
root, name=name, shape=[1, 2], dtype=dtypes.float64)
(named_variable,), _, _ = checkpointable_utils._serialize_object_graph(
@@ -611,8 +612,8 @@ class CheckpointingTests(test.TestCase):
@test_util.run_in_graph_and_eager_modes(assert_no_eager_garbage=True)
def testNumberedPath(self):
- root = checkpointable.Checkpointable()
- leaf = checkpointable.Checkpointable()
+ root = tracking.Checkpointable()
+ leaf = tracking.Checkpointable()
root.leaf = leaf
checkpointable_utils.add_variable(leaf, name="v", shape=[])
(named_variable,), _, _ = checkpointable_utils._serialize_object_graph(
@@ -621,8 +622,8 @@ class CheckpointingTests(test.TestCase):
@test_util.run_in_graph_and_eager_modes
def testLocalNameValidation(self):
- root = checkpointable.Checkpointable()
- leaf = checkpointable.Checkpointable()
+ root = tracking.Checkpointable()
+ leaf = tracking.Checkpointable()
# Dots are escaped, which avoids conflicts with reserved names.
root._track_checkpointable(leaf, name=".ATTRIBUTES")
checkpointable_utils.add_variable(checkpointable=leaf, name="a", shape=[])
@@ -663,13 +664,13 @@ class CheckpointingTests(test.TestCase):
@test_util.run_in_graph_and_eager_modes
def testLateDependencyTracking(self):
- class Dependency(checkpointable.Checkpointable):
+ class Dependency(tracking.Checkpointable):
def build(self):
self.var = checkpointable_utils.add_variable(
self, "var", initializer=0.)
- class LateDependencies(checkpointable.Checkpointable):
+ class LateDependencies(tracking.Checkpointable):
def add_dep(self):
self.dep = Dependency()
@@ -695,13 +696,13 @@ class CheckpointingTests(test.TestCase):
@test_util.run_in_graph_and_eager_modes
def testDepAfterVar(self):
- class Dependency(checkpointable.Checkpointable):
+ class Dependency(tracking.Checkpointable):
def build(self):
self.var = checkpointable_utils.add_variable(
self, "var", initializer=0.)
- class DepAfterVar(checkpointable.Checkpointable):
+ class DepAfterVar(tracking.Checkpointable):
def add_dep(self):
dep = Dependency()
@@ -728,7 +729,7 @@ class CheckpointingTests(test.TestCase):
def testDeferredSlotRestoration(self):
checkpoint_directory = self.get_temp_dir()
- root = checkpointable.Checkpointable()
+ root = tracking.Checkpointable()
root.var = checkpointable_utils.add_variable(
root, name="var", initializer=0.)
optimizer = adam.AdamOptimizer(0.1)
@@ -751,7 +752,7 @@ class CheckpointingTests(test.TestCase):
14.))
slots_path = checkpointable_utils.CheckpointableSaver(root).save(
os.path.join(checkpoint_directory, "with_slots"))
- new_root = checkpointable.Checkpointable()
+ new_root = tracking.Checkpointable()
# Load the slot-containing checkpoint (deferred), then immediately overwrite
# the non-slot variable (also deferred).
slot_status = checkpointable_utils.CheckpointableSaver(
@@ -792,8 +793,8 @@ class CheckpointingTests(test.TestCase):
@test_util.run_in_graph_and_eager_modes
def testOverlappingRestores(self):
checkpoint_directory = self.get_temp_dir()
- save_root = checkpointable.Checkpointable()
- save_root.dep = checkpointable.Checkpointable()
+ save_root = tracking.Checkpointable()
+ save_root.dep = tracking.Checkpointable()
save_root.dep.var = checkpointable_utils.add_variable(
save_root.dep, name="var", initializer=0.)
self.evaluate(state_ops.assign(save_root.dep.var, 12.))
@@ -802,13 +803,13 @@ class CheckpointingTests(test.TestCase):
self.evaluate(state_ops.assign(save_root.dep.var, 13.))
second_path = saver.save(os.path.join(checkpoint_directory, "second"))
- first_root = checkpointable.Checkpointable()
- second_root = checkpointable.Checkpointable()
+ first_root = tracking.Checkpointable()
+ second_root = tracking.Checkpointable()
first_status = checkpointable_utils.CheckpointableSaver(
first_root).restore(first_path)
second_status = checkpointable_utils.CheckpointableSaver(
second_root).restore(second_path)
- load_dep = checkpointable.Checkpointable()
+ load_dep = tracking.Checkpointable()
load_dep.var = checkpointable_utils.add_variable(
load_dep, name="var", shape=[])
first_root.dep = load_dep
@@ -822,13 +823,13 @@ class CheckpointingTests(test.TestCase):
# Try again with the order of the restore() reversed. The last restore
# determines the final value.
- first_root = checkpointable.Checkpointable()
- second_root = checkpointable.Checkpointable()
+ first_root = tracking.Checkpointable()
+ second_root = tracking.Checkpointable()
second_status = checkpointable_utils.CheckpointableSaver(
second_root).restore(second_path)
first_status = checkpointable_utils.CheckpointableSaver(
first_root).restore(first_path)
- load_dep = checkpointable.Checkpointable()
+ load_dep = tracking.Checkpointable()
load_dep.var = checkpointable_utils.add_variable(
load_dep, name="var", shape=[])
first_root.dep = load_dep
@@ -844,23 +845,23 @@ class CheckpointingTests(test.TestCase):
def testAmbiguousLoad(self):
# Not OK to split one checkpoint object into two
checkpoint_directory = self.get_temp_dir()
- save_root = checkpointable.Checkpointable()
- save_root.dep_one = checkpointable.Checkpointable()
- save_root.dep_two = checkpointable.Checkpointable()
- dep_three = checkpointable.Checkpointable()
+ save_root = tracking.Checkpointable()
+ save_root.dep_one = tracking.Checkpointable()
+ save_root.dep_two = tracking.Checkpointable()
+ dep_three = tracking.Checkpointable()
save_root.dep_one.dep_three = dep_three
save_root.dep_two.dep_three = dep_three
checkpointable_utils.add_variable(dep_three, name="var", initializer=0.)
self.evaluate(checkpointable_utils.gather_initializers(save_root))
save_path = checkpointable_utils.CheckpointableSaver(save_root).save(
os.path.join(checkpoint_directory, "ckpt"))
- load_root = checkpointable.Checkpointable()
+ load_root = tracking.Checkpointable()
status = checkpointable_utils.CheckpointableSaver(load_root).restore(
save_path)
- load_root.dep_one = checkpointable.Checkpointable()
- load_root.dep_two = checkpointable.Checkpointable()
- load_root.dep_one.dep_three = checkpointable.Checkpointable()
- load_root.dep_two.dep_three = checkpointable.Checkpointable()
+ load_root.dep_one = tracking.Checkpointable()
+ load_root.dep_two = tracking.Checkpointable()
+ load_root.dep_one.dep_three = tracking.Checkpointable()
+ load_root.dep_two.dep_three = tracking.Checkpointable()
checkpointable_utils.add_variable(
load_root.dep_one.dep_three, name="var", initializer=0.)
with self.assertRaises(AssertionError):
@@ -870,9 +871,9 @@ class CheckpointingTests(test.TestCase):
def testObjectsCombined(self):
# Currently fine to load two checkpoint objects into one Python object
checkpoint_directory = self.get_temp_dir()
- save_root = checkpointable.Checkpointable()
- save_root.dep_one = checkpointable.Checkpointable()
- save_root.dep_two = checkpointable.Checkpointable()
+ save_root = tracking.Checkpointable()
+ save_root.dep_one = tracking.Checkpointable()
+ save_root.dep_two = tracking.Checkpointable()
checkpointable_utils.add_variable(
save_root.dep_one, name="var1", initializer=32., dtype=dtypes.float64)
checkpointable_utils.add_variable(
@@ -880,8 +881,8 @@ class CheckpointingTests(test.TestCase):
self.evaluate(checkpointable_utils.gather_initializers(save_root))
save_path = checkpointable_utils.CheckpointableSaver(save_root).save(
os.path.join(checkpoint_directory, "ckpt"))
- load_root = checkpointable.Checkpointable()
- load_root.dep_one = checkpointable.Checkpointable()
+ load_root = tracking.Checkpointable()
+ load_root.dep_one = tracking.Checkpointable()
load_root.dep_two = load_root.dep_one
v1 = checkpointable_utils.add_variable(
load_root.dep_one, name="var1", shape=[], dtype=dtypes.float64)
@@ -897,8 +898,8 @@ class CheckpointingTests(test.TestCase):
def testDependencyLoop(self):
# Note: this test creates garbage during eager execution because it
# purposefully creates a reference cycle.
- first = checkpointable.Checkpointable()
- second = checkpointable.Checkpointable()
+ first = tracking.Checkpointable()
+ second = tracking.Checkpointable()
first.second = second
second.first = first
first.v = checkpointable_utils.add_variable(
@@ -911,10 +912,10 @@ class CheckpointingTests(test.TestCase):
os.path.join(checkpoint_directory, "ckpt"))
# Test deferred loading
- first_load = checkpointable.Checkpointable()
+ first_load = tracking.Checkpointable()
status = checkpointable_utils.CheckpointableSaver(
first_load).restore(save_path)
- second_load = checkpointable.Checkpointable()
+ second_load = tracking.Checkpointable()
first_load.second = second_load
second_load.first = first_load
with self.assertRaises(AssertionError):
@@ -945,7 +946,7 @@ class CheckpointingTests(test.TestCase):
checkpoint_prefix = os.path.join(checkpoint_directory, "ckpt")
save_graph = ops.Graph()
with save_graph.as_default(), self.test_session(save_graph):
- first = checkpointable.Checkpointable()
+ first = tracking.Checkpointable()
first.var1 = variable_scope.get_variable(
name="outside_var", initializer=0.)
first.var2 = variable_scope.get_variable(
@@ -956,7 +957,7 @@ class CheckpointingTests(test.TestCase):
checkpoint_prefix)
restore_graph = ops.Graph()
with restore_graph.as_default(), self.test_session(restore_graph):
- second = checkpointable.Checkpointable()
+ second = tracking.Checkpointable()
second.var2 = variable_scope.get_variable(
name="blah", initializer=0.)
status = checkpointable_utils.CheckpointableSaver(
@@ -978,7 +979,7 @@ class CheckpointingTests(test.TestCase):
with graph.as_default(), self.test_session(graph):
checkpoint_directory = self.get_temp_dir()
checkpoint_prefix = os.path.join(checkpoint_directory, "ckpt")
- obj = checkpointable.Checkpointable()
+ obj = tracking.Checkpointable()
obj.var = variable_scope.get_variable(name="v", initializer=0.)
obj.opt = adam.AdamOptimizer(0.1)
obj.opt.minimize(obj.var.read_value())
@@ -993,7 +994,7 @@ class CheckpointingTests(test.TestCase):
def testCheckpointCleanup(self):
checkpoint_directory = self.get_temp_dir()
checkpoint_prefix = os.path.join(checkpoint_directory, "ckpt")
- obj = checkpointable.Checkpointable()
+ obj = tracking.Checkpointable()
obj.var = variable_scope.get_variable(name="v", initializer=0.)
self.evaluate(checkpointable_utils.gather_initializers(obj))
saver = checkpointable_utils.Checkpoint(obj=obj)
@@ -1013,7 +1014,7 @@ class CheckpointingTests(test.TestCase):
def testCheckpointCleanupChangingVarList(self):
checkpoint_directory = self.get_temp_dir()
checkpoint_prefix = os.path.join(checkpoint_directory, "ckpt")
- obj = checkpointable.Checkpointable()
+ obj = tracking.Checkpointable()
obj.var = variable_scope.get_variable(name="v", initializer=0.)
self.evaluate(checkpointable_utils.gather_initializers(obj))
checkpoint = checkpointable_utils.Checkpoint(obj=obj)
@@ -1062,7 +1063,7 @@ class CheckpointingTests(test.TestCase):
with graph.as_default(), self.test_session(graph):
checkpoint_directory = self.get_temp_dir()
checkpoint_prefix = os.path.join(checkpoint_directory, "ckpt")
- obj = checkpointable.Checkpointable()
+ obj = tracking.Checkpointable()
obj.var = variable_scope.get_variable(name="v", initializer=0.)
obj.opt = adam.AdamOptimizer(0.1)
obj.opt.minimize(obj.var.read_value())
@@ -1243,7 +1244,7 @@ class CheckpointingTests(test.TestCase):
self.assertEqual(42., self.evaluate(optimizer.variables()[0]))
-class _ManualScope(checkpointable.Checkpointable):
+class _ManualScope(tracking.Checkpointable):
def __call__(self):
with variable_scope.variable_scope("ManualScope") as vs:
diff --git a/tensorflow/python/training/saver_test.py b/tensorflow/python/training/saver_test.py
index 96963f55bd..f235300eb5 100644
--- a/tensorflow/python/training/saver_test.py
+++ b/tensorflow/python/training/saver_test.py
@@ -79,7 +79,8 @@ from tensorflow.python.training import saver as saver_module
from tensorflow.python.training import saver_test_utils
from tensorflow.python.training import training_util
from tensorflow.python.training.checkpoint_state_pb2 import CheckpointState
-from tensorflow.python.training.checkpointable import base as checkpointable
+from tensorflow.python.training.checkpointable import base as checkpointable_base
+from tensorflow.python.training.checkpointable import tracking as checkpointable_tracking
from tensorflow.python.training.checkpointable import util as checkpointable_utils
from tensorflow.python.util import compat
@@ -2939,7 +2940,7 @@ class ScopedGraphTest(test.TestCase):
self.assertEqual(2.0, var_dict2["variable2:0"].eval())
-class _OwnsAVariableSimple(checkpointable.CheckpointableBase):
+class _OwnsAVariableSimple(checkpointable_base.CheckpointableBase):
"""A Checkpointable object which can be saved using a tf.train.Saver."""
def __init__(self):
@@ -2947,7 +2948,7 @@ class _OwnsAVariableSimple(checkpointable.CheckpointableBase):
name="non_dep_variable", initializer=6., use_resource=True)
def _gather_saveables_for_checkpoint(self):
- return {checkpointable.VARIABLE_VALUE_KEY: self.non_dep_variable}
+ return {checkpointable_base.VARIABLE_VALUE_KEY: self.non_dep_variable}
# The Saver sorts by name before parsing, so we need a name property.
@property
@@ -2972,7 +2973,7 @@ class _MirroringSaveable(
self._mirrored_variable.assign(tensor))
-class _OwnsMirroredVariables(checkpointable.CheckpointableBase):
+class _OwnsMirroredVariables(checkpointable_base.CheckpointableBase):
"""A Checkpointable object which returns a more complex SaveableObject."""
def __init__(self):
@@ -2987,7 +2988,7 @@ class _OwnsMirroredVariables(checkpointable.CheckpointableBase):
primary_variable=self.non_dep_variable,
mirrored_variable=self.mirrored,
name=name)
- return {checkpointable.VARIABLE_VALUE_KEY: _saveable_factory}
+ return {checkpointable_base.VARIABLE_VALUE_KEY: _saveable_factory}
# The Saver sorts by name before parsing, so we need a name property.
@property
@@ -2995,7 +2996,7 @@ class _OwnsMirroredVariables(checkpointable.CheckpointableBase):
return self.non_dep_variable.name
-class NonLayerCheckpointable(checkpointable.Checkpointable):
+class NonLayerCheckpointable(checkpointable_tracking.Checkpointable):
def __init__(self):
super(NonLayerCheckpointable, self).__init__()
diff --git a/tensorflow/tools/api/golden/tensorflow.train.-checkpoint.pbtxt b/tensorflow/tools/api/golden/tensorflow.train.-checkpoint.pbtxt
index ddc553d7c9..2d067e4eff 100644
--- a/tensorflow/tools/api/golden/tensorflow.train.-checkpoint.pbtxt
+++ b/tensorflow/tools/api/golden/tensorflow.train.-checkpoint.pbtxt
@@ -1,7 +1,7 @@
path: "tensorflow.train.Checkpoint"
tf_class {
is_instance: "<class \'tensorflow.python.training.checkpointable.util.Checkpoint\'>"
- is_instance: "<class \'tensorflow.python.training.checkpointable.base.Checkpointable\'>"
+ is_instance: "<class \'tensorflow.python.training.checkpointable.tracking.Checkpointable\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.CheckpointableBase\'>"
is_instance: "<type \'object\'>"
member {
diff --git a/tensorflow/workspace.bzl b/tensorflow/workspace.bzl
index 779b6a3d9a..45a9347f34 100644
--- a/tensorflow/workspace.bzl
+++ b/tensorflow/workspace.bzl
@@ -452,11 +452,11 @@ def tf_workspace(path_prefix="", tf_repo_name=""):
tf_http_archive(
name = "llvm",
urls = [
- "https://mirror.bazel.build/github.com/llvm-mirror/llvm/archive/19357eaea4f9599bcb228611719e0c5b8fc65298.tar.gz",
- "https://github.com/llvm-mirror/llvm/archive/19357eaea4f9599bcb228611719e0c5b8fc65298.tar.gz",
+ "https://mirror.bazel.build/github.com/llvm-mirror/llvm/archive/7f7cea53068238fca7b7e4299793a0c77bea7219.tar.gz",
+ "https://github.com/llvm-mirror/llvm/archive/7f7cea53068238fca7b7e4299793a0c77bea7219.tar.gz",
],
- sha256 = "c07971d102ae5353c4a22c15e82e75f4347a16260c52060187baf4b113161216",
- strip_prefix = "llvm-19357eaea4f9599bcb228611719e0c5b8fc65298",
+ sha256 = "b645507080e07c845607f212d45e4ee79253c3c9b762531f51fbaeceb6b47391",
+ strip_prefix = "llvm-7f7cea53068238fca7b7e4299793a0c77bea7219",
build_file = clean_dep("//third_party/llvm:llvm.autogenerated.BUILD"),
)
diff --git a/third_party/examples/eager/spinn/README.md b/third_party/examples/eager/spinn/README.md
index fbb1fde837..e2fd8009a0 100644
--- a/third_party/examples/eager/spinn/README.md
+++ b/third_party/examples/eager/spinn/README.md
@@ -22,7 +22,7 @@ Other eager execution examples can be found under [tensorflow/contrib/eager/pyth
- [`data.py`](../../../../tensorflow/contrib/eager/python/examples/spinn/data.py): Pipeline for loading and preprocessing the
[SNLI](https://nlp.stanford.edu/projects/snli/) data and
[GloVe](https://nlp.stanford.edu/projects/glove/) word embedding, written
- using the [`tf.data`](https://www.tensorflow.org/programmers_guide/datasets)
+ using the [`tf.data`](https://www.tensorflow.org/guide/datasets)
API.
- [`spinn.py`](./spinn.py): Model definition and training routines.
This example illustrates how one might perform the following actions with
diff --git a/third_party/llvm/llvm.autogenerated.BUILD b/third_party/llvm/llvm.autogenerated.BUILD
index e1c22c8151..4f645fa260 100644
--- a/third_party/llvm/llvm.autogenerated.BUILD
+++ b/third_party/llvm/llvm.autogenerated.BUILD
@@ -292,6 +292,20 @@ gentbl(
],
)
+gentbl(
+ name = "instcombine_transforms_gen",
+ tbl_outs = [(
+ "-gen-searchable-tables",
+ "lib/Transforms/InstCombine/InstCombineTables.inc",
+ )],
+ tblgen = ":llvm-tblgen",
+ td_file = "lib/Transforms/InstCombine/InstCombineTables.td",
+ td_srcs = glob([
+ "include/llvm/CodeGen/*.td",
+ "include/llvm/IR/Intrinsics*.td",
+ ]) + ["include/llvm/TableGen/SearchableTable.td"],
+)
+
# Binary targets used by Tensorflow.
cc_binary(
name = "llvm-tblgen",
@@ -1397,6 +1411,7 @@ cc_library(
":analysis",
":config",
":core",
+ ":instcombine_transforms_gen",
":support",
":transform_utils",
],