aboutsummaryrefslogtreecommitdiffhomepage
diff options
context:
space:
mode:
authorGravatar Martin Wicke <martin.wicke@gmail.com>2016-06-28 21:29:08 -0700
committerGravatar GitHub <noreply@github.com>2016-06-28 21:29:08 -0700
commit896086b24f0431e18af378a0d71d0494c221b356 (patch)
tree463019c4f8950247b9c1940ac1499de16d1386eb
parent5d468f05d92abaa3f9d4a18518087067a3e78805 (diff)
parent990bc2c2a3d11522c21c1c54376611f28f5c55ed (diff)
Merge pull request #3096 from martinwicke/docs-update
Wide and Deep docs update
-rw-r--r--tensorflow/g3doc/images/wide_n_deep.svg1540
-rw-r--r--tensorflow/g3doc/tutorials/index.md121
-rw-r--r--tensorflow/g3doc/tutorials/leftnav_files14
-rw-r--r--tensorflow/g3doc/tutorials/linear/overview.md237
-rw-r--r--tensorflow/g3doc/tutorials/tflearn/index.md251
-rw-r--r--tensorflow/g3doc/tutorials/wide/index.md482
-rw-r--r--tensorflow/g3doc/tutorials/wide_and_deep/index.md275
7 files changed, 2873 insertions, 47 deletions
diff --git a/tensorflow/g3doc/images/wide_n_deep.svg b/tensorflow/g3doc/images/wide_n_deep.svg
new file mode 100644
index 0000000000..6dfe9e7f10
--- /dev/null
+++ b/tensorflow/g3doc/images/wide_n_deep.svg
@@ -0,0 +1,1540 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<svg
+ xmlns:dc="http://purl.org/dc/elements/1.1/"
+ xmlns:cc="http://creativecommons.org/ns#"
+ xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
+ xmlns:svg="http://www.w3.org/2000/svg"
+ xmlns="http://www.w3.org/2000/svg"
+ xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
+ xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
+ version="1.1"
+ viewBox="0 0 952.2796 201.30696"
+ stroke-miterlimit="10"
+ id="svg4775"
+ inkscape:version="0.91 r13725"
+ sodipodi:docname="wide_n_deep_resized.svg"
+ width="952.2796"
+ height="201.30696"
+ style="fill:none;stroke:none;stroke-linecap:square;stroke-miterlimit:10">
+ <metadata
+ id="metadata5374">
+ <rdf:RDF>
+ <cc:Work
+ rdf:about="">
+ <dc:format>image/svg+xml</dc:format>
+ <dc:type
+ rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
+ <dc:title></dc:title>
+ </cc:Work>
+ </rdf:RDF>
+ </metadata>
+ <defs
+ id="defs5372" />
+ <sodipodi:namedview
+ pagecolor="#ffffff"
+ bordercolor="#666666"
+ borderopacity="1"
+ objecttolerance="10"
+ gridtolerance="10"
+ guidetolerance="10"
+ inkscape:pageopacity="0"
+ inkscape:pageshadow="2"
+ inkscape:window-width="1421"
+ inkscape:window-height="797"
+ id="namedview5370"
+ showgrid="false"
+ fit-margin-top="0"
+ fit-margin-left="0"
+ fit-margin-right="0"
+ fit-margin-bottom="0"
+ inkscape:zoom="0.90138889"
+ inkscape:cx="430.75268"
+ inkscape:cy="135.99525"
+ inkscape:window-x="1"
+ inkscape:window-y="20"
+ inkscape:window-maximized="0"
+ inkscape:current-layer="g4780" />
+ <clipPath
+ id="p.0">
+ <path
+ d="M 0,0 960,0 960,720 0,720 0,0 Z"
+ id="path4778"
+ inkscape:connector-curvature="0"
+ style="clip-rule:nonzero" />
+ </clipPath>
+ <g
+ clip-path="url(#p.0)"
+ id="g4780"
+ transform="translate(-4.8713584,-250.31233)">
+ <path
+ d="m 0,0 960,0 0,720 -960,0 z"
+ id="path4782"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 2.6456692,433.43008 5.3765955,-12.23624 0,0 941.0262953,0 0,0 5.37659,12.23624 -5.37659,12.23621 0,0 -941.0262953,0 0,0 z"
+ id="path4784"
+ inkscape:connector-curvature="0"
+ style="fill:#efefef;fill-rule:nonzero" />
+ <path
+ d="m 393.94235,353.87927 562.7086,0 0,34.48819 -562.7086,0 z"
+ id="path4786"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 393.94235,353.87927 562.7086,0 0,34.48819 -562.7086,0 z"
+ id="path4788"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#b7b7b7;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round;stroke-dasharray:4, 3" />
+ <path
+ d="m 86.80062,252.30708 773.41736,0 0,30.11024 -773.41736,0 z"
+ id="path4790"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 86.80062,252.30708 773.41736,0 0,30.11024 -773.41736,0 z"
+ id="path4792"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#b7b7b7;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round;stroke-dasharray:4, 3" />
+ <path
+ d="m 430.66415,289.09183 484.09445,0 0,58.11023 -484.09445,0 z"
+ id="path4794"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 430.66415,289.09183 484.09445,0 0,58.11023 -484.09445,0 z"
+ id="path4796"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#b7b7b7;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round;stroke-dasharray:4, 3" />
+ <path
+ d="m 4.8713584,391.71652 952.1575016,0 0,24.47244 -952.1575016,0 z"
+ id="path4798"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 4.8713584,391.71652 952.1575016,0 0,24.47244 -952.1575016,0 z"
+ id="path4800"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#b7b7b7;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round;stroke-dasharray:4, 3" />
+ <path
+ d="m 8.301801,400.93906 0,0 c 0,-4.25455 3.448989,-7.70352 7.703532,-7.70352 l 0,0 c 2.043104,0 4.002527,0.81161 5.44722,2.25632 1.444693,1.44467 2.256311,3.40411 2.256311,5.4472 l 0,0 c 0,4.25455 -3.448988,7.70355 -7.703531,7.70355 l 0,0 c -4.254543,0 -7.7035319,-3.449 -7.7035319,-7.70355 z"
+ id="path4802"
+ inkscape:connector-curvature="0"
+ style="fill:#999999;fill-rule:nonzero" />
+ <path
+ d="m 33.63098,400.94022 0,0 c 0,-4.25452 3.44899,-7.70352 7.703533,-7.70352 l 0,0 c 2.043102,0 4.002525,0.81161 5.44722,2.25632 1.444691,1.4447 2.256313,3.40411 2.256313,5.4472 l 0,0 c 0,4.25455 -3.44899,7.70355 -7.703533,7.70355 l 0,0 c -4.254543,0 -7.703533,-3.449 -7.703533,-7.70355 z"
+ id="path4804"
+ inkscape:connector-curvature="0"
+ style="fill:#999999;fill-rule:nonzero" />
+ <path
+ d="M 117.4221,273.73578 41.32435,393.228"
+ id="path4806"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="M 117.4221,273.73578 41.32435,393.228"
+ id="path4808"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="M 117.4221,273.73578 16.00975,393.228"
+ id="path4810"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="M 117.4221,273.73578 16.00975,393.228"
+ id="path4812"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 57.94737,400.93848 0,0 c 0,-4.25455 3.44899,-7.70352 7.703533,-7.70352 l 0,0 c 2.043106,0 4.002525,0.81161 5.44722,2.25632 1.444694,1.44467 2.25631,3.40411 2.25631,5.4472 l 0,0 c 0,4.25455 -3.44899,7.70352 -7.70353,7.70352 l 0,0 c -4.254543,0 -7.703533,-3.44897 -7.703533,-7.70352 z"
+ id="path4814"
+ inkscape:connector-curvature="0"
+ style="fill:#f1c232;fill-rule:nonzero" />
+ <path
+ d="m 82.263756,400.93848 0,0 c 0,-4.25455 3.44899,-7.70352 7.703529,-7.70352 l 0,0 c 2.043106,0 4.002533,0.81161 5.447228,2.25632 1.444687,1.44467 2.256309,3.40411 2.256309,5.4472 l 0,0 c 0,4.25455 -3.44899,7.70352 -7.703537,7.70352 l 0,0 c -4.254539,0 -7.703529,-3.44897 -7.703529,-7.70352 z"
+ id="path4816"
+ inkscape:connector-curvature="0"
+ style="fill:#999999;fill-rule:nonzero" />
+ <path
+ d="m 108.60574,400.93848 0,0 c 0,-4.25455 3.44898,-7.70352 7.70353,-7.70352 l 0,0 c 2.0431,0 4.00252,0.81161 5.44722,2.25632 1.44469,1.44467 2.2563,3.40411 2.2563,5.4472 l 0,0 c 0,4.25455 -3.44899,7.70352 -7.70352,7.70352 l 0,0 c -4.25455,0 -7.70353,-3.44897 -7.70353,-7.70352 z"
+ id="path4818"
+ inkscape:connector-curvature="0"
+ style="fill:#999999;fill-rule:nonzero" />
+ <path
+ d="m 132.92212,400.93848 0,0 c 0,-4.25455 3.44899,-7.70352 7.70354,-7.70352 l 0,0 c 2.0431,0 4.00251,0.81161 5.44722,2.25632 1.44468,1.44467 2.2563,3.40411 2.2563,5.4472 l 0,0 c 0,4.25455 -3.44899,7.70352 -7.70352,7.70352 l 0,0 c -4.25455,0 -7.70354,-3.44897 -7.70354,-7.70352 z"
+ id="path4820"
+ inkscape:connector-curvature="0"
+ style="fill:#999999;fill-rule:nonzero" />
+ <path
+ d="m 157.23851,400.93848 0,0 c 0,-4.25455 3.44899,-7.70352 7.70352,-7.70352 l 0,0 c 2.04311,0 4.00253,0.81161 5.44722,2.25632 1.4447,1.44467 2.25632,3.40411 2.25632,5.4472 l 0,0 c 0,4.25455 -3.44899,7.70352 -7.70354,7.70352 l 0,0 c -4.25453,0 -7.70352,-3.44897 -7.70352,-7.70352 z"
+ id="path4822"
+ inkscape:connector-curvature="0"
+ style="fill:#999999;fill-rule:nonzero" />
+ <path
+ d="M 117.4221,273.73578 65.645405,393.228"
+ id="path4824"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="M 117.4221,273.73578 65.645405,393.228"
+ id="path4826"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="M 117.4221,273.73578 164.94471,393.228"
+ id="path4828"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="M 117.4221,273.73578 164.94471,393.228"
+ id="path4830"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="M 117.4221,273.73578 89.952465,393.228"
+ id="path4832"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="M 117.4221,273.73578 89.952465,393.228"
+ id="path4834"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="M 117.4221,273.73578 140.63765,393.228"
+ id="path4836"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="M 117.4221,273.73578 140.63765,393.228"
+ id="path4838"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="M 117.4221,273.73578 116.31659,393.228"
+ id="path4840"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="M 117.4221,273.73578 116.31659,393.228"
+ id="path4842"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 181.5549,399.8878 0,0 c 0,-4.25455 3.44897,-7.70352 7.70352,-7.70352 l 0,0 c 2.04311,0 4.00253,0.81161 5.44722,2.25628 1.4447,1.44471 2.25632,3.40415 2.25632,5.44724 l 0,0 c 0,4.25455 -3.44899,7.70352 -7.70354,7.70352 l 0,0 c -4.25455,0 -7.70352,-3.44897 -7.70352,-7.70352 z"
+ id="path4844"
+ inkscape:connector-curvature="0"
+ style="fill:#999999;fill-rule:nonzero" />
+ <path
+ d="m 205.87128,399.8878 0,0 c 0,-4.25455 3.44899,-7.70352 7.70354,-7.70352 l 0,0 c 2.0431,0 4.00253,0.81161 5.44722,2.25628 1.44468,1.44471 2.25631,3.40415 2.25631,5.44724 l 0,0 c 0,4.25455 -3.44899,7.70352 -7.70353,7.70352 l 0,0 c -4.25455,0 -7.70354,-3.44897 -7.70354,-7.70352 z"
+ id="path4846"
+ inkscape:connector-curvature="0"
+ style="fill:#f1c232;fill-rule:nonzero" />
+ <path
+ d="m 230.18767,399.8878 0,0 c 0,-4.25455 3.44899,-7.70352 7.70354,-7.70352 l 0,0 c 2.0431,0 4.00251,0.81161 5.44722,2.25628 1.44468,1.44471 2.2563,3.40415 2.2563,5.44724 l 0,0 c 0,4.25455 -3.44898,7.70352 -7.70352,7.70352 l 0,0 c -4.25455,0 -7.70354,-3.44897 -7.70354,-7.70352 z"
+ id="path4848"
+ inkscape:connector-curvature="0"
+ style="fill:#999999;fill-rule:nonzero" />
+ <path
+ d="m 117.4221,273.73578 71.84365,118.4567"
+ id="path4850"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 117.4221,273.73578 71.84365,118.4567"
+ id="path4852"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 117.4221,273.73578 96.15072,118.4567"
+ id="path4854"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 117.4221,273.73578 96.15072,118.4567"
+ id="path4856"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 117.4221,273.73578 120.47176,118.4567"
+ id="path4858"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 117.4221,273.73578 120.47176,118.4567"
+ id="path4860"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 109.71856,266.03226 0,0 c 0,-4.25455 3.44899,-7.70352 7.70354,-7.70352 l 0,0 c 2.0431,0 4.00252,0.81161 5.44722,2.25632 1.44468,1.44467 2.25631,3.40411 2.25631,5.4472 l 0,0 c 0,4.25455 -3.44899,7.70352 -7.70353,7.70352 l 0,0 c -4.25455,0 -7.70354,-3.44897 -7.70354,-7.70352 z"
+ id="path4862"
+ inkscape:connector-curvature="0"
+ style="fill:#f1c232;fill-rule:nonzero" />
+ <path
+ d="m 111.36923,270.73413 c 3.14159,0 4.71238,-2.35443 6.28317,-4.70886 1.5708,-2.35443 3.14159,-4.7089 6.28318,-4.7089"
+ id="path4864"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 111.36923,270.73413 c 3.14159,0 4.71238,-2.35443 6.28317,-4.70886 1.5708,-2.35443 3.14159,-4.7089 6.28318,-4.7089"
+ id="path4866"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#434343;stroke-width:3;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 846.92847,363.60492 0,0 c 0,-4.9e-4 4.3e-4,-8.9e-4 9.2e-4,-8.9e-4 l 105.84033,8.9e-4 c 1.8e-4,0 4.2e-4,9e-5 6.1e-4,2.4e-4 1.2e-4,1.9e-4 2.4e-4,4e-4 2.4e-4,6.5e-4 l -8.5e-4,23.72979 c 0,4.9e-4 -4.3e-4,8.6e-4 -9.2e-4,8.6e-4 l -105.84033,-8.6e-4 0,0 c -4.9e-4,0 -8.5e-4,-3.9e-4 -8.5e-4,-8.8e-4 z"
+ id="path4868"
+ inkscape:connector-curvature="0"
+ style="fill:#cccccc;fill-rule:nonzero" />
+ <path
+ d="m 722.254,364.3828 0,0 c 0,-4.6e-4 3.7e-4,-8.2e-4 8.5e-4,-8.2e-4 l 98.01074,8.2e-4 c 1.9e-4,0 4.3e-4,9e-5 5.5e-4,2.4e-4 1.9e-4,1.6e-4 2.5e-4,3.7e-4 2.5e-4,5.8e-4 l -8e-4,23.72986 c 0,4.3e-4 -3.6e-4,8e-4 -8.5e-4,8e-4 l -98.01074,-8e-4 0,0 c -4.3e-4,0 -7.9e-4,-3.6e-4 -7.9e-4,-8.2e-4 z"
+ id="path4870"
+ inkscape:connector-curvature="0"
+ style="fill:#cccccc;fill-rule:nonzero" />
+ <path
+ d="m 731.6505,376.2962 0,0 c 0,-4.08316 3.31006,-7.39319 7.39319,-7.39319 l 0,0 c 1.96075,0 3.84125,0.77893 5.22772,2.16541 1.38654,1.3865 2.16541,3.26696 2.16541,5.22778 l 0,0 c 0,4.08313 -3.31,7.39316 -7.39313,7.39316 l 0,0 c -4.08313,0 -7.39319,-3.31003 -7.39319,-7.39316 z"
+ id="path4872"
+ inkscape:connector-curvature="0"
+ style="fill:#434343;fill-rule:nonzero" />
+ <path
+ d="m 754.547,376.2727 0,0 c 0,-4.08316 3.31006,-7.39319 7.39319,-7.39319 l 0,0 c 1.96081,0 3.84131,0.77893 5.22778,2.16541 1.38648,1.3865 2.16541,3.26696 2.16541,5.22778 l 0,0 c 0,4.08313 -3.31006,7.39316 -7.39319,7.39316 l 0,0 c -4.08313,0 -7.39319,-3.31003 -7.39319,-7.39316 z"
+ id="path4874"
+ inkscape:connector-curvature="0"
+ style="fill:#666666;fill-rule:nonzero" />
+ <path
+ d="m 775.6335,376.2727 0,0 c 0,-4.08316 3.31006,-7.39319 7.39319,-7.39319 l 0,0 c 1.96081,0 3.84125,0.77893 5.22778,2.16541 1.38648,1.3865 2.16541,3.26696 2.16541,5.22778 l 0,0 c 0,4.08313 -3.31006,7.39316 -7.39319,7.39316 l 0,0 c -4.08313,0 -7.39319,-3.31003 -7.39319,-7.39316 z"
+ id="path4876"
+ inkscape:connector-curvature="0"
+ style="fill:#999999;fill-rule:nonzero" />
+ <path
+ d="m 798.9702,376.27158 0,0 c 0,-4.08313 3.31006,-7.39319 7.39319,-7.39319 l 0,0 c 1.96081,0 3.84125,0.77893 5.22778,2.16544 1.38648,1.38647 2.16541,3.26697 2.16541,5.22775 l 0,0 c 0,4.08313 -3.31006,7.39316 -7.39319,7.39316 l 0,0 c -4.08313,0 -7.39319,-3.31003 -7.39319,-7.39316 z"
+ id="path4878"
+ inkscape:connector-curvature="0"
+ style="fill:#efefef;fill-rule:nonzero" />
+ <path
+ d="m 857.57184,375.3713 0,0 c 0,-4.08313 3.31006,-7.39316 7.39319,-7.39316 l 0,0 c 1.96081,0 3.84131,0.77893 5.22778,2.16541 1.38648,1.3865 2.16541,3.26697 2.16541,5.22775 l 0,0 c 0,4.08316 -3.31006,7.39319 -7.39319,7.39319 l 0,0 c -4.08313,0 -7.39319,-3.31003 -7.39319,-7.39319 z"
+ id="path4880"
+ inkscape:connector-curvature="0"
+ style="fill:#434343;fill-rule:nonzero" />
+ <path
+ d="m 881.88055,375.37244 0,0 c 0,-4.08313 3.31006,-7.39316 7.39319,-7.39316 l 0,0 c 1.96081,0 3.84131,0.7789 5.22778,2.16541 1.38648,1.38647 2.16541,3.26697 2.16541,5.22775 l 0,0 c 0,4.08313 -3.31006,7.39319 -7.39319,7.39319 l 0,0 c -4.08313,0 -7.39319,-3.31006 -7.39319,-7.39319 z"
+ id="path4882"
+ inkscape:connector-curvature="0"
+ style="fill:#666666;fill-rule:nonzero" />
+ <path
+ d="m 788.2263,328.63565 0,0 c 0,-4.08316 3.31,-7.39319 7.39313,-7.39319 l 0,0 c 1.96081,0 3.84131,0.77893 5.22778,2.16541 1.38648,1.3865 2.16541,3.26696 2.16541,5.22778 l 0,0 c 0,4.08313 -3.31006,7.39316 -7.39319,7.39316 l 0,0 c -4.08313,0 -7.39313,-3.31003 -7.39313,-7.39316 z"
+ id="path4884"
+ inkscape:connector-curvature="0"
+ style="fill:#f1c232;fill-rule:nonzero" />
+ <path
+ d="m 795.61945,336.0288 -56.56683,32.87646"
+ id="path4886"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 795.61945,336.0288 -56.56683,32.87646"
+ id="path4888"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 795.61945,336.0288 -33.66882,32.84958"
+ id="path4890"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 795.61945,336.0288 -33.66882,32.84958"
+ id="path4892"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 795.61945,336.0288 93.67371,31.93637"
+ id="path4894"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 795.61945,336.0288 93.67371,31.93637"
+ id="path4896"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 795.61945,336.0288 -12.59723,32.84958"
+ id="path4898"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 795.61945,336.0288 -12.59723,32.84958"
+ id="path4900"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 795.61945,336.0288 69.35211,31.93637"
+ id="path4902"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 795.61945,336.0288 69.35211,31.93637"
+ id="path4904"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 795.61945,336.0288 10.74396,32.84958"
+ id="path4906"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 795.61945,336.0288 10.74396,32.84958"
+ id="path4908"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 905.2173,375.37076 0,0 c 0,-4.08313 3.31006,-7.39316 7.39319,-7.39316 l 0,0 c 1.96081,0 3.84131,0.7789 5.22778,2.16541 1.38648,1.38647 2.16541,3.26697 2.16541,5.22775 l 0,0 c 0,4.08313 -3.31006,7.39319 -7.39319,7.39319 l 0,0 c -4.08313,0 -7.39319,-3.31006 -7.39319,-7.39319 z"
+ id="path4910"
+ inkscape:connector-curvature="0"
+ style="fill:#999999;fill-rule:nonzero" />
+ <path
+ d="m 928.554,375.37076 0,0 c 0,-4.08313 3.31006,-7.39316 7.39319,-7.39316 l 0,0 c 1.96081,0 3.84125,0.7789 5.22778,2.16541 1.38648,1.38647 2.16541,3.26697 2.16541,5.22775 l 0,0 c 0,4.08313 -3.31006,7.39319 -7.39319,7.39319 l 0,0 c -4.08313,0 -7.39319,-3.31006 -7.39319,-7.39319 z"
+ id="path4912"
+ inkscape:connector-curvature="0"
+ style="fill:#efefef;fill-rule:nonzero" />
+ <path
+ d="m 795.61945,336.0288 116.97461,31.93637"
+ id="path4914"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 795.61945,336.0288 116.97461,31.93637"
+ id="path4916"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 795.61945,336.0288 140.31586,31.93637"
+ id="path4918"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 795.61945,336.0288 140.31586,31.93637"
+ id="path4920"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 826.32306,328.63565 0,0 c 0,-4.08316 3.31006,-7.39319 7.39319,-7.39319 l 0,0 c 1.96075,0 3.84125,0.77893 5.22772,2.16541 1.38654,1.3865 2.16541,3.26696 2.16541,5.22778 l 0,0 c 0,4.08313 -3.31,7.39316 -7.39313,7.39316 l 0,0 c -4.08313,0 -7.39319,-3.31003 -7.39319,-7.39316 z"
+ id="path4922"
+ inkscape:connector-curvature="0"
+ style="fill:#f1c232;fill-rule:nonzero" />
+ <path
+ d="m 833.71625,336.0288 -94.68097,32.87646"
+ id="path4924"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 833.71625,336.0288 -94.68097,32.87646"
+ id="path4926"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 833.71625,336.0288 -27.34332,32.84958"
+ id="path4928"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 833.71625,336.0288 -27.34332,32.84958"
+ id="path4930"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 833.71625,336.0288 55.55957,31.93637"
+ id="path4932"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 833.71625,336.0288 55.55957,31.93637"
+ id="path4934"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 833.71625,336.0288 -71.78296,32.84958"
+ id="path4936"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 833.71625,336.0288 -71.78296,32.84958"
+ id="path4938"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 833.71625,336.0288 31.23798,31.93637"
+ id="path4940"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 833.71625,336.0288 31.23798,31.93637"
+ id="path4942"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 833.71625,336.0288 -45.4469,35.02524"
+ id="path4944"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 833.71625,336.0288 -45.4469,35.02524"
+ id="path4946"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 833.71625,336.0288 78.90076,31.93637"
+ id="path4948"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 833.71625,336.0288 78.90076,31.93637"
+ id="path4950"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 833.71625,336.0288 102.242,31.93637"
+ id="path4952"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 833.71625,336.0288 102.242,31.93637"
+ id="path4954"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 864.42096,328.63565 0,0 c 0,-4.08316 3.31,-7.39319 7.39319,-7.39319 l 0,0 c 1.96075,0 3.84125,0.77893 5.22772,2.16541 1.38648,1.3865 2.16541,3.26696 2.16541,5.22778 l 0,0 c 0,4.08313 -3.31,7.39316 -7.39313,7.39316 l 0,0 c -4.08319,0 -7.39319,-3.31003 -7.39319,-7.39316 z"
+ id="path4956"
+ inkscape:connector-curvature="0"
+ style="fill:#f1c232;fill-rule:nonzero" />
+ <path
+ d="M 871.81415,336.0288 739.05933,368.90526"
+ id="path4958"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="M 871.81415,336.0288 739.05933,368.90526"
+ id="path4960"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 871.81415,336.0288 -65.44403,32.84958"
+ id="path4962"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 871.81415,336.0288 -65.44403,32.84958"
+ id="path4964"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 871.81415,336.0288 17.44544,31.93637"
+ id="path4966"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 871.81415,336.0288 17.44544,31.93637"
+ id="path4968"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="M 871.81415,336.0288 761.95734,368.87838"
+ id="path4970"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="M 871.81415,336.0288 761.95734,368.87838"
+ id="path4972"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 871.81415,336.0288 -6.83588,31.93637"
+ id="path4974"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 871.81415,336.0288 -6.83588,31.93637"
+ id="path4976"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 871.81415,336.0288 -88.78522,32.84958"
+ id="path4978"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 871.81415,336.0288 -88.78522,32.84958"
+ id="path4980"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 871.81415,336.0288 40.78662,31.93637"
+ id="path4982"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 871.81415,336.0288 40.78662,31.93637"
+ id="path4984"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 871.81415,336.0288 64.12787,31.93637"
+ id="path4986"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 871.81415,336.0288 64.12787,31.93637"
+ id="path4988"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 788.2263,299.19727 0,0 c 0,-4.08316 3.31,-7.39319 7.39313,-7.39319 l 0,0 c 1.96081,0 3.84131,0.77893 5.22778,2.16541 1.38648,1.3865 2.16541,3.26696 2.16541,5.22778 l 0,0 c 0,4.08313 -3.31006,7.39316 -7.39319,7.39316 l 0,0 c -4.08313,0 -7.39313,-3.31003 -7.39313,-7.39316 z"
+ id="path4990"
+ inkscape:connector-curvature="0"
+ style="fill:#f1c232;fill-rule:nonzero" />
+ <path
+ d="m 826.32306,299.19727 0,0 c 0,-4.08316 3.31006,-7.39319 7.39319,-7.39319 l 0,0 c 1.96075,0 3.84125,0.77893 5.22772,2.16541 1.38654,1.3865 2.16541,3.26696 2.16541,5.22778 l 0,0 c 0,4.08313 -3.31,7.39316 -7.39313,7.39316 l 0,0 c -4.08313,0 -7.39319,-3.31003 -7.39319,-7.39316 z"
+ id="path4992"
+ inkscape:connector-curvature="0"
+ style="fill:#f1c232;fill-rule:nonzero" />
+ <path
+ d="m 864.42096,299.19727 0,0 c 0,-4.08316 3.31,-7.39319 7.39319,-7.39319 l 0,0 c 1.96075,0 3.84125,0.77893 5.22772,2.16541 1.38648,1.3865 2.16541,3.26696 2.16541,5.22778 l 0,0 c 0,4.08313 -3.31,7.39316 -7.39313,7.39316 l 0,0 c -4.08319,0 -7.39319,-3.31003 -7.39319,-7.39316 z"
+ id="path4994"
+ inkscape:connector-curvature="0"
+ style="fill:#f1c232;fill-rule:nonzero" />
+ <path
+ d="m 871.81415,306.59042 0,14.65204"
+ id="path4996"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 871.81415,306.59042 0,14.65204"
+ id="path4998"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 833.71625,306.59042 0,14.65204"
+ id="path5000"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 833.71625,306.59042 0,14.65204"
+ id="path5002"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 795.61945,306.59042 0,14.65204"
+ id="path5004"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 795.61945,306.59042 0,14.65204"
+ id="path5006"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 871.81415,306.59042 -76.18799,14.65204"
+ id="path5008"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 871.81415,306.59042 -76.18799,14.65204"
+ id="path5010"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 871.81415,306.59042 -38.11414,14.65204"
+ id="path5012"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 871.81415,306.59042 -38.11414,14.65204"
+ id="path5014"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 833.71625,306.59042 38.11407,14.65204"
+ id="path5016"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 833.71625,306.59042 38.11407,14.65204"
+ id="path5018"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 795.61945,306.59042 38.11414,14.65204"
+ id="path5020"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 795.61945,306.59042 38.11414,14.65204"
+ id="path5022"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 833.71625,306.59042 -38.11414,14.65204"
+ id="path5024"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 833.71625,306.59042 -38.11414,14.65204"
+ id="path5026"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 833.71625,273.60336 -38.11414,18.21097"
+ id="path5028"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 833.71625,273.60336 -38.11414,18.21097"
+ id="path5030"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 833.71625,273.60336 0,18.21097"
+ id="path5032"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 833.71625,273.60336 0,18.21097"
+ id="path5034"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 833.71625,273.60336 38.11407,18.21097"
+ id="path5036"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 833.71625,273.60336 38.11407,18.21097"
+ id="path5038"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 763.86664,402.68573 0,0 c 0,-4.08313 3.31006,-7.39316 7.39319,-7.39316 l 0,0 c 1.96081,0 3.84125,0.77893 5.22778,2.16541 1.38648,1.3865 2.16541,3.26697 2.16541,5.22775 l 0,0 c 0,4.08316 -3.31006,7.39319 -7.39319,7.39319 l 0,0 c -4.08313,0 -7.39319,-3.31003 -7.39319,-7.39319 z"
+ id="path5040"
+ inkscape:connector-curvature="0"
+ style="fill:#f1c232;fill-rule:nonzero" />
+ <path
+ d="m 771.2598,388.1135 0,7.185"
+ id="path5042"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 771.2598,388.1135 0,7.185"
+ id="path5044"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 892.45593,402.68573 0,0 c 0,-4.08313 3.31,-7.39316 7.39313,-7.39316 l 0,0 c 1.96081,0 3.84131,0.77893 5.22778,2.16541 1.38648,1.3865 2.16541,3.26697 2.16541,5.22775 l 0,0 c 0,4.08316 -3.31006,7.39319 -7.39319,7.39319 l 0,0 c -4.08313,0 -7.39313,-3.31003 -7.39313,-7.39319 z"
+ id="path5046"
+ inkscape:connector-curvature="0"
+ style="fill:#f1c232;fill-rule:nonzero" />
+ <path
+ d="m 899.84906,387.3356 0,7.96393"
+ id="path5048"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 899.84906,387.3356 0,7.96393"
+ id="path5050"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 788.3015,330.95367 8.12622,0 4.51245,-7.81622"
+ id="path5052"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 788.3015,330.95367 8.12622,0 4.51245,-7.81622"
+ id="path5054"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#434343;stroke-width:3;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 826.3613,330.95367 8.12622,0 4.51251,-7.81622"
+ id="path5056"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 826.3613,330.95367 8.12622,0 4.51251,-7.81622"
+ id="path5058"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#434343;stroke-width:3;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 864.42065,330.95367 8.12622,0 4.51245,-7.81622"
+ id="path5060"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 864.42065,330.95367 8.12622,0 4.51245,-7.81622"
+ id="path5062"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#434343;stroke-width:3;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 864.42065,301.47162 8.12622,0 4.51245,-7.81619"
+ id="path5064"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 864.42065,301.47162 8.12622,0 4.51245,-7.81619"
+ id="path5066"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#434343;stroke-width:3;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 826.62317,301.47162 8.12622,0 4.51245,-7.81619"
+ id="path5068"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 826.62317,301.47162 8.12622,0 4.51245,-7.81619"
+ id="path5070"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#434343;stroke-width:3;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 788.3015,301.47162 8.12622,0 4.51245,-7.81619"
+ id="path5072"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 788.3015,301.47162 8.12622,0 4.51245,-7.81619"
+ id="path5074"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#434343;stroke-width:3;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 826.32306,266.21017 0,0 c 0,-4.08313 3.31006,-7.39316 7.39319,-7.39316 l 0,0 c 1.96075,0 3.84125,0.7789 5.22772,2.16541 1.38654,1.38647 2.16541,3.26697 2.16541,5.22775 l 0,0 c 0,4.08313 -3.31,7.39319 -7.39313,7.39319 l 0,0 c -4.08313,0 -7.39319,-3.31006 -7.39319,-7.39319 z"
+ id="path5076"
+ inkscape:connector-curvature="0"
+ style="fill:#f1c232;fill-rule:nonzero" />
+ <path
+ d="m 827.6896,270.72263 c 3.0083,0 4.51245,-2.25958 6.0166,-4.51917 1.50415,-2.25958 3.0083,-4.51916 6.0166,-4.51916"
+ id="path5078"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 827.6896,270.72263 c 3.0083,0 4.51245,-2.25958 6.0166,-4.51917 1.50415,-2.25958 3.0083,-4.51916 6.0166,-4.51916"
+ id="path5080"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#434343;stroke-width:3;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 411.3028,272.46396 51.11435,20.4736"
+ id="path5082"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 411.3028,272.46396 51.11435,20.4736"
+ id="path5084"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 411.3028,272.46396 87.09213,20.4736"
+ id="path5086"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 411.3028,272.46396 87.09213,20.4736"
+ id="path5088"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 411.3028,272.46396 123.05725,20.4736"
+ id="path5090"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 411.3028,272.46396 123.05725,20.4736"
+ id="path5092"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 260.54385,400.8314 0,0 c 0,-3.85428 3.12451,-6.97879 6.97876,-6.97879 l 0,0 c 1.85089,0 3.62598,0.73526 4.93475,2.04404 1.30878,1.30877 2.04404,3.08386 2.04404,4.93475 l 0,0 c 0,3.85428 -3.12451,6.97879 -6.97879,6.97879 l 0,0 c -3.85425,0 -6.97876,-3.12451 -6.97876,-6.97879 z"
+ id="path5094"
+ inkscape:connector-curvature="0"
+ style="fill:#999999;fill-rule:nonzero" />
+ <path
+ d="m 282.57254,400.8314 0,0 c 0,-3.85428 3.12451,-6.97879 6.97876,-6.97879 l 0,0 c 1.85089,0 3.62598,0.73526 4.93475,2.04404 1.30878,1.30877 2.04404,3.08386 2.04404,4.93475 l 0,0 c 0,3.85428 -3.12451,6.97879 -6.97879,6.97879 l 0,0 c -3.85425,0 -6.97876,-3.12451 -6.97876,-6.97879 z"
+ id="path5096"
+ inkscape:connector-curvature="0"
+ style="fill:#999999;fill-rule:nonzero" />
+ <path
+ d="m 304.60123,400.8314 0,0 c 0,-3.85428 3.12451,-6.97879 6.97876,-6.97879 l 0,0 c 1.85089,0 3.62598,0.73526 4.93475,2.04404 1.30878,1.30877 2.04404,3.08386 2.04404,4.93475 l 0,0 c 0,3.85428 -3.12451,6.97879 -6.97879,6.97879 l 0,0 c -3.85425,0 -6.97876,-3.12451 -6.97876,-6.97879 z"
+ id="path5098"
+ inkscape:connector-curvature="0"
+ style="fill:#f1c232;fill-rule:nonzero" />
+ <path
+ d="m 326.6299,400.83032 0,0 c 0,-3.85425 3.12451,-6.97876 6.97876,-6.97876 l 0,0 c 1.85089,0 3.62598,0.73526 4.93475,2.04404 1.30878,1.30877 2.04404,3.08386 2.04404,4.93472 l 0,0 c 0,3.85428 -3.12451,6.97879 -6.97879,6.97879 l 0,0 c -3.85425,0 -6.97876,-3.12451 -6.97876,-6.97879 z"
+ id="path5100"
+ inkscape:connector-curvature="0"
+ style="fill:#999999;fill-rule:nonzero" />
+ <path
+ d="m 349.5761,400.83032 0,0 c 0,-3.85425 3.12451,-6.97876 6.97876,-6.97876 l 0,0 c 1.85089,0 3.62598,0.73526 4.93475,2.04404 1.30878,1.30877 2.04404,3.08386 2.04404,4.93472 l 0,0 c 0,3.85428 -3.12451,6.97879 -6.97879,6.97879 l 0,0 c -3.85425,0 -6.97876,-3.12451 -6.97876,-6.97879 z"
+ id="path5102"
+ inkscape:connector-curvature="0"
+ style="fill:#999999;fill-rule:nonzero" />
+ <path
+ d="m 372.5223,400.8314 0,0 c 0,-3.85428 3.12451,-6.97879 6.97876,-6.97879 l 0,0 c 1.85089,0 3.62598,0.73526 4.93475,2.04404 1.30878,1.30877 2.04404,3.08386 2.04404,4.93475 l 0,0 c 0,3.85428 -3.12451,6.97879 -6.97879,6.97879 l 0,0 c -3.85425,0 -6.97876,-3.12451 -6.97876,-6.97879 z"
+ id="path5104"
+ inkscape:connector-curvature="0"
+ style="fill:#999999;fill-rule:nonzero" />
+ <path
+ d="M 411.3028,272.46396 267.5184,393.84781"
+ id="path5106"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="M 411.3028,272.46396 267.5184,393.84781"
+ id="path5108"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="M 411.3028,272.46396 289.53864,393.84781"
+ id="path5110"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="M 411.3028,272.46396 289.53864,393.84781"
+ id="path5112"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="M 411.3028,272.46396 379.49579,393.84781"
+ id="path5114"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="M 411.3028,272.46396 379.49579,393.84781"
+ id="path5116"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="M 411.3028,272.46396 311.58423,393.84781"
+ id="path5118"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="M 411.3028,272.46396 311.58423,393.84781"
+ id="path5120"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="M 411.3028,272.46396 356.56281,393.84781"
+ id="path5122"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="M 411.3028,272.46396 356.56281,393.84781"
+ id="path5124"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="M 411.3028,272.46396 333.60447,393.84781"
+ id="path5126"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="M 411.3028,272.46396 333.60447,393.84781"
+ id="path5128"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 404.32404,265.48517 0,0 c 0,-3.85428 3.12451,-6.97879 6.97876,-6.97879 l 0,0 c 1.85089,0 3.62598,0.73526 4.93475,2.04404 1.30878,1.30877 2.04404,3.08386 2.04404,4.93475 l 0,0 c 0,3.85428 -3.12451,6.97879 -6.97879,6.97879 l 0,0 c -3.85425,0 -6.97876,-3.12451 -6.97876,-6.97879 z"
+ id="path5130"
+ inkscape:connector-curvature="0"
+ style="fill:#f1c232;fill-rule:nonzero" />
+ <path
+ d="m 405.61658,269.7447 c 2.84918,0 4.27377,-2.12659 5.69839,-4.25317 1.4246,-2.12662 2.84919,-4.25321 5.69837,-4.25321"
+ id="path5132"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 405.61658,269.7447 c 2.84918,0 4.27377,-2.12659 5.69839,-4.25317 1.4246,-2.12662 2.84919,-4.25321 5.69837,-4.25321"
+ id="path5134"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#434343;stroke-width:3;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 498.61996,365.02457 0,0 c 0,-4e-4 3.4e-4,-7.3e-4 7.3e-4,-7.3e-4 l 89.61417,7.3e-4 c 1.9e-4,0 3.7e-4,9e-5 4.9e-4,2.1e-4 1.2e-4,1.6e-4 2.4e-4,3.4e-4 2.4e-4,5.5e-4 l -7.3e-4,22.39978 c 0,4.3e-4 -3.6e-4,7.7e-4 -7.9e-4,7.7e-4 l -89.61411,-7.7e-4 0,0 c -4.2e-4,0 -7.6e-4,-3e-4 -7.6e-4,-7.3e-4 z"
+ id="path5136"
+ inkscape:connector-curvature="0"
+ style="fill:#d9d9d9;fill-rule:nonzero" />
+ <path
+ d="m 401.27518,364.6629 0,0 c 0,-4e-4 3.1e-4,-7.3e-4 7.3e-4,-7.3e-4 l 87.64917,7.3e-4 c 2.2e-4,0 4e-4,9e-5 5.2e-4,2.1e-4 1.5e-4,1.6e-4 2.1e-4,3.4e-4 2.1e-4,5.2e-4 l -7.3e-4,22.39984 c 0,4e-4 -3e-4,7.1e-4 -7.3e-4,7.1e-4 l -87.64917,-7.1e-4 0,0 c -4e-4,0 -7.3e-4,-3.3e-4 -7.3e-4,-7.3e-4 z"
+ id="path5138"
+ inkscape:connector-curvature="0"
+ style="fill:#d9d9d9;fill-rule:nonzero" />
+ <path
+ d="m 405.2727,375.9086 0,0 c 0,-3.85425 3.12451,-6.97876 6.97879,-6.97876 l 0,0 c 1.85086,0 3.62595,0.73526 4.93472,2.04404 1.30878,1.30877 2.04404,3.08386 2.04404,4.93472 l 0,0 c 0,3.85428 -3.12451,6.97879 -6.97876,6.97879 l 0,0 c -3.85428,0 -6.97879,-3.12451 -6.97879,-6.97879 z"
+ id="path5140"
+ inkscape:connector-curvature="0"
+ style="fill:#434343;fill-rule:nonzero" />
+ <path
+ d="m 426.88586,375.8864 0,0 c 0,-3.85425 3.12448,-6.97876 6.97876,-6.97876 l 0,0 c 1.85089,0 3.62598,0.73526 4.93475,2.04404 1.30878,1.30877 2.04404,3.08386 2.04404,4.93472 l 0,0 c 0,3.85428 -3.12451,6.97879 -6.97879,6.97879 l 0,0 c -3.85428,0 -6.97876,-3.12451 -6.97876,-6.97879 z"
+ id="path5142"
+ inkscape:connector-curvature="0"
+ style="fill:#666666;fill-rule:nonzero" />
+ <path
+ d="m 446.7904,375.8864 0,0 c 0,-3.85425 3.12451,-6.97876 6.97879,-6.97876 l 0,0 c 1.85089,0 3.62598,0.73526 4.93475,2.04404 1.30878,1.30877 2.04404,3.08386 2.04404,4.93472 l 0,0 c 0,3.85428 -3.12451,6.97879 -6.97879,6.97879 l 0,0 c -3.85428,0 -6.97879,-3.12451 -6.97879,-6.97879 z"
+ id="path5144"
+ inkscape:connector-curvature="0"
+ style="fill:#999999;fill-rule:nonzero" />
+ <path
+ d="m 468.8191,375.88538 0,0 c 0,-3.85428 3.12451,-6.97879 6.97879,-6.97879 l 0,0 c 1.85089,0 3.62598,0.73526 4.93475,2.04404 1.30878,1.30877 2.04404,3.08386 2.04404,4.93475 l 0,0 c 0,3.85428 -3.12451,6.97876 -6.97879,6.97876 l 0,0 c -3.85428,0 -6.97879,-3.12448 -6.97879,-6.97876 z"
+ id="path5146"
+ inkscape:connector-curvature="0"
+ style="fill:#b7b7b7;fill-rule:nonzero" />
+ <path
+ d="m 502.69196,375.76062 0,0 c 0,-3.85428 3.12451,-6.97879 6.97879,-6.97879 l 0,0 c 1.85089,0 3.62595,0.73526 4.93472,2.04404 1.30878,1.30877 2.04407,3.08386 2.04407,4.93475 l 0,0 c 0,3.85428 -3.12451,6.97876 -6.97879,6.97876 l 0,0 c -3.85428,0 -6.97879,-3.12448 -6.97879,-6.97876 z"
+ id="path5148"
+ inkscape:connector-curvature="0"
+ style="fill:#b7b7b7;fill-rule:nonzero" />
+ <path
+ d="m 525.6382,375.76166 0,0 c 0,-3.85425 3.12451,-6.97876 6.97876,-6.97876 l 0,0 c 1.85089,0 3.62598,0.73526 4.93475,2.04404 1.30878,1.30877 2.04401,3.08386 2.04401,4.93472 l 0,0 c 0,3.85428 -3.12451,6.97879 -6.97876,6.97879 l 0,0 c -3.85425,0 -6.97876,-3.12451 -6.97876,-6.97879 z"
+ id="path5150"
+ inkscape:connector-curvature="0"
+ style="fill:#999999;fill-rule:nonzero" />
+ <path
+ d="m 455.45737,330.91946 0,0 c 0,-3.85428 3.12451,-6.97879 6.97879,-6.97879 l 0,0 c 1.85089,0 3.62598,0.73526 4.93475,2.04404 1.30875,1.30877 2.04404,3.08386 2.04404,4.93475 l 0,0 c 0,3.85428 -3.12451,6.97879 -6.97879,6.97879 l 0,0 c -3.85428,0 -6.97879,-3.12451 -6.97879,-6.97879 z"
+ id="path5152"
+ inkscape:connector-curvature="0"
+ style="fill:#f1c232;fill-rule:nonzero" />
+ <path
+ d="m 462.43616,337.89825 -50.17621,31.02103"
+ id="path5154"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 462.43616,337.89825 -50.17621,31.02103"
+ id="path5156"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 462.43616,337.89825 -28.56165,31.02103"
+ id="path5158"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 462.43616,337.89825 -28.56165,31.02103"
+ id="path5160"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 462.43616,337.89825 70.16809,30.89426"
+ id="path5162"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 462.43616,337.89825 70.16809,30.89426"
+ id="path5164"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 462.43616,337.89825 -8.65851,31.02103"
+ id="path5166"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 462.43616,337.89825 -8.65851,31.02103"
+ id="path5168"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 462.43616,337.89825 47.2478,30.89426"
+ id="path5170"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 462.43616,337.89825 47.2478,30.89426"
+ id="path5172"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 462.43616,337.89825 13.34906,31.02103"
+ id="path5174"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 462.43616,337.89825 13.34906,31.02103"
+ id="path5176"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 547.6669,375.76007 0,0 c 0,-3.85425 3.12451,-6.97876 6.97876,-6.97876 l 0,0 c 1.85089,0 3.62598,0.73526 4.93475,2.04404 1.30878,1.30877 2.04401,3.08386 2.04401,4.93472 l 0,0 c 0,3.85428 -3.12451,6.97879 -6.97876,6.97879 l 0,0 c -3.85425,0 -6.97876,-3.12451 -6.97876,-6.97879 z"
+ id="path5178"
+ inkscape:connector-curvature="0"
+ style="fill:#434343;fill-rule:nonzero" />
+ <path
+ d="m 569.69556,375.76007 0,0 c 0,-3.85425 3.12451,-6.97876 6.97876,-6.97876 l 0,0 c 1.85089,0 3.62598,0.73526 4.93475,2.04404 1.30878,1.30877 2.04401,3.08386 2.04401,4.93472 l 0,0 c 0,3.85428 -3.12451,6.97879 -6.97876,6.97879 l 0,0 c -3.85425,0 -6.97876,-3.12451 -6.97876,-6.97879 z"
+ id="path5180"
+ inkscape:connector-curvature="0"
+ style="fill:#666666;fill-rule:nonzero" />
+ <path
+ d="m 462.43616,337.89825 92.22638,30.89426"
+ id="path5182"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 462.43616,337.89825 92.22638,30.89426"
+ id="path5184"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 462.43616,337.89825 114.23395,30.89426"
+ id="path5186"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 462.43616,337.89825 114.23395,30.89426"
+ id="path5188"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 491.4188,330.91946 0,0 c 0,-3.85428 3.12451,-6.97879 6.97879,-6.97879 l 0,0 c 1.85089,0 3.62595,0.73526 4.93472,2.04404 1.30878,1.30877 2.04404,3.08386 2.04404,4.93475 l 0,0 c 0,3.85428 -3.12448,6.97879 -6.97876,6.97879 l 0,0 c -3.85428,0 -6.97879,-3.12451 -6.97879,-6.97879 z"
+ id="path5190"
+ inkscape:connector-curvature="0"
+ style="fill:#f1c232;fill-rule:nonzero" />
+ <path
+ d="m 498.39758,337.89825 -86.14136,31.02103"
+ id="path5192"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 498.39758,337.89825 -86.14136,31.02103"
+ id="path5194"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 498.39758,337.89825 -22.61606,31.02103"
+ id="path5196"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 498.39758,337.89825 -22.61606,31.02103"
+ id="path5198"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 498.39758,337.89825 34.203,30.89426"
+ id="path5200"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 498.39758,337.89825 34.203,30.89426"
+ id="path5202"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 498.39758,337.89825 -64.53946,31.02103"
+ id="path5204"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 498.39758,337.89825 -64.53946,31.02103"
+ id="path5206"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 498.39758,337.89825 11.26999,30.89426"
+ id="path5208"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 498.39758,337.89825 11.26999,30.89426"
+ id="path5210"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 498.39758,337.89825 -39.67953,33.03668"
+ id="path5212"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 498.39758,337.89825 -39.67953,33.03668"
+ id="path5214"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 498.39758,337.89825 56.2486,30.89426"
+ id="path5216"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 498.39758,337.89825 56.2486,30.89426"
+ id="path5218"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 498.39758,337.89825 78.2688,30.89426"
+ id="path5220"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 498.39758,337.89825 78.2688,30.89426"
+ id="path5222"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 527.3813,330.91946 0,0 c 0,-3.85428 3.12451,-6.97879 6.97876,-6.97879 l 0,0 c 1.85089,0 3.62598,0.73526 4.93475,2.04404 1.30878,1.30877 2.04401,3.08386 2.04401,4.93475 l 0,0 c 0,3.85428 -3.12445,6.97879 -6.97876,6.97879 l 0,0 c -3.85425,0 -6.97876,-3.12451 -6.97876,-6.97879 z"
+ id="path5224"
+ inkscape:connector-curvature="0"
+ style="fill:#f1c232;fill-rule:nonzero" />
+ <path
+ d="M 534.36005,337.89825 412.2536,368.91928"
+ id="path5226"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="M 534.36005,337.89825 412.2536,368.91928"
+ id="path5228"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 534.36005,337.89825 -58.55582,31.02103"
+ id="path5230"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 534.36005,337.89825 -58.55582,31.02103"
+ id="path5232"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 534.36005,337.89825 -1.73676,30.89426"
+ id="path5234"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 534.36005,337.89825 -1.73676,30.89426"
+ id="path5236"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="M 534.36005,337.89825 433.8555,368.91928"
+ id="path5238"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="M 534.36005,337.89825 433.8555,368.91928"
+ id="path5240"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 534.36005,337.89825 -24.6951,30.89426"
+ id="path5242"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 534.36005,337.89825 -24.6951,30.89426"
+ id="path5244"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 534.36005,337.89825 -80.60141,31.02103"
+ id="path5246"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 534.36005,337.89825 -80.60141,31.02103"
+ id="path5248"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 534.36005,337.89825 20.28345,30.89426"
+ id="path5250"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 534.36005,337.89825 20.28345,30.89426"
+ id="path5252"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 534.36005,337.89825 42.30371,30.89426"
+ id="path5254"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 534.36005,337.89825 42.30371,30.89426"
+ id="path5256"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 455.45737,299.91113 0,0 c 0,-3.85428 3.12451,-6.97879 6.97879,-6.97879 l 0,0 c 1.85089,0 3.62598,0.73526 4.93475,2.04404 1.30875,1.30877 2.04404,3.08386 2.04404,4.93475 l 0,0 c 0,3.85425 -3.12451,6.97876 -6.97879,6.97876 l 0,0 c -3.85428,0 -6.97879,-3.12451 -6.97879,-6.97876 z"
+ id="path5258"
+ inkscape:connector-curvature="0"
+ style="fill:#f1c232;fill-rule:nonzero" />
+ <path
+ d="m 491.4188,299.91113 0,0 c 0,-3.85428 3.12451,-6.97879 6.97879,-6.97879 l 0,0 c 1.85089,0 3.62595,0.73526 4.93472,2.04404 1.30878,1.30877 2.04404,3.08386 2.04404,4.93475 l 0,0 c 0,3.85425 -3.12448,6.97876 -6.97876,6.97876 l 0,0 c -3.85428,0 -6.97879,-3.12451 -6.97879,-6.97876 z"
+ id="path5260"
+ inkscape:connector-curvature="0"
+ style="fill:#f1c232;fill-rule:nonzero" />
+ <path
+ d="m 527.3813,299.91113 0,0 c 0,-3.85428 3.12451,-6.97879 6.97876,-6.97879 l 0,0 c 1.85089,0 3.62598,0.73526 4.93475,2.04404 1.30878,1.30877 2.04401,3.08386 2.04401,4.93475 l 0,0 c 0,3.85425 -3.12445,6.97876 -6.97876,6.97876 l 0,0 c -3.85425,0 -6.97876,-3.12451 -6.97876,-6.97876 z"
+ id="path5262"
+ inkscape:connector-curvature="0"
+ style="fill:#f1c232;fill-rule:nonzero" />
+ <path
+ d="m 534.36005,306.8899 0,17.03812"
+ id="path5264"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 534.36005,306.8899 0,17.03812"
+ id="path5266"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 498.39758,306.8899 0,17.03812"
+ id="path5268"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 498.39758,306.8899 0,17.03812"
+ id="path5270"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 462.43616,306.8899 0,17.03812"
+ id="path5272"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 462.43616,306.8899 0,17.03812"
+ id="path5274"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 534.36005,306.8899 -71.9429,17.03812"
+ id="path5276"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 534.36005,306.8899 -71.9429,17.03812"
+ id="path5278"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 534.36005,306.8899 -35.96512,17.03812"
+ id="path5280"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 534.36005,306.8899 -35.96512,17.03812"
+ id="path5282"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 498.39758,306.8899 35.96509,17.03812"
+ id="path5284"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 498.39758,306.8899 35.96509,17.03812"
+ id="path5286"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 462.43616,306.8899 35.96512,17.03812"
+ id="path5288"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 462.43616,306.8899 35.96512,17.03812"
+ id="path5290"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 498.39758,306.8899 -35.96512,17.03812"
+ id="path5292"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 498.39758,306.8899 -35.96512,17.03812"
+ id="path5294"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 455.4576,301.80026 7.67075,0 4.25952,-7.37811"
+ id="path5296"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 455.4576,301.80026 7.67075,0 4.25952,-7.37811"
+ id="path5298"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#434343;stroke-width:3;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 491.41956,301.80026 7.67075,0 4.25952,-7.37811"
+ id="path5300"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 491.41956,301.80026 7.67075,0 4.25952,-7.37811"
+ id="path5302"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#434343;stroke-width:3;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 527.97626,301.80026 7.67078,0 4.25952,-7.37811"
+ id="path5304"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 527.97626,301.80026 7.67078,0 4.25952,-7.37811"
+ id="path5306"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#434343;stroke-width:3;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 491.78485,333.51288 7.67075,0 4.25952,-7.37811"
+ id="path5308"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 491.78485,333.51288 7.67075,0 4.25952,-7.37811"
+ id="path5310"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#434343;stroke-width:3;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 527.61096,333.51288 7.67078,0 4.25952,-7.37811"
+ id="path5312"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 527.61096,333.51288 7.67078,0 4.25952,-7.37811"
+ id="path5314"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#434343;stroke-width:3;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 455.8229,333.51288 7.67075,0 4.25952,-7.37811"
+ id="path5316"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 455.8229,333.51288 7.67075,0 4.25952,-7.37811"
+ id="path5318"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#434343;stroke-width:3;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 445.10013,387.06348 0,6.80762"
+ id="path5320"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 445.10013,387.06348 0,6.80762"
+ id="path5322"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 543.42737,387.4251 0,6.42734"
+ id="path5324"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 543.42737,387.4251 0,6.42734"
+ id="path5326"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 536.4481,400.8322 0,0 c 0,-3.85428 3.12451,-6.97879 6.97876,-6.97879 l 0,0 c 1.85089,0 3.62598,0.73529 4.93475,2.04404 1.30878,1.30877 2.04407,3.08386 2.04407,4.93475 l 0,0 c 0,3.85428 -3.12451,6.97879 -6.97882,6.97879 l 0,0 c -3.85425,0 -6.97876,-3.12451 -6.97876,-6.97879 z"
+ id="path5328"
+ inkscape:connector-curvature="0"
+ style="fill:#f1c232;fill-rule:nonzero" />
+ <path
+ d="m 438.12134,400.8317 0,0 c 0,-3.85428 3.12451,-6.97879 6.97879,-6.97879 l 0,0 c 1.85089,0 3.62598,0.73526 4.93475,2.04404 1.30878,1.30877 2.04404,3.08386 2.04404,4.93475 l 0,0 c 0,3.85428 -3.12451,6.97876 -6.97879,6.97876 l 0,0 c -3.85428,0 -6.97879,-3.12448 -6.97879,-6.97876 z"
+ id="path5330"
+ inkscape:connector-curvature="0"
+ style="fill:#f1c232;fill-rule:nonzero" />
+ <path
+ d="m 462.43616,306.8899 71.93701,17.03937"
+ id="path5332"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 462.43616,306.8899 71.93701,17.03937"
+ id="path5334"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 795.61945,306.59042 76.18896,14.64566"
+ id="path5336"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 795.61945,306.59042 76.18896,14.64566"
+ id="path5338"
+ inkscape:connector-curvature="0"
+ style="fill-rule:nonzero;stroke:#999999;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round" />
+ <path
+ d="m 25.223194,416.41708 182.582686,0 0,34.48819 -182.582686,0 z"
+ id="path5340"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 81.940315,436.93707 -2.046875,-8.59375 1.765625,0 1.296875,5.90625 1.578125,-5.90625 2.0625,0 1.5,6 1.3125,-6 1.75,0 -2.078125,8.59375 -1.84375,0 -1.71875,-6.42188 -1.703125,6.42188 -1.875,0 z m 10.091797,-7.0625 0,-1.53125 1.65625,0 0,1.53125 -1.65625,0 z m 0,7.0625 0,-6.21875 1.65625,0 0,6.21875 -1.65625,0 z m 9.037108,0 -1.515624,0 0,-0.92188 q -0.390625,0.54688 -0.90625,0.8125 -0.515625,0.25 -1.046875,0.25 -1.078125,0 -1.84375,-0.85937 -0.75,-0.875 -0.75,-2.42188 0,-1.57812 0.734375,-2.39062 0.75,-0.82813 1.890625,-0.82813 1.03125,0 1.796875,0.85938 l 0,-3.09375 1.640624,0 0,8.59375 z m -4.390624,-3.25 q 0,1 0.28125,1.4375 0.390625,0.65625 1.109375,0.65625 0.5625,0 0.953125,-0.48438 0.40625,-0.48437 0.40625,-1.45312 0,-1.0625 -0.390625,-1.53125 -0.375,-0.48438 -0.984375,-0.48438 -0.578125,0 -0.984375,0.46875 -0.390625,0.46875 -0.390625,1.39063 z m 9.626954,1.26562 1.64062,0.28125 q -0.3125,0.90625 -1,1.375 -0.6875,0.46875 -1.70312,0.46875 -1.625,0 -2.40625,-1.0625 -0.625,-0.84375 -0.625,-2.14062 0,-1.54688 0.8125,-2.42188 0.8125,-0.875 2.04687,-0.875 1.39063,0 2.1875,0.92188 0.8125,0.90625 0.76563,2.79687 l -4.125,0 q 0.0312,0.73438 0.40625,1.14063 0.375,0.40625 0.95312,0.40625 0.375,0 0.64063,-0.20313 0.26562,-0.21875 0.40625,-0.6875 z m 0.0937,-1.65625 q -0.0156,-0.71875 -0.375,-1.09375 -0.34375,-0.375 -0.85938,-0.375 -0.53125,0 -0.89062,0.39063 -0.34375,0.40625 -0.34375,1.07812 l 2.46875,0 z m 6.28906,3.64063 0,-8.59375 2.60938,0 1.54687,5.85937 1.54688,-5.85937 2.59375,0 0,8.59375 -1.60938,0 0,-6.76563 -1.70312,6.76563 -1.67188,0 -1.6875,-6.76563 0,6.76563 -1.625,0 z m 9.63672,-3.20313 q 0,-0.8125 0.40625,-1.57812 0.40625,-0.78125 1.14063,-1.17188 0.73437,-0.40625 1.65625,-0.40625 1.40625,0 2.3125,0.92188 0.90624,0.90625 0.90624,2.3125 0,1.40625 -0.92187,2.34375 -0.90625,0.92187 -2.28125,0.92187 -0.85937,0 -1.64062,-0.39062 -0.76563,-0.39063 -1.17188,-1.125 -0.40625,-0.75 -0.40625,-1.82813 z m 1.6875,0.0937 q 0,0.92188 0.4375,1.42188 0.4375,0.48437 1.07813,0.48437 0.65625,0 1.07812,-0.48437 0.4375,-0.5 0.4375,-1.4375 0,-0.90625 -0.4375,-1.39063 -0.42187,-0.5 -1.07812,-0.5 -0.64063,0 -1.07813,0.5 -0.4375,0.48438 -0.4375,1.40625 z m 11.7207,3.10938 -1.51563,0 0,-0.92188 q -0.39062,0.54688 -0.90625,0.8125 -0.51562,0.25 -1.04687,0.25 -1.07813,0 -1.84375,-0.85937 -0.75,-0.875 -0.75,-2.42188 0,-1.57812 0.73437,-2.39062 0.75,-0.82813 1.89063,-0.82813 1.03125,0 1.79687,0.85938 l 0,-3.09375 1.64063,0 0,8.59375 z m -4.39063,-3.25 q 0,1 0.28125,1.4375 0.39063,0.65625 1.10938,0.65625 0.5625,0 0.95312,-0.48438 0.40625,-0.48437 0.40625,-1.45312 0,-1.0625 -0.39062,-1.53125 -0.375,-0.48438 -0.98438,-0.48438 -0.57812,0 -0.98437,0.46875 -0.39063,0.46875 -0.39063,1.39063 z m 9.62695,1.26562 1.64063,0.28125 q -0.3125,0.90625 -1,1.375 -0.6875,0.46875 -1.70313,0.46875 -1.625,0 -2.40625,-1.0625 -0.625,-0.84375 -0.625,-2.14062 0,-1.54688 0.8125,-2.42188 0.8125,-0.875 2.04688,-0.875 1.39062,0 2.1875,0.92188 0.8125,0.90625 0.76562,2.79687 l -4.125,0 q 0.0312,0.73438 0.40625,1.14063 0.375,0.40625 0.95313,0.40625 0.375,0 0.64062,-0.20313 0.26563,-0.21875 0.40625,-0.6875 z m 0.0937,-1.65625 q -0.0156,-0.71875 -0.375,-1.09375 -0.34375,-0.375 -0.85937,-0.375 -0.53125,0 -0.89063,0.39063 -0.34375,0.40625 -0.34375,1.07812 l 2.46875,0 z m 2.97071,3.64063 0,-8.59375 1.65625,0 0,8.59375 -1.65625,0 z m 2.75586,-1.78125 1.65625,-0.25 q 0.10937,0.48437 0.42187,0.73437 0.32813,0.25 0.90625,0.25 0.64063,0 0.95313,-0.23437 0.21875,-0.17188 0.21875,-0.4375 0,-0.1875 -0.10938,-0.3125 -0.125,-0.125 -0.54687,-0.21875 -2,-0.4375 -2.53125,-0.79688 -0.73438,-0.51562 -0.73438,-1.40625 0,-0.8125 0.625,-1.35937 0.64063,-0.54688 1.98438,-0.54688 1.28125,0 1.90625,0.42188 0.625,0.40625 0.85937,1.21875 l -1.5625,0.28125 q -0.0937,-0.35938 -0.375,-0.54688 -0.28125,-0.20312 -0.79687,-0.20312 -0.64063,0 -0.92188,0.1875 -0.1875,0.125 -0.1875,0.32812 0,0.1875 0.15625,0.3125 0.21875,0.15625 1.53125,0.45313 1.32813,0.29687 1.84375,0.73437 0.51563,0.4375 0.51563,1.21875 0,0.85938 -0.71875,1.48438 -0.70313,0.60937 -2.10938,0.60937 -1.26562,0 -2.01562,-0.51562 -0.73438,-0.51563 -0.96875,-1.40625 z"
+ id="path5342"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-rule:nonzero" />
+ <path
+ d="m 743.672,415.30432 182.5827,0 0,36.31497 -182.5827,0 z"
+ id="path5344"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 798.83057,427.23056 3.15625,0 q 1.07812,0 1.64062,0.17188 0.75,0.21875 1.28125,0.79687 0.54688,0.5625 0.82813,1.39063 0.28125,0.8125 0.28125,2.01562 0,1.0625 -0.25,1.82813 -0.32813,0.9375 -0.92188,1.51562 -0.45312,0.45313 -1.21875,0.6875 -0.57812,0.1875 -1.54687,0.1875 l -3.25,0 0,-8.59375 z m 1.73437,1.45313 0,5.6875 1.28125,0 q 0.73438,0 1.0625,-0.0781 0.42188,-0.10937 0.6875,-0.35937 0.28125,-0.25 0.45313,-0.82813 0.1875,-0.57812 0.1875,-1.57812 0,-0.98438 -0.1875,-1.51563 -0.17188,-0.54687 -0.48438,-0.84375 -0.3125,-0.29687 -0.79687,-0.40625 -0.375,-0.0781 -1.4375,-0.0781 l -0.76563,0 z m 10.5254,5.15625 1.64062,0.28125 q -0.3125,0.90625 -1,1.375 -0.6875,0.46875 -1.70312,0.46875 -1.625,0 -2.40625,-1.0625 -0.625,-0.84375 -0.625,-2.14063 0,-1.54687 0.8125,-2.42187 0.8125,-0.875 2.04687,-0.875 1.39063,0 2.1875,0.92187 0.8125,0.90625 0.76563,2.79688 l -4.125,0 q 0.0312,0.73437 0.40625,1.14062 0.375,0.40625 0.95312,0.40625 0.375,0 0.64063,-0.20312 0.26562,-0.21875 0.40625,-0.6875 z m 0.0937,-1.65625 q -0.0156,-0.71875 -0.375,-1.09375 -0.34375,-0.375 -0.85938,-0.375 -0.53125,0 -0.89062,0.39062 -0.34375,0.40625 -0.34375,1.07813 l 2.46875,0 z m 6.58007,1.65625 1.64063,0.28125 q -0.3125,0.90625 -1,1.375 -0.6875,0.46875 -1.70313,0.46875 -1.625,0 -2.40625,-1.0625 -0.625,-0.84375 -0.625,-2.14063 0,-1.54687 0.8125,-2.42187 0.8125,-0.875 2.04688,-0.875 1.39062,0 2.1875,0.92187 0.8125,0.90625 0.76562,2.79688 l -4.125,0 q 0.0312,0.73437 0.40625,1.14062 0.375,0.40625 0.95313,0.40625 0.375,0 0.64062,-0.20312 0.26563,-0.21875 0.40625,-0.6875 z m 0.0937,-1.65625 q -0.0156,-0.71875 -0.375,-1.09375 -0.34375,-0.375 -0.85937,-0.375 -0.53125,0 -0.89063,0.39062 -0.34375,0.40625 -0.34375,1.07813 l 2.46875,0 z m 2.92383,-2.57813 1.53125,0 0,0.90625 q 0.3125,-0.46875 0.8125,-0.75 0.51563,-0.29687 1.14063,-0.29687 1.07812,0 1.82812,0.84375 0.76563,0.84375 0.76563,2.375 0,1.54687 -0.76563,2.42187 -0.76562,0.85938 -1.84375,0.85938 -0.51562,0 -0.9375,-0.20313 -0.42187,-0.20312 -0.875,-0.70312 l 0,3.14062 -1.65625,0 0,-8.59375 z m 1.625,3 q 0,1.04688 0.42188,1.54688 0.42187,0.5 1.01562,0.5 0.57813,0 0.95313,-0.45313 0.375,-0.46875 0.375,-1.51562 0,-0.96875 -0.39063,-1.4375 -0.39062,-0.48438 -0.96875,-0.48438 -0.60937,0 -1.01562,0.46875 -0.39063,0.46875 -0.39063,1.375 z m 9.07031,3.21875 0,-8.59375 2.60938,0 1.54687,5.85938 1.54688,-5.85938 2.59375,0 0,8.59375 -1.60938,0 0,-6.76562 -1.70312,6.76562 -1.67188,0 -1.6875,-6.76562 0,6.76562 -1.625,0 z m 9.63672,-3.20312 q 0,-0.8125 0.40625,-1.57813 0.40625,-0.78125 1.14063,-1.17187 0.73437,-0.40625 1.65625,-0.40625 1.40625,0 2.3125,0.92187 0.90625,0.90625 0.90625,2.3125 0,1.40625 -0.92188,2.34375 -0.90625,0.92188 -2.28125,0.92188 -0.85937,0 -1.64062,-0.39063 -0.76563,-0.39062 -1.17188,-1.125 -0.40625,-0.75 -0.40625,-1.82812 z m 1.6875,0.0937 q 0,0.92187 0.4375,1.42187 0.4375,0.48438 1.07813,0.48438 0.65625,0 1.07812,-0.48438 0.4375,-0.5 0.4375,-1.4375 0,-0.90625 -0.4375,-1.39062 -0.42187,-0.5 -1.07812,-0.5 -0.64063,0 -1.07813,0.5 -0.4375,0.48437 -0.4375,1.40625 z m 11.72071,3.10937 -1.51563,0 0,-0.92187 q -0.39062,0.54687 -0.90625,0.8125 -0.51562,0.25 -1.04687,0.25 -1.07813,0 -1.84375,-0.85938 -0.75,-0.875 -0.75,-2.42187 0,-1.57813 0.73437,-2.39063 0.75,-0.82812 1.89063,-0.82812 1.03125,0 1.79687,0.85937 l 0,-3.09375 1.64063,0 0,8.59375 z m -4.39063,-3.25 q 0,1 0.28125,1.4375 0.39063,0.65625 1.10938,0.65625 0.5625,0 0.95312,-0.48437 0.40625,-0.48438 0.40625,-1.45313 0,-1.0625 -0.39062,-1.53125 -0.375,-0.48437 -0.98438,-0.48437 -0.57812,0 -0.98437,0.46875 -0.39063,0.46875 -0.39063,1.39062 z m 9.62695,1.26563 1.64063,0.28125 q -0.3125,0.90625 -1,1.375 -0.6875,0.46875 -1.70313,0.46875 -1.625,0 -2.40625,-1.0625 -0.625,-0.84375 -0.625,-2.14063 0,-1.54687 0.8125,-2.42187 0.8125,-0.875 2.04688,-0.875 1.39062,0 2.1875,0.92187 0.8125,0.90625 0.76562,2.79688 l -4.125,0 q 0.0312,0.73437 0.40625,1.14062 0.375,0.40625 0.95313,0.40625 0.375,0 0.64062,-0.20312 0.26563,-0.21875 0.40625,-0.6875 z m 0.0937,-1.65625 q -0.0156,-0.71875 -0.375,-1.09375 -0.34375,-0.375 -0.85937,-0.375 -0.53125,0 -0.89063,0.39062 -0.34375,0.40625 -0.34375,1.07813 l 2.46875,0 z m 2.97071,3.64062 0,-8.59375 1.65625,0 0,8.59375 -1.65625,0 z m 2.75586,-1.78125 1.65625,-0.25 q 0.10937,0.48438 0.42187,0.73438 0.32813,0.25 0.90625,0.25 0.64063,0 0.95313,-0.23438 0.21875,-0.17187 0.21875,-0.4375 0,-0.1875 -0.10938,-0.3125 -0.125,-0.125 -0.54687,-0.21875 -2,-0.4375 -2.53125,-0.79687 -0.73438,-0.51563 -0.73438,-1.40625 0,-0.8125 0.625,-1.35938 0.64063,-0.54687 1.98438,-0.54687 1.28125,0 1.90625,0.42187 0.625,0.40625 0.85937,1.21875 l -1.5625,0.28125 q -0.0937,-0.35937 -0.375,-0.54687 -0.28125,-0.20313 -0.79687,-0.20313 -0.64063,0 -0.92188,0.1875 -0.1875,0.125 -0.1875,0.32813 0,0.1875 0.15625,0.3125 0.21875,0.15625 1.53125,0.45312 1.32813,0.29688 1.84375,0.73438 0.51563,0.4375 0.51563,1.21875 0,0.85937 -0.71875,1.48437 -0.70313,0.60938 -2.10938,0.60938 -1.26562,0 -2.01562,-0.51563 -0.73438,-0.51562 -0.96875,-1.40625 z"
+ id="path5346"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-rule:nonzero" />
+ <path
+ d="m 356.7665,416.4171 182.58267,0 0,34.48819 -182.58267,0 z"
+ id="path5348"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 391.14474,436.9371 -2.04687,-8.59375 1.76562,0 1.29688,5.90625 1.57812,-5.90625 2.0625,0 1.5,6 1.3125,-6 1.75,0 -2.07812,8.59375 -1.84375,0 -1.71875,-6.42188 -1.70313,6.42188 -1.875,0 z m 10.0918,-7.0625 0,-1.53125 1.65625,0 0,1.53125 -1.65625,0 z m 0,7.0625 0,-6.21875 1.65625,0 0,6.21875 -1.65625,0 z m 9.03711,0 -1.51563,0 0,-0.92188 q -0.39062,0.54688 -0.90625,0.8125 -0.51562,0.25 -1.04687,0.25 -1.07813,0 -1.84375,-0.85937 -0.75,-0.875 -0.75,-2.42188 0,-1.57812 0.73437,-2.39062 0.75,-0.82813 1.89063,-0.82813 1.03125,0 1.79687,0.85938 l 0,-3.09375 1.64063,0 0,8.59375 z m -4.39063,-3.25 q 0,1 0.28125,1.4375 0.39063,0.65625 1.10938,0.65625 0.5625,0 0.95312,-0.48438 0.40625,-0.48437 0.40625,-1.45312 0,-1.0625 -0.39062,-1.53125 -0.375,-0.48438 -0.98438,-0.48438 -0.57812,0 -0.98437,0.46875 -0.39063,0.46875 -0.39063,1.39063 z m 9.62695,1.26562 1.64063,0.28125 q -0.3125,0.90625 -1,1.375 -0.6875,0.46875 -1.70313,0.46875 -1.625,0 -2.40625,-1.0625 -0.625,-0.84375 -0.625,-2.14062 0,-1.54688 0.8125,-2.42188 0.8125,-0.875 2.04688,-0.875 1.39062,0 2.1875,0.92188 0.8125,0.90625 0.76562,2.79687 l -4.125,0 q 0.0312,0.73438 0.40625,1.14063 0.375,0.40625 0.95313,0.40625 0.375,0 0.64062,-0.20313 0.26563,-0.21875 0.40625,-0.6875 z m 0.0937,-1.65625 q -0.0156,-0.71875 -0.375,-1.09375 -0.34375,-0.375 -0.85937,-0.375 -0.53125,0 -0.89063,0.39063 -0.34375,0.40625 -0.34375,1.07812 l 2.46875,0 z m 13.91407,2.59375 -0.98438,1.26563 q -0.73437,-0.35938 -1.40625,-0.98438 -0.53125,0.46875 -1.125,0.70313 -0.57812,0.21875 -1.39062,0.21875 -1.59375,0 -2.40625,-0.90625 -0.625,-0.70313 -0.625,-1.60938 0,-0.82812 0.48437,-1.48437 0.5,-0.65625 1.46875,-1.14063 -0.4375,-0.53125 -0.65625,-1 -0.21875,-0.46875 -0.21875,-0.89062 0,-0.78125 0.625,-1.3125 0.625,-0.54688 1.78125,-0.54688 1.10938,0 1.73438,0.57813 0.625,0.5625 0.625,1.375 0,0.51562 -0.3125,0.98437 -0.3125,0.46875 -1.25,1.0625 l 1.1875,1.57813 q 0.21875,-0.375 0.375,-0.96875 l 1.48437,0.32812 q -0.21875,0.79688 -0.39062,1.17188 -0.15625,0.35937 -0.34375,0.60937 0.26562,0.25 0.70312,0.5625 0.4375,0.29688 0.64063,0.40625 z m -4.48438,-4.67187 0.45313,-0.34375 q 0.48437,-0.375 0.48437,-0.75 0,-0.3125 -0.23437,-0.53125 -0.23438,-0.23438 -0.64063,-0.23438 -0.39062,0 -0.60937,0.20313 -0.21875,0.1875 -0.21875,0.45312 0,0.29688 0.375,0.73438 l 0.39062,0.46875 z m -0.64062,1.78125 q -0.5625,0.29687 -0.84375,0.70312 -0.28125,0.39063 -0.28125,0.8125 0,0.54688 0.34375,0.89063 0.34375,0.32812 0.9375,0.32812 0.39062,0 0.73437,-0.15625 0.35938,-0.15625 0.78125,-0.5 l -1.67187,-2.07812 z m 9.53125,-4.65625 3.15625,0 q 1.07812,0 1.64062,0.17187 0.75,0.21875 1.28125,0.79688 0.54688,0.5625 0.82813,1.39062 0.28125,0.8125 0.28125,2.01563 0,1.0625 -0.25,1.82812 -0.32813,0.9375 -0.92188,1.51563 -0.45312,0.45312 -1.21875,0.6875 -0.57812,0.1875 -1.54687,0.1875 l -3.25,0 0,-8.59375 z m 1.73437,1.45312 0,5.6875 1.28125,0 q 0.73438,0 1.0625,-0.0781 0.42188,-0.10938 0.6875,-0.35938 0.28125,-0.25 0.45313,-0.82812 0.1875,-0.57813 0.1875,-1.57813 0,-0.98437 -0.1875,-1.51562 -0.17188,-0.54688 -0.48438,-0.84375 -0.3125,-0.29688 -0.79687,-0.40625 -0.375,-0.0781 -1.4375,-0.0781 l -0.76563,0 z m 10.52539,5.15625 1.64063,0.28125 q -0.3125,0.90625 -1,1.375 -0.6875,0.46875 -1.70313,0.46875 -1.625,0 -2.40625,-1.0625 -0.625,-0.84375 -0.625,-2.14062 0,-1.54688 0.8125,-2.42188 0.8125,-0.875 2.04688,-0.875 1.39062,0 2.1875,0.92188 0.8125,0.90625 0.76562,2.79687 l -4.125,0 q 0.0312,0.73438 0.40625,1.14063 0.375,0.40625 0.95313,0.40625 0.375,0 0.64062,-0.20313 0.26563,-0.21875 0.40625,-0.6875 z m 0.0937,-1.65625 q -0.0156,-0.71875 -0.375,-1.09375 -0.34375,-0.375 -0.85937,-0.375 -0.53125,0 -0.89063,0.39063 -0.34375,0.40625 -0.34375,1.07812 l 2.46875,0 z m 6.58008,1.65625 1.64063,0.28125 q -0.3125,0.90625 -1,1.375 -0.6875,0.46875 -1.70313,0.46875 -1.625,0 -2.40625,-1.0625 -0.625,-0.84375 -0.625,-2.14062 0,-1.54688 0.8125,-2.42188 0.8125,-0.875 2.04688,-0.875 1.39062,0 2.1875,0.92188 0.8125,0.90625 0.76562,2.79687 l -4.125,0 q 0.0312,0.73438 0.40625,1.14063 0.375,0.40625 0.95313,0.40625 0.375,0 0.64062,-0.20313 0.26563,-0.21875 0.40625,-0.6875 z m 0.0937,-1.65625 q -0.0156,-0.71875 -0.375,-1.09375 -0.34375,-0.375 -0.85937,-0.375 -0.53125,0 -0.89063,0.39063 -0.34375,0.40625 -0.34375,1.07812 l 2.46875,0 z m 2.92383,-2.57812 1.53125,0 0,0.90625 q 0.3125,-0.46875 0.8125,-0.75 0.51562,-0.29688 1.14062,-0.29688 1.07813,0 1.82813,0.84375 0.76562,0.84375 0.76562,2.375 0,1.54688 -0.76562,2.42188 -0.76563,0.85937 -1.84375,0.85937 -0.51563,0 -0.9375,-0.20312 -0.42188,-0.20313 -0.875,-0.70313 l 0,3.14063 -1.65625,0 0,-8.59375 z m 1.625,3 q 0,1.04687 0.42187,1.54687 0.42188,0.5 1.01563,0.5 0.57812,0 0.95312,-0.45312 0.375,-0.46875 0.375,-1.51563 0,-0.96875 -0.39062,-1.4375 -0.39063,-0.48437 -0.96875,-0.48437 -0.60938,0 -1.01563,0.46875 -0.39062,0.46875 -0.39062,1.375 z m 9.07031,3.21875 0,-8.59375 2.60938,0 1.54687,5.85937 1.54688,-5.85937 2.59375,0 0,8.59375 -1.60938,0 0,-6.76563 -1.70312,6.76563 -1.67188,0 -1.6875,-6.76563 0,6.76563 -1.625,0 z m 9.63672,-3.20313 q 0,-0.8125 0.40625,-1.57812 0.40625,-0.78125 1.14063,-1.17188 0.73437,-0.40625 1.65625,-0.40625 1.40625,0 2.3125,0.92188 0.90625,0.90625 0.90625,2.3125 0,1.40625 -0.92188,2.34375 -0.90625,0.92187 -2.28125,0.92187 -0.85937,0 -1.64062,-0.39062 -0.76563,-0.39063 -1.17188,-1.125 -0.40625,-0.75 -0.40625,-1.82813 z m 1.6875,0.0937 q 0,0.92188 0.4375,1.42188 0.4375,0.48437 1.07813,0.48437 0.65625,0 1.07812,-0.48437 0.4375,-0.5 0.4375,-1.4375 0,-0.90625 -0.4375,-1.39063 -0.42187,-0.5 -1.07812,-0.5 -0.64063,0 -1.07813,0.5 -0.4375,0.48438 -0.4375,1.40625 z m 11.7207,3.10938 -1.51562,0 0,-0.92188 q -0.39063,0.54688 -0.90625,0.8125 -0.51563,0.25 -1.04688,0.25 -1.07812,0 -1.84375,-0.85937 -0.75,-0.875 -0.75,-2.42188 0,-1.57812 0.73438,-2.39062 0.75,-0.82813 1.89062,-0.82813 1.03125,0 1.79688,0.85938 l 0,-3.09375 1.64062,0 0,8.59375 z m -4.39062,-3.25 q 0,1 0.28125,1.4375 0.39062,0.65625 1.10937,0.65625 0.5625,0 0.95313,-0.48438 0.40625,-0.48437 0.40625,-1.45312 0,-1.0625 -0.39063,-1.53125 -0.375,-0.48438 -0.98437,-0.48438 -0.57813,0 -0.98438,0.46875 -0.39062,0.46875 -0.39062,1.39063 z m 9.62695,1.26562 1.64063,0.28125 q -0.3125,0.90625 -1,1.375 -0.6875,0.46875 -1.70313,0.46875 -1.625,0 -2.40625,-1.0625 -0.625,-0.84375 -0.625,-2.14062 0,-1.54688 0.8125,-2.42188 0.8125,-0.875 2.04688,-0.875 1.39062,0 2.1875,0.92188 0.8125,0.90625 0.76562,2.79687 l -4.125,0 q 0.0312,0.73438 0.40625,1.14063 0.375,0.40625 0.95313,0.40625 0.375,0 0.64062,-0.20313 0.26563,-0.21875 0.40625,-0.6875 z m 0.0937,-1.65625 q -0.0156,-0.71875 -0.375,-1.09375 -0.34375,-0.375 -0.85937,-0.375 -0.53125,0 -0.89063,0.39063 -0.34375,0.40625 -0.34375,1.07812 l 2.46875,0 z m 2.9707,3.64063 0,-8.59375 1.65625,0 0,8.59375 -1.65625,0 z m 2.75586,-1.78125 1.65625,-0.25 q 0.10938,0.48437 0.42188,0.73437 0.32812,0.25 0.90625,0.25 0.64062,0 0.95312,-0.23437 0.21875,-0.17188 0.21875,-0.4375 0,-0.1875 -0.10937,-0.3125 -0.125,-0.125 -0.54688,-0.21875 -2,-0.4375 -2.53125,-0.79688 -0.73437,-0.51562 -0.73437,-1.40625 0,-0.8125 0.625,-1.35937 0.64062,-0.54688 1.98437,-0.54688 1.28125,0 1.90625,0.42188 0.625,0.40625 0.85938,1.21875 l -1.5625,0.28125 q -0.0937,-0.35938 -0.375,-0.54688 -0.28125,-0.20312 -0.79688,-0.20312 -0.64062,0 -0.92187,0.1875 -0.1875,0.125 -0.1875,0.32812 0,0.1875 0.15625,0.3125 0.21875,0.15625 1.53125,0.45313 1.32812,0.29687 1.84375,0.73437 0.51562,0.4375 0.51562,1.21875 0,0.85938 -0.71875,1.48438 -0.70312,0.60937 -2.10937,0.60937 -1.26563,0 -2.01563,-0.51562 -0.73437,-0.51563 -0.96875,-1.40625 z"
+ id="path5350"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-rule:nonzero" />
+ <path
+ d="m 601.66156,300.39102 120.66144,0 0,30.11023 -120.66144,0 z"
+ id="path5352"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 621.5206,320.911 0,-8.59375 1.73437,0 0,3.39062 3.40625,0 0,-3.39062 1.73438,0 0,8.59375 -1.73438,0 0,-3.75 -3.40625,0 0,3.75 -1.73437,0 z m 8.65039,-7.0625 0,-1.53125 1.65625,0 0,1.53125 -1.65625,0 z m 0,7.0625 0,-6.21875 1.65625,0 0,6.21875 -1.65625,0 z m 9.03711,0 -1.51563,0 0,-0.92188 q -0.39062,0.54688 -0.90625,0.8125 -0.51562,0.25 -1.04687,0.25 -1.07813,0 -1.84375,-0.85937 -0.75,-0.875 -0.75,-2.42188 0,-1.57812 0.73437,-2.39062 0.75,-0.82813 1.89063,-0.82813 1.03125,0 1.79687,0.85938 l 0,-3.09375 1.64063,0 0,8.59375 z m -4.39063,-3.25 q 0,1 0.28125,1.4375 0.39063,0.65625 1.10938,0.65625 0.5625,0 0.95312,-0.48438 0.40625,-0.48437 0.40625,-1.45312 0,-1.0625 -0.39062,-1.53125 -0.375,-0.48438 -0.98438,-0.48438 -0.57812,0 -0.98437,0.46875 -0.39063,0.46875 -0.39063,1.39063 z m 11.72071,3.25 -1.51563,0 0,-0.92188 q -0.39062,0.54688 -0.90625,0.8125 -0.51562,0.25 -1.04687,0.25 -1.07813,0 -1.84375,-0.85937 -0.75,-0.875 -0.75,-2.42188 0,-1.57812 0.73437,-2.39062 0.75,-0.82813 1.89063,-0.82813 1.03125,0 1.79687,0.85938 l 0,-3.09375 1.64063,0 0,8.59375 z m -4.39063,-3.25 q 0,1 0.28125,1.4375 0.39063,0.65625 1.10938,0.65625 0.5625,0 0.95312,-0.48438 0.40625,-0.48437 0.40625,-1.45312 0,-1.0625 -0.39062,-1.53125 -0.375,-0.48438 -0.98438,-0.48438 -0.57812,0 -0.98437,0.46875 -0.39063,0.46875 -0.39063,1.39063 z m 9.62696,1.26562 1.64062,0.28125 q -0.3125,0.90625 -1,1.375 -0.6875,0.46875 -1.70312,0.46875 -1.625,0 -2.40625,-1.0625 -0.625,-0.84375 -0.625,-2.14062 0,-1.54688 0.8125,-2.42188 0.8125,-0.875 2.04687,-0.875 1.39063,0 2.1875,0.92188 0.8125,0.90625 0.76563,2.79687 l -4.125,0 q 0.0312,0.73438 0.40625,1.14063 0.375,0.40625 0.95312,0.40625 0.375,0 0.64063,-0.20313 0.26562,-0.21875 0.40625,-0.6875 z m 0.0937,-1.65625 q -0.0156,-0.71875 -0.375,-1.09375 -0.34375,-0.375 -0.85938,-0.375 -0.53125,0 -0.89062,0.39063 -0.34375,0.40625 -0.34375,1.07812 l 2.46875,0 z m 8.62695,3.64063 -1.64063,0 0,-3.17188 q 0,-1.01562 -0.10937,-1.3125 -0.0937,-0.29687 -0.34375,-0.45312 -0.23438,-0.17188 -0.5625,-0.17188 -0.4375,0 -0.78125,0.23438 -0.32813,0.23437 -0.45313,0.625 -0.125,0.39062 -0.125,1.4375 l 0,2.8125 -1.65625,0 0,-6.21875 1.53125,0 0,0.90625 q 0.8125,-1.04688 2.0625,-1.04688 0.53125,0 0.98438,0.20313 0.45312,0.1875 0.6875,0.5 0.23437,0.29687 0.3125,0.6875 0.0937,0.375 0.0937,1.09375 l 0,3.875 z m 5.07031,0 0,-8.51563 1.73438,0 0,7.0625 4.3125,0 0,1.45313 -6.04688,0 z m 8.50195,-4.32813 -1.5,-0.26562 q 0.25,-0.90625 0.85938,-1.32813 0.625,-0.4375 1.84375,-0.4375 1.09375,0 1.625,0.26563 0.54687,0.25 0.76562,0.65625 0.21875,0.39062 0.21875,1.46875 l -0.0156,1.92187 q 0,0.82813 0.0781,1.21875 0.0781,0.375 0.29688,0.82813 l -1.625,0 q -0.0625,-0.17188 -0.15625,-0.48438 -0.0469,-0.15625 -0.0625,-0.20312 -0.42188,0.42187 -0.90625,0.625 -0.46875,0.20312 -1.01563,0.20312 -0.96875,0 -1.53125,-0.51562 -0.54687,-0.53125 -0.54687,-1.32813 0,-0.53125 0.25,-0.9375 0.26562,-0.40625 0.71875,-0.625 0.45312,-0.23437 1.3125,-0.39062 1.14062,-0.21875 1.59375,-0.40625 l 0,-0.15625 q 0,-0.48438 -0.23438,-0.6875 -0.23437,-0.20313 -0.89062,-0.20313 -0.4375,0 -0.6875,0.17188 -0.23438,0.17187 -0.39063,0.60937 z m 2.20313,1.34375 q -0.3125,0.0937 -1,0.25 -0.6875,0.14063 -0.90625,0.28125 -0.3125,0.23438 -0.3125,0.57813 0,0.34375 0.25,0.60937 0.26562,0.25 0.65625,0.25 0.45312,0 0.85937,-0.29687 0.29688,-0.21875 0.39063,-0.54688 0.0625,-0.20312 0.0625,-0.79687 l 0,-0.32813 z m 2.45508,-3.23437 1.75,0 1.5,4.40625 1.45312,-4.40625 1.70313,0 -2.20313,5.98437 -0.39062,1.07813 q -0.21875,0.54687 -0.42188,0.82812 -0.1875,0.29688 -0.45312,0.46875 -0.25,0.1875 -0.625,0.28125 -0.35938,0.10938 -0.82813,0.10938 -0.48437,0 -0.9375,-0.10938 l -0.14062,-1.28125 q 0.39062,0.0781 0.6875,0.0781 0.57812,0 0.84375,-0.34375 0.28125,-0.32813 0.4375,-0.85938 l -2.375,-6.23437 z m 11.06445,4.23437 1.64063,0.28125 q -0.3125,0.90625 -1,1.375 -0.6875,0.46875 -1.70313,0.46875 -1.625,0 -2.40625,-1.0625 -0.625,-0.84375 -0.625,-2.14062 0,-1.54688 0.8125,-2.42188 0.8125,-0.875 2.04688,-0.875 1.39062,0 2.1875,0.92188 0.8125,0.90625 0.76562,2.79687 l -4.125,0 q 0.0312,0.73438 0.40625,1.14063 0.375,0.40625 0.95313,0.40625 0.375,0 0.64062,-0.20313 0.26563,-0.21875 0.40625,-0.6875 z m 0.0937,-1.65625 q -0.0156,-0.71875 -0.375,-1.09375 -0.34375,-0.375 -0.85937,-0.375 -0.53125,0 -0.89063,0.39063 -0.34375,0.40625 -0.34375,1.07812 l 2.46875,0 z m 4.54883,3.64063 -1.64063,0 0,-6.21875 1.53125,0 0,0.875 q 0.39063,-0.625 0.70313,-0.8125 0.3125,-0.20313 0.70312,-0.20313 0.5625,0 1.09375,0.3125 l -0.51562,1.42188 q -0.42188,-0.26563 -0.76563,-0.26563 -0.35937,0 -0.59375,0.20313 -0.23437,0.1875 -0.375,0.6875 -0.14062,0.48437 -0.14062,2.07812 l 0,1.92188 z m 2.51367,-1.78125 1.65625,-0.25 q 0.10938,0.48437 0.42188,0.73437 0.32812,0.25 0.90625,0.25 0.64062,0 0.95312,-0.23437 0.21875,-0.17188 0.21875,-0.4375 0,-0.1875 -0.10937,-0.3125 -0.125,-0.125 -0.54688,-0.21875 -2,-0.4375 -2.53125,-0.79688 -0.73437,-0.51562 -0.73437,-1.40625 0,-0.8125 0.625,-1.35937 0.64062,-0.54688 1.98437,-0.54688 1.28125,0 1.90625,0.42188 0.625,0.40625 0.85938,1.21875 l -1.5625,0.28125 q -0.0937,-0.35938 -0.375,-0.54688 -0.28125,-0.20312 -0.79688,-0.20312 -0.64062,0 -0.92187,0.1875 -0.1875,0.125 -0.1875,0.32812 0,0.1875 0.15625,0.3125 0.21875,0.15625 1.53125,0.45313 1.32812,0.29687 1.84375,0.73437 0.51562,0.4375 0.51562,1.21875 0,0.85938 -0.71875,1.48438 -0.70312,0.60937 -2.10937,0.60937 -1.26563,0 -2.01563,-0.51562 -0.73437,-0.51563 -0.96875,-1.40625 z"
+ id="path5354"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-rule:nonzero" />
+ <path
+ d="m 605.00006,385.7558 120.66144,0 0,30.11026 -120.66144,0 z"
+ id="path5356"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 619.0778,403.47894 1.6875,-0.15625 q 0.15625,0.84375 0.60938,1.25 0.46875,0.39063 1.26562,0.39063 0.82813,0 1.25,-0.34375 0.4375,-0.35938 0.4375,-0.84375 0,-0.29688 -0.1875,-0.51563 -0.17187,-0.21875 -0.625,-0.375 -0.29687,-0.10937 -1.375,-0.375 -1.40625,-0.34375 -1.96875,-0.84375 -0.78125,-0.71875 -0.78125,-1.73437 0,-0.65625 0.35938,-1.21875 0.375,-0.57813 1.07812,-0.875 0.70313,-0.29688 1.6875,-0.29688 1.625,0 2.4375,0.71875 0.8125,0.70313 0.85938,1.875 l -1.73438,0.0781 q -0.10937,-0.65625 -0.48437,-0.9375 -0.35938,-0.29688 -1.09375,-0.29688 -0.75,0 -1.1875,0.3125 -0.26563,0.1875 -0.26563,0.53125 0,0.29688 0.25,0.51563 0.32813,0.28125 1.59375,0.57812 1.26563,0.29688 1.875,0.625 0.60938,0.3125 0.95313,0.875 0.34375,0.54688 0.34375,1.35938 0,0.73437 -0.42188,1.39062 -0.40625,0.64063 -1.15625,0.95313 -0.75,0.3125 -1.85937,0.3125 -1.64063,0 -2.51563,-0.75 -0.85937,-0.76563 -1.03125,-2.20313 z m 8.37891,-3.42187 1.53125,0 0,0.90625 q 0.3125,-0.46875 0.8125,-0.75 0.51562,-0.29688 1.14062,-0.29688 1.07813,0 1.82813,0.84375 0.76562,0.84375 0.76562,2.375 0,1.54688 -0.76562,2.42188 -0.76563,0.85937 -1.84375,0.85937 -0.51563,0 -0.9375,-0.20312 -0.42188,-0.20313 -0.875,-0.70313 l 0,3.14063 -1.65625,0 0,-8.59375 z m 1.625,3 q 0,1.04687 0.42187,1.54687 0.42188,0.5 1.01563,0.5 0.57812,0 0.95312,-0.45312 0.375,-0.46875 0.375,-1.51563 0,-0.96875 -0.39062,-1.4375 -0.39063,-0.48437 -0.96875,-0.48437 -0.60938,0 -1.01563,0.46875 -0.39062,0.46875 -0.39062,1.375 z m 6.98632,-1.10938 -1.5,-0.26562 q 0.25,-0.90625 0.85938,-1.32813 0.625,-0.4375 1.84375,-0.4375 1.09375,0 1.625,0.26563 0.54687,0.25 0.76562,0.65625 0.21875,0.39062 0.21875,1.46875 l -0.0156,1.92187 q 0,0.82813 0.0781,1.21875 0.0781,0.375 0.29688,0.82813 l -1.625,0 q -0.0625,-0.17188 -0.15625,-0.48438 -0.0469,-0.15625 -0.0625,-0.20312 -0.42188,0.42187 -0.90625,0.625 -0.46875,0.20312 -1.01563,0.20312 -0.96875,0 -1.53125,-0.51562 -0.54687,-0.53125 -0.54687,-1.32813 0,-0.53125 0.25,-0.9375 0.26562,-0.40625 0.71875,-0.625 0.45312,-0.23437 1.3125,-0.39062 1.14062,-0.21875 1.59375,-0.40625 l 0,-0.15625 q 0,-0.48438 -0.23438,-0.6875 -0.23437,-0.20313 -0.89062,-0.20313 -0.4375,0 -0.6875,0.17188 -0.23438,0.17187 -0.39063,0.60937 z m 2.20313,1.34375 q -0.3125,0.0937 -1,0.25 -0.6875,0.14063 -0.90625,0.28125 -0.3125,0.23438 -0.3125,0.57813 0,0.34375 0.25,0.60937 0.26562,0.25 0.65625,0.25 0.45312,0 0.85937,-0.29687 0.29688,-0.21875 0.39063,-0.54688 0.0625,-0.20312 0.0625,-0.79687 l 0,-0.32813 z m 4.81445,2.98438 -1.64062,0 0,-6.21875 1.53125,0 0,0.875 q 0.39062,-0.625 0.70312,-0.8125 0.3125,-0.20313 0.70313,-0.20313 0.5625,0 1.09375,0.3125 l -0.51563,1.42188 q -0.42187,-0.26563 -0.76562,-0.26563 -0.35938,0 -0.59375,0.20313 -0.23438,0.1875 -0.375,0.6875 -0.14063,0.48437 -0.14063,2.07812 l 0,1.92188 z m 2.51367,-1.78125 1.65625,-0.25 q 0.10938,0.48437 0.42188,0.73437 0.32812,0.25 0.90625,0.25 0.64062,0 0.95312,-0.23437 0.21875,-0.17188 0.21875,-0.4375 0,-0.1875 -0.10937,-0.3125 -0.125,-0.125 -0.54688,-0.21875 -2,-0.4375 -2.53125,-0.79688 -0.73437,-0.51562 -0.73437,-1.40625 0,-0.8125 0.625,-1.35937 0.64062,-0.54688 1.98437,-0.54688 1.28125,0 1.90625,0.42188 0.625,0.40625 0.85938,1.21875 l -1.5625,0.28125 q -0.0937,-0.35938 -0.375,-0.54688 -0.28125,-0.20312 -0.79688,-0.20312 -0.64062,0 -0.92187,0.1875 -0.1875,0.125 -0.1875,0.32812 0,0.1875 0.15625,0.3125 0.21875,0.15625 1.53125,0.45313 1.32812,0.29687 1.84375,0.73437 0.51562,0.4375 0.51562,1.21875 0,0.85938 -0.71875,1.48438 -0.70312,0.60937 -2.10937,0.60937 -1.26563,0 -2.01563,-0.51562 -0.73437,-0.51563 -0.96875,-1.40625 z m 10.86133,-0.20313 1.64063,0.28125 q -0.3125,0.90625 -1,1.375 -0.6875,0.46875 -1.70313,0.46875 -1.625,0 -2.40625,-1.0625 -0.625,-0.84375 -0.625,-2.14062 0,-1.54688 0.8125,-2.42188 0.8125,-0.875 2.04688,-0.875 1.39062,0 2.1875,0.92188 0.8125,0.90625 0.76562,2.79687 l -4.125,0 q 0.0312,0.73438 0.40625,1.14063 0.375,0.40625 0.95313,0.40625 0.375,0 0.64062,-0.20313 0.26563,-0.21875 0.40625,-0.6875 z m 0.0937,-1.65625 q -0.0156,-0.71875 -0.375,-1.09375 -0.34375,-0.375 -0.85937,-0.375 -0.53125,0 -0.89063,0.39063 -0.34375,0.40625 -0.34375,1.07812 l 2.46875,0 z m 6.33594,3.64063 0,-8.59375 5.89062,0 0,1.45312 -4.15625,0 0,2.03125 3.57813,0 0,1.45313 -3.57813,0 0,3.65625 -1.73437,0 z m 10.9082,-1.98438 1.64063,0.28125 q -0.3125,0.90625 -1,1.375 -0.6875,0.46875 -1.70313,0.46875 -1.625,0 -2.40625,-1.0625 -0.625,-0.84375 -0.625,-2.14062 0,-1.54688 0.8125,-2.42188 0.8125,-0.875 2.04688,-0.875 1.39062,0 2.1875,0.92188 0.8125,0.90625 0.76562,2.79687 l -4.125,0 q 0.0312,0.73438 0.40625,1.14063 0.375,0.40625 0.95313,0.40625 0.375,0 0.64062,-0.20313 0.26563,-0.21875 0.40625,-0.6875 z m 0.0937,-1.65625 q -0.0156,-0.71875 -0.375,-1.09375 -0.34375,-0.375 -0.85937,-0.375 -0.53125,0 -0.89063,0.39063 -0.34375,0.40625 -0.34375,1.07812 l 2.46875,0 z m 4.20508,-0.6875 -1.5,-0.26562 q 0.25,-0.90625 0.85938,-1.32813 0.625,-0.4375 1.84375,-0.4375 1.09375,0 1.625,0.26563 0.54687,0.25 0.76562,0.65625 0.21875,0.39062 0.21875,1.46875 l -0.0156,1.92187 q 0,0.82813 0.0781,1.21875 0.0781,0.375 0.29688,0.82813 l -1.625,0 q -0.0625,-0.17188 -0.15625,-0.48438 -0.0469,-0.15625 -0.0625,-0.20312 -0.42188,0.42187 -0.90625,0.625 -0.46875,0.20312 -1.01563,0.20312 -0.96875,0 -1.53125,-0.51562 -0.54687,-0.53125 -0.54687,-1.32813 0,-0.53125 0.25,-0.9375 0.26562,-0.40625 0.71875,-0.625 0.45312,-0.23437 1.3125,-0.39062 1.14062,-0.21875 1.59375,-0.40625 l 0,-0.15625 q 0,-0.48438 -0.23438,-0.6875 -0.23437,-0.20313 -0.89062,-0.20313 -0.4375,0 -0.6875,0.17188 -0.23438,0.17187 -0.39063,0.60937 z m 2.20313,1.34375 q -0.3125,0.0937 -1,0.25 -0.6875,0.14063 -0.90625,0.28125 -0.3125,0.23438 -0.3125,0.57813 0,0.34375 0.25,0.60937 0.26562,0.25 0.65625,0.25 0.45312,0 0.85937,-0.29687 0.29688,-0.21875 0.39063,-0.54688 0.0625,-0.20312 0.0625,-0.79687 l 0,-0.32813 z m 6.0957,-3.23437 0,1.3125 -1.125,0 0,2.5 q 0,0.76562 0.0312,0.89062 0.0312,0.125 0.14062,0.21875 0.125,0.0781 0.28125,0.0781 0.23438,0 0.65625,-0.17188 l 0.14063,1.28125 q -0.5625,0.25 -1.29688,0.25 -0.4375,0 -0.79687,-0.14062 -0.35938,-0.15625 -0.53125,-0.39063 -0.15625,-0.25 -0.23438,-0.64062 -0.0469,-0.29688 -0.0469,-1.17188 l 0,-2.70312 -0.75,0 0,-1.3125 0.75,0 0,-1.23438 1.65625,-0.96875 0,2.20313 1.125,0 z m 5.23047,6.21875 0,-0.9375 q -0.32813,0.5 -0.89063,0.79687 -0.54687,0.28125 -1.17187,0.28125 -0.625,0 -1.125,-0.26562 -0.5,-0.28125 -0.71875,-0.78125 -0.21875,-0.5 -0.21875,-1.375 l 0,-3.9375 1.64062,0 0,2.85937 q 0,1.3125 0.0937,1.60938 0.0937,0.29687 0.32813,0.46875 0.25,0.17187 0.60937,0.17187 0.42188,0 0.75,-0.23437 0.34375,-0.23438 0.46875,-0.57813 0.125,-0.34375 0.125,-1.67187 l 0,-2.625 1.64063,0 0,6.21875 -1.53125,0 z m 4.81445,0 -1.64062,0 0,-6.21875 1.53125,0 0,0.875 q 0.39062,-0.625 0.70312,-0.8125 0.3125,-0.20313 0.70313,-0.20313 0.5625,0 1.09375,0.3125 l -0.51563,1.42188 q -0.42187,-0.26563 -0.76562,-0.26563 -0.35938,0 -0.59375,0.20313 -0.23438,0.1875 -0.375,0.6875 -0.14063,0.48437 -0.14063,2.07812 l 0,1.92188 z m 6.70117,-1.98438 1.64063,0.28125 q -0.3125,0.90625 -1,1.375 -0.6875,0.46875 -1.70313,0.46875 -1.625,0 -2.40625,-1.0625 -0.625,-0.84375 -0.625,-2.14062 0,-1.54688 0.8125,-2.42188 0.8125,-0.875 2.04688,-0.875 1.39062,0 2.1875,0.92188 0.8125,0.90625 0.76562,2.79687 l -4.125,0 q 0.0312,0.73438 0.40625,1.14063 0.375,0.40625 0.95313,0.40625 0.375,0 0.64062,-0.20313 0.26563,-0.21875 0.40625,-0.6875 z m 0.0937,-1.65625 q -0.0156,-0.71875 -0.375,-1.09375 -0.34375,-0.375 -0.85937,-0.375 -0.53125,0 -0.89063,0.39063 -0.34375,0.40625 -0.34375,1.07812 l 2.46875,0 z m 2.39258,1.85938 1.65625,-0.25 q 0.10938,0.48437 0.42188,0.73437 0.32812,0.25 0.90625,0.25 0.64062,0 0.95312,-0.23437 0.21875,-0.17188 0.21875,-0.4375 0,-0.1875 -0.10937,-0.3125 -0.125,-0.125 -0.54688,-0.21875 -2,-0.4375 -2.53125,-0.79688 -0.73437,-0.51562 -0.73437,-1.40625 0,-0.8125 0.625,-1.35937 0.64062,-0.54688 1.98437,-0.54688 1.28125,0 1.90625,0.42188 0.625,0.40625 0.85938,1.21875 l -1.5625,0.28125 q -0.0937,-0.35938 -0.375,-0.54688 -0.28125,-0.20312 -0.79688,-0.20312 -0.64062,0 -0.92187,0.1875 -0.1875,0.125 -0.1875,0.32812 0,0.1875 0.15625,0.3125 0.21875,0.15625 1.53125,0.45313 1.32812,0.29687 1.84375,0.73437 0.51562,0.4375 0.51562,1.21875 0,0.85938 -0.71875,1.48438 -0.70312,0.60937 -2.10937,0.60937 -1.26563,0 -2.01563,-0.51562 -0.73437,-0.51563 -0.96875,-1.40625 z"
+ id="path5358"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-rule:nonzero" />
+ <path
+ d="m 601.66156,250.31233 120.66144,0 0,30.11024 -120.66144,0 z"
+ id="path5360"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 626.18274,266.58234 q 0,-1.3125 0.40625,-2.20313 0.28125,-0.65625 0.78125,-1.17187 0.51562,-0.51563 1.125,-0.76563 0.79687,-0.34375 1.84375,-0.34375 1.90625,0 3.04687,1.1875 1.14063,1.17188 1.14063,3.26563 0,2.07812 -1.14063,3.25 -1.125,1.17187 -3.01562,1.17187 -1.92188,0 -3.0625,-1.15625 -1.125,-1.17187 -1.125,-3.23437 z m 1.79687,-0.0469 q 0,1.45313 0.67188,2.20313 0.67187,0.75 1.70312,0.75 1.04688,0 1.70313,-0.73438 0.67187,-0.75 0.67187,-2.25 0,-1.46875 -0.65625,-2.1875 -0.64062,-0.73437 -1.71875,-0.73437 -1.0625,0 -1.71875,0.73437 -0.65625,0.73438 -0.65625,2.21875 z m 11.97461,4.29688 0,-0.9375 q -0.32812,0.5 -0.89062,0.79687 -0.54688,0.28125 -1.17188,0.28125 -0.625,0 -1.125,-0.26562 -0.5,-0.28125 -0.71875,-0.78125 -0.21875,-0.5 -0.21875,-1.375 l 0,-3.9375 1.64063,0 0,2.85937 q 0,1.3125 0.0937,1.60938 0.0937,0.29687 0.32812,0.46875 0.25,0.17187 0.60938,0.17187 0.42187,0 0.75,-0.23437 0.34375,-0.23438 0.46875,-0.57813 0.125,-0.34375 0.125,-1.67187 l 0,-2.625 1.64062,0 0,6.21875 -1.53125,0 z m 6.09571,-6.21875 0,1.3125 -1.125,0 0,2.5 q 0,0.76562 0.0312,0.89062 0.0312,0.125 0.14062,0.21875 0.125,0.0781 0.28125,0.0781 0.23438,0 0.65625,-0.17188 l 0.14063,1.28125 q -0.5625,0.25 -1.29688,0.25 -0.4375,0 -0.79687,-0.14062 -0.35938,-0.15625 -0.53125,-0.39063 -0.15625,-0.25 -0.23438,-0.64062 -0.0469,-0.29688 -0.0469,-1.17188 l 0,-2.70312 -0.75,0 0,-1.3125 0.75,0 0,-1.23438 1.65625,-0.96875 0,2.20313 1.125,0 z m 1.08984,0 1.53125,0 0,0.90625 q 0.3125,-0.46875 0.8125,-0.75 0.51563,-0.29688 1.14063,-0.29688 1.07812,0 1.82812,0.84375 0.76563,0.84375 0.76563,2.375 0,1.54688 -0.76563,2.42188 -0.76562,0.85937 -1.84375,0.85937 -0.51562,0 -0.9375,-0.20312 -0.42187,-0.20313 -0.875,-0.70313 l 0,3.14063 -1.65625,0 0,-8.59375 z m 1.625,3 q 0,1.04687 0.42188,1.54687 0.42187,0.5 1.01562,0.5 0.57813,0 0.95313,-0.45312 0.375,-0.46875 0.375,-1.51563 0,-0.96875 -0.39063,-1.4375 -0.39062,-0.48437 -0.96875,-0.48437 -0.60937,0 -1.01562,0.46875 -0.39063,0.46875 -0.39063,1.375 z m 9.8457,3.21875 0,-0.9375 q -0.32812,0.5 -0.89062,0.79687 -0.54688,0.28125 -1.17188,0.28125 -0.625,0 -1.125,-0.26562 -0.5,-0.28125 -0.71875,-0.78125 -0.21875,-0.5 -0.21875,-1.375 l 0,-3.9375 1.64063,0 0,2.85937 q 0,1.3125 0.0937,1.60938 0.0937,0.29687 0.32812,0.46875 0.25,0.17187 0.60938,0.17187 0.42187,0 0.75,-0.23437 0.34375,-0.23438 0.46875,-0.57813 0.125,-0.34375 0.125,-1.67187 l 0,-2.625 1.64062,0 0,6.21875 -1.53125,0 z m 6.09571,-6.21875 0,1.3125 -1.125,0 0,2.5 q 0,0.76562 0.0312,0.89062 0.0312,0.125 0.14062,0.21875 0.125,0.0781 0.28125,0.0781 0.23438,0 0.65625,-0.17188 l 0.14063,1.28125 q -0.5625,0.25 -1.29688,0.25 -0.4375,0 -0.79687,-0.14062 -0.35938,-0.15625 -0.53125,-0.39063 -0.15625,-0.25 -0.23438,-0.64062 -0.0469,-0.29688 -0.0469,-1.17188 l 0,-2.70312 -0.75,0 0,-1.3125 0.75,0 0,-1.23438 1.65625,-0.96875 0,2.20313 1.125,0 z m 4.4707,-2.375 1.73437,0 0,4.65625 q 0,1.10937 0.0625,1.4375 0.10938,0.53125 0.53125,0.84375 0.42188,0.3125 1.15625,0.3125 0.73438,0 1.10938,-0.29688 0.375,-0.29687 0.45312,-0.73437 0.0781,-0.4375 0.0781,-1.46875 l 0,-4.75 1.73437,0 0,4.51562 q 0,1.54688 -0.14062,2.1875 -0.14063,0.64063 -0.53125,1.07813 -0.375,0.4375 -1.01563,0.70312 -0.625,0.25 -1.64062,0.25 -1.23438,0 -1.875,-0.28125 -0.625,-0.28125 -1,-0.73437 -0.35938,-0.45313 -0.48438,-0.95313 -0.17187,-0.73437 -0.17187,-2.1875 l 0,-4.57812 z m 14.32227,8.59375 -1.64063,0 0,-3.17188 q 0,-1.01562 -0.10937,-1.3125 -0.0937,-0.29687 -0.34375,-0.45312 -0.23438,-0.17188 -0.5625,-0.17188 -0.4375,0 -0.78125,0.23438 -0.32813,0.23437 -0.45313,0.625 -0.125,0.39062 -0.125,1.4375 l 0,2.8125 -1.65625,0 0,-6.21875 1.53125,0 0,0.90625 q 0.8125,-1.04688 2.0625,-1.04688 0.53125,0 0.98438,0.20313 0.45312,0.1875 0.6875,0.5 0.23437,0.29687 0.3125,0.6875 0.0937,0.375 0.0937,1.09375 l 0,3.875 z m 1.67382,-7.0625 0,-1.53125 1.65625,0 0,1.53125 -1.65625,0 z m 0,7.0625 0,-6.21875 1.65625,0 0,6.21875 -1.65625,0 z m 6.19336,-6.21875 0,1.3125 -1.125,0 0,2.5 q 0,0.76562 0.0312,0.89062 0.0312,0.125 0.14063,0.21875 0.125,0.0781 0.28125,0.0781 0.23437,0 0.65625,-0.17188 l 0.14062,1.28125 q -0.5625,0.25 -1.29687,0.25 -0.4375,0 -0.79688,-0.14062 -0.35937,-0.15625 -0.53125,-0.39063 -0.15625,-0.25 -0.23437,-0.64062 -0.0469,-0.29688 -0.0469,-1.17188 l 0,-2.70312 -0.75,0 0,-1.3125 0.75,0 0,-1.23438 1.65625,-0.96875 0,2.20313 1.125,0 z m 0.5586,4.4375 1.65625,-0.25 q 0.10937,0.48437 0.42187,0.73437 0.32813,0.25 0.90625,0.25 0.64063,0 0.95313,-0.23437 0.21875,-0.17188 0.21875,-0.4375 0,-0.1875 -0.10938,-0.3125 -0.125,-0.125 -0.54687,-0.21875 -2,-0.4375 -2.53125,-0.79688 -0.73438,-0.51562 -0.73438,-1.40625 0,-0.8125 0.625,-1.35937 0.64063,-0.54688 1.98438,-0.54688 1.28125,0 1.90625,0.42188 0.625,0.40625 0.85937,1.21875 l -1.5625,0.28125 q -0.0937,-0.35938 -0.375,-0.54688 -0.28125,-0.20312 -0.79687,-0.20312 -0.64063,0 -0.92188,0.1875 -0.1875,0.125 -0.1875,0.32812 0,0.1875 0.15625,0.3125 0.21875,0.15625 1.53125,0.45313 1.32813,0.29687 1.84375,0.73437 0.51563,0.4375 0.51563,1.21875 0,0.85938 -0.71875,1.48438 -0.70313,0.60937 -2.10938,0.60937 -1.26562,0 -2.01562,-0.51562 -0.73438,-0.51563 -0.96875,-1.40625 z"
+ id="path5362"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-rule:nonzero" />
+ <path
+ d="m 601.6615,345.60638 120.66144,0 0,42.86615 -120.66144,0 z"
+ id="path5364"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:0;fill-rule:nonzero" />
+ <path
+ d="m 644.85846,357.53265 3.15625,0 q 1.07813,0 1.64063,0.17187 0.75,0.21875 1.28125,0.79688 0.54687,0.5625 0.82812,1.39062 0.28125,0.8125 0.28125,2.01563 0,1.0625 -0.25,1.82812 -0.32812,0.9375 -0.92187,1.51563 -0.45313,0.45312 -1.21875,0.6875 -0.57813,0.1875 -1.54688,0.1875 l -3.25,0 0,-8.59375 z m 1.73438,1.45312 0,5.6875 1.28125,0 q 0.73437,0 1.0625,-0.0781 0.42187,-0.10938 0.6875,-0.35938 0.28125,-0.25 0.45312,-0.82812 0.1875,-0.57813 0.1875,-1.57813 0,-0.98437 -0.1875,-1.51562 -0.17187,-0.54688 -0.48437,-0.84375 -0.3125,-0.29688 -0.79688,-0.40625 -0.375,-0.0781 -1.4375,-0.0781 l -0.76562,0 z m 10.52539,5.15625 1.64062,0.28125 q -0.3125,0.90625 -1,1.375 -0.6875,0.46875 -1.70312,0.46875 -1.625,0 -2.40625,-1.0625 -0.625,-0.84375 -0.625,-2.14062 0,-1.54688 0.8125,-2.42188 0.8125,-0.875 2.04687,-0.875 1.39063,0 2.1875,0.92188 0.8125,0.90625 0.76563,2.79687 l -4.125,0 q 0.0312,0.73438 0.40625,1.14063 0.375,0.40625 0.95312,0.40625 0.375,0 0.64063,-0.20313 0.26562,-0.21875 0.40625,-0.6875 z m 0.0937,-1.65625 q -0.0156,-0.71875 -0.375,-1.09375 -0.34375,-0.375 -0.85938,-0.375 -0.53125,0 -0.89062,0.39063 -0.34375,0.40625 -0.34375,1.07812 l 2.46875,0 z m 8.62695,3.64063 -1.64063,0 0,-3.17188 q 0,-1.01562 -0.10937,-1.3125 -0.0937,-0.29687 -0.34375,-0.45312 -0.23438,-0.17188 -0.5625,-0.17188 -0.4375,0 -0.78125,0.23438 -0.32813,0.23437 -0.45313,0.625 -0.125,0.39062 -0.125,1.4375 l 0,2.8125 -1.65625,0 0,-6.21875 1.53125,0 0,0.90625 q 0.8125,-1.04688 2.0625,-1.04688 0.53125,0 0.98438,0.20313 0.45312,0.1875 0.6875,0.5 0.23437,0.29687 0.3125,0.6875 0.0937,0.375 0.0937,1.09375 l 0,3.875 z m 1.0957,-1.78125 1.65625,-0.25 q 0.10938,0.48437 0.42188,0.73437 0.32812,0.25 0.90625,0.25 0.64062,0 0.95312,-0.23437 0.21875,-0.17188 0.21875,-0.4375 0,-0.1875 -0.10937,-0.3125 -0.125,-0.125 -0.54688,-0.21875 -2,-0.4375 -2.53125,-0.79688 -0.73437,-0.51562 -0.73437,-1.40625 0,-0.8125 0.625,-1.35937 0.64062,-0.54688 1.98437,-0.54688 1.28125,0 1.90625,0.42188 0.625,0.40625 0.85938,1.21875 l -1.5625,0.28125 q -0.0937,-0.35938 -0.375,-0.54688 -0.28125,-0.20312 -0.79688,-0.20312 -0.64062,0 -0.92187,0.1875 -0.1875,0.125 -0.1875,0.32812 0,0.1875 0.15625,0.3125 0.21875,0.15625 1.53125,0.45313 1.32812,0.29687 1.84375,0.73437 0.51562,0.4375 0.51562,1.21875 0,0.85938 -0.71875,1.48438 -0.70312,0.60937 -2.10937,0.60937 -1.26563,0 -2.01563,-0.51562 -0.73437,-0.51563 -0.96875,-1.40625 z m 10.86133,-0.20313 1.64063,0.28125 q -0.3125,0.90625 -1,1.375 -0.6875,0.46875 -1.70313,0.46875 -1.625,0 -2.40625,-1.0625 -0.625,-0.84375 -0.625,-2.14062 0,-1.54688 0.8125,-2.42188 0.8125,-0.875 2.04688,-0.875 1.39062,0 2.1875,0.92188 0.8125,0.90625 0.76562,2.79687 l -4.125,0 q 0.0312,0.73438 0.40625,1.14063 0.375,0.40625 0.95313,0.40625 0.375,0 0.64062,-0.20313 0.26563,-0.21875 0.40625,-0.6875 z m 0.0937,-1.65625 q -0.0156,-0.71875 -0.375,-1.09375 -0.34375,-0.375 -0.85937,-0.375 -0.53125,0 -0.89063,0.39063 -0.34375,0.40625 -0.34375,1.07812 l 2.46875,0 z"
+ id="path5366"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-rule:nonzero" />
+ <path
+ d="m 626.8643,380.1264 0,-8.59375 6.375,0 0,1.45312 -4.64063,0 0,1.90625 4.3125,0 0,1.45313 -4.3125,0 0,2.32812 4.79688,0 0,1.45313 -6.53125,0 z m 7.86328,-6.21875 1.51563,0 0,0.84375 q 0.82812,-0.98438 1.95312,-0.98438 0.59375,0 1.03125,0.25 0.4375,0.23438 0.71875,0.73438 0.40625,-0.5 0.875,-0.73438 0.48438,-0.25 1.03125,-0.25 0.67188,0 1.14063,0.28125 0.48437,0.26563 0.71875,0.8125 0.17187,0.39063 0.17187,1.28125 l 0,3.98438 -1.64062,0 0,-3.5625 q 0,-0.92188 -0.17188,-1.1875 -0.23437,-0.35938 -0.70312,-0.35938 -0.34375,0 -0.65625,0.21875 -0.29688,0.20313 -0.4375,0.60938 -0.14063,0.40625 -0.14063,1.29687 l 0,2.98438 -1.64062,0 0,-3.40625 q 0,-0.90625 -0.0937,-1.17188 -0.0781,-0.26562 -0.26563,-0.39062 -0.1875,-0.14063 -0.5,-0.14063 -0.375,0 -0.6875,0.21875 -0.29687,0.20313 -0.4375,0.59375 -0.125,0.375 -0.125,1.26563 l 0,3.03125 -1.65625,0 0,-6.21875 z m 10.73242,6.21875 0,-8.59375 1.64063,0 0,3.09375 q 0.76562,-0.85938 1.8125,-0.85938 1.125,0 1.875,0.82813 0.75,0.8125 0.75,2.35937 0,1.59375 -0.76563,2.45313 -0.76562,0.85937 -1.84375,0.85937 -0.53125,0 -1.04687,-0.26562 -0.51563,-0.26563 -0.89063,-0.79688 l 0,0.92188 -1.53125,0 z m 1.625,-3.25 q 0,0.96875 0.3125,1.4375 0.42188,0.65625 1.14063,0.65625 0.53125,0 0.92187,-0.46875 0.39063,-0.46875 0.39063,-1.46875 0,-1.0625 -0.39063,-1.53125 -0.39062,-0.48438 -1,-0.48438 -0.57812,0 -0.98437,0.46875 -0.39063,0.45313 -0.39063,1.39063 z m 9.37696,1.26562 1.64062,0.28125 q -0.3125,0.90625 -1,1.375 -0.6875,0.46875 -1.70312,0.46875 -1.625,0 -2.40625,-1.0625 -0.625,-0.84375 -0.625,-2.14062 0,-1.54688 0.8125,-2.42188 0.8125,-0.875 2.04687,-0.875 1.39063,0 2.1875,0.92188 0.8125,0.90625 0.76563,2.79687 l -4.125,0 q 0.0312,0.73438 0.40625,1.14063 0.375,0.40625 0.95312,0.40625 0.375,0 0.64063,-0.20313 0.26562,-0.21875 0.40625,-0.6875 z m 0.0937,-1.65625 q -0.0156,-0.71875 -0.375,-1.09375 -0.34375,-0.375 -0.85938,-0.375 -0.53125,0 -0.89062,0.39063 -0.34375,0.40625 -0.34375,1.07812 l 2.46875,0 z m 8.67382,3.64063 -1.51562,0 0,-0.92188 q -0.39063,0.54688 -0.90625,0.8125 -0.51563,0.25 -1.04688,0.25 -1.07812,0 -1.84375,-0.85937 -0.75,-0.875 -0.75,-2.42188 0,-1.57812 0.73438,-2.39062 0.75,-0.82813 1.89062,-0.82813 1.03125,0 1.79688,0.85938 l 0,-3.09375 1.64062,0 0,8.59375 z m -4.39062,-3.25 q 0,1 0.28125,1.4375 0.39062,0.65625 1.10937,0.65625 0.5625,0 0.95313,-0.48438 0.40625,-0.48437 0.40625,-1.45312 0,-1.0625 -0.39063,-1.53125 -0.375,-0.48438 -0.98437,-0.48438 -0.57813,0 -0.98438,0.46875 -0.39062,0.46875 -0.39062,1.39063 z m 11.7207,3.25 -1.51562,0 0,-0.92188 q -0.39063,0.54688 -0.90625,0.8125 -0.51563,0.25 -1.04688,0.25 -1.07812,0 -1.84375,-0.85937 -0.75,-0.875 -0.75,-2.42188 0,-1.57812 0.73438,-2.39062 0.75,-0.82813 1.89062,-0.82813 1.03125,0 1.79688,0.85938 l 0,-3.09375 1.64062,0 0,8.59375 z m -4.39062,-3.25 q 0,1 0.28125,1.4375 0.39062,0.65625 1.10937,0.65625 0.5625,0 0.95313,-0.48438 0.40625,-0.48437 0.40625,-1.45312 0,-1.0625 -0.39063,-1.53125 -0.375,-0.48438 -0.98437,-0.48438 -0.57813,0 -0.98438,0.46875 -0.39062,0.46875 -0.39062,1.39063 z m 6.01758,-3.8125 0,-1.53125 1.65625,0 0,1.53125 -1.65625,0 z m 0,7.0625 0,-6.21875 1.65625,0 0,6.21875 -1.65625,0 z m 8.99023,0 -1.64063,0 0,-3.17188 q 0,-1.01562 -0.10937,-1.3125 -0.0937,-0.29687 -0.34375,-0.45312 -0.23438,-0.17188 -0.5625,-0.17188 -0.4375,0 -0.78125,0.23438 -0.32813,0.23437 -0.45313,0.625 -0.125,0.39062 -0.125,1.4375 l 0,2.8125 -1.65625,0 0,-6.21875 1.53125,0 0,0.90625 q 0.8125,-1.04688 2.0625,-1.04688 0.53125,0 0.98438,0.20313 0.45312,0.1875 0.6875,0.5 0.23437,0.29687 0.3125,0.6875 0.0937,0.375 0.0937,1.09375 l 0,3.875 z m 1.51758,0.40625 1.89062,0.23437 q 0.0469,0.32813 0.21875,0.45313 0.23438,0.17187 0.73438,0.17187 0.64062,0 0.96875,-0.1875 0.21875,-0.14062 0.32812,-0.42187 0.0781,-0.20313 0.0781,-0.75 l 0,-0.92188 q -0.75,1.01563 -1.875,1.01563 -1.25,0 -1.98438,-1.0625 -0.5625,-0.84375 -0.5625,-2.07813 0,-1.57812 0.75,-2.39062 0.75,-0.82813 1.875,-0.82813 1.14063,0 1.89063,1.01563 l 0,-0.875 1.54687,0 0,5.57812 q 0,1.10938 -0.1875,1.64063 -0.17187,0.54687 -0.5,0.85937 -0.32812,0.3125 -0.875,0.48438 -0.54687,0.1875 -1.39062,0.1875 -1.57813,0 -2.25,-0.54688 -0.65625,-0.54687 -0.65625,-1.375 0,-0.0781 0,-0.20312 z m 1.48437,-3.64063 q 0,0.98438 0.375,1.45313 0.39063,0.45312 0.95313,0.45312 0.59375,0 1.01562,-0.46875 0.42188,-0.48437 0.42188,-1.40625 0,-0.96875 -0.40625,-1.4375 -0.39063,-0.46875 -1,-0.46875 -0.59375,0 -0.98438,0.46875 -0.375,0.45313 -0.375,1.40625 z m 5.42383,1.45313 1.65625,-0.25 q 0.10938,0.48437 0.42188,0.73437 0.32812,0.25 0.90625,0.25 0.64062,0 0.95312,-0.23437 0.21875,-0.17188 0.21875,-0.4375 0,-0.1875 -0.10937,-0.3125 -0.125,-0.125 -0.54688,-0.21875 -2,-0.4375 -2.53125,-0.79688 -0.73437,-0.51562 -0.73437,-1.40625 0,-0.8125 0.625,-1.35937 0.64062,-0.54688 1.98437,-0.54688 1.28125,0 1.90625,0.42188 0.625,0.40625 0.85938,1.21875 l -1.5625,0.28125 q -0.0937,-0.35938 -0.375,-0.54688 -0.28125,-0.20312 -0.79688,-0.20312 -0.64062,0 -0.92187,0.1875 -0.1875,0.125 -0.1875,0.32812 0,0.1875 0.15625,0.3125 0.21875,0.15625 1.53125,0.45313 1.32812,0.29687 1.84375,0.73437 0.51562,0.4375 0.51562,1.21875 0,0.85938 -0.71875,1.48438 -0.70312,0.60937 -2.10937,0.60937 -1.26563,0 -2.01563,-0.51562 -0.73437,-0.51563 -0.96875,-1.40625 z"
+ id="path5368"
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-rule:nonzero" />
+ </g>
+</svg>
diff --git a/tensorflow/g3doc/tutorials/index.md b/tensorflow/g3doc/tutorials/index.md
index ccb05ab5f0..292596837d 100644
--- a/tensorflow/g3doc/tutorials/index.md
+++ b/tensorflow/g3doc/tutorials/index.md
@@ -1,7 +1,8 @@
# Tutorials
+## Basic Neural Networks
-## MNIST For ML Beginners
+### MNIST For ML Beginners
If you're new to machine learning, we recommend starting here. You'll learn
about a classic problem, handwritten digit classification (MNIST), and get a
@@ -10,33 +11,75 @@ gentle introduction to multiclass classification.
[View Tutorial](../tutorials/mnist/beginners/index.md)
-## Deep MNIST for Experts
+### Deep MNIST for Experts
If you're already familiar with other deep learning software packages, and are
-already familiar with MNIST, this tutorial will give you a very brief primer on
-TensorFlow.
+already familiar with MNIST, this tutorial will give you a very brief primer
+on TensorFlow.
[View Tutorial](../tutorials/mnist/pros/index.md)
-
-## TensorFlow Mechanics 101
+### TensorFlow Mechanics 101
This is a technical tutorial, where we walk you through the details of using
-TensorFlow infrastructure to train models at scale. We again use MNIST as the
+TensorFlow infrastructure to train models at scale. We use MNIST as the
example.
[View Tutorial](../tutorials/mnist/tf/index.md)
+### MNIST Data Download
+
+Details about downloading the MNIST handwritten digits data set. Exciting
+stuff.
+
+[View Tutorial](../tutorials/mnist/download/index.md)
+
+
+## Easy ML with tf.contrib.learn
+
+### tf.contrib.learn Quickstart
+
+A quick introduction to tf.contrib.learn, a high-level API for TensorFlow.
+Build, train, and evaluate a neural network with just a few lines of
+code.
+
+[View Tutorial](../tutorials/tflearn/index.md)
+
+### Overview of Linear Models with tf.contrib.learn
+
+An overview of tf.contrib.learn's rich set of tools for working with linear
+models in TensorFlow.
+
+[View Tutorial](../tutorials/linear/overview.md)
+
+### Linear Model Tutorial
+
+This tutorial walks you through the code for building a linear model using
+tf.contrib.learn.
+
+[View Tutorial](../tutorials/wide/index.md)
+
+### Wide and Deep Learning Tutorial
+
+This tutorial shows you how to use tf.contrib.learn to jointly train a linear
+model and a deep neural net to harness the advantages of each type of model.
+
+[View Tutorial](../tutorials/wide_and_deep/index.md)
+
## TensorFlow Serving
+### TensorFlow Serving
+
An introduction to TensorFlow Serving, a flexible, high-performance system for
serving machine learning models, designed for production environments.
[View Tutorial](../tutorials/tfserve/index.md)
-## Convolutional Neural Networks
+## Image Processing
+
+### Convolutional Neural Networks
An introduction to convolutional neural networks using the CIFAR-10 data set.
Convolutional neural nets are particularly tailored to images, since they
@@ -45,8 +88,25 @@ representations of visual content.
[View Tutorial](../tutorials/deep_cnn/index.md)
+### Image Recognition
+
+How to run object recognition using a convolutional neural network
+trained on ImageNet Challenge data and label set.
+
+[View Tutorial](../tutorials/image_recognition/index.md)
+
+### Deep Dream Visual Hallucinations
+
+Building on the Inception recognition model, we will release a TensorFlow
+version of the [Deep Dream](https://github.com/google/deepdream) neural network
+visual hallucination software.
+
+[View Tutorial](https://www.tensorflow.org/code/tensorflow/examples/tutorials/deepdream/deepdream.ipynb)
+
+
+## Language and Sequence Processing
-## Vector Representations of Words
+### Vector Representations of Words
This tutorial motivates why it is useful to learn to represent words as vectors
(called *word embeddings*). It introduces the word2vec model as an efficient
@@ -56,16 +116,14 @@ embeddings).
[View Tutorial](../tutorials/word2vec/index.md)
-
-## Recurrent Neural Networks
+### Recurrent Neural Networks
An introduction to RNNs, wherein we train an LSTM network to predict the next
word in an English sentence. (A task sometimes called language modeling.)
[View Tutorial](../tutorials/recurrent/index.md)
-
-## Sequence-to-Sequence Models
+### Sequence-to-Sequence Models
A follow on to the RNN tutorial, where we assemble a sequence-to-sequence model
for machine translation. You will learn to build your own English-to-French
@@ -73,8 +131,7 @@ translator, entirely machine learned, end-to-end.
[View Tutorial](../tutorials/seq2seq/index.md)
-
-## SyntaxNet: Neural Models of Syntax
+### SyntaxNet: Neural Models of Syntax
An introduction to SyntaxNet, a Natural Language Processing framework for
TensorFlow.
@@ -82,44 +139,18 @@ TensorFlow.
[View Tutorial](../tutorials/syntaxnet/index.md)
-## Mandelbrot Set
+## Non-ML Applications
+
+### Mandelbrot Set
TensorFlow can be used for computation that has nothing to do with machine
learning. Here's a naive implementation of Mandelbrot set visualization.
[View Tutorial](../tutorials/mandelbrot/index.md)
-
-## Partial Differential Equations
+### Partial Differential Equations
As another example of non-machine learning computation, we offer an example of
a naive PDE simulation of raindrops landing on a pond.
[View Tutorial](../tutorials/pdes/index.md)
-
-
-## MNIST Data Download
-
-Details about downloading the MNIST handwritten digits data set. Exciting
-stuff.
-
-[View Tutorial](../tutorials/mnist/download/index.md)
-
-
-## Image Recognition
-
-How to run object recognition using a convolutional neural network
-trained on ImageNet Challenge data and label set.
-
-[View Tutorial](../tutorials/image_recognition/index.md)
-
-We will soon be releasing code for training a state-of-the-art Inception model.
-
-
-## Deep Dream Visual Hallucinations
-
-Building on the Inception recognition model, we will release a TensorFlow
-version of the [Deep Dream](https://github.com/google/deepdream) neural network
-visual hallucination software.
-
-[View Tutorial](https://www.tensorflow.org/code/tensorflow/examples/tutorials/deepdream/deepdream.ipynb)
diff --git a/tensorflow/g3doc/tutorials/leftnav_files b/tensorflow/g3doc/tutorials/leftnav_files
index c35a936995..09cd084b49 100644
--- a/tensorflow/g3doc/tutorials/leftnav_files
+++ b/tensorflow/g3doc/tutorials/leftnav_files
@@ -1,13 +1,23 @@
+### Basic Neural Networks
mnist/beginners/index.md
mnist/pros/index.md
mnist/tf/index.md
+mnist/download/index.md
+### Easy ML with tf.contrib.learn
+tflearn/index.md
+linear/overview.md
+wide/index.md
+wide_and_deep/index.md
+### TensorFlow Serving
tfserve/index.md
+### Image Processing
deep_cnn/index.md
+image_recognition/index.md
+### Language and Sequence Processing
word2vec/index.md
recurrent/index.md
seq2seq/index.md
syntaxnet/index.md
+### Non-ML Applications
mandelbrot/index.md
pdes/index.md
-mnist/download/index.md
-image_recognition/index.md \ No newline at end of file
diff --git a/tensorflow/g3doc/tutorials/linear/overview.md b/tensorflow/g3doc/tutorials/linear/overview.md
new file mode 100644
index 0000000000..8614011290
--- /dev/null
+++ b/tensorflow/g3doc/tutorials/linear/overview.md
@@ -0,0 +1,237 @@
+# Large-scale Linear Models with TensorFlow
+
+The tf.learn API provides (among other things) a rich set of tools for working
+with linear models in TensorFlow. This document provides an overview of those
+tools. It explains:
+
+ * what a linear model is.
+ * why you might want to use a linear model.
+ * how tf.learn makes it easy to build linear models in TensorFlow.
+ * how you can use tf.learn to combine linear models with
+ deep learning to get the advantages of both.
+
+Read this overview to decide whether the tf.learn linear model tools might be
+useful to you. Then do the [Linear Models tutorial](wide/) to
+give it a try. This overview uses code samples from the tutorial, but the
+tutorial walks through the code in greater detail.
+
+To understand this overview it will help to have some familiarity
+with basic machine learning concepts, and also with
+[tf.learn](../tflearn/).
+
+[TOC]
+
+## What is a linear model?
+
+A *linear model* uses a single weighted sum of features to make a prediction.
+For example, if you have [data](https://archive.ics.uci.edu/ml/machine-learning-databases/adult/adult.names)
+on age, years of education, and weekly hours of
+work for a population, you can learn weights for each of those numbers so that
+their weighted sum estimates a person's salary. You can also use linear models
+for classification.
+
+Some linear models transform the weighted sum into a more convenient form. For
+example, *logistic regression* plugs the weighted sum into the logistic
+function to turn the output into a value between 0 and 1. But you still just
+have one weight for each input feature.
+
+## Why would you want to use a linear model?
+
+Why would you want to use so simple a model when recent research has
+demonstrated the power of more complex neural networks with many layers?
+
+Linear models:
+
+ * train quickly, compared to deep neural nets.
+ * can work well on very large feature sets.
+ * can be trained with algorithms that don't require a lot of fiddling
+ with learning rates, etc.
+ * can be interpreted and debugged more easily than neural nets.
+ You can examine the weights assigned to each feature to figure out what's
+ having the biggest impact on a prediction.
+ * provide an excellent starting point for learning about machine learning.
+ * are widely used in industry.
+
+## How does tf.learn help you build linear models?
+
+You can build a linear model from scratch in TensorFlow without the help of a
+special API. But tf.learn provides some tools that make it easier to build
+effective large-scale linear models.
+
+### Feature columns and transformations
+
+Much of the work of designing a linear model consists of transforming raw data
+into suitable input features. tf.learn uses the `FeatureColumn` abstraction to
+enable these transformations.
+
+A `FeatureColumn` represents a single feature in your data. A `FeatureColumn`
+may represent a quantity like 'height', or it may represent a category like
+'eye_color' where the value is drawn from a set of discrete possibilities like {'blue', 'brown', 'green'}.
+
+In the case of both *continuous features* like 'height' and *categorical
+features* like 'eye_color', a single value in the data might get transformed
+into a sequence of numbers before it is input into the model. The
+`FeatureColumn` abstraction lets you manipulate the feature as a single
+semantic unit in spite of this fact. You can specify transformations and
+select features to include without dealing with specific indices in the
+tensors you feed into the model.
+
+#### Sparse columns
+
+Categorical features in linear models are typically translated into a sparse
+vector in which each possible value has a corresponding index or id. For
+example, if there are only three possible eye colors you can represent
+'eye_color' as a length 3 vector: 'brown' would become [1, 0, 0], 'blue' would
+become [0, 1, 0] and 'green' would become [0, 0, 1]. These vectors are called
+"sparse" because they may be very long, with many zeros, when the set of
+possible values is very large (such as all English words).
+
+While you don't need to use sparse columns to use tf.learn linear models, one
+of the strengths of linear models is their ability to deal with large sparse
+vectors. Sparse features are a primary use case for the tf.learn linear model
+tools.
+
+##### Encoding sparse columns
+
+`FeatureColumn` handles the conversion of categorical values into vectors
+automatically, with code like this:
+
+```python
+eye_color = tf.contrib.layers.sparse_column_with_keys(
+ column_name="eye_color", keys=["blue", "brown", "green"])
+```
+
+where `eye_color` is the name of a column in your source data.
+
+You can also generate `FeatureColumn`s for categorical features for which you
+don't know all possible values. For this case you would use
+`sparse_column_with_hash_bucket()`, which uses a hash function to assign
+indices to feature values.
+
+```python
+education = tf.contrib.layers.sparse_column_with_hash_bucket(\
+ "education", hash_bucket_size=1000)
+```
+
+##### Feature Crosses
+
+Because linear models assign independent weights to separate features, they
+can't learn the relative importance of specific combinations of feature
+values. If you have a feature 'favorite_sport' and a feature 'home_city' and
+you're trying to predict whether a person likes to wear red, your linear model
+won't be able to learn that baseball fans from St. Louis especially like to
+wear red.
+
+You can get around this limitation by creating a new feature
+'favorite_sport_x_home_city'. The value of this feature for a given person is
+just the concatenation of the values of the two source features:
+'baseball_x_stlouis', for example. This sort of combination feature is called
+a *feature cross*.
+
+The `crossed_column()` method makes it easy to set up feature crosses:
+
+```python
+sport = tf.contrib.layers.sparse_column_with_hash_bucket(\
+ "sport", hash_bucket_size=1000)
+city = tf.contrib.layers.sparse_column_with_hash_bucket(\
+ "city", hash_bucket_size=1000)
+sport_x_city = tf.contrib.layers.crossed_column(
+ [sport, city], hash_bucket_size=int(1e4))
+```
+
+#### Continuous columns
+
+You can specify a continuous feature like so:
+
+```python
+age = tf.contrib.layers.real_valued_column("age")
+```
+
+Although, as a single real number, a continuous feature can often be input
+directly into the model, tf.learn offers useful transformations for this sort
+of column as well.
+
+##### Bucketization
+
+*Bucketization* turns a continuous column into a categorical column. This
+transformation lets you use continuous features in feature crosses, or learn
+cases where specific value ranges have particular importance.
+
+Bucketization divides the range of possible values into subranges called
+buckets:
+
+```python
+age_buckets = tf.contrib.layers.bucketized_column(
+ age, boundaries=[18, 25, 30, 35, 40, 45, 50, 55, 60, 65])
+```
+
+The bucket into which a value falls becomes the categorical label for
+that value.
+
+#### Input function
+
+`FeatureColumn`s provide a specification for the input data for your model,
+indicating how to represent and transform the data. But they do not provide
+the data itself. You provide the data through an input function.
+
+The input function must return a dictionary of tensors. Each key corresponds
+to the name of a `FeatureColumn`. Each key's value is a tensor containing the
+values of that feature for all data instances. See `input_fn` in the [linear
+models tutorial code](
+https://www.tensorflow.org/code/tensorflow/examples/learn/wide_n_deep_tutorial.py?l=160)
+for an example of an input function.
+
+The input function is passed to the `fit()` and `evaluate()` calls that
+initiate training and testing, as described in the next section.
+
+### Linear estimators
+
+tf.learn's estimator classes provide a unified training and evaluation harness
+for regression and classification models. They take care of the details of the
+training and evaluation loops and allow the user to focus on model inputs and
+architecture.
+
+To build a linear estimator, you can use either the
+`tf.contrib.learn.LinearClassifier` estimator or the
+`tf.contrib.learn.LinearRegressor` estimator, for classification and
+regression respectively.
+
+As with all tf.learn estimators, to run the estimator you just:
+
+ 1. Instantiate the estimator class. For the two linear estimator classes,
+ you pass a list of `FeatureColumn`s to the constructor.
+ 2. Call the estimator's `fit()` method to train it.
+ 3. Call the estimator's `evaluate()` method to see how it does.
+
+For example:
+
+```python
+e = tf.contrib.learn.LinearClassifier(feature_columns=[
+ native_country, education, occupation, workclass, marital_status,
+ race, age_buckets, education_x_occupation, age_buckets_x_race_x_occupation],
+ model_dir=YOUR_MODEL_DIRECTORY)
+e.fit(input_fn=input_fn_train, steps=200)
+# Evaluate for one step (one pass through the test data).
+results = e.evaluate(input_fn=input_fn_test, steps=1)
+
+# Print the stats for the evaluation.
+for key in sorted(results):
+ print "%s: %s" % (key, results[key])
+```
+
+### Wide and deep learning
+
+The tf.learn API also provides an estimator class that lets you jointly train
+a linear model and a deep neural network. This novel approach combines the
+ability of linear models to "memorize" key features with the generalization
+ability of neural nets. Use `tf.contrib.learn.DNNLinearCombinedClassifier` to
+create this sort of "wide and deep" model:
+
+```python
+e = tf.contrib.learn.DNNLinearCombinedClassifier(
+ model_dir=YOUR_MODEL_DIR,
+ linear_feature_columns=wide_columns,
+ dnn_feature_columns=deep_columns,
+ dnn_hidden_units=[100, 50])
+```
+For more information, see the [Wide and Deep Learning tutorial](../wide_and_deep/).
diff --git a/tensorflow/g3doc/tutorials/tflearn/index.md b/tensorflow/g3doc/tutorials/tflearn/index.md
new file mode 100644
index 0000000000..0a228baf08
--- /dev/null
+++ b/tensorflow/g3doc/tutorials/tflearn/index.md
@@ -0,0 +1,251 @@
+## tf.contrib.learn Quickstart
+
+TensorFlow’s high-level machine learning API (tf.contrib.learn) makes it easy
+to configure, train, and evaluate a variety of machine learning models. In
+this quickstart tutorial, you’ll use tf.contrib.learn to construct a [neural
+network](https://en.wikipedia.org/wiki/Artificial_neural_network) classifier
+and train it on [Fisher’s Iris data
+set](https://en.wikipedia.org/wiki/Iris_flower_data_set) to predict flower
+species based on sepal/petal geometry. You’ll perform the following five
+steps:
+
+1. Load CSVs containing Iris training/test data into a TensorFlow `Dataset`
+2. Construct a [neural network classifier](
+../../api_docs/python/contrib.learn.html#DNNClassifier)
+3. Fit the model using the training data
+4. Evaluate the accuracy of the model
+5. Classify new samples
+
+## Get Started
+
+Remember to [install TensorFlow on your
+machine](../../get_started/os_setup.html#download-and-setup) before getting
+started with this tutorial.
+
+Here is the full code for our neural network:
+
+```python
+import tensorflow as tf
+import numpy as np
+
+# Data sets
+IRIS_TRAINING = "iris_training.csv"
+IRIS_TEST = "iris_test.csv"
+
+# Load datasets.
+training_set = tf.contrib.learn.datasets.base.load_csv(filename=IRIS_TRAINING, target_dtype=np.int)
+test_set = tf.contrib.learn.datasets.base.load_csv(filename=IRIS_TEST, target_dtype=np.int)
+
+x_train, x_test, y_train, y_test = training_set.data, test_set.data, \
+ training_set.target, test_set.target
+
+# Build 3 layer DNN with 10, 20, 10 units respectively.
+classifier = tf.contrib.learn.DNNClassifier(hidden_units=[10, 20, 10], n_classes=3)
+
+# Fit model.
+classifier.fit(x=x_train, y=y_train, steps=200)
+
+# Evaluate accuracy.
+accuracy_score = classifier.evaluate(x=x_test, y=y_test)["accuracy"]
+print('Accuracy: {0:f}'.format(accuracy_score))
+
+# Classify two new flower samples.
+new_samples = np.array(
+ [[6.4, 3.2, 4.5, 1.5], [5.8, 3.1, 5.0, 1.7]], dtype=float)
+y = classifier.predict(new_samples)
+print ('Predictions: {}'.format(str(y)))
+```
+
+The following sections walk through the code in detail.
+
+## Load the Iris CSV data to TensorFlow
+
+The [Iris data set](https://en.wikipedia.org/wiki/Iris_flower_data_set)
+contains 150 rows of data, comprising 50 samples from each of three related
+Iris species: *Iris setosa*, *Iris virginica*, and *Iris versicolor*. Each row
+contains the following data for each flower sample: [sepal](https://en.wikipedia.org/wiki/Sepal)
+length, sepal width, [petal](https://en.wikipedia.org/wiki/Petal) length, petal width,
+and flower species. Flower species are represented as integers, with 0 denoting *Iris
+setosa*, 1 denoting *Iris versicolor*, and 2 denoting *Iris virginica*.
+
+Sepal Length | Sepal Width | Petal Length | Petal Width | Species
+:----------- | :---------- | :----------- | :---------- | :------
+5.1 | 3.5 | 1.4 | 0.2 | 0
+4.9 | 3.0 | 1.4 | 0.2 | 0
+4.7 | 3.2 | 1.3 | 0.2 | 0
+&hellip; | &hellip; | &hellip; | &hellip; | &hellip;
+7.0 | 3.2 | 4.7 | 1.4 | 1
+6.4 | 3.2 | 4.5 | 1.5 | 1
+6.9 | 3.1 | 4.9 | 1.5 | 1
+&hellip; | &hellip; | &hellip; | &hellip; | &hellip;
+6.5 | 3.0 | 5.2 | 2.0 | 2
+6.2 | 3.4 | 5.4 | 2.3 | 2
+5.9 | 3.0 | 5.1 | 1.8 | 2
+
+<!-- TODO: The rest of this section presumes that CSVs will live in same
+directory as tutorial examples; if not, update links and code --> For this
+tutorial, the Iris data has been randomized and split into two separate CSVs:
+a training set of 120 samples
+([iris_training.csv](http://download.tensorflow.org/data/iris_training.csv)).
+and a test set of 30 samples
+([iris_test.csv](http://download.tensorflow.org/data/iris_test.csv)).
+
+To get started, first import TensorFlow and numpy:
+
+```python
+import tensorflow as tf
+import numpy as np
+```
+
+Next, load the training and test sets into `Dataset`s using the [`load_csv()`]
+(https://www.tensorflow.org/code/tensorflow/contrib/learn/python/learn/datasets/base.py) method in `learn.datasets.base`. The
+`load_csv()` method has two required arguments:
+
+* `filename`, which takes the filepath to the CSV file, and
+* `target_dtype`, which takes the [`numpy` datatype](http://docs.scipy.org/doc/numpy/user/basics.types.html) of the dataset's target value.
+
+Here, the target (the value you're training the model to predict) is flower
+species, which is an integer from 0&ndash;2, so the appropriate `numpy`
+datatype is `np.int`:
+
+```python
+# Data sets
+IRIS_TRAINING = "iris_training.csv"
+IRIS_TEST = "iris_test.csv"
+
+# Load datasets.
+training_set = tf.contrib.learn.datasets.base.load_csv(filename=IRIS_TRAINING, target_dtype=np.int)
+test_set = tf.contrib.learn.datasets.base.load_csv(filename=IRIS_TEST, target_dtype=np.int)
+```
+
+Next, assign variables to the feature data and target values: `x_train` for
+training-set feature data, `x_test` for test-set feature data, `y_train` for
+training-set target values, and `y_test` for test-set target values. `Dataset`s
+in tf.contrib.learn are [named tuples](https://docs.python.org/2/library/collections.h
+tml#collections.namedtuple), and you can access feature data and target values
+via the `data` and `target` fields, respectively:
+
+```python
+x_train, x_test, y_train, y_test = training_set.data, test_set.data, \
+ training_set.target, test_set.target
+```
+
+Later on, in "Fit the DNNClassifier to the Iris Training Data," you'll use
+`x_train` and `y_train` to train your model, and in "Evaluate Model
+Accuracy", you'll use `x_test` and `y_test`. But first, you'll construct your
+model in the next section.
+
+## Construct a Deep Neural Network Classifier
+
+tf.contrib.learn offers a variety of predefined models, called [`Estimator`s
+](../../api_docs/python/contrib.learn.html#estimators), which you can use "out
+of the box" to run training and evaluation operations on your data. Here,
+you'll configure a Deep Neural Network Classifier model to fit the Iris data.
+Using tf.contrib.learn, you can instantiate your
+[`DNNClassifier`](../../api_docs/python/contrib.learn.html#DNNClassifier) with
+just one line of code:
+
+```python
+# Build 3 layer DNN with 10, 20, 10 units respectively.
+classifier = tf.contrib.learn.DNNClassifier(hidden_units=[10, 20, 10], n_classes=3)
+```
+
+The code above creates a `DNNClassifier` model with three [hidden layers](http://stats.stackexchange.com/questions/181/how-to-choose-the-number-of-hidden-layers-and-nodes-in-a-feedforward-neural-netw),
+containing 10, 20, and 10 neurons, respectively (`hidden_units=[10, 20, 10]`), and three target
+classes (`n_classes=3`).
+
+
+## Fit the DNNClassifier to the Iris Training Data
+
+Now that you've configured your DNN `classifier` model, you can fit it to the Iris training data
+using the [`fit`](../../api_docs/python/contrib.learn.html#BaseEstimator.fit)
+method. Pass as arguments your feature data (`x_train`), target values
+(`y_train`), and the number of steps to train (here, 200):
+
+```python
+# Fit model
+classifier.fit(x=x_train, y=y_train, steps=200)
+```
+
+<!-- Style the below (up to the next section) as an aside (note?) -->
+
+<!-- Pretty sure the following is correct, but maybe a SWE could verify? -->
+The state of the model is preserved in the `classifier`, which means you can train iteratively if
+you like. For example, the above is equivalent to the following:
+
+```python
+classifier.fit(x=x_train, y=y_train, steps=100)
+classifier.fit(x=x_train, y=y_train, steps=100)
+```
+
+<!-- TODO: When tutorial exists for monitoring, link to it here -->
+However, if you're looking to track the model while it trains, you'll likely
+want to instead use a TensorFlow [`monitor`](https://www.tensorflow.org/code/tensorflow/contrib/learn/python/learn/monitors.py)
+to perform logging operations.
+
+## Evaluate Model Accuracy
+
+You've fit your `DNNClassifier` model on the Iris training data; now, you can
+check its accuracy on the Iris test data using the [`evaluate`
+](../../api_docs/python/contrib.learn.html#BaseEstimator.evaluate) method.
+Like `fit`, `evaluate` takes feature data and target values as
+arguments, and returns a `dict` with the evaluation results. The following
+code passes the Iris test data&mdash;`x_test` and `y_test`&mdash;to `evaluate`
+and prints the `accuracy` from the results:
+
+```python
+accuracy_score = classifier.evaluate(x=x_test, y=y_test)["accuracy"]
+print('Accuracy: {0:f}'.format(accuracy_score))
+```
+
+Run the full script, and check the accuracy results. You should get:
+
+```
+Accuracy: 0.933333
+```
+
+Not bad for a relatively small data set!
+
+## Classify New Samples
+
+Use the estimator's `predict()` method to classify new samples. For example,
+say you have these two new flower samples:
+
+Sepal Length | Sepal Width | Petal Length | Petal Width
+:----------- | :---------- | :----------- | :----------
+6.4 | 3.2 | 4.5 | 1.5
+5.8 | 3.1 | 5.0 | 1.7
+
+You can predict their species with the following code:
+
+```python
+# Classify two new flower samples.
+new_samples = np.array(
+ [[6.4, 3.2, 4.5, 1.5], [5.8, 3.1, 5.0, 1.7]], dtype=float)
+y = classifier.predict(new_samples)
+print ('Predictions: {}'.format(str(y)))
+```
+
+The `predict()` method returns an array of predictions, one for each sample:
+
+```python
+Prediction: [1 2]
+```
+
+The model thus predicts that the first sample is *Iris versicolor*, and the
+second sample is *Iris virginica*.
+
+## Additional Resources
+
+* For further reference materials on tf.contrib.learn, see the official
+[API docs](../../api_docs/python/contrib.learn.md).
+
+<!-- David, will the below be live when this tutorial is released? -->
+* To learn more about using tf.contrib.learn to create linear models, see
+[Large-scale Linear Models with TensorFlow](../linear/).
+
+* To experiment with neural network modeling and visualization in the browser,
+check out [Deep Playground](http://playground.tensorflow.org/).
+
+* For more advanced tutorials on neural networks, see [Convolutional Neural
+Networks](../deep_cnn/) and [Recurrent Neural Networks](../recurrent/).
diff --git a/tensorflow/g3doc/tutorials/wide/index.md b/tensorflow/g3doc/tutorials/wide/index.md
new file mode 100644
index 0000000000..5dd409f4e4
--- /dev/null
+++ b/tensorflow/g3doc/tutorials/wide/index.md
@@ -0,0 +1,482 @@
+# TensorFlow Linear Model Tutorial
+
+In this tutorial, we will use the TF.Learn API in TensorFlow to solve a binary
+classification problem: Given census data about a person such as age, gender,
+education and occupation (the features), we will try to predict whether or not
+the person earns more than 50,000 dollars a year (the target label). We will
+train a **logistic regression** model, and given an individual's information our
+model will output a number between 0 and 1, which can be interpreted as the
+probability that the individual has an annual income of over 50,000 dollars.
+
+## Setup
+
+To try the code for this tutorial:
+
+1. [Install TensorFlow](../../get_started/os_setup.md) if you haven't
+already.
+
+2. Download [the tutorial code](
+https://www.tensorflow.org/code/tensorflow/examples/learn/wide_n_deep_tutorial.py).
+
+3. Install the pandas data analysis library. tf.learn doesn't require pandas, but it does support it, and this tutorial uses pandas. To install pandas:
+ 1. Get `pip`:
+
+ ```shell
+ # Ubuntu/Linux 64-bit
+ $ sudo apt-get install python-pip python-dev
+
+ # Mac OS X
+ $ sudo easy_install pip
+ $ sudo easy_install --upgrade six
+ ```
+
+ 2. Use `pip` to install pandas:
+
+ ```shell
+ $ sudo pip install pandas
+ ```
+
+ If you have trouble installing pandas, consult the [instructions]
+(http://pandas.pydata.org/pandas-docs/stable/install.html) on the pandas site.
+
+4. Execute the tutorial code with the following command to train the linear
+model described in this tutorial:
+
+ ```shell
+ $ python wide_n_deep_tutorial.py --model_type=wide
+ ```
+
+Read on to find out how this code builds its linear model.
+
+## Reading The Census Data
+
+The dataset we'll be using is the [Census Income Dataset]
+(https://archive.ics.uci.edu/ml/datasets/Census+Income). You can download the
+[training data]
+(https://archive.ics.uci.edu/ml/machine-learning-databases/adult/adult.data) and
+[test data]
+(https://archive.ics.uci.edu/ml/machine-learning-databases/adult/adult.test)
+manually or use code like this:
+
+```python
+import tempfile
+import urllib
+train_file = tempfile.NamedTemporaryFile()
+test_file = tempfile.NamedTemporaryFile()
+urllib.urlretrieve("https://archive.ics.uci.edu/ml/machine-learning-databases/adult/adult.data", train_file.name)
+urllib.urlretrieve("https://archive.ics.uci.edu/ml/machine-learning-databases/adult/adult.test", test_file.name)
+```
+
+Once the CSV files are downloaded, let's read them into [Pandas]
+(http://pandas.pydata.org/) dataframes.
+
+```python
+import pandas as pd
+COLUMNS = ["age", "workclass", "fnlwgt", "education", "education_num",
+ "marital_status", "occupation", "relationship", "race", "gender",
+ "capital_gain", "capital_loss", "hours_per_week", "native_country",
+ "income_bracket"]
+df_train = pd.read_csv(train_file, names=COLUMNS, skipinitialspace=True)
+df_test = pd.read_csv(test_file, names=COLUMNS, skipinitialspace=True, skiprows=1)
+```
+
+Since the task is a binary classification problem, we'll construct a label
+column named "label" whose value is 1 if the income is over 50K, and 0
+otherwise.
+
+```python
+LABEL_COLUMN = "label"
+df_train[LABEL_COLUMN] = (df_train["income_bracket"].apply(lambda x: ">50K" in x)).astype(int)
+df_test[LABEL_COLUMN] = (df_test["income_bracket"].apply(lambda x: ">50K" in x)).astype(int)
+```
+
+Next, let's take a look at the dataframe and see which columns we can use to
+predict the target label. The columns can be grouped into two types—categorical
+and continuous columns:
+
+* A column is called **categorical** if its value can only be one of the
+ categories in a finite set. For example, the native country of a person
+ (U.S., India, Japan, etc.) or the education level (high school, college,
+ etc.) are categorical columns.
+* A column is called **continuous** if its value can be any numerical value in
+ a continuous range. For example, the capital gain of a person (e.g. $14,084)
+ is a continuous column.
+
+```python
+CATEGORICAL_COLUMNS = ["workclass", "education", "marital_status", "occupation",
+ "relationship", "race", "gender", "native_country"]
+CONTINUOUS_COLUMNS = ["age", "education_num", "capital_gain", "capital_loss", "hours_per_week"]
+```
+
+Here's a list of columns available in the Census Income dataset:
+
+| Column Name | Type | Description | {.sortable}
+| -------------- | ----------- | --------------------------------- |
+| age | Continuous | The age of the individual |
+| workclass | Categorical | The type of employer the |
+: : : individual has (government, :
+: : : military, private, etc.). :
+| fnlwgt | Continuous | The number of people the census |
+: : : takers believe that observation :
+: : : represents (sample weight). This :
+: : : variable will not be used. :
+| education | Categorical | The highest level of education |
+: : : achieved for that individual. :
+| education_num | Continuous | The highest level of education in |
+: : : numerical form. :
+| marital_status | Categorical | Marital status of the individual. |
+| occupation | Categorical | The occupation of the individual. |
+| relationship | Categorical | Wife, Own-child, Husband, |
+: : : Not-in-family, Other-relative, :
+: : : Unmarried. :
+| race | Categorical | White, Asian-Pac-Islander, |
+: : : Amer-Indian-Eskimo, Other, Black. :
+| gender | Categorical | Female, Male. |
+| capital_gain | Continuous | Capital gains recorded. |
+| capital_loss | Continuous | Capital Losses recorded. |
+| hours_per_week | Continuous | Hours worked per week. |
+| native_country | Categorical | Country of origin of the |
+: : : individual. :
+| income | Categorical | ">50K" or "<=50K", meaning |
+: : : whether the person makes more :
+: : : than \$50,000 annually. :
+
+## Converting Data into Tensors
+
+When building a TF.Learn model, the input data is specified by means of an Input
+Builder function. This builder function will not be called until it is later
+passed to TF.Learn methods such as `fit` and `evaluate`. The purpose of this
+function is to construct the input data, which is represented in the form of
+[Tensors]
+(https://www.tensorflow.org/versions/r0.9/api_docs/python/framework.html#Tensor)
+or [SparseTensors]
+(https://www.tensorflow.org/versions/r0.9/api_docs/python/sparse_ops.html#SparseTensor).
+In more detail, the Input Builder function returns the following as a pair:
+
+1. `feature_cols`: A dict from feature column names to `Tensors` or
+ `SparseTensors`.
+2. `label`: A `Tensor` containing the label column.
+
+The keys of the `feature_cols` will be used to when construct columns in the
+next section. Because we want to call the `fit` and `evaluate` methods with
+different data, we define two different input builder functions,
+`train_input_fn` and `test_input_fn` which are identical except that they pass
+different data to `input_fn`. Note that `input_fn` will be called while
+constructing the TensorFlow graph, not while running the graph. What it is
+returning is a representation of the input data as the fundamental unit of
+TensorFlow computations, a `Tensor` (or `SparseTensor`).
+
+Our model represents the input data as *constant* tensors, meaning that the
+tensor represents a constant value, in this case the values of a particular
+column of `df_train` or `df_test`. This is the simplest way to pass data into
+TensorFlow. Another more advanced way to represent input data would be to
+construct an [Input Reader]
+(https://www.tensorflow.org/versions/r0.9/api_docs/python/io_ops.html#inputs-and-readers)
+that represents a file or other data source, and iterates through the file as
+TensorFlow runs the graph. Each continuous column in the train or test dataframe
+will be converted into a `Tensor`, which in general is a good format to
+represent dense data. For cateogorical data, we must represent the data as a
+`SparseTensor`. This data format is good for representing sparse data.
+
+```python
+import tensorflow as tf
+
+def input_fn(df):
+ # Creates a dictionary mapping from each continuous feature column name (k) to
+ # the values of that column stored in a constant Tensor.
+ continuous_cols = {k: tf.constant(df[k].values)
+ for k in CONTINUOUS_COLUMNS}
+ # Creates a dictionary mapping from each categorical feature column name (k)
+ # to the values of that column stored in a tf.SparseTensor.
+ categorical_cols = {k: tf.SparseTensor(
+ indices=[[i, 0] for i in range(df[k].size)],
+ values=df[k].values,
+ shape=[df[k].size, 1])
+ for k in CATEGORICAL_COLUMNS}
+ # Merges the two dictionaries into one.
+ feature_cols = dict(continuous_cols.items() + categorical_cols.items())
+ # Converts the label column into a constant Tensor.
+ label = tf.constant(df[LABEL_COLUMN].values)
+ # Returns the feature columns and the label.
+ return feature_cols, label
+
+def train_input_fn():
+ return input_fn(df_train)
+
+def eval_input_fn():
+ return input_fn(df_test)
+```
+
+## Selecting and Engineering Features for the Model
+
+Selecting and crafting the right set of feature columns is key to learning an
+effective model. A **feature column** can be either one of the raw columns in
+the original dataframe (let's call them **base feature columns**), or any new
+columns created based on some transformations defined over one or multiple base
+columns (let's call them **derived feature columns**). Basically, "feature
+column" is an abstract concept of any raw or derived variable that can be used
+to predict the target label.
+
+### Base Categorical Feature Columns
+
+To define a feature column for a categorical feature, we can create a
+`SparseColumn` using the TF.Learn API. If you know the set of all possible
+feature values of a column and there are only a few of them, you can use
+`sparse_column_with_keys`. Each key in the list will get assigned an
+auto-incremental ID starting from 0. For example, for the `gender` column we can
+assign the feature string "female" to an integer ID of 0 and "male" to 1 by
+doing:
+
+```python
+gender = tf.contrib.layers.sparse_column_with_keys(
+ column_name="gender", keys=["female", "male"])
+```
+
+What if we don't know the set of possible values in advance? Not a problem. We
+can use `sparse_column_with_hash_bucket` instead:
+
+```python
+education = tf.contrib.layers.sparse_column_with_hash_bucket("education", hash_bucket_size=1000)
+```
+
+What will happen is that each possible value in the feature column `education`
+will be hashed to an integer ID as we encounter them in training. See an example
+illustration below:
+
+ID | Feature
+--- | -------------
+... |
+9 | `"Bachelors"`
+... |
+103 | `"Doctorate"`
+... |
+375 | `"Masters"`
+... |
+
+No matter which way we choose to define a `SparseColumn`, each feature string
+will be mapped into an integer ID by looking up a fixed mapping or by hashing.
+Note that hashing collisions are possible, but may not significantly impact the
+model quality. Under the hood, the `LinearModel` class is responsible for
+managing the mapping and creating `tf.Variable` to store the model parameters
+(also known as model weights) for each feature ID. The model parameters will be
+learned through the model training process we'll go through later.
+
+We'll do the similar trick to define the other categorical features:
+
+```python
+race = tf.contrib.layers.sparse_column_with_keys(column_name="race", keys=[
+ "Amer-Indian-Eskimo", "Asian-Pac-Islander", "Black", "Other", "White"])
+marital_status = tf.contrib.layers.sparse_column_with_hash_bucket("marital_status", hash_bucket_size=100)
+relationship = tf.contrib.layers.sparse_column_with_hash_bucket("relationship", hash_bucket_size=100)
+workclass = tf.contrib.layers.sparse_column_with_hash_bucket("workclass", hash_bucket_size=100)
+occupation = tf.contrib.layers.sparse_column_with_hash_bucket("occupation", hash_bucket_size=1000)
+native_country = tf.contrib.layers.sparse_column_with_hash_bucket("native_country", hash_bucket_size=1000)
+```
+
+### Base Continuous Feature Columns
+
+Similarly, we can define a `RealValuedColumn` for each continuous feature column
+that we want to use in the model:
+
+```python
+age = tf.contrib.layers.real_valued_column("age")
+education_num = tf.contrib.layers.real_valued_column("education_num")
+capital_gain = tf.contrib.layers.real_valued_column("capital_gain")
+capital_loss = tf.contrib.layers.real_valued_column("capital_loss")
+hours_per_week = tf.contrib.layers.real_valued_column("hours_per_week")
+```
+
+### Making Continuous Features Categorical through Bucketization
+
+Sometimes the relationship between a continuous feature and the label is not
+linear. As an hypothetical example, a person's income may grow with age in the
+early stage of one's career, then the growth may slow at some point, and finally
+the income decreases after retirement. In this scenario, using the raw `age` as
+a real-valued feature column might not be a good choice because the model can
+only learn one of the three cases:
+
+1. Income always increases at some rate as age grows (positive correlation),
+1. Income always decreases at some rate as age grows (negative correlation), or
+1. Income stays the same no matter at what age (no correlation)
+
+If we want to learn the fine-grained correlation between income and each age
+group seperately, we can leverage **bucketization**. Bucketization is a process
+of dividing the entire range of a continuous feature into a set of consecutive
+bins/buckets, and then converting the original numerical feature into a bucket
+ID (as a categorical feature) depending on which bucket that value falls into.
+So, we can define a `bucketized_column` over `age` as:
+
+```python
+age_buckets = tf.contrib.layers.bucketized_column(age, boundaries=[18, 25, 30, 35, 40, 45, 50, 55, 60, 65])
+```
+
+where the `boundaries` is a list of bucket boundaries. In this case, there are
+10 boundaries, resulting in 11 age group buckets (from age 17 and below, 18-24,
+25-29, ..., to 65 and over).
+
+### Intersecting Multiple Columns with CrossedColumn
+
+Using each base feature column separately may not be enough to explain the data.
+For example, the correlation between education and the label (earning > 50,000
+dollars) may be different for different occupations. Therefore, if we only learn
+a single model weight for `education="Bachelors"` and `education="Masters"`, we
+won't be able to capture every single education-occupation combination (e.g.
+distinguishing between `education="Bachelors" AND occupation="Exec-managerial"`
+and `education="Bachelors" AND occupation="Craft-repair"`). To learn the
+differences between different feature combinations, we can add **crossed feature
+columns** to the model.
+
+```python
+education_x_occupation = tf.contrib.layers.crossed_column([education, occupation], hash_bucket_size=int(1e4))
+```
+
+We can also create a `CrossedColumn` over more than two columns. Each
+constituent column can be either a base feature column that is categorical
+(`SparseColumn`), a bucketized real-valued feature column (`BucketizedColumn`),
+or even another `CrossColumn`. Here's an example:
+
+```python
+age_buckets_x_race_x_occupation = tf.contrib.layers.crossed_column(
+ [age_buckets, race, occupation], hash_bucket_size=int(1e6))
+```
+
+## Defining The Logistic Regression Model
+
+After processing the input data and defining all the feature columns, we're now
+ready to put them all together and build a Logistic Regression model. In the
+previous section we've seen several types of base and derived feature columns,
+including:
+
+* `SparseColumn`
+* `RealValuedColumn`
+* `BucketizedColumn`
+* `CrossedColumn`
+
+All of these are subclasses of the abstract `FeatureColumn` class, and can be
+added to the `feature_columns` field of a model:
+
+```python
+model_dir = tempfile.mkdtemp()
+m = tf.contrib.learn.LinearClassifier(feature_columns=[
+ gender, native_country, education, occupation, workclass, marital_status, race,
+ age_buckets, education_x_occupation, age_buckets_x_race_x_occupation],
+ model_dir=model_dir)
+```
+
+The model also automatically learns a bias term, which controls the prediction
+one would make without observing any features (see the section "How Logistic
+Regression Works" for more explanations). The learned model files will be stored
+in `model_dir`.
+
+## Training and Evaluating Our Model
+
+After adding all the features to the model, now let's look at how to actually
+train the model. Training a model is just a one-liner using the TF.Learn API:
+
+```python
+m.fit(input_fn=train_input_fn, steps=200)
+```
+
+After the model is trained, we can evaluate how good our model is at predicting
+the labels of the holdout data:
+
+```python
+results = m.evaluate(input_fn=eval_input_fn, steps=1)
+for key in sorted(results):
+ print "%s: %s" % (key, results[key])
+```
+
+The first line of the output should be something like `accuracy: 0.83557522`,
+which means the accuracy is 83.6%. Feel free to try more features and
+transformations and see if you can do even better!
+
+If you'd like to see a working end-to-end example, you can download our [example
+code]
+(https://www.tensorflow.org/code/tensorflow/examples/learn/wide_n_deep_tutorial.py)
+and set the `model_type` flag to `wide`.
+
+## Adding Regularization to Prevent Overfitting
+
+Regularization is a technique used to avoid **overfitting**. Overfitting happens
+when your model does well on the data it is trained on, but worse on test data
+that the model has not seen before, such as live traffic. Overfitting generally
+occurs when a model is excessively complex, such as having too many parameters
+relative to the number of observed training data. Regularization allows for you
+to control your model's complexity and makes the model more generalizable to
+unseen data.
+
+In the Linear Model library, you can add L1 and L2 regularizations to the model
+as:
+
+```
+m = tf.contrib.learn.LinearClassifier(feature_columns=[
+ gender, native_country, education, occupation, workclass, marital_status, race,
+ age_buckets, education_x_occupation, age_buckets_x_race_x_occupation],
+ optimizer=tf.train.FtrlOptimizer(
+ learning_rate=0.1,
+ l1_regularization_strength=1.0,
+ l2_regularization_strength=1.0),
+ model_dir=model_dir)
+```
+
+One important difference between L1 and L2 regularization is that L1
+regularization tends to make model weights stay at zero, creating sparser
+models, whereas L2 regularization also tries to make the model weights closer to
+zero but not necessarily zero. Therefore, if you increase the strength of L1
+regularization, you will have a smaller model size because many of the model
+weights will be zero. This is often desirable when the feature space is very
+large but sparse, and when there are resource constraints that prevent you from
+serving a model that is too large.
+
+In practice, you should try various combinations of L1, L2 regularization
+strengths and find the best parameters that best control overfitting and give
+you a desirable model size.
+
+## How Logistic Regression Works
+
+Finally, let's take a minute to talk about what the Logistic Regression model
+actually looks like in case you're not already familiar with it. We'll denote
+the label as $$Y$$, and the set of observed features as a feature vector
+$$\mathbf{x}=[x_1, x_2, ..., x_d]$$. We define $$Y=1$$ if an individual earned >
+50,000 dollars and $$Y=0$$ otherwise. In Logistic Regression, the probability of
+the label being positive ($$Y=1$$) given the features $$\mathbf{x}$$ is given
+as:
+
+$$ P(Y=1|\mathbf{x}) = \frac{1}{1+\exp(-(\mathbf{w}^T\mathbf{x}+b))}$$
+
+where $$\mathbf{w}=[w_1, w_2, ..., w_d]$$ are the model weights for the features
+$$\mathbf{x}=[x_1, x_2, ..., x_d]$$. $$b$$ is a constant that is often called
+the **bias** of the model. The equation consists of two parts—A linear model and
+a logistic function:
+
+* **Linear Model**: First, we can see that $$\mathbf{w}^T\mathbf{x}+b = b +
+ w_1x_1 + ... +w_dx_d$$ is a linear model where the output is a linear
+ function of the input features $$\mathbf{x}$$. The bias $$b$$ is the
+ prediction one would make without observing any features. The model weight
+ $$w_i$$ reflects how the feature $$x_i$$ is correlated with the positive
+ label. If $$x_i$$ is positively correlated with the positive label, the
+ weight $$w_i$$ increases, and the probability $$P(Y=1|\mathbf{x})$$ will be
+ closer to 1. On the other hand, if $$x_i$$ is negatively correlated with the
+ positive label, then the weight $$w_i$$ decreases and the probability
+ $$P(Y=1|\mathbf{x})$$ will be closer to 0.
+
+* **Logistic Function**: Second, we can see that there's a logistic function
+ (also known as the sigmoid function) $$S(t) = 1/(1+\exp(-t))$$ being applied
+ to the linear model. The logistic function is used to convert the output of
+ the linear model $$\mathbf{w}^T\mathbf{x}+b$$ from any real number into the
+ range of $$[0, 1]$$, which can be interpreted as a probability.
+
+Model training is an optimization problem: The goal is to find a set of model
+weights (i.e. model parameters) to minimize a **loss function** defined over the
+training data, such as logistic loss for Logistic Regression models. The loss
+function measures the discrepancy between the ground-truth label and the model's
+prediction. If the prediction is very close to the ground-truth label, the loss
+value will be low; if the prediction is very far from the label, then the loss
+value would be high.
+
+## Learn Deeper
+
+If you're interested in learning more, check out our [Wide & Deep Learning
+Tutorial](../wide_and_deep/) where we'll show you how to combine
+the strengths of linear models and deep neural networks by jointly training them
+using the TF.Learn API.
diff --git a/tensorflow/g3doc/tutorials/wide_and_deep/index.md b/tensorflow/g3doc/tutorials/wide_and_deep/index.md
new file mode 100644
index 0000000000..910e91e1d0
--- /dev/null
+++ b/tensorflow/g3doc/tutorials/wide_and_deep/index.md
@@ -0,0 +1,275 @@
+# TensorFlow Wide & Deep Learning Tutorial
+
+In the previous [TensorFlow Linear Model Tutorial](../wide/),
+we trained a logistic regression model to predict the probability that the
+individual has an annual income of over 50,000 dollars using the [Census Income
+Dataset](https://archive.ics.uci.edu/ml/datasets/Census+Income). TensorFlow is
+great for training deep neural networks too, and you might be thinking which one
+you should choose—Well, why not both? Would it be possible to combine the
+strengths of both in one model?
+
+In this tutorial, we'll introduce how to use the TF.Learn API to jointly train a
+wide linear model and a deep feed-forward neural network. This approach combines
+the strengths of memorization and generalization. It's useful for generic
+large-scale regression and classification problems with sparse input features
+(e.g., categorical features with a large number of possible feature values). If
+you're interested in learning more about how Wide & Deep Learning works, please
+check out our [research paper](http://arxiv.org/abs/1606.07792).
+
+![Wide & Deep Spectrum of Models]
+(../../images/wide_n_deep.svg "Wide & Deep")
+
+The figure above shows a comparison of a wide model (logistic regression with
+sparse features and transformations), a deep model (feed-forward neural network
+with an embedding layer and several hidden layers), and a Wide & Deep model
+(joint training of both). At a high level, there are only 3 steps to configure a
+wide, deep, or Wide & Deep model using the TF.Learn API:
+
+1. Select features for the wide part: Choose the sparse base columns and
+ crossed columns you want to use.
+1. Select features for the deep part: Choose the continuous columns, the
+ embedding dimension for each categorical column, and the hidden layer sizes.
+1. Put them all together in a Wide & Deep model
+ (`DNNLinearCombinedClassifier`).
+
+And that's it! Let's go through a simple example.
+
+## Setup
+
+To try the code for this tutorial:
+
+1. [Install TensorFlow](../../get_started/os_setup.md) if you haven't
+already.
+
+2. Download [the tutorial code](
+https://www.tensorflow.org/code/tensorflow/examples/learn/wide_n_deep_tutorial.py).
+
+3. Install the pandas data analysis library. tf.learn doesn't require pandas, but it does support it, and this tutorial uses pandas. To install pandas:
+ 1. Get `pip`:
+
+ ```shell
+ # Ubuntu/Linux 64-bit
+ $ sudo apt-get install python-pip python-dev
+
+ # Mac OS X
+ $ sudo easy_install pip
+ $ sudo easy_install --upgrade six
+ ```
+
+ 2. Use `pip` to install pandas:
+
+ ```shell
+ $ sudo pip install pandas
+ ```
+
+ If you have trouble installing pandas, consult the [instructions]
+(http://pandas.pydata.org/pandas-docs/stable/install.html) on the pandas site.
+
+4. Execute the tutorial code with the following command to train the linear
+model described in this tutorial:
+
+ ```shell
+ $ python wide_n_deep_tutorial.py --model_type=wide_n_deep
+ ```
+
+Read on to find out how this code builds its linear model.
+
+
+## Define Base Feature Columns
+
+First, let's define the base categorical and continuous feature columns that
+we'll use. These base columns will be the building blocks used by both the wide
+part and the deep part of the model.
+
+```python
+import tensorflow as tf
+
+# Categorical base columns.
+gender = tf.contrib.layers.sparse_column_with_keys(column_name="gender", keys=["female", "male"])
+race = tf.contrib.layers.sparse_column_with_keys(column_name="race", keys=[
+ "Amer-Indian-Eskimo", "Asian-Pac-Islander", "Black", "Other", "White"])
+education = tf.contrib.layers.sparse_column_with_hash_bucket("education", hash_bucket_size=1000)
+marital_status = tf.contrib.layers.sparse_column_with_hash_bucket("marital_status", hash_bucket_size=100)
+relationship = tf.contrib.layers.sparse_column_with_hash_bucket("relationship", hash_bucket_size=100)
+workclass = tf.contrib.layers.sparse_column_with_hash_bucket("workclass", hash_bucket_size=100)
+occupation = tf.contrib.layers.sparse_column_with_hash_bucket("occupation", hash_bucket_size=1000)
+native_country = tf.contrib.layers.sparse_column_with_hash_bucket("native_country", hash_bucket_size=1000)
+
+# Continuous base columns.
+age = tf.contrib.layers.real_valued_column("age")
+age_buckets = tf.contrib.layers.bucketized_column(age, boundaries=[18, 25, 30, 35, 40, 45, 50, 55, 60, 65])
+education_num = tf.contrib.layers.real_valued_column("education_num")
+capital_gain = tf.contrib.layers.real_valued_column("capital_gain")
+capital_loss = tf.contrib.layers.real_valued_column("capital_loss")
+hours_per_week = tf.contrib.layers.real_valued_column("hours_per_week")
+```
+
+## The Wide Model: Linear Model with Crossed Feature Columns
+
+The wide model is a linear model with a wide set of sparse and crossed feature
+columns:
+
+```python
+wide_columns = [
+ gender, native_country, education, occupation, workclass, marital_status, relationship, age_buckets,
+ tf.contrib.layers.crossed_column([education, occupation], hash_bucket_size=int(1e4)),
+ tf.contrib.layers.crossed_column([native_country, occupation], hash_bucket_size=int(1e4)),
+ tf.contrib.layers.crossed_column([age_buckets, race, occupation], hash_bucket_size=int(1e6))]
+```
+
+Wide models with crossed feature columns can memorize sparse interactions
+between features effectively. That being said, one limitation of crossed feature
+columns is that they do not generalize to feature combinations that have not
+appeared in the training data. Let's add a deep model with embeddings to fix
+that.
+
+## The Deep Model: Neural Network with Embeddings
+
+The deep model is a feed-forward neural network, as shown in the previous
+figure. Each of the sparse, high-dimensional categorical features are first
+converted into a low-dimensional and dense real-valued vector, often referred to
+as an embedding vector. These low-dimensional dense embedding vectors are
+concatenated with the continuous features, and then fed into the hidden layers
+of a neural network in the forward pass. The embedding values are initialized
+randomly, and are trained along with all other model parameters to minimize the
+training loss. If you're interested in learning more about embeddings, check out
+the TensorFlow tutorial on [Vector Representations of Words]
+(https://www.tensorflow.org/versions/r0.9/tutorials/word2vec/index.html), or
+[Word Embedding](https://en.wikipedia.org/wiki/Word_embedding) on Wikipedia.
+
+We'll configure the embeddings for the categorical columns using
+`embedding_column`, and concatenate them with the continuous columns:
+
+```python
+deep_columns = [
+ tf.contrib.layers.embedding_column(workclass, dimension=8),
+ tf.contrib.layers.embedding_column(education, dimension=8),
+ tf.contrib.layers.embedding_column(marital_status, dimension=8),
+ tf.contrib.layers.embedding_column(gender, dimension=8),
+ tf.contrib.layers.embedding_column(relationship, dimension=8),
+ tf.contrib.layers.embedding_column(race, dimension=8),
+ tf.contrib.layers.embedding_column(native_country, dimension=8),
+ tf.contrib.layers.embedding_column(occupation, dimension=8),
+ age, education_num, capital_gain, capital_loss, hours_per_week]
+```
+
+The higher the `dimension` of the embedding is, the more degrees of freedom the
+model will have to learn the representations of the features. For simplicity, we
+set the dimension to 8 for all feature columns here. Empirically, a more
+informed decision for the number of dimensions is to start with a value on the
+order of $$k\log_2(n)$$ or $$k\sqrt[4]n$$, where $$n$$ is the number of unique
+features in a feature column and $$k$$ is a small constant (usually smaller than
+10).
+
+Through dense embeddings, deep models can generalize better and make predictions
+on feature pairs that were previously unseen in the training data. However, it
+is difficult to learn effective low-dimensional representations for feature
+columns when the underlying interaction matrix between two feature columns is
+sparse and high-rank. In such cases, the interaction between most feature pairs
+should be zero except a few, but dense embeddings will lead to nonzero
+predictions for all feature pairs, and thus can over-generalize. On the other
+hand, linear models with crossed features can memorize these “exception rules”
+effectively with fewer model parameters.
+
+Now, let's see how to jointly train wide and deep models and allow them to
+complement each other’s strengths and weaknesses.
+
+## Combining Wide and Deep Models into One
+
+The wide models and deep models are combined by summing up their final output
+log odds as the prediction, then feeding the prediction to a logistic loss
+function. All the graph definition and variable allocations have already been
+handled for you under the hood, so you simply need to create a
+`DNNLinearCombinedClassifier`:
+
+```python
+import tempfile
+model_dir = tempfile.mkdtemp()
+m = tf.contrib.learn.DNNLinearCombinedClassifier(
+ model_dir=model_dir,
+ linear_feature_columns=wide_columns,
+ dnn_feature_columns=deep_columns,
+ dnn_hidden_units=[100, 50])
+```
+
+## Training and Evaluating The Model
+
+Before we train the model, let's read in the Census dataset as we did in the
+[TensorFlow Linear Model tutorial](../wide/). The code for
+input data processing is provided here again for your convenience:
+
+```python
+import pandas as pd
+import urllib
+
+# Define the column names for the data sets.
+COLUMNS = ["age", "workclass", "fnlwgt", "education", "education_num",
+ "marital_status", "occupation", "relationship", "race", "gender",
+ "capital_gain", "capital_loss", "hours_per_week", "native_country", "income_bracket"]
+LABEL_COLUMN = 'label'
+CATEGORICAL_COLUMNS = ["workclass", "education", "marital_status", "occupation",
+ "relationship", "race", "gender", "native_country"]
+CONTINUOUS_COLUMNS = ["age", "education_num", "capital_gain", "capital_loss",
+ "hours_per_week"]
+
+# Download the training and test data to temporary files.
+# Alternatively, you can download them yourself and change train_file and
+# test_file to your own paths.
+train_file = tempfile.NamedTemporaryFile()
+test_file = tempfile.NamedTemporaryFile()
+urllib.urlretrieve("https://archive.ics.uci.edu/ml/machine-learning-databases/adult/adult.data", train_file.name)
+urllib.urlretrieve("https://archive.ics.uci.edu/ml/machine-learning-databases/adult/adult.test", test_file.name)
+
+# Read the training and test data sets into Pandas dataframe.
+df_train = pd.read_csv(train_file, names=COLUMNS, skipinitialspace=True)
+df_test = pd.read_csv(test_file, names=COLUMNS, skipinitialspace=True, skiprows=1)
+df_train[LABEL_COLUMN] = (df_train['income_bracket'].apply(lambda x: '>50K' in x)).astype(int)
+df_test[LABEL_COLUMN] = (df_test['income_bracket'].apply(lambda x: '>50K' in x)).astype(int)
+
+def input_fn(df):
+ # Creates a dictionary mapping from each continuous feature column name (k) to
+ # the values of that column stored in a constant Tensor.
+ continuous_cols = {k: tf.constant(df[k].values)
+ for k in CONTINUOUS_COLUMNS}
+ # Creates a dictionary mapping from each categorical feature column name (k)
+ # to the values of that column stored in a tf.SparseTensor.
+ categorical_cols = {k: tf.SparseTensor(
+ indices=[[i, 0] for i in range(df[k].size)],
+ values=df[k].values,
+ shape=[df[k].size, 1])
+ for k in CATEGORICAL_COLUMNS}
+ # Merges the two dictionaries into one.
+ feature_cols = dict(continuous_cols.items() + categorical_cols.items())
+ # Converts the label column into a constant Tensor.
+ label = tf.constant(df[LABEL_COLUMN].values)
+ # Returns the feature columns and the label.
+ return feature_cols, label
+
+def train_input_fn():
+ return input_fn(df_train)
+
+def eval_input_fn():
+ return input_fn(df_test)
+```
+
+After reading in the data, you can train and evaluate the model:
+
+```python
+m.fit(input_fn=train_input_fn, steps=200)
+results = m.evaluate(input_fn=eval_input_fn, steps=1)
+for key in sorted(results):
+ print "%s: %s" % (key, results[key])
+```
+
+The first line of the output should be something like `accuracy: 0.84429705`. We
+can see that the accuracy was improved from about 83.6% using a wide-only linear
+model to about 84.4% using a Wide & Deep model. If you'd like to see a working
+end-to-end example, you can download our [example code]
+(https://github.com/tensorflow/tensorflow/blob/master/tensorflow/examples/learn/wide_n_deep_tutorial.py).
+
+Note that this tutorial is just a quick example on a small dataset to get you
+familiar with the API. Wide & Deep Learning will be even more powerful if you
+try it on a large dataset with many sparse feature columns that have a large
+number of possible feature values. Again, feel free to take a look at our
+[research paper](http://arxiv.org/abs/1606.07792) for more ideas about how to
+apply Wide & Deep Learning in real-world large-scale maching learning problems.