代码拉取完成,页面将自动刷新
<?xml version="1.0" ?>
<net batch="1" name="road-segmentation-adas-0001" version="5">
<layers>
<layer id="0" name="data" precision="FP32" type="Input">
<output>
<port id="0">
<dim>1</dim>
<dim>3</dim>
<dim>512</dim>
<dim>896</dim>
</port>
</output>
</layer>
<layer id="1" name="L0000_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="3,3" output="16" pads_begin="1,1" pads_end="1,1" strides="2,2"/>
<input>
<port id="0">
<dim>1</dim>
<dim>3</dim>
<dim>512</dim>
<dim>896</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>16</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</output>
<blobs>
<weights offset="0" size="1728"/>
<biases offset="1728" size="64"/>
</blobs>
</layer>
<layer id="2" name="L0001_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>16</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>16</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</output>
</layer>
<layer id="3" name="L0002_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="8" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>16</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>8</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</output>
<blobs>
<weights offset="1792" size="512"/>
<biases offset="2304" size="32"/>
</blobs>
</layer>
<layer id="4" name="L0003_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>8</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>8</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</output>
</layer>
<layer id="5" name="L0004_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="8" kernel="3,3" output="8" pads_begin="1,1" pads_end="1,1" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>8</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>8</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</output>
<blobs>
<weights offset="2336" size="288"/>
<biases offset="2624" size="32"/>
</blobs>
</layer>
<layer id="6" name="L0005_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>8</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>8</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</output>
</layer>
<layer id="7" name="L0006_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="16" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>8</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>16</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</output>
<blobs>
<weights offset="2656" size="512"/>
<biases offset="3168" size="64"/>
</blobs>
</layer>
<layer id="8" name="L0007_AddBackward1" precision="FP32" type="Eltwise">
<data coeff="" operation="sum"/>
<input>
<port id="0">
<dim>1</dim>
<dim>16</dim>
<dim>256</dim>
<dim>448</dim>
</port>
<port id="1">
<dim>1</dim>
<dim>16</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</input>
<output>
<port id="2">
<dim>1</dim>
<dim>16</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</output>
</layer>
<layer id="9" name="L0008_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>16</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>16</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</output>
</layer>
<layer id="10" name="L0009_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="8" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>16</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>8</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</output>
<blobs>
<weights offset="3232" size="512"/>
<biases offset="3744" size="32"/>
</blobs>
</layer>
<layer id="11" name="L0010_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>8</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>8</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</output>
</layer>
<layer id="12" name="L0011_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="8" kernel="3,3" output="8" pads_begin="1,1" pads_end="1,1" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>8</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>8</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</output>
<blobs>
<weights offset="3776" size="288"/>
<biases offset="4064" size="32"/>
</blobs>
</layer>
<layer id="13" name="L0012_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>8</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>8</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</output>
</layer>
<layer id="14" name="L0013_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="24" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>8</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>24</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</output>
<blobs>
<weights offset="4096" size="768"/>
<biases offset="4864" size="96"/>
</blobs>
</layer>
<layer id="15" name="L0014_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="24" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>16</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>24</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</output>
<blobs>
<weights offset="4960" size="1536"/>
<biases offset="6496" size="96"/>
</blobs>
</layer>
<layer id="16" name="L0015_AddBackward1" precision="FP32" type="Eltwise">
<data coeff="" operation="sum"/>
<input>
<port id="0">
<dim>1</dim>
<dim>24</dim>
<dim>256</dim>
<dim>448</dim>
</port>
<port id="1">
<dim>1</dim>
<dim>24</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</input>
<output>
<port id="2">
<dim>1</dim>
<dim>24</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</output>
</layer>
<layer id="17" name="L0016_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>24</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>24</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</output>
</layer>
<layer id="18" name="L0017_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="8" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>24</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>8</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</output>
<blobs>
<weights offset="6592" size="768"/>
<biases offset="7360" size="32"/>
</blobs>
</layer>
<layer id="19" name="L0018_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>8</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>8</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</output>
</layer>
<layer id="20" name="L0019_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="8" kernel="3,3" output="8" pads_begin="1,1" pads_end="1,1" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>8</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>8</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</output>
<blobs>
<weights offset="7392" size="288"/>
<biases offset="7680" size="32"/>
</blobs>
</layer>
<layer id="21" name="L0020_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>8</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>8</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</output>
</layer>
<layer id="22" name="L0021_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="24" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>8</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>24</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</output>
<blobs>
<weights offset="7712" size="768"/>
<biases offset="8480" size="96"/>
</blobs>
</layer>
<layer id="23" name="L0022_AddBackward1" precision="FP32" type="Eltwise">
<data coeff="" operation="sum"/>
<input>
<port id="0">
<dim>1</dim>
<dim>24</dim>
<dim>256</dim>
<dim>448</dim>
</port>
<port id="1">
<dim>1</dim>
<dim>24</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</input>
<output>
<port id="2">
<dim>1</dim>
<dim>24</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</output>
</layer>
<layer id="24" name="L0023_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>24</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>24</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</output>
</layer>
<layer id="25" name="L0024_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="8" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>24</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>8</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</output>
<blobs>
<weights offset="8576" size="768"/>
<biases offset="9344" size="32"/>
</blobs>
</layer>
<layer id="26" name="L0025_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>8</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>8</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</output>
</layer>
<layer id="27" name="L0026_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="8" kernel="3,3" output="8" pads_begin="1,1" pads_end="1,1" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>8</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>8</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</output>
<blobs>
<weights offset="9376" size="288"/>
<biases offset="9664" size="32"/>
</blobs>
</layer>
<layer id="28" name="L0027_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>8</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>8</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</output>
</layer>
<layer id="29" name="L0028_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="32" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>8</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>32</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</output>
<blobs>
<weights offset="9696" size="1024"/>
<biases offset="10720" size="128"/>
</blobs>
</layer>
<layer id="30" name="L0029_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="32" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>24</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>32</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</output>
<blobs>
<weights offset="10848" size="3072"/>
<biases offset="13920" size="128"/>
</blobs>
</layer>
<layer id="31" name="L0030_AddBackward1" precision="FP32" type="Eltwise">
<data coeff="" operation="sum"/>
<input>
<port id="0">
<dim>1</dim>
<dim>32</dim>
<dim>256</dim>
<dim>448</dim>
</port>
<port id="1">
<dim>1</dim>
<dim>32</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</input>
<output>
<port id="2">
<dim>1</dim>
<dim>32</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</output>
</layer>
<layer id="32" name="L0031_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>32</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>32</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</output>
</layer>
<layer id="33" name="L0032_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="8" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>32</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>8</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</output>
<blobs>
<weights offset="14048" size="1024"/>
<biases offset="15072" size="32"/>
</blobs>
</layer>
<layer id="34" name="L0033_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>8</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>8</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</output>
</layer>
<layer id="35" name="L0034_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="8" kernel="3,3" output="8" pads_begin="1,1" pads_end="1,1" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>8</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>8</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</output>
<blobs>
<weights offset="15104" size="288"/>
<biases offset="15392" size="32"/>
</blobs>
</layer>
<layer id="36" name="L0035_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>8</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>8</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</output>
</layer>
<layer id="37" name="L0036_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="32" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>8</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>32</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</output>
<blobs>
<weights offset="15424" size="1024"/>
<biases offset="16448" size="128"/>
</blobs>
</layer>
<layer id="38" name="L0037_AddBackward1" precision="FP32" type="Eltwise">
<data coeff="" operation="sum"/>
<input>
<port id="0">
<dim>1</dim>
<dim>32</dim>
<dim>256</dim>
<dim>448</dim>
</port>
<port id="1">
<dim>1</dim>
<dim>32</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</input>
<output>
<port id="2">
<dim>1</dim>
<dim>32</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</output>
</layer>
<layer id="39" name="L0038_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>32</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>32</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</output>
</layer>
<layer id="40" name="L0039_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="2,2" output="16" pads_begin="0,0" pads_end="0,0" strides="2,2"/>
<input>
<port id="0">
<dim>1</dim>
<dim>32</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>16</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</output>
<blobs>
<weights offset="16576" size="8192"/>
<biases offset="24768" size="64"/>
</blobs>
</layer>
<layer id="41" name="L0040_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>16</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>16</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</output>
</layer>
<layer id="42" name="L0041_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="16" kernel="3,3" output="16" pads_begin="1,1" pads_end="1,1" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>16</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>16</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</output>
<blobs>
<weights offset="24832" size="576"/>
<biases offset="25408" size="64"/>
</blobs>
</layer>
<layer id="43" name="L0042_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>16</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>16</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</output>
</layer>
<layer id="44" name="L0043_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="48" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>16</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>48</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</output>
<blobs>
<weights offset="25472" size="3072"/>
<biases offset="28544" size="192"/>
</blobs>
</layer>
<layer id="45" name="L0044_MaxPool2d" precision="FP32" type="Pooling">
<data exclude-pad="true" kernel="2,2" pads_begin="0,0" pads_end="0,0" pool-method="max" rounding_type="ceil" strides="2,2"/>
<input>
<port id="0">
<dim>1</dim>
<dim>32</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>32</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</output>
</layer>
<layer id="46" name="L0045_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="48" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>32</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>48</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</output>
<blobs>
<weights offset="28736" size="6144"/>
<biases offset="34880" size="192"/>
</blobs>
</layer>
<layer id="47" name="L0046_AddBackward1" precision="FP32" type="Eltwise">
<data coeff="" operation="sum"/>
<input>
<port id="0">
<dim>1</dim>
<dim>48</dim>
<dim>128</dim>
<dim>224</dim>
</port>
<port id="1">
<dim>1</dim>
<dim>48</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</input>
<output>
<port id="2">
<dim>1</dim>
<dim>48</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</output>
</layer>
<layer id="48" name="L0047_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>48</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>48</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</output>
</layer>
<layer id="49" name="L0048_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="16" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>48</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>16</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</output>
<blobs>
<weights offset="35072" size="3072"/>
<biases offset="38144" size="64"/>
</blobs>
</layer>
<layer id="50" name="L0049_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>16</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>16</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</output>
</layer>
<layer id="51" name="L0050_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="16" kernel="3,3" output="16" pads_begin="1,1" pads_end="1,1" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>16</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>16</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</output>
<blobs>
<weights offset="38208" size="576"/>
<biases offset="38784" size="64"/>
</blobs>
</layer>
<layer id="52" name="L0051_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>16</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>16</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</output>
</layer>
<layer id="53" name="L0052_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="48" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>16</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>48</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</output>
<blobs>
<weights offset="38848" size="3072"/>
<biases offset="41920" size="192"/>
</blobs>
</layer>
<layer id="54" name="L0053_AddBackward1" precision="FP32" type="Eltwise">
<data coeff="" operation="sum"/>
<input>
<port id="0">
<dim>1</dim>
<dim>48</dim>
<dim>128</dim>
<dim>224</dim>
</port>
<port id="1">
<dim>1</dim>
<dim>48</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</input>
<output>
<port id="2">
<dim>1</dim>
<dim>48</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</output>
</layer>
<layer id="55" name="L0054_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>48</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>48</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</output>
</layer>
<layer id="56" name="L0055_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="16" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>48</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>16</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</output>
<blobs>
<weights offset="42112" size="3072"/>
<biases offset="45184" size="64"/>
</blobs>
</layer>
<layer id="57" name="L0056_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>16</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>16</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</output>
</layer>
<layer id="58" name="L0057_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="16" kernel="3,3" output="16" pads_begin="1,1" pads_end="1,1" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>16</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>16</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</output>
<blobs>
<weights offset="45248" size="576"/>
<biases offset="45824" size="64"/>
</blobs>
</layer>
<layer id="59" name="L0058_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>16</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>16</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</output>
</layer>
<layer id="60" name="L0059_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="48" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>16</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>48</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</output>
<blobs>
<weights offset="45888" size="3072"/>
<biases offset="48960" size="192"/>
</blobs>
</layer>
<layer id="61" name="L0060_AddBackward1" precision="FP32" type="Eltwise">
<data coeff="" operation="sum"/>
<input>
<port id="0">
<dim>1</dim>
<dim>48</dim>
<dim>128</dim>
<dim>224</dim>
</port>
<port id="1">
<dim>1</dim>
<dim>48</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</input>
<output>
<port id="2">
<dim>1</dim>
<dim>48</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</output>
</layer>
<layer id="62" name="L0061_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>48</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>48</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</output>
</layer>
<layer id="63" name="L0062_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="16" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>48</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>16</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</output>
<blobs>
<weights offset="49152" size="3072"/>
<biases offset="52224" size="64"/>
</blobs>
</layer>
<layer id="64" name="L0063_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>16</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>16</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</output>
</layer>
<layer id="65" name="L0064_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="2,2" group="16" kernel="3,3" output="16" pads_begin="2,2" pads_end="2,2" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>16</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>16</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</output>
<blobs>
<weights offset="52288" size="576"/>
<biases offset="52864" size="64"/>
</blobs>
</layer>
<layer id="66" name="L0065_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>16</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>16</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</output>
</layer>
<layer id="67" name="L0066_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="48" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>16</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>48</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</output>
<blobs>
<weights offset="52928" size="3072"/>
<biases offset="56000" size="192"/>
</blobs>
</layer>
<layer id="68" name="L0067_AddBackward1" precision="FP32" type="Eltwise">
<data coeff="" operation="sum"/>
<input>
<port id="0">
<dim>1</dim>
<dim>48</dim>
<dim>128</dim>
<dim>224</dim>
</port>
<port id="1">
<dim>1</dim>
<dim>48</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</input>
<output>
<port id="2">
<dim>1</dim>
<dim>48</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</output>
</layer>
<layer id="69" name="L0068_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>48</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>48</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</output>
</layer>
<layer id="70" name="L0069_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="16" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>48</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>16</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</output>
<blobs>
<weights offset="56192" size="3072"/>
<biases offset="59264" size="64"/>
</blobs>
</layer>
<layer id="71" name="L0070_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>16</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>16</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</output>
</layer>
<layer id="72" name="L0071_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="3,3" group="16" kernel="3,3" output="16" pads_begin="3,3" pads_end="3,3" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>16</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>16</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</output>
<blobs>
<weights offset="59328" size="576"/>
<biases offset="59904" size="64"/>
</blobs>
</layer>
<layer id="73" name="L0072_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>16</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>16</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</output>
</layer>
<layer id="74" name="L0073_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="48" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>16</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>48</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</output>
<blobs>
<weights offset="59968" size="3072"/>
<biases offset="63040" size="192"/>
</blobs>
</layer>
<layer id="75" name="L0074_AddBackward1" precision="FP32" type="Eltwise">
<data coeff="" operation="sum"/>
<input>
<port id="0">
<dim>1</dim>
<dim>48</dim>
<dim>128</dim>
<dim>224</dim>
</port>
<port id="1">
<dim>1</dim>
<dim>48</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</input>
<output>
<port id="2">
<dim>1</dim>
<dim>48</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</output>
</layer>
<layer id="76" name="L0075_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>48</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>48</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</output>
</layer>
<layer id="77" name="L0076_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="16" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>48</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>16</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</output>
<blobs>
<weights offset="63232" size="3072"/>
<biases offset="66304" size="64"/>
</blobs>
</layer>
<layer id="78" name="L0077_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>16</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>16</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</output>
</layer>
<layer id="79" name="L0078_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="16" kernel="3,3" output="16" pads_begin="1,1" pads_end="1,1" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>16</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>16</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</output>
<blobs>
<weights offset="66368" size="576"/>
<biases offset="66944" size="64"/>
</blobs>
</layer>
<layer id="80" name="L0079_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>16</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>16</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</output>
</layer>
<layer id="81" name="L0080_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="48" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>16</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>48</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</output>
<blobs>
<weights offset="67008" size="3072"/>
<biases offset="70080" size="192"/>
</blobs>
</layer>
<layer id="82" name="L0081_AddBackward1" precision="FP32" type="Eltwise">
<data coeff="" operation="sum"/>
<input>
<port id="0">
<dim>1</dim>
<dim>48</dim>
<dim>128</dim>
<dim>224</dim>
</port>
<port id="1">
<dim>1</dim>
<dim>48</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</input>
<output>
<port id="2">
<dim>1</dim>
<dim>48</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</output>
</layer>
<layer id="83" name="L0082_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>48</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>48</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</output>
</layer>
<layer id="84" name="L0083_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="16" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>48</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>16</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</output>
<blobs>
<weights offset="70272" size="3072"/>
<biases offset="73344" size="64"/>
</blobs>
</layer>
<layer id="85" name="L0084_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>16</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>16</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</output>
</layer>
<layer id="86" name="L0085_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="2,2" group="16" kernel="3,3" output="16" pads_begin="2,2" pads_end="2,2" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>16</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>16</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</output>
<blobs>
<weights offset="73408" size="576"/>
<biases offset="73984" size="64"/>
</blobs>
</layer>
<layer id="87" name="L0086_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>16</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>16</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</output>
</layer>
<layer id="88" name="L0087_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="48" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>16</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>48</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</output>
<blobs>
<weights offset="74048" size="3072"/>
<biases offset="77120" size="192"/>
</blobs>
</layer>
<layer id="89" name="L0088_AddBackward1" precision="FP32" type="Eltwise">
<data coeff="" operation="sum"/>
<input>
<port id="0">
<dim>1</dim>
<dim>48</dim>
<dim>128</dim>
<dim>224</dim>
</port>
<port id="1">
<dim>1</dim>
<dim>48</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</input>
<output>
<port id="2">
<dim>1</dim>
<dim>48</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</output>
</layer>
<layer id="90" name="L0089_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>48</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>48</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</output>
</layer>
<layer id="91" name="L0090_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="16" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>48</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>16</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</output>
<blobs>
<weights offset="77312" size="3072"/>
<biases offset="80384" size="64"/>
</blobs>
</layer>
<layer id="92" name="L0091_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>16</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>16</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</output>
</layer>
<layer id="93" name="L0092_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="3,3" group="16" kernel="3,3" output="16" pads_begin="3,3" pads_end="3,3" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>16</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>16</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</output>
<blobs>
<weights offset="80448" size="576"/>
<biases offset="81024" size="64"/>
</blobs>
</layer>
<layer id="94" name="L0093_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>16</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>16</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</output>
</layer>
<layer id="95" name="L0094_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="48" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>16</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>48</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</output>
<blobs>
<weights offset="81088" size="3072"/>
<biases offset="84160" size="192"/>
</blobs>
</layer>
<layer id="96" name="L0095_AddBackward1" precision="FP32" type="Eltwise">
<data coeff="" operation="sum"/>
<input>
<port id="0">
<dim>1</dim>
<dim>48</dim>
<dim>128</dim>
<dim>224</dim>
</port>
<port id="1">
<dim>1</dim>
<dim>48</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</input>
<output>
<port id="2">
<dim>1</dim>
<dim>48</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</output>
</layer>
<layer id="97" name="L0096_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>48</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>48</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</output>
</layer>
<layer id="98" name="L0097_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="16" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>48</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>16</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</output>
<blobs>
<weights offset="84352" size="3072"/>
<biases offset="87424" size="64"/>
</blobs>
</layer>
<layer id="99" name="L0098_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>16</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>16</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</output>
</layer>
<layer id="100" name="L0099_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="16" kernel="3,3" output="16" pads_begin="1,1" pads_end="1,1" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>16</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>16</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</output>
<blobs>
<weights offset="87488" size="576"/>
<biases offset="88064" size="64"/>
</blobs>
</layer>
<layer id="101" name="L0100_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>16</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>16</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</output>
</layer>
<layer id="102" name="L0101_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="48" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>16</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>48</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</output>
<blobs>
<weights offset="88128" size="3072"/>
<biases offset="91200" size="192"/>
</blobs>
</layer>
<layer id="103" name="L0102_AddBackward1" precision="FP32" type="Eltwise">
<data coeff="" operation="sum"/>
<input>
<port id="0">
<dim>1</dim>
<dim>48</dim>
<dim>128</dim>
<dim>224</dim>
</port>
<port id="1">
<dim>1</dim>
<dim>48</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</input>
<output>
<port id="2">
<dim>1</dim>
<dim>48</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</output>
</layer>
<layer id="104" name="L0103_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>48</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>48</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</output>
</layer>
<layer id="105" name="L0104_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="2,2" output="32" pads_begin="0,0" pads_end="0,0" strides="2,2"/>
<input>
<port id="0">
<dim>1</dim>
<dim>48</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="91392" size="24576"/>
<biases offset="115968" size="128"/>
</blobs>
</layer>
<layer id="106" name="L0105_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="107" name="L0106_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="32" kernel="3,3" output="32" pads_begin="1,1" pads_end="1,1" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="116096" size="1152"/>
<biases offset="117248" size="128"/>
</blobs>
</layer>
<layer id="108" name="L0107_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="109" name="L0108_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="64" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>64</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="117376" size="8192"/>
<biases offset="125568" size="256"/>
</blobs>
</layer>
<layer id="110" name="L0109_MaxPool2d" precision="FP32" type="Pooling">
<data exclude-pad="true" kernel="2,2" pads_begin="0,0" pads_end="0,0" pool-method="max" rounding_type="ceil" strides="2,2"/>
<input>
<port id="0">
<dim>1</dim>
<dim>48</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>48</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="111" name="L0110_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="64" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>48</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>64</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="125824" size="12288"/>
<biases offset="138112" size="256"/>
</blobs>
</layer>
<layer id="112" name="L0111_AddBackward1" precision="FP32" type="Eltwise">
<data coeff="" operation="sum"/>
<input>
<port id="0">
<dim>1</dim>
<dim>64</dim>
<dim>64</dim>
<dim>112</dim>
</port>
<port id="1">
<dim>1</dim>
<dim>64</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="2">
<dim>1</dim>
<dim>64</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="113" name="L0112_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>64</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>64</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="114" name="L0113_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="32" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>64</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="138368" size="8192"/>
<biases offset="146560" size="128"/>
</blobs>
</layer>
<layer id="115" name="L0114_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="116" name="L0115_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="32" kernel="3,3" output="32" pads_begin="1,1" pads_end="1,1" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="146688" size="1152"/>
<biases offset="147840" size="128"/>
</blobs>
</layer>
<layer id="117" name="L0116_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="118" name="L0117_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="64" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>64</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="147968" size="8192"/>
<biases offset="156160" size="256"/>
</blobs>
</layer>
<layer id="119" name="L0118_AddBackward1" precision="FP32" type="Eltwise">
<data coeff="" operation="sum"/>
<input>
<port id="0">
<dim>1</dim>
<dim>64</dim>
<dim>64</dim>
<dim>112</dim>
</port>
<port id="1">
<dim>1</dim>
<dim>64</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="2">
<dim>1</dim>
<dim>64</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="120" name="L0119_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>64</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>64</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="121" name="L0120_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="32" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>64</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="156416" size="8192"/>
<biases offset="164608" size="128"/>
</blobs>
</layer>
<layer id="122" name="L0121_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="123" name="L0122_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="32" kernel="3,3" output="32" pads_begin="1,1" pads_end="1,1" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="164736" size="1152"/>
<biases offset="165888" size="128"/>
</blobs>
</layer>
<layer id="124" name="L0123_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="125" name="L0124_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="64" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>64</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="166016" size="8192"/>
<biases offset="174208" size="256"/>
</blobs>
</layer>
<layer id="126" name="L0125_AddBackward1" precision="FP32" type="Eltwise">
<data coeff="" operation="sum"/>
<input>
<port id="0">
<dim>1</dim>
<dim>64</dim>
<dim>64</dim>
<dim>112</dim>
</port>
<port id="1">
<dim>1</dim>
<dim>64</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="2">
<dim>1</dim>
<dim>64</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="127" name="L0126_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>64</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>64</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="128" name="L0127_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="32" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>64</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="174464" size="8192"/>
<biases offset="182656" size="128"/>
</blobs>
</layer>
<layer id="129" name="L0128_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="130" name="L0129_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="32" kernel="3,3" output="32" pads_begin="1,1" pads_end="1,1" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="182784" size="1152"/>
<biases offset="183936" size="128"/>
</blobs>
</layer>
<layer id="131" name="L0130_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="132" name="L0131_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="64" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>64</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="184064" size="8192"/>
<biases offset="192256" size="256"/>
</blobs>
</layer>
<layer id="133" name="L0132_AddBackward1" precision="FP32" type="Eltwise">
<data coeff="" operation="sum"/>
<input>
<port id="0">
<dim>1</dim>
<dim>64</dim>
<dim>64</dim>
<dim>112</dim>
</port>
<port id="1">
<dim>1</dim>
<dim>64</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="2">
<dim>1</dim>
<dim>64</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="134" name="L0133_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>64</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>64</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="135" name="L0134_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="40" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>64</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="192512" size="10240"/>
<biases offset="202752" size="160"/>
</blobs>
</layer>
<layer id="136" name="L0135_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="137" name="L0136_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="2,2" group="40" kernel="3,3" output="40" pads_begin="2,2" pads_end="2,2" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="202912" size="1440"/>
<biases offset="204352" size="160"/>
</blobs>
</layer>
<layer id="138" name="L0137_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="139" name="L0138_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="80" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="204512" size="12800"/>
<biases offset="217312" size="320"/>
</blobs>
</layer>
<layer id="140" name="L0139_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="80" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>64</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="217632" size="20480"/>
<biases offset="238112" size="320"/>
</blobs>
</layer>
<layer id="141" name="L0140_AddBackward1" precision="FP32" type="Eltwise">
<data coeff="" operation="sum"/>
<input>
<port id="0">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
<port id="1">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="2">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="142" name="L0141_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="143" name="L0142_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="40" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="238432" size="12800"/>
<biases offset="251232" size="160"/>
</blobs>
</layer>
<layer id="144" name="L0143_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="145" name="L0144_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="3,3" group="40" kernel="3,3" output="40" pads_begin="3,3" pads_end="3,3" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="251392" size="1440"/>
<biases offset="252832" size="160"/>
</blobs>
</layer>
<layer id="146" name="L0145_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="147" name="L0146_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="80" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="252992" size="12800"/>
<biases offset="265792" size="320"/>
</blobs>
</layer>
<layer id="148" name="L0147_AddBackward1" precision="FP32" type="Eltwise">
<data coeff="" operation="sum"/>
<input>
<port id="0">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
<port id="1">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="2">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="149" name="L0148_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="150" name="L0149_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="40" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="266112" size="12800"/>
<biases offset="278912" size="160"/>
</blobs>
</layer>
<layer id="151" name="L0150_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="152" name="L0151_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="4,4" group="40" kernel="3,3" output="40" pads_begin="4,4" pads_end="4,4" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="279072" size="1440"/>
<biases offset="280512" size="160"/>
</blobs>
</layer>
<layer id="153" name="L0152_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="154" name="L0153_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="80" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="280672" size="12800"/>
<biases offset="293472" size="320"/>
</blobs>
</layer>
<layer id="155" name="L0154_AddBackward1" precision="FP32" type="Eltwise">
<data coeff="" operation="sum"/>
<input>
<port id="0">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
<port id="1">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="2">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="156" name="L0155_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="157" name="L0156_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="40" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="293792" size="12800"/>
<biases offset="306592" size="160"/>
</blobs>
</layer>
<layer id="158" name="L0157_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="159" name="L0158_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="40" kernel="3,3" output="40" pads_begin="1,1" pads_end="1,1" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="306752" size="1440"/>
<biases offset="308192" size="160"/>
</blobs>
</layer>
<layer id="160" name="L0159_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="161" name="L0160_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="80" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="308352" size="12800"/>
<biases offset="321152" size="320"/>
</blobs>
</layer>
<layer id="162" name="L0161_AddBackward1" precision="FP32" type="Eltwise">
<data coeff="" operation="sum"/>
<input>
<port id="0">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
<port id="1">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="2">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="163" name="L0162_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="164" name="L0163_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="40" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="321472" size="12800"/>
<biases offset="334272" size="160"/>
</blobs>
</layer>
<layer id="165" name="L0164_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="166" name="L0165_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="40" kernel="3,3" output="40" pads_begin="1,1" pads_end="1,1" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="334432" size="1440"/>
<biases offset="335872" size="160"/>
</blobs>
</layer>
<layer id="167" name="L0166_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="168" name="L0167_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="80" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="336032" size="12800"/>
<biases offset="348832" size="320"/>
</blobs>
</layer>
<layer id="169" name="L0168_AddBackward1" precision="FP32" type="Eltwise">
<data coeff="" operation="sum"/>
<input>
<port id="0">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
<port id="1">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="2">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="170" name="L0169_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="171" name="L0170_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="40" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="349152" size="12800"/>
<biases offset="361952" size="160"/>
</blobs>
</layer>
<layer id="172" name="L0171_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="173" name="L0172_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="2,2" group="40" kernel="3,3" output="40" pads_begin="2,2" pads_end="2,2" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="362112" size="1440"/>
<biases offset="363552" size="160"/>
</blobs>
</layer>
<layer id="174" name="L0173_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="175" name="L0174_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="80" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="363712" size="12800"/>
<biases offset="376512" size="320"/>
</blobs>
</layer>
<layer id="176" name="L0175_AddBackward1" precision="FP32" type="Eltwise">
<data coeff="" operation="sum"/>
<input>
<port id="0">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
<port id="1">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="2">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="177" name="L0176_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="178" name="L0177_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="40" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="376832" size="12800"/>
<biases offset="389632" size="160"/>
</blobs>
</layer>
<layer id="179" name="L0178_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="180" name="L0179_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="3,3" group="40" kernel="3,3" output="40" pads_begin="3,3" pads_end="3,3" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="389792" size="1440"/>
<biases offset="391232" size="160"/>
</blobs>
</layer>
<layer id="181" name="L0180_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="182" name="L0181_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="80" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="391392" size="12800"/>
<biases offset="404192" size="320"/>
</blobs>
</layer>
<layer id="183" name="L0182_AddBackward1" precision="FP32" type="Eltwise">
<data coeff="" operation="sum"/>
<input>
<port id="0">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
<port id="1">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="2">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="184" name="L0183_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="185" name="L0184_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="40" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="404512" size="12800"/>
<biases offset="417312" size="160"/>
</blobs>
</layer>
<layer id="186" name="L0185_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="187" name="L0186_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="4,4" group="40" kernel="3,3" output="40" pads_begin="4,4" pads_end="4,4" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="417472" size="1440"/>
<biases offset="418912" size="160"/>
</blobs>
</layer>
<layer id="188" name="L0187_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="189" name="L0188_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="80" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="419072" size="12800"/>
<biases offset="431872" size="320"/>
</blobs>
</layer>
<layer id="190" name="L0189_AddBackward1" precision="FP32" type="Eltwise">
<data coeff="" operation="sum"/>
<input>
<port id="0">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
<port id="1">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="2">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="191" name="L0190_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="192" name="L0191_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="40" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="432192" size="12800"/>
<biases offset="444992" size="160"/>
</blobs>
</layer>
<layer id="193" name="L0192_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="194" name="L0193_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="40" kernel="3,3" output="40" pads_begin="1,1" pads_end="1,1" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="445152" size="1440"/>
<biases offset="446592" size="160"/>
</blobs>
</layer>
<layer id="195" name="L0194_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="196" name="L0195_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="80" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="446752" size="12800"/>
<biases offset="459552" size="320"/>
</blobs>
</layer>
<layer id="197" name="L0196_AddBackward1" precision="FP32" type="Eltwise">
<data coeff="" operation="sum"/>
<input>
<port id="0">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
<port id="1">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="2">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="198" name="L0197_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="199" name="L0198_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="40" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="459872" size="12800"/>
<biases offset="472672" size="160"/>
</blobs>
</layer>
<layer id="200" name="L0199_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="201" name="L0200_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="40" kernel="3,3" output="40" pads_begin="1,1" pads_end="1,1" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="472832" size="1440"/>
<biases offset="474272" size="160"/>
</blobs>
</layer>
<layer id="202" name="L0201_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="203" name="L0202_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="80" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="474432" size="12800"/>
<biases offset="487232" size="320"/>
</blobs>
</layer>
<layer id="204" name="L0203_AddBackward1" precision="FP32" type="Eltwise">
<data coeff="" operation="sum"/>
<input>
<port id="0">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
<port id="1">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="2">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="205" name="L0204_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="206" name="L0205_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="40" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="487552" size="12800"/>
<biases offset="500352" size="160"/>
</blobs>
</layer>
<layer id="207" name="L0206_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="208" name="L0207_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="2,2" group="40" kernel="3,3" output="40" pads_begin="2,2" pads_end="2,2" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="500512" size="1440"/>
<biases offset="501952" size="160"/>
</blobs>
</layer>
<layer id="209" name="L0208_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="210" name="L0209_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="80" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>40</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="502112" size="12800"/>
<biases offset="514912" size="320"/>
</blobs>
</layer>
<layer id="211" name="L0210_AddBackward1" precision="FP32" type="Eltwise">
<data coeff="" operation="sum"/>
<input>
<port id="0">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
<port id="1">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="2">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="212" name="L0211_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="213" name="L0212_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="32" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="515232" size="10240"/>
<biases offset="525472" size="128"/>
</blobs>
</layer>
<layer id="214" name="L0213_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="215" name="L0214_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="4,4" group="32" kernel="3,3" output="32" pads_begin="4,4" pads_end="4,4" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="525600" size="1152"/>
<biases offset="526752" size="128"/>
</blobs>
</layer>
<layer id="216" name="L0215_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="217" name="L0216_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="64" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>64</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="526880" size="8192"/>
<biases offset="535072" size="256"/>
</blobs>
</layer>
<layer id="218" name="L0217_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="64" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>80</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>64</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="535328" size="20480"/>
<biases offset="555808" size="256"/>
</blobs>
</layer>
<layer id="219" name="L0218_AddBackward1" precision="FP32" type="Eltwise">
<data coeff="" operation="sum"/>
<input>
<port id="0">
<dim>1</dim>
<dim>64</dim>
<dim>64</dim>
<dim>112</dim>
</port>
<port id="1">
<dim>1</dim>
<dim>64</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="2">
<dim>1</dim>
<dim>64</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="220" name="L0219_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>64</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>64</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="221" name="L0220_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="32" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>64</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="556064" size="8192"/>
<biases offset="564256" size="128"/>
</blobs>
</layer>
<layer id="222" name="L0221_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="223" name="L0222_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="6,6" group="32" kernel="3,3" output="32" pads_begin="6,6" pads_end="6,6" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="564384" size="1152"/>
<biases offset="565536" size="128"/>
</blobs>
</layer>
<layer id="224" name="L0223_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="225" name="L0224_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="64" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>64</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="565664" size="8192"/>
<biases offset="573856" size="256"/>
</blobs>
</layer>
<layer id="226" name="L0225_AddBackward1" precision="FP32" type="Eltwise">
<data coeff="" operation="sum"/>
<input>
<port id="0">
<dim>1</dim>
<dim>64</dim>
<dim>64</dim>
<dim>112</dim>
</port>
<port id="1">
<dim>1</dim>
<dim>64</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="2">
<dim>1</dim>
<dim>64</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="227" name="L0226_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>64</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>64</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="228" name="L0227_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="32" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>64</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="574112" size="8192"/>
<biases offset="582304" size="128"/>
</blobs>
</layer>
<layer id="229" name="L0228_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="230" name="L0229_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="8,8" group="32" kernel="3,3" output="32" pads_begin="8,8" pads_end="8,8" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="582432" size="1152"/>
<biases offset="583584" size="128"/>
</blobs>
</layer>
<layer id="231" name="L0230_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="232" name="L0231_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="64" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>64</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="583712" size="8192"/>
<biases offset="591904" size="256"/>
</blobs>
</layer>
<layer id="233" name="L0232_AddBackward1" precision="FP32" type="Eltwise">
<data coeff="" operation="sum"/>
<input>
<port id="0">
<dim>1</dim>
<dim>64</dim>
<dim>64</dim>
<dim>112</dim>
</port>
<port id="1">
<dim>1</dim>
<dim>64</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="2">
<dim>1</dim>
<dim>64</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="234" name="L0233_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>64</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>64</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="235" name="L0234_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="32" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>64</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="592160" size="8192"/>
<biases offset="600352" size="128"/>
</blobs>
</layer>
<layer id="236" name="L0235_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="237" name="L0236_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="32" kernel="3,3" output="32" pads_begin="1,1" pads_end="1,1" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="600480" size="1152"/>
<biases offset="601632" size="128"/>
</blobs>
</layer>
<layer id="238" name="L0237_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="239" name="L0238_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="56" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>56</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="601760" size="7168"/>
<biases offset="608928" size="224"/>
</blobs>
</layer>
<layer id="240" name="L0239_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="56" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>64</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>56</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="609152" size="14336"/>
<biases offset="623488" size="224"/>
</blobs>
</layer>
<layer id="241" name="L0240_AddBackward1" precision="FP32" type="Eltwise">
<data coeff="" operation="sum"/>
<input>
<port id="0">
<dim>1</dim>
<dim>56</dim>
<dim>64</dim>
<dim>112</dim>
</port>
<port id="1">
<dim>1</dim>
<dim>56</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="2">
<dim>1</dim>
<dim>56</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="242" name="L0241_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>56</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>56</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="243" name="L0242_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="32" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>56</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="623712" size="7168"/>
<biases offset="630880" size="128"/>
</blobs>
</layer>
<layer id="244" name="L0243_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="245" name="L0244_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="32" kernel="3,3" output="32" pads_begin="1,1" pads_end="1,1" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="631008" size="1152"/>
<biases offset="632160" size="128"/>
</blobs>
</layer>
<layer id="246" name="L0245_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="247" name="L0246_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="56" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>56</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="632288" size="7168"/>
<biases offset="639456" size="224"/>
</blobs>
</layer>
<layer id="248" name="L0247_AddBackward1" precision="FP32" type="Eltwise">
<data coeff="" operation="sum"/>
<input>
<port id="0">
<dim>1</dim>
<dim>56</dim>
<dim>64</dim>
<dim>112</dim>
</port>
<port id="1">
<dim>1</dim>
<dim>56</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="2">
<dim>1</dim>
<dim>56</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="249" name="L0248_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>56</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>56</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="250" name="L0249_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="32" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>56</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="639680" size="7168"/>
<biases offset="646848" size="128"/>
</blobs>
</layer>
<layer id="251" name="L0250_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="252" name="L0251_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="2,2" group="32" kernel="3,3" output="32" pads_begin="2,2" pads_end="2,2" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="646976" size="1152"/>
<biases offset="648128" size="128"/>
</blobs>
</layer>
<layer id="253" name="L0252_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="254" name="L0253_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="56" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>32</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>56</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="648256" size="7168"/>
<biases offset="655424" size="224"/>
</blobs>
</layer>
<layer id="255" name="L0254_AddBackward1" precision="FP32" type="Eltwise">
<data coeff="" operation="sum"/>
<input>
<port id="0">
<dim>1</dim>
<dim>56</dim>
<dim>64</dim>
<dim>112</dim>
</port>
<port id="1">
<dim>1</dim>
<dim>56</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="2">
<dim>1</dim>
<dim>56</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="256" name="L0255_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>56</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>56</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="257" name="L0256_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="24" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>56</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>24</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="655648" size="5376"/>
<biases offset="661024" size="96"/>
</blobs>
</layer>
<layer id="258" name="L0257_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>24</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>24</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="259" name="L0258_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="4,4" group="24" kernel="3,3" output="24" pads_begin="4,4" pads_end="4,4" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>24</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>24</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="661120" size="864"/>
<biases offset="661984" size="96"/>
</blobs>
</layer>
<layer id="260" name="L0259_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>24</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>24</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="261" name="L0260_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="48" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>24</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>48</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="662080" size="4608"/>
<biases offset="666688" size="192"/>
</blobs>
</layer>
<layer id="262" name="L0261_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="48" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>56</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>48</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="666880" size="10752"/>
<biases offset="677632" size="192"/>
</blobs>
</layer>
<layer id="263" name="L0262_AddBackward1" precision="FP32" type="Eltwise">
<data coeff="" operation="sum"/>
<input>
<port id="0">
<dim>1</dim>
<dim>48</dim>
<dim>64</dim>
<dim>112</dim>
</port>
<port id="1">
<dim>1</dim>
<dim>48</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="2">
<dim>1</dim>
<dim>48</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="264" name="L0263_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>48</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>48</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="265" name="L0264_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="24" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>48</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>24</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="677824" size="4608"/>
<biases offset="682432" size="96"/>
</blobs>
</layer>
<layer id="266" name="L0265_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>24</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>24</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="267" name="L0266_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="6,6" group="24" kernel="3,3" output="24" pads_begin="6,6" pads_end="6,6" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>24</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>24</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="682528" size="864"/>
<biases offset="683392" size="96"/>
</blobs>
</layer>
<layer id="268" name="L0267_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>24</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>24</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="269" name="L0268_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="48" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>24</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>48</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="683488" size="4608"/>
<biases offset="688096" size="192"/>
</blobs>
</layer>
<layer id="270" name="L0269_AddBackward1" precision="FP32" type="Eltwise">
<data coeff="" operation="sum"/>
<input>
<port id="0">
<dim>1</dim>
<dim>48</dim>
<dim>64</dim>
<dim>112</dim>
</port>
<port id="1">
<dim>1</dim>
<dim>48</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="2">
<dim>1</dim>
<dim>48</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="271" name="L0270_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>48</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>48</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="272" name="L0271_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="24" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>48</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>24</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="688288" size="4608"/>
<biases offset="692896" size="96"/>
</blobs>
</layer>
<layer id="273" name="L0272_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>24</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>24</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="274" name="L0273_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="8,8" group="24" kernel="3,3" output="24" pads_begin="8,8" pads_end="8,8" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>24</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>24</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="692992" size="864"/>
<biases offset="693856" size="96"/>
</blobs>
</layer>
<layer id="275" name="L0274_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>24</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>24</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="276" name="L0275_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="48" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>24</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>48</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="693952" size="4608"/>
<biases offset="698560" size="192"/>
</blobs>
</layer>
<layer id="277" name="L0276_AddBackward1" precision="FP32" type="Eltwise">
<data coeff="" operation="sum"/>
<input>
<port id="0">
<dim>1</dim>
<dim>48</dim>
<dim>64</dim>
<dim>112</dim>
</port>
<port id="1">
<dim>1</dim>
<dim>48</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="2">
<dim>1</dim>
<dim>48</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="278" name="L0277_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>48</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>48</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="279" name="L0278_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="24" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>48</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>24</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="698752" size="4608"/>
<biases offset="703360" size="96"/>
</blobs>
</layer>
<layer id="280" name="L0279_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>24</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>24</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="281" name="L0280_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="24" kernel="3,3" output="24" pads_begin="1,1" pads_end="1,1" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>24</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>24</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="703456" size="864"/>
<biases offset="704320" size="96"/>
</blobs>
</layer>
<layer id="282" name="L0281_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>24</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>24</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="283" name="L0282_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="48" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>24</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>48</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="704416" size="4608"/>
<biases offset="709024" size="192"/>
</blobs>
</layer>
<layer id="284" name="L0283_AddBackward1" precision="FP32" type="Eltwise">
<data coeff="" operation="sum"/>
<input>
<port id="0">
<dim>1</dim>
<dim>48</dim>
<dim>64</dim>
<dim>112</dim>
</port>
<port id="1">
<dim>1</dim>
<dim>48</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="2">
<dim>1</dim>
<dim>48</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="285" name="L0284_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>48</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>48</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="286" name="L0285_AvgPool2d" precision="FP32" type="Pooling">
<data exclude-pad="false" kernel="2,2" pads_begin="0,0" pads_end="0,0" pool-method="avg" rounding_type="ceil" strides="2,2"/>
<input>
<port id="0">
<dim>1</dim>
<dim>48</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>48</dim>
<dim>32</dim>
<dim>56</dim>
</port>
</output>
</layer>
<layer id="287" name="L0286_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="16" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>48</dim>
<dim>32</dim>
<dim>56</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>16</dim>
<dim>32</dim>
<dim>56</dim>
</port>
</output>
<blobs>
<weights offset="709216" size="3072"/>
<biases offset="712288" size="64"/>
</blobs>
</layer>
<layer id="288" name="L0287_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>16</dim>
<dim>32</dim>
<dim>56</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>16</dim>
<dim>32</dim>
<dim>56</dim>
</port>
</output>
</layer>
<layer id="289" name="L0288_Upsample" precision="FP32" type="Interp">
<data align_corners="1" height="0" pad_beg="0" pad_end="0" shrink_factor="1" width="0" zoom_factor="2"/>
<input>
<port id="0">
<dim>1</dim>
<dim>16</dim>
<dim>32</dim>
<dim>56</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>16</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="290" name="L0289_Conv2d" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="48" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>16</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>48</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="712352" size="3072"/>
<biases offset="715424" size="192"/>
</blobs>
</layer>
<layer id="291" name="L0290_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>48</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>48</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="292" name="L0291_AddBackward1" precision="FP32" type="Eltwise">
<data coeff="" operation="sum"/>
<input>
<port id="0">
<dim>1</dim>
<dim>48</dim>
<dim>64</dim>
<dim>112</dim>
</port>
<port id="1">
<dim>1</dim>
<dim>48</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="2">
<dim>1</dim>
<dim>48</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="293" name="L0292_AvgPool2d" precision="FP32" type="Pooling">
<data exclude-pad="false" kernel="4,4" pads_begin="0,0" pads_end="0,0" pool-method="avg" rounding_type="ceil" strides="4,4"/>
<input>
<port id="0">
<dim>1</dim>
<dim>48</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>48</dim>
<dim>16</dim>
<dim>28</dim>
</port>
</output>
</layer>
<layer id="294" name="L0293_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="16" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>48</dim>
<dim>16</dim>
<dim>28</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>16</dim>
<dim>16</dim>
<dim>28</dim>
</port>
</output>
<blobs>
<weights offset="715616" size="3072"/>
<biases offset="718688" size="64"/>
</blobs>
</layer>
<layer id="295" name="L0294_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>16</dim>
<dim>16</dim>
<dim>28</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>16</dim>
<dim>16</dim>
<dim>28</dim>
</port>
</output>
</layer>
<layer id="296" name="L0295_Upsample" precision="FP32" type="Interp">
<data align_corners="1" height="0" pad_beg="0" pad_end="0" shrink_factor="1" width="0" zoom_factor="4"/>
<input>
<port id="0">
<dim>1</dim>
<dim>16</dim>
<dim>16</dim>
<dim>28</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>16</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="297" name="L0296_Conv2d" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="48" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>16</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>48</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="718752" size="3072"/>
<biases offset="721824" size="192"/>
</blobs>
</layer>
<layer id="298" name="L0297_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>48</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>48</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="299" name="L0298_AddBackward1" precision="FP32" type="Eltwise">
<data coeff="" operation="sum"/>
<input>
<port id="0">
<dim>1</dim>
<dim>48</dim>
<dim>64</dim>
<dim>112</dim>
</port>
<port id="1">
<dim>1</dim>
<dim>48</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="2">
<dim>1</dim>
<dim>48</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="300" name="L0299_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="24" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>48</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>24</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
<blobs>
<weights offset="722016" size="4608"/>
<biases offset="726624" size="96"/>
</blobs>
</layer>
<layer id="301" name="L0300_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>24</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>24</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</output>
</layer>
<layer id="302" name="L0301_Upsample" precision="FP32" type="Interp">
<data align_corners="1" height="0" pad_beg="0" pad_end="0" shrink_factor="1" width="0" zoom_factor="2"/>
<input>
<port id="0">
<dim>1</dim>
<dim>24</dim>
<dim>64</dim>
<dim>112</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>24</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</output>
</layer>
<layer id="303" name="L0302_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="24" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>48</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>24</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</output>
<blobs>
<weights offset="726720" size="4608"/>
<biases offset="731328" size="96"/>
</blobs>
</layer>
<layer id="304" name="L0303_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>24</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>24</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</output>
</layer>
<layer id="305" name="L0304_AddBackward1" precision="FP32" type="Eltwise">
<data coeff="" operation="sum"/>
<input>
<port id="0">
<dim>1</dim>
<dim>24</dim>
<dim>128</dim>
<dim>224</dim>
</port>
<port id="1">
<dim>1</dim>
<dim>24</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</input>
<output>
<port id="2">
<dim>1</dim>
<dim>24</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</output>
</layer>
<layer id="306" name="L0305_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="16" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>24</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>16</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</output>
<blobs>
<weights offset="731424" size="1536"/>
<biases offset="732960" size="64"/>
</blobs>
</layer>
<layer id="307" name="L0306_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>16</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>16</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</output>
</layer>
<layer id="308" name="L0307_Upsample" precision="FP32" type="Interp">
<data align_corners="1" height="0" pad_beg="0" pad_end="0" shrink_factor="1" width="0" zoom_factor="2"/>
<input>
<port id="0">
<dim>1</dim>
<dim>16</dim>
<dim>128</dim>
<dim>224</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>16</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</output>
</layer>
<layer id="309" name="L0308_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="16" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>32</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>16</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</output>
<blobs>
<weights offset="733024" size="2048"/>
<biases offset="735072" size="64"/>
</blobs>
</layer>
<layer id="310" name="L0309_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>16</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>16</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</output>
</layer>
<layer id="311" name="L0310_AddBackward1" precision="FP32" type="Eltwise">
<data coeff="" operation="sum"/>
<input>
<port id="0">
<dim>1</dim>
<dim>16</dim>
<dim>256</dim>
<dim>448</dim>
</port>
<port id="1">
<dim>1</dim>
<dim>16</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</input>
<output>
<port id="2">
<dim>1</dim>
<dim>16</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</output>
</layer>
<layer id="312" name="L0311_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="16" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>16</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>16</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</output>
<blobs>
<weights offset="735136" size="1024"/>
<biases offset="736160" size="64"/>
</blobs>
</layer>
<layer id="313" name="L0312_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>16</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>16</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</output>
</layer>
<layer id="314" name="L0313_Conv2d_BN" precision="FP32" type="Convolution">
<data dilations="1,1" group="16" kernel="3,3" output="16" pads_begin="1,1" pads_end="1,1" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>16</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>16</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</output>
<blobs>
<weights offset="736224" size="576"/>
<biases offset="736800" size="64"/>
</blobs>
</layer>
<layer id="315" name="L0314_ReLU" precision="FP32" type="ReLU">
<data negative_slope="0.0"/>
<input>
<port id="0">
<dim>1</dim>
<dim>16</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>16</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</output>
</layer>
<layer id="316" name="L0315_Conv2d" precision="FP32" type="Convolution">
<data dilations="1,1" group="1" kernel="1,1" output="4" pads_begin="0,0" pads_end="0,0" strides="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>16</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</input>
<output>
<port id="3">
<dim>1</dim>
<dim>4</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</output>
<blobs>
<weights offset="736864" size="256"/>
<biases offset="737120" size="16"/>
</blobs>
</layer>
<layer id="317" name="L0316_Upsample" precision="FP32" type="Interp">
<data align_corners="1" height="0" pad_beg="0" pad_end="0" shrink_factor="1" width="0" zoom_factor="2"/>
<input>
<port id="0">
<dim>1</dim>
<dim>4</dim>
<dim>256</dim>
<dim>448</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>4</dim>
<dim>512</dim>
<dim>896</dim>
</port>
</output>
</layer>
<layer id="318" name="L0317_ReWeight_SoftMax" precision="FP32" type="SoftMax">
<data axis="1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>4</dim>
<dim>512</dim>
<dim>896</dim>
</port>
</input>
<output>
<port id="1">
<dim>1</dim>
<dim>4</dim>
<dim>512</dim>
<dim>896</dim>
</port>
</output>
</layer>
</layers>
<edges>
<edge from-layer="0" from-port="0" to-layer="1" to-port="0"/>
<edge from-layer="1" from-port="3" to-layer="2" to-port="0"/>
<edge from-layer="2" from-port="1" to-layer="3" to-port="0"/>
<edge from-layer="3" from-port="3" to-layer="4" to-port="0"/>
<edge from-layer="4" from-port="1" to-layer="5" to-port="0"/>
<edge from-layer="5" from-port="3" to-layer="6" to-port="0"/>
<edge from-layer="6" from-port="1" to-layer="7" to-port="0"/>
<edge from-layer="7" from-port="3" to-layer="8" to-port="0"/>
<edge from-layer="2" from-port="1" to-layer="8" to-port="1"/>
<edge from-layer="8" from-port="2" to-layer="9" to-port="0"/>
<edge from-layer="9" from-port="1" to-layer="10" to-port="0"/>
<edge from-layer="10" from-port="3" to-layer="11" to-port="0"/>
<edge from-layer="11" from-port="1" to-layer="12" to-port="0"/>
<edge from-layer="12" from-port="3" to-layer="13" to-port="0"/>
<edge from-layer="13" from-port="1" to-layer="14" to-port="0"/>
<edge from-layer="9" from-port="1" to-layer="15" to-port="0"/>
<edge from-layer="14" from-port="3" to-layer="16" to-port="0"/>
<edge from-layer="15" from-port="3" to-layer="16" to-port="1"/>
<edge from-layer="16" from-port="2" to-layer="17" to-port="0"/>
<edge from-layer="17" from-port="1" to-layer="18" to-port="0"/>
<edge from-layer="18" from-port="3" to-layer="19" to-port="0"/>
<edge from-layer="19" from-port="1" to-layer="20" to-port="0"/>
<edge from-layer="20" from-port="3" to-layer="21" to-port="0"/>
<edge from-layer="21" from-port="1" to-layer="22" to-port="0"/>
<edge from-layer="22" from-port="3" to-layer="23" to-port="0"/>
<edge from-layer="17" from-port="1" to-layer="23" to-port="1"/>
<edge from-layer="23" from-port="2" to-layer="24" to-port="0"/>
<edge from-layer="24" from-port="1" to-layer="25" to-port="0"/>
<edge from-layer="25" from-port="3" to-layer="26" to-port="0"/>
<edge from-layer="26" from-port="1" to-layer="27" to-port="0"/>
<edge from-layer="27" from-port="3" to-layer="28" to-port="0"/>
<edge from-layer="28" from-port="1" to-layer="29" to-port="0"/>
<edge from-layer="24" from-port="1" to-layer="30" to-port="0"/>
<edge from-layer="29" from-port="3" to-layer="31" to-port="0"/>
<edge from-layer="30" from-port="3" to-layer="31" to-port="1"/>
<edge from-layer="31" from-port="2" to-layer="32" to-port="0"/>
<edge from-layer="32" from-port="1" to-layer="33" to-port="0"/>
<edge from-layer="33" from-port="3" to-layer="34" to-port="0"/>
<edge from-layer="34" from-port="1" to-layer="35" to-port="0"/>
<edge from-layer="35" from-port="3" to-layer="36" to-port="0"/>
<edge from-layer="36" from-port="1" to-layer="37" to-port="0"/>
<edge from-layer="37" from-port="3" to-layer="38" to-port="0"/>
<edge from-layer="32" from-port="1" to-layer="38" to-port="1"/>
<edge from-layer="38" from-port="2" to-layer="39" to-port="0"/>
<edge from-layer="39" from-port="1" to-layer="40" to-port="0"/>
<edge from-layer="40" from-port="3" to-layer="41" to-port="0"/>
<edge from-layer="41" from-port="1" to-layer="42" to-port="0"/>
<edge from-layer="42" from-port="3" to-layer="43" to-port="0"/>
<edge from-layer="43" from-port="1" to-layer="44" to-port="0"/>
<edge from-layer="39" from-port="1" to-layer="45" to-port="0"/>
<edge from-layer="45" from-port="1" to-layer="46" to-port="0"/>
<edge from-layer="44" from-port="3" to-layer="47" to-port="0"/>
<edge from-layer="46" from-port="3" to-layer="47" to-port="1"/>
<edge from-layer="47" from-port="2" to-layer="48" to-port="0"/>
<edge from-layer="48" from-port="1" to-layer="49" to-port="0"/>
<edge from-layer="49" from-port="3" to-layer="50" to-port="0"/>
<edge from-layer="50" from-port="1" to-layer="51" to-port="0"/>
<edge from-layer="51" from-port="3" to-layer="52" to-port="0"/>
<edge from-layer="52" from-port="1" to-layer="53" to-port="0"/>
<edge from-layer="53" from-port="3" to-layer="54" to-port="0"/>
<edge from-layer="48" from-port="1" to-layer="54" to-port="1"/>
<edge from-layer="54" from-port="2" to-layer="55" to-port="0"/>
<edge from-layer="55" from-port="1" to-layer="56" to-port="0"/>
<edge from-layer="56" from-port="3" to-layer="57" to-port="0"/>
<edge from-layer="57" from-port="1" to-layer="58" to-port="0"/>
<edge from-layer="58" from-port="3" to-layer="59" to-port="0"/>
<edge from-layer="59" from-port="1" to-layer="60" to-port="0"/>
<edge from-layer="60" from-port="3" to-layer="61" to-port="0"/>
<edge from-layer="55" from-port="1" to-layer="61" to-port="1"/>
<edge from-layer="61" from-port="2" to-layer="62" to-port="0"/>
<edge from-layer="62" from-port="1" to-layer="63" to-port="0"/>
<edge from-layer="63" from-port="3" to-layer="64" to-port="0"/>
<edge from-layer="64" from-port="1" to-layer="65" to-port="0"/>
<edge from-layer="65" from-port="3" to-layer="66" to-port="0"/>
<edge from-layer="66" from-port="1" to-layer="67" to-port="0"/>
<edge from-layer="67" from-port="3" to-layer="68" to-port="0"/>
<edge from-layer="62" from-port="1" to-layer="68" to-port="1"/>
<edge from-layer="68" from-port="2" to-layer="69" to-port="0"/>
<edge from-layer="69" from-port="1" to-layer="70" to-port="0"/>
<edge from-layer="70" from-port="3" to-layer="71" to-port="0"/>
<edge from-layer="71" from-port="1" to-layer="72" to-port="0"/>
<edge from-layer="72" from-port="3" to-layer="73" to-port="0"/>
<edge from-layer="73" from-port="1" to-layer="74" to-port="0"/>
<edge from-layer="74" from-port="3" to-layer="75" to-port="0"/>
<edge from-layer="69" from-port="1" to-layer="75" to-port="1"/>
<edge from-layer="75" from-port="2" to-layer="76" to-port="0"/>
<edge from-layer="76" from-port="1" to-layer="77" to-port="0"/>
<edge from-layer="77" from-port="3" to-layer="78" to-port="0"/>
<edge from-layer="78" from-port="1" to-layer="79" to-port="0"/>
<edge from-layer="79" from-port="3" to-layer="80" to-port="0"/>
<edge from-layer="80" from-port="1" to-layer="81" to-port="0"/>
<edge from-layer="81" from-port="3" to-layer="82" to-port="0"/>
<edge from-layer="76" from-port="1" to-layer="82" to-port="1"/>
<edge from-layer="82" from-port="2" to-layer="83" to-port="0"/>
<edge from-layer="83" from-port="1" to-layer="84" to-port="0"/>
<edge from-layer="84" from-port="3" to-layer="85" to-port="0"/>
<edge from-layer="85" from-port="1" to-layer="86" to-port="0"/>
<edge from-layer="86" from-port="3" to-layer="87" to-port="0"/>
<edge from-layer="87" from-port="1" to-layer="88" to-port="0"/>
<edge from-layer="88" from-port="3" to-layer="89" to-port="0"/>
<edge from-layer="83" from-port="1" to-layer="89" to-port="1"/>
<edge from-layer="89" from-port="2" to-layer="90" to-port="0"/>
<edge from-layer="90" from-port="1" to-layer="91" to-port="0"/>
<edge from-layer="91" from-port="3" to-layer="92" to-port="0"/>
<edge from-layer="92" from-port="1" to-layer="93" to-port="0"/>
<edge from-layer="93" from-port="3" to-layer="94" to-port="0"/>
<edge from-layer="94" from-port="1" to-layer="95" to-port="0"/>
<edge from-layer="95" from-port="3" to-layer="96" to-port="0"/>
<edge from-layer="90" from-port="1" to-layer="96" to-port="1"/>
<edge from-layer="96" from-port="2" to-layer="97" to-port="0"/>
<edge from-layer="97" from-port="1" to-layer="98" to-port="0"/>
<edge from-layer="98" from-port="3" to-layer="99" to-port="0"/>
<edge from-layer="99" from-port="1" to-layer="100" to-port="0"/>
<edge from-layer="100" from-port="3" to-layer="101" to-port="0"/>
<edge from-layer="101" from-port="1" to-layer="102" to-port="0"/>
<edge from-layer="102" from-port="3" to-layer="103" to-port="0"/>
<edge from-layer="97" from-port="1" to-layer="103" to-port="1"/>
<edge from-layer="103" from-port="2" to-layer="104" to-port="0"/>
<edge from-layer="104" from-port="1" to-layer="105" to-port="0"/>
<edge from-layer="105" from-port="3" to-layer="106" to-port="0"/>
<edge from-layer="106" from-port="1" to-layer="107" to-port="0"/>
<edge from-layer="107" from-port="3" to-layer="108" to-port="0"/>
<edge from-layer="108" from-port="1" to-layer="109" to-port="0"/>
<edge from-layer="104" from-port="1" to-layer="110" to-port="0"/>
<edge from-layer="110" from-port="1" to-layer="111" to-port="0"/>
<edge from-layer="109" from-port="3" to-layer="112" to-port="0"/>
<edge from-layer="111" from-port="3" to-layer="112" to-port="1"/>
<edge from-layer="112" from-port="2" to-layer="113" to-port="0"/>
<edge from-layer="113" from-port="1" to-layer="114" to-port="0"/>
<edge from-layer="114" from-port="3" to-layer="115" to-port="0"/>
<edge from-layer="115" from-port="1" to-layer="116" to-port="0"/>
<edge from-layer="116" from-port="3" to-layer="117" to-port="0"/>
<edge from-layer="117" from-port="1" to-layer="118" to-port="0"/>
<edge from-layer="118" from-port="3" to-layer="119" to-port="0"/>
<edge from-layer="113" from-port="1" to-layer="119" to-port="1"/>
<edge from-layer="119" from-port="2" to-layer="120" to-port="0"/>
<edge from-layer="120" from-port="1" to-layer="121" to-port="0"/>
<edge from-layer="121" from-port="3" to-layer="122" to-port="0"/>
<edge from-layer="122" from-port="1" to-layer="123" to-port="0"/>
<edge from-layer="123" from-port="3" to-layer="124" to-port="0"/>
<edge from-layer="124" from-port="1" to-layer="125" to-port="0"/>
<edge from-layer="125" from-port="3" to-layer="126" to-port="0"/>
<edge from-layer="120" from-port="1" to-layer="126" to-port="1"/>
<edge from-layer="126" from-port="2" to-layer="127" to-port="0"/>
<edge from-layer="127" from-port="1" to-layer="128" to-port="0"/>
<edge from-layer="128" from-port="3" to-layer="129" to-port="0"/>
<edge from-layer="129" from-port="1" to-layer="130" to-port="0"/>
<edge from-layer="130" from-port="3" to-layer="131" to-port="0"/>
<edge from-layer="131" from-port="1" to-layer="132" to-port="0"/>
<edge from-layer="132" from-port="3" to-layer="133" to-port="0"/>
<edge from-layer="127" from-port="1" to-layer="133" to-port="1"/>
<edge from-layer="133" from-port="2" to-layer="134" to-port="0"/>
<edge from-layer="134" from-port="1" to-layer="135" to-port="0"/>
<edge from-layer="135" from-port="3" to-layer="136" to-port="0"/>
<edge from-layer="136" from-port="1" to-layer="137" to-port="0"/>
<edge from-layer="137" from-port="3" to-layer="138" to-port="0"/>
<edge from-layer="138" from-port="1" to-layer="139" to-port="0"/>
<edge from-layer="134" from-port="1" to-layer="140" to-port="0"/>
<edge from-layer="139" from-port="3" to-layer="141" to-port="0"/>
<edge from-layer="140" from-port="3" to-layer="141" to-port="1"/>
<edge from-layer="141" from-port="2" to-layer="142" to-port="0"/>
<edge from-layer="142" from-port="1" to-layer="143" to-port="0"/>
<edge from-layer="143" from-port="3" to-layer="144" to-port="0"/>
<edge from-layer="144" from-port="1" to-layer="145" to-port="0"/>
<edge from-layer="145" from-port="3" to-layer="146" to-port="0"/>
<edge from-layer="146" from-port="1" to-layer="147" to-port="0"/>
<edge from-layer="147" from-port="3" to-layer="148" to-port="0"/>
<edge from-layer="142" from-port="1" to-layer="148" to-port="1"/>
<edge from-layer="148" from-port="2" to-layer="149" to-port="0"/>
<edge from-layer="149" from-port="1" to-layer="150" to-port="0"/>
<edge from-layer="150" from-port="3" to-layer="151" to-port="0"/>
<edge from-layer="151" from-port="1" to-layer="152" to-port="0"/>
<edge from-layer="152" from-port="3" to-layer="153" to-port="0"/>
<edge from-layer="153" from-port="1" to-layer="154" to-port="0"/>
<edge from-layer="154" from-port="3" to-layer="155" to-port="0"/>
<edge from-layer="149" from-port="1" to-layer="155" to-port="1"/>
<edge from-layer="155" from-port="2" to-layer="156" to-port="0"/>
<edge from-layer="156" from-port="1" to-layer="157" to-port="0"/>
<edge from-layer="157" from-port="3" to-layer="158" to-port="0"/>
<edge from-layer="158" from-port="1" to-layer="159" to-port="0"/>
<edge from-layer="159" from-port="3" to-layer="160" to-port="0"/>
<edge from-layer="160" from-port="1" to-layer="161" to-port="0"/>
<edge from-layer="161" from-port="3" to-layer="162" to-port="0"/>
<edge from-layer="156" from-port="1" to-layer="162" to-port="1"/>
<edge from-layer="162" from-port="2" to-layer="163" to-port="0"/>
<edge from-layer="163" from-port="1" to-layer="164" to-port="0"/>
<edge from-layer="164" from-port="3" to-layer="165" to-port="0"/>
<edge from-layer="165" from-port="1" to-layer="166" to-port="0"/>
<edge from-layer="166" from-port="3" to-layer="167" to-port="0"/>
<edge from-layer="167" from-port="1" to-layer="168" to-port="0"/>
<edge from-layer="168" from-port="3" to-layer="169" to-port="0"/>
<edge from-layer="163" from-port="1" to-layer="169" to-port="1"/>
<edge from-layer="169" from-port="2" to-layer="170" to-port="0"/>
<edge from-layer="170" from-port="1" to-layer="171" to-port="0"/>
<edge from-layer="171" from-port="3" to-layer="172" to-port="0"/>
<edge from-layer="172" from-port="1" to-layer="173" to-port="0"/>
<edge from-layer="173" from-port="3" to-layer="174" to-port="0"/>
<edge from-layer="174" from-port="1" to-layer="175" to-port="0"/>
<edge from-layer="175" from-port="3" to-layer="176" to-port="0"/>
<edge from-layer="170" from-port="1" to-layer="176" to-port="1"/>
<edge from-layer="176" from-port="2" to-layer="177" to-port="0"/>
<edge from-layer="177" from-port="1" to-layer="178" to-port="0"/>
<edge from-layer="178" from-port="3" to-layer="179" to-port="0"/>
<edge from-layer="179" from-port="1" to-layer="180" to-port="0"/>
<edge from-layer="180" from-port="3" to-layer="181" to-port="0"/>
<edge from-layer="181" from-port="1" to-layer="182" to-port="0"/>
<edge from-layer="182" from-port="3" to-layer="183" to-port="0"/>
<edge from-layer="177" from-port="1" to-layer="183" to-port="1"/>
<edge from-layer="183" from-port="2" to-layer="184" to-port="0"/>
<edge from-layer="184" from-port="1" to-layer="185" to-port="0"/>
<edge from-layer="185" from-port="3" to-layer="186" to-port="0"/>
<edge from-layer="186" from-port="1" to-layer="187" to-port="0"/>
<edge from-layer="187" from-port="3" to-layer="188" to-port="0"/>
<edge from-layer="188" from-port="1" to-layer="189" to-port="0"/>
<edge from-layer="189" from-port="3" to-layer="190" to-port="0"/>
<edge from-layer="184" from-port="1" to-layer="190" to-port="1"/>
<edge from-layer="190" from-port="2" to-layer="191" to-port="0"/>
<edge from-layer="191" from-port="1" to-layer="192" to-port="0"/>
<edge from-layer="192" from-port="3" to-layer="193" to-port="0"/>
<edge from-layer="193" from-port="1" to-layer="194" to-port="0"/>
<edge from-layer="194" from-port="3" to-layer="195" to-port="0"/>
<edge from-layer="195" from-port="1" to-layer="196" to-port="0"/>
<edge from-layer="196" from-port="3" to-layer="197" to-port="0"/>
<edge from-layer="191" from-port="1" to-layer="197" to-port="1"/>
<edge from-layer="197" from-port="2" to-layer="198" to-port="0"/>
<edge from-layer="198" from-port="1" to-layer="199" to-port="0"/>
<edge from-layer="199" from-port="3" to-layer="200" to-port="0"/>
<edge from-layer="200" from-port="1" to-layer="201" to-port="0"/>
<edge from-layer="201" from-port="3" to-layer="202" to-port="0"/>
<edge from-layer="202" from-port="1" to-layer="203" to-port="0"/>
<edge from-layer="203" from-port="3" to-layer="204" to-port="0"/>
<edge from-layer="198" from-port="1" to-layer="204" to-port="1"/>
<edge from-layer="204" from-port="2" to-layer="205" to-port="0"/>
<edge from-layer="205" from-port="1" to-layer="206" to-port="0"/>
<edge from-layer="206" from-port="3" to-layer="207" to-port="0"/>
<edge from-layer="207" from-port="1" to-layer="208" to-port="0"/>
<edge from-layer="208" from-port="3" to-layer="209" to-port="0"/>
<edge from-layer="209" from-port="1" to-layer="210" to-port="0"/>
<edge from-layer="210" from-port="3" to-layer="211" to-port="0"/>
<edge from-layer="205" from-port="1" to-layer="211" to-port="1"/>
<edge from-layer="211" from-port="2" to-layer="212" to-port="0"/>
<edge from-layer="212" from-port="1" to-layer="213" to-port="0"/>
<edge from-layer="213" from-port="3" to-layer="214" to-port="0"/>
<edge from-layer="214" from-port="1" to-layer="215" to-port="0"/>
<edge from-layer="215" from-port="3" to-layer="216" to-port="0"/>
<edge from-layer="216" from-port="1" to-layer="217" to-port="0"/>
<edge from-layer="212" from-port="1" to-layer="218" to-port="0"/>
<edge from-layer="217" from-port="3" to-layer="219" to-port="0"/>
<edge from-layer="218" from-port="3" to-layer="219" to-port="1"/>
<edge from-layer="219" from-port="2" to-layer="220" to-port="0"/>
<edge from-layer="220" from-port="1" to-layer="221" to-port="0"/>
<edge from-layer="221" from-port="3" to-layer="222" to-port="0"/>
<edge from-layer="222" from-port="1" to-layer="223" to-port="0"/>
<edge from-layer="223" from-port="3" to-layer="224" to-port="0"/>
<edge from-layer="224" from-port="1" to-layer="225" to-port="0"/>
<edge from-layer="225" from-port="3" to-layer="226" to-port="0"/>
<edge from-layer="220" from-port="1" to-layer="226" to-port="1"/>
<edge from-layer="226" from-port="2" to-layer="227" to-port="0"/>
<edge from-layer="227" from-port="1" to-layer="228" to-port="0"/>
<edge from-layer="228" from-port="3" to-layer="229" to-port="0"/>
<edge from-layer="229" from-port="1" to-layer="230" to-port="0"/>
<edge from-layer="230" from-port="3" to-layer="231" to-port="0"/>
<edge from-layer="231" from-port="1" to-layer="232" to-port="0"/>
<edge from-layer="232" from-port="3" to-layer="233" to-port="0"/>
<edge from-layer="227" from-port="1" to-layer="233" to-port="1"/>
<edge from-layer="233" from-port="2" to-layer="234" to-port="0"/>
<edge from-layer="234" from-port="1" to-layer="235" to-port="0"/>
<edge from-layer="235" from-port="3" to-layer="236" to-port="0"/>
<edge from-layer="236" from-port="1" to-layer="237" to-port="0"/>
<edge from-layer="237" from-port="3" to-layer="238" to-port="0"/>
<edge from-layer="238" from-port="1" to-layer="239" to-port="0"/>
<edge from-layer="234" from-port="1" to-layer="240" to-port="0"/>
<edge from-layer="239" from-port="3" to-layer="241" to-port="0"/>
<edge from-layer="240" from-port="3" to-layer="241" to-port="1"/>
<edge from-layer="241" from-port="2" to-layer="242" to-port="0"/>
<edge from-layer="242" from-port="1" to-layer="243" to-port="0"/>
<edge from-layer="243" from-port="3" to-layer="244" to-port="0"/>
<edge from-layer="244" from-port="1" to-layer="245" to-port="0"/>
<edge from-layer="245" from-port="3" to-layer="246" to-port="0"/>
<edge from-layer="246" from-port="1" to-layer="247" to-port="0"/>
<edge from-layer="247" from-port="3" to-layer="248" to-port="0"/>
<edge from-layer="242" from-port="1" to-layer="248" to-port="1"/>
<edge from-layer="248" from-port="2" to-layer="249" to-port="0"/>
<edge from-layer="249" from-port="1" to-layer="250" to-port="0"/>
<edge from-layer="250" from-port="3" to-layer="251" to-port="0"/>
<edge from-layer="251" from-port="1" to-layer="252" to-port="0"/>
<edge from-layer="252" from-port="3" to-layer="253" to-port="0"/>
<edge from-layer="253" from-port="1" to-layer="254" to-port="0"/>
<edge from-layer="254" from-port="3" to-layer="255" to-port="0"/>
<edge from-layer="249" from-port="1" to-layer="255" to-port="1"/>
<edge from-layer="255" from-port="2" to-layer="256" to-port="0"/>
<edge from-layer="256" from-port="1" to-layer="257" to-port="0"/>
<edge from-layer="257" from-port="3" to-layer="258" to-port="0"/>
<edge from-layer="258" from-port="1" to-layer="259" to-port="0"/>
<edge from-layer="259" from-port="3" to-layer="260" to-port="0"/>
<edge from-layer="260" from-port="1" to-layer="261" to-port="0"/>
<edge from-layer="256" from-port="1" to-layer="262" to-port="0"/>
<edge from-layer="261" from-port="3" to-layer="263" to-port="0"/>
<edge from-layer="262" from-port="3" to-layer="263" to-port="1"/>
<edge from-layer="263" from-port="2" to-layer="264" to-port="0"/>
<edge from-layer="264" from-port="1" to-layer="265" to-port="0"/>
<edge from-layer="265" from-port="3" to-layer="266" to-port="0"/>
<edge from-layer="266" from-port="1" to-layer="267" to-port="0"/>
<edge from-layer="267" from-port="3" to-layer="268" to-port="0"/>
<edge from-layer="268" from-port="1" to-layer="269" to-port="0"/>
<edge from-layer="269" from-port="3" to-layer="270" to-port="0"/>
<edge from-layer="264" from-port="1" to-layer="270" to-port="1"/>
<edge from-layer="270" from-port="2" to-layer="271" to-port="0"/>
<edge from-layer="271" from-port="1" to-layer="272" to-port="0"/>
<edge from-layer="272" from-port="3" to-layer="273" to-port="0"/>
<edge from-layer="273" from-port="1" to-layer="274" to-port="0"/>
<edge from-layer="274" from-port="3" to-layer="275" to-port="0"/>
<edge from-layer="275" from-port="1" to-layer="276" to-port="0"/>
<edge from-layer="276" from-port="3" to-layer="277" to-port="0"/>
<edge from-layer="271" from-port="1" to-layer="277" to-port="1"/>
<edge from-layer="277" from-port="2" to-layer="278" to-port="0"/>
<edge from-layer="278" from-port="1" to-layer="279" to-port="0"/>
<edge from-layer="279" from-port="3" to-layer="280" to-port="0"/>
<edge from-layer="280" from-port="1" to-layer="281" to-port="0"/>
<edge from-layer="281" from-port="3" to-layer="282" to-port="0"/>
<edge from-layer="282" from-port="1" to-layer="283" to-port="0"/>
<edge from-layer="283" from-port="3" to-layer="284" to-port="0"/>
<edge from-layer="278" from-port="1" to-layer="284" to-port="1"/>
<edge from-layer="284" from-port="2" to-layer="285" to-port="0"/>
<edge from-layer="285" from-port="1" to-layer="286" to-port="0"/>
<edge from-layer="286" from-port="1" to-layer="287" to-port="0"/>
<edge from-layer="287" from-port="3" to-layer="288" to-port="0"/>
<edge from-layer="288" from-port="1" to-layer="289" to-port="0"/>
<edge from-layer="289" from-port="1" to-layer="290" to-port="0"/>
<edge from-layer="290" from-port="3" to-layer="291" to-port="0"/>
<edge from-layer="285" from-port="1" to-layer="292" to-port="0"/>
<edge from-layer="291" from-port="1" to-layer="292" to-port="1"/>
<edge from-layer="285" from-port="1" to-layer="293" to-port="0"/>
<edge from-layer="293" from-port="1" to-layer="294" to-port="0"/>
<edge from-layer="294" from-port="3" to-layer="295" to-port="0"/>
<edge from-layer="295" from-port="1" to-layer="296" to-port="0"/>
<edge from-layer="296" from-port="1" to-layer="297" to-port="0"/>
<edge from-layer="297" from-port="3" to-layer="298" to-port="0"/>
<edge from-layer="292" from-port="2" to-layer="299" to-port="0"/>
<edge from-layer="298" from-port="1" to-layer="299" to-port="1"/>
<edge from-layer="299" from-port="2" to-layer="300" to-port="0"/>
<edge from-layer="300" from-port="3" to-layer="301" to-port="0"/>
<edge from-layer="301" from-port="1" to-layer="302" to-port="0"/>
<edge from-layer="104" from-port="1" to-layer="303" to-port="0"/>
<edge from-layer="303" from-port="3" to-layer="304" to-port="0"/>
<edge from-layer="302" from-port="1" to-layer="305" to-port="0"/>
<edge from-layer="304" from-port="1" to-layer="305" to-port="1"/>
<edge from-layer="305" from-port="2" to-layer="306" to-port="0"/>
<edge from-layer="306" from-port="3" to-layer="307" to-port="0"/>
<edge from-layer="307" from-port="1" to-layer="308" to-port="0"/>
<edge from-layer="39" from-port="1" to-layer="309" to-port="0"/>
<edge from-layer="309" from-port="3" to-layer="310" to-port="0"/>
<edge from-layer="308" from-port="1" to-layer="311" to-port="0"/>
<edge from-layer="310" from-port="1" to-layer="311" to-port="1"/>
<edge from-layer="311" from-port="2" to-layer="312" to-port="0"/>
<edge from-layer="312" from-port="3" to-layer="313" to-port="0"/>
<edge from-layer="313" from-port="1" to-layer="314" to-port="0"/>
<edge from-layer="314" from-port="3" to-layer="315" to-port="0"/>
<edge from-layer="315" from-port="1" to-layer="316" to-port="0"/>
<edge from-layer="316" from-port="3" to-layer="317" to-port="0"/>
<edge from-layer="317" from-port="1" to-layer="318" to-port="0"/>
</edges>
<meta_data>
<MO_version value="2019.1.0-341-gc9b66a2"/>
<cli_parameters>
<data_type value="FP32"/>
<disable_fusing value="False"/>
<disable_gfusing value="False"/>
<disable_nhwc_to_nchw value="False"/>
<disable_omitting_optional value="False"/>
<disable_resnet_optimization value="False"/>
<enable_concat_optimization value="False"/>
<enable_flattening_nested_params value="False"/>
<extensions value="DIR"/>
<framework value="caffe"/>
<generate_deprecated_IR_V2 value="False"/>
<input value="data"/>
<input_model value="DIR/model.caffemodel"/>
<input_model_is_text value="False"/>
<input_proto value="DIR/model.prototxt"/>
<input_shape value="[1,3,512,896]"/>
<k value="DIR/CustomLayersMapping.xml"/>
<keep_shape_ops value="False"/>
<legacy_mxnet_model value="False"/>
<log_level value="ERROR"/>
<mean_scale_values value="{}"/>
<mean_values value="()"/>
<model_name value="road-segmentation-adas-0001"/>
<move_to_preprocess value="False"/>
<output value="['L0317_ReWeight_SoftMax']"/>
<output_dir value="DIR"/>
<placeholder_shapes value="{'data': array([ 1, 3, 512, 896])}"/>
<remove_output_softmax value="False"/>
<reverse_input_channels value="False"/>
<save_params_from_nd value="False"/>
<scale_values value="()"/>
<silent value="False"/>
<version value="False"/>
<unset unset_cli_parameters="batch, counts, finegrain_fusing, freeze_placeholder_with_value, input_checkpoint, input_meta_graph, input_symbol, mean_file, mean_file_offsets, nd_prefix_name, pretrained_model_name, saved_model_dir, saved_model_tags, scale, tensorboard_logdir, tensorflow_custom_layer_libraries, tensorflow_custom_operations_config_update, tensorflow_object_detection_api_pipeline_config, tensorflow_operation_patterns, tensorflow_subgraph_patterns, tensorflow_use_custom_operations_config"/>
</cli_parameters>
</meta_data>
</net>
此处可能存在不合适展示的内容,页面不予展示。您可通过相关编辑功能自查并修改。
如您确认内容无涉及 不当用语 / 纯广告导流 / 暴力 / 低俗色情 / 侵权 / 盗版 / 虚假 / 无价值内容或违法国家有关法律法规的内容,可点击提交进行申诉,我们将尽快为您处理。