stable-diffusion-xl-base-1.0/vae_encoder/openvino_model.xml

27099 lines
830 KiB
XML

<?xml version="1.0"?>
<net name="torch_jit" version="11">
<layers>
<layer id="0" name="sample" type="Parameter" version="opset1">
<data shape="?,3,?,?" element_type="f32" />
<rt_info>
<attribute name="fused_names" version="0" value="sample" />
</rt_info>
<output>
<port id="0" precision="FP32" names="sample">
<dim>-1</dim>
<dim>3</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1" name="encoder.mid_block.attentions.0.to_out.0.bias" type="Const" version="opset1">
<data element_type="f32" shape="512" offset="0" size="2048" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.mid_block.attentions.0.to_out.0.bias" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.mid_block.attentions.0.to_out.0.bias">
<dim>512</dim>
</port>
</output>
</layer>
<layer id="2" name="encoder.mid_block.attentions.0.to_q.bias" type="Const" version="opset1">
<data element_type="f32" shape="512" offset="2048" size="2048" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.mid_block.attentions.0.to_q.bias" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.mid_block.attentions.0.to_q.bias">
<dim>512</dim>
</port>
</output>
</layer>
<layer id="3" name="encoder.conv_in.weight" type="Const" version="opset1">
<data element_type="f32" shape="128, 3, 3, 3" offset="4096" size="13824" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.conv_in.weight" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.conv_in.weight">
<dim>128</dim>
<dim>3</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="4" name="/encoder/conv_in/Conv/WithoutBiases" type="Convolution" version="opset1">
<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/conv_in/Conv/WithoutBiases" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>3</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>128</dim>
<dim>3</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="5" name="encoder.conv_in.bias" type="Const" version="opset1">
<data element_type="f32" shape="128" offset="17920" size="512" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.conv_in.bias" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.conv_in.bias">
<dim>128</dim>
</port>
</output>
</layer>
<layer id="6" name="Constant_257035" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257035" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="7" name="ShapeOf_257041" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18440" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257041" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="8" name="ShapeOf_257033" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257033" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="9" name="ShapeOf_257034" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257034" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="10" name="Constant_257036" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257036" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="11" name="Subtract_257037" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257036, Subtract_257037" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="12" name="Broadcast_257038" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_257038, Constant_257035" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</output>
</layer>
<layer id="13" name="Concat_257042" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_257042, Constant_257035, ShapeOf_257041" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="14" name="Reshape_257043" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_257043" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>128</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>128</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="15" name="/encoder/conv_in/Conv" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/conv_in/Conv" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>128</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/conv_in/Conv_output_0">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="16" name="/encoder/down_blocks.0/resnets.0/norm1/Constant" type="Const" version="opset1">
<data element_type="i64" shape="3" offset="18456" size="24" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.0/norm1/Constant" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/down_blocks.0/resnets.0/norm1/Constant_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="17" name="/encoder/down_blocks.0/resnets.0/norm1/Reshape" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.0/norm1/Reshape" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.0/resnets.0/norm1/Reshape_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="18" name="Constant_257053" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257053" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="19" name="MVN_257054" type="MVN" version="opset6">
<data eps="9.9999999747524271e-07" normalize_variance="true" eps_mode="INSIDE_SQRT" />
<rt_info>
<attribute name="fused_names" version="0" value="MVN_257054" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="20" name="/encoder/down_blocks.0/resnets.0/norm1/Constant_1" type="Const" version="opset1">
<data element_type="f32" shape="32" offset="18480" size="128" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.0/norm1/Constant_1" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/down_blocks.0/resnets.0/norm1/Constant_1_output_0">
<dim>32</dim>
</port>
</output>
</layer>
<layer id="21" name="Constant_257057" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257057" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="22" name="ShapeOf_257063" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18608" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257063" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="23" name="ShapeOf_257055" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257055" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="24" name="ShapeOf_257056" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257056" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="25" name="Constant_257058" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257058" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="26" name="Subtract_257059" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257058, Subtract_257059" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="27" name="Broadcast_257060" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_257060, Constant_257057" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="28" name="Concat_257064" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_257064, Constant_257057, ShapeOf_257063" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="29" name="Reshape_257065" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_257065" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>32</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="30" name="Multiply_257068" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Multiply_257068" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="31" name="/encoder/down_blocks.0/resnets.0/norm1/Constant_2" type="Const" version="opset1">
<data element_type="f32" shape="32" offset="18616" size="128" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.0/norm1/Constant_2" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/down_blocks.0/resnets.0/norm1/Constant_2_output_0">
<dim>32</dim>
</port>
</output>
</layer>
<layer id="32" name="Constant_257069" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257069" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="33" name="ShapeOf_257075" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18608" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257075" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="34" name="Constant_257070" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257070" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="35" name="Subtract_257071" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257070, Subtract_257071" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="36" name="Broadcast_257072" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_257072, Constant_257069" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="37" name="Concat_257076" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_257076, Constant_257069, ShapeOf_257075" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="38" name="Reshape_257077" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_257077" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>32</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="39" name="/encoder/down_blocks.0/resnets.0/norm1/InstanceNormalization" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.0/norm1/InstanceNormalization" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.0/resnets.0/norm1/InstanceNormalization_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="40" name="/encoder/down_blocks.0/resnets.0/norm1/Shape" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.0/norm1/Shape" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/encoder/down_blocks.0/resnets.0/norm1/Shape_output_0">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="41" name="/encoder/down_blocks.0/resnets.0/norm1/Reshape_1" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.0/norm1/Reshape_1" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.0/resnets.0/norm1/Reshape_1_output_0">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="42" name="onnx::Mul_964" type="Const" version="opset1">
<data element_type="f32" shape="128, 1, 1" offset="18744" size="512" />
<rt_info>
<attribute name="fused_names" version="0" value="onnx::Mul_964" />
</rt_info>
<output>
<port id="0" precision="FP32" names="onnx::Mul_964">
<dim>128</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="43" name="/encoder/down_blocks.0/resnets.0/norm1/Mul" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.0/norm1/Mul" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>128</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.0/resnets.0/norm1/Mul_output_0">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="44" name="onnx::Add_965" type="Const" version="opset1">
<data element_type="f32" shape="128, 1, 1" offset="19256" size="512" />
<rt_info>
<attribute name="fused_names" version="0" value="onnx::Add_965" />
</rt_info>
<output>
<port id="0" precision="FP32" names="onnx::Add_965">
<dim>128</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="45" name="/encoder/down_blocks.0/resnets.0/norm1/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.0/norm1/Add" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>128</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.0/resnets.0/norm1/Add_output_0">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="46" name="/encoder/down_blocks.0/resnets.0/nonlinearity/Sigmoid" type="Sigmoid" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.0/nonlinearity/Sigmoid" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="FP32" names="/encoder/down_blocks.0/resnets.0/nonlinearity/Sigmoid_output_0">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="47" name="/encoder/down_blocks.0/resnets.0/nonlinearity/Mul" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.0/nonlinearity/Mul" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.0/resnets.0/nonlinearity/Mul_output_0">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="48" name="encoder.down_blocks.0.resnets.0.conv1.weight" type="Const" version="opset1">
<data element_type="f32" shape="128, 128, 3, 3" offset="19768" size="589824" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.down_blocks.0.resnets.0.conv1.weight" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.down_blocks.0.resnets.0.conv1.weight">
<dim>128</dim>
<dim>128</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="49" name="/encoder/down_blocks.0/resnets.0/conv1/Conv/WithoutBiases" type="Convolution" version="opset1">
<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.0/conv1/Conv/WithoutBiases" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>128</dim>
<dim>128</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="50" name="encoder.down_blocks.0.resnets.0.conv1.bias" type="Const" version="opset1">
<data element_type="f32" shape="128" offset="609592" size="512" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.down_blocks.0.resnets.0.conv1.bias" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.down_blocks.0.resnets.0.conv1.bias">
<dim>128</dim>
</port>
</output>
</layer>
<layer id="51" name="Constant_257096" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257096" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="52" name="ShapeOf_257102" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18440" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257102" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="53" name="ShapeOf_257094" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257094" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="54" name="ShapeOf_257095" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257095" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="55" name="Constant_257097" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257097" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="56" name="Subtract_257098" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257097, Subtract_257098" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="57" name="Broadcast_257099" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_257099, Constant_257096" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</output>
</layer>
<layer id="58" name="Concat_257103" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_257103, Constant_257096, ShapeOf_257102" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="59" name="Reshape_257104" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_257104" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>128</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>128</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="60" name="/encoder/down_blocks.0/resnets.0/conv1/Conv" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.0/conv1/Conv" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>128</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.0/resnets.0/conv1/Conv_output_0">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="61" name="/encoder/down_blocks.0/resnets.0/norm2/Constant" type="Const" version="opset1">
<data element_type="i64" shape="3" offset="18456" size="24" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.0/norm2/Constant" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/down_blocks.0/resnets.0/norm2/Constant_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="62" name="/encoder/down_blocks.0/resnets.0/norm2/Reshape" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.0/norm2/Reshape" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.0/resnets.0/norm2/Reshape_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="63" name="Constant_257114" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257114" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="64" name="MVN_257115" type="MVN" version="opset6">
<data eps="9.9999999747524271e-07" normalize_variance="true" eps_mode="INSIDE_SQRT" />
<rt_info>
<attribute name="fused_names" version="0" value="MVN_257115" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="65" name="/encoder/down_blocks.0/resnets.0/norm2/Constant_1" type="Const" version="opset1">
<data element_type="f32" shape="32" offset="18480" size="128" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.0/norm2/Constant_1" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/down_blocks.0/resnets.0/norm2/Constant_1_output_0">
<dim>32</dim>
</port>
</output>
</layer>
<layer id="66" name="Constant_257118" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257118" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="67" name="ShapeOf_257124" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18608" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257124" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="68" name="ShapeOf_257116" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257116" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="69" name="ShapeOf_257117" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257117" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="70" name="Constant_257119" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257119" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="71" name="Subtract_257120" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257119, Subtract_257120" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="72" name="Broadcast_257121" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_257121, Constant_257118" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="73" name="Concat_257125" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_257125, Constant_257118, ShapeOf_257124" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="74" name="Reshape_257126" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_257126" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>32</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="75" name="Multiply_257129" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Multiply_257129" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="76" name="/encoder/down_blocks.0/resnets.0/norm2/Constant_2" type="Const" version="opset1">
<data element_type="f32" shape="32" offset="18616" size="128" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.0/norm2/Constant_2" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/down_blocks.0/resnets.0/norm2/Constant_2_output_0">
<dim>32</dim>
</port>
</output>
</layer>
<layer id="77" name="Constant_257130" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257130" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="78" name="ShapeOf_257136" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18608" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257136" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="79" name="Constant_257131" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257131" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="80" name="Subtract_257132" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257131, Subtract_257132" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="81" name="Broadcast_257133" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_257133, Constant_257130" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="82" name="Concat_257137" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_257137, Constant_257130, ShapeOf_257136" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="83" name="Reshape_257138" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_257138" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>32</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="84" name="/encoder/down_blocks.0/resnets.0/norm2/InstanceNormalization" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.0/norm2/InstanceNormalization" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.0/resnets.0/norm2/InstanceNormalization_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="85" name="/encoder/down_blocks.0/resnets.0/norm2/Shape" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.0/norm2/Shape" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/encoder/down_blocks.0/resnets.0/norm2/Shape_output_0">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="86" name="/encoder/down_blocks.0/resnets.0/norm2/Reshape_1" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.0/norm2/Reshape_1" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.0/resnets.0/norm2/Reshape_1_output_0">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="87" name="onnx::Mul_966" type="Const" version="opset1">
<data element_type="f32" shape="128, 1, 1" offset="610104" size="512" />
<rt_info>
<attribute name="fused_names" version="0" value="onnx::Mul_966" />
</rt_info>
<output>
<port id="0" precision="FP32" names="onnx::Mul_966">
<dim>128</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="88" name="/encoder/down_blocks.0/resnets.0/norm2/Mul" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.0/norm2/Mul" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>128</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.0/resnets.0/norm2/Mul_output_0">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="89" name="onnx::Add_967" type="Const" version="opset1">
<data element_type="f32" shape="128, 1, 1" offset="610616" size="512" />
<rt_info>
<attribute name="fused_names" version="0" value="onnx::Add_967" />
</rt_info>
<output>
<port id="0" precision="FP32" names="onnx::Add_967">
<dim>128</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="90" name="/encoder/down_blocks.0/resnets.0/norm2/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.0/norm2/Add" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>128</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.0/resnets.0/norm2/Add_output_0">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="91" name="/encoder/down_blocks.0/resnets.0/nonlinearity_1/Sigmoid" type="Sigmoid" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.0/nonlinearity_1/Sigmoid" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="FP32" names="/encoder/down_blocks.0/resnets.0/nonlinearity_1/Sigmoid_output_0">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="92" name="/encoder/down_blocks.0/resnets.0/nonlinearity_1/Mul" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.0/nonlinearity_1/Mul" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.0/resnets.0/nonlinearity_1/Mul_output_0">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="93" name="encoder.down_blocks.0.resnets.0.conv2.weight" type="Const" version="opset1">
<data element_type="f32" shape="128, 128, 3, 3" offset="611128" size="589824" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.down_blocks.0.resnets.0.conv2.weight" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.down_blocks.0.resnets.0.conv2.weight">
<dim>128</dim>
<dim>128</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="94" name="/encoder/down_blocks.0/resnets.0/conv2/Conv/WithoutBiases" type="Convolution" version="opset1">
<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.0/conv2/Conv/WithoutBiases" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>128</dim>
<dim>128</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="95" name="encoder.down_blocks.0.resnets.0.conv2.bias" type="Const" version="opset1">
<data element_type="f32" shape="128" offset="1200952" size="512" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.down_blocks.0.resnets.0.conv2.bias" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.down_blocks.0.resnets.0.conv2.bias">
<dim>128</dim>
</port>
</output>
</layer>
<layer id="96" name="Constant_257157" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257157" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="97" name="ShapeOf_257163" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18440" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257163" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="98" name="ShapeOf_257155" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257155" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="99" name="ShapeOf_257156" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257156" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="100" name="Constant_257158" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257158" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="101" name="Subtract_257159" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257158, Subtract_257159" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="102" name="Broadcast_257160" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_257160, Constant_257157" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</output>
</layer>
<layer id="103" name="Concat_257164" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_257164, Constant_257157, ShapeOf_257163" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="104" name="Reshape_257165" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_257165" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>128</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>128</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="105" name="/encoder/down_blocks.0/resnets.0/conv2/Conv" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.0/conv2/Conv" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>128</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.0/resnets.0/conv2/Conv_output_0">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="106" name="/encoder/down_blocks.0/resnets.0/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.0/Add" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.0/resnets.0/Add_output_0">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="107" name="/encoder/down_blocks.0/resnets.0/Constant" type="Const" version="opset1">
<data element_type="f32" shape="" offset="1201464" size="4" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.0/Constant" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/down_blocks.0/resnets.0/Constant_output_0" />
</output>
</layer>
<layer id="108" name="/encoder/down_blocks.0/resnets.0/Div" type="Divide" version="opset1">
<data auto_broadcast="numpy" m_pythondiv="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.0/Div" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32" />
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.0/resnets.0/Div_output_0">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="109" name="/encoder/down_blocks.0/resnets.1/norm1/Constant" type="Const" version="opset1">
<data element_type="i64" shape="3" offset="18456" size="24" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.1/norm1/Constant" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/down_blocks.0/resnets.1/norm1/Constant_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="110" name="/encoder/down_blocks.0/resnets.1/norm1/Reshape" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.1/norm1/Reshape" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.0/resnets.1/norm1/Reshape_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="111" name="Constant_257178" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257178" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="112" name="MVN_257179" type="MVN" version="opset6">
<data eps="9.9999999747524271e-07" normalize_variance="true" eps_mode="INSIDE_SQRT" />
<rt_info>
<attribute name="fused_names" version="0" value="MVN_257179" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="113" name="/encoder/down_blocks.0/resnets.1/norm1/Constant_1" type="Const" version="opset1">
<data element_type="f32" shape="32" offset="18480" size="128" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.1/norm1/Constant_1" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/down_blocks.0/resnets.1/norm1/Constant_1_output_0">
<dim>32</dim>
</port>
</output>
</layer>
<layer id="114" name="Constant_257182" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257182" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="115" name="ShapeOf_257188" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18608" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257188" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="116" name="ShapeOf_257180" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257180" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="117" name="ShapeOf_257181" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257181" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="118" name="Constant_257183" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257183" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="119" name="Subtract_257184" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257183, Subtract_257184" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="120" name="Broadcast_257185" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_257185, Constant_257182" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="121" name="Concat_257189" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_257189, Constant_257182, ShapeOf_257188" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="122" name="Reshape_257190" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_257190" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>32</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="123" name="Multiply_257193" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Multiply_257193" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="124" name="/encoder/down_blocks.0/resnets.1/norm1/Constant_2" type="Const" version="opset1">
<data element_type="f32" shape="32" offset="18616" size="128" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.1/norm1/Constant_2" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/down_blocks.0/resnets.1/norm1/Constant_2_output_0">
<dim>32</dim>
</port>
</output>
</layer>
<layer id="125" name="Constant_257194" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257194" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="126" name="ShapeOf_257200" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18608" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257200" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="127" name="Constant_257195" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257195" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="128" name="Subtract_257196" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257195, Subtract_257196" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="129" name="Broadcast_257197" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_257197, Constant_257194" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="130" name="Concat_257201" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_257201, Constant_257194, ShapeOf_257200" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="131" name="Reshape_257202" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_257202" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>32</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="132" name="/encoder/down_blocks.0/resnets.1/norm1/InstanceNormalization" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.1/norm1/InstanceNormalization" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.0/resnets.1/norm1/InstanceNormalization_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="133" name="/encoder/down_blocks.0/resnets.1/norm1/Shape" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.1/norm1/Shape" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/encoder/down_blocks.0/resnets.1/norm1/Shape_output_0">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="134" name="/encoder/down_blocks.0/resnets.1/norm1/Reshape_1" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.1/norm1/Reshape_1" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.0/resnets.1/norm1/Reshape_1_output_0">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="135" name="onnx::Mul_968" type="Const" version="opset1">
<data element_type="f32" shape="128, 1, 1" offset="1201468" size="512" />
<rt_info>
<attribute name="fused_names" version="0" value="onnx::Mul_968" />
</rt_info>
<output>
<port id="0" precision="FP32" names="onnx::Mul_968">
<dim>128</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="136" name="/encoder/down_blocks.0/resnets.1/norm1/Mul" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.1/norm1/Mul" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>128</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.0/resnets.1/norm1/Mul_output_0">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="137" name="onnx::Add_969" type="Const" version="opset1">
<data element_type="f32" shape="128, 1, 1" offset="1201980" size="512" />
<rt_info>
<attribute name="fused_names" version="0" value="onnx::Add_969" />
</rt_info>
<output>
<port id="0" precision="FP32" names="onnx::Add_969">
<dim>128</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="138" name="/encoder/down_blocks.0/resnets.1/norm1/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.1/norm1/Add" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>128</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.0/resnets.1/norm1/Add_output_0">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="139" name="/encoder/down_blocks.0/resnets.1/nonlinearity/Sigmoid" type="Sigmoid" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.1/nonlinearity/Sigmoid" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="FP32" names="/encoder/down_blocks.0/resnets.1/nonlinearity/Sigmoid_output_0">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="140" name="/encoder/down_blocks.0/resnets.1/nonlinearity/Mul" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.1/nonlinearity/Mul" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.0/resnets.1/nonlinearity/Mul_output_0">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="141" name="encoder.down_blocks.0.resnets.1.conv1.weight" type="Const" version="opset1">
<data element_type="f32" shape="128, 128, 3, 3" offset="1202492" size="589824" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.down_blocks.0.resnets.1.conv1.weight" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.down_blocks.0.resnets.1.conv1.weight">
<dim>128</dim>
<dim>128</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="142" name="/encoder/down_blocks.0/resnets.1/conv1/Conv/WithoutBiases" type="Convolution" version="opset1">
<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.1/conv1/Conv/WithoutBiases" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>128</dim>
<dim>128</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="143" name="encoder.down_blocks.0.resnets.1.conv1.bias" type="Const" version="opset1">
<data element_type="f32" shape="128" offset="1792316" size="512" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.down_blocks.0.resnets.1.conv1.bias" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.down_blocks.0.resnets.1.conv1.bias">
<dim>128</dim>
</port>
</output>
</layer>
<layer id="144" name="Constant_257221" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257221" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="145" name="ShapeOf_257227" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18440" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257227" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="146" name="ShapeOf_257219" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257219" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="147" name="ShapeOf_257220" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257220" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="148" name="Constant_257222" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257222" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="149" name="Subtract_257223" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257222, Subtract_257223" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="150" name="Broadcast_257224" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_257224, Constant_257221" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</output>
</layer>
<layer id="151" name="Concat_257228" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_257228, Constant_257221, ShapeOf_257227" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="152" name="Reshape_257229" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_257229" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>128</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>128</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="153" name="/encoder/down_blocks.0/resnets.1/conv1/Conv" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.1/conv1/Conv" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>128</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.0/resnets.1/conv1/Conv_output_0">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="154" name="/encoder/down_blocks.0/resnets.1/norm2/Constant" type="Const" version="opset1">
<data element_type="i64" shape="3" offset="18456" size="24" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.1/norm2/Constant" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/down_blocks.0/resnets.1/norm2/Constant_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="155" name="/encoder/down_blocks.0/resnets.1/norm2/Reshape" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.1/norm2/Reshape" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.0/resnets.1/norm2/Reshape_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="156" name="Constant_257239" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257239" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="157" name="MVN_257240" type="MVN" version="opset6">
<data eps="9.9999999747524271e-07" normalize_variance="true" eps_mode="INSIDE_SQRT" />
<rt_info>
<attribute name="fused_names" version="0" value="MVN_257240" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="158" name="/encoder/down_blocks.0/resnets.1/norm2/Constant_1" type="Const" version="opset1">
<data element_type="f32" shape="32" offset="18480" size="128" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.1/norm2/Constant_1" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/down_blocks.0/resnets.1/norm2/Constant_1_output_0">
<dim>32</dim>
</port>
</output>
</layer>
<layer id="159" name="Constant_257243" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257243" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="160" name="ShapeOf_257249" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18608" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257249" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="161" name="ShapeOf_257241" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257241" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="162" name="ShapeOf_257242" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257242" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="163" name="Constant_257244" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257244" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="164" name="Subtract_257245" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257244, Subtract_257245" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="165" name="Broadcast_257246" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_257246, Constant_257243" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="166" name="Concat_257250" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_257250, Constant_257243, ShapeOf_257249" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="167" name="Reshape_257251" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_257251" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>32</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="168" name="Multiply_257254" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Multiply_257254" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="169" name="/encoder/down_blocks.0/resnets.1/norm2/Constant_2" type="Const" version="opset1">
<data element_type="f32" shape="32" offset="18616" size="128" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.1/norm2/Constant_2" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/down_blocks.0/resnets.1/norm2/Constant_2_output_0">
<dim>32</dim>
</port>
</output>
</layer>
<layer id="170" name="Constant_257255" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257255" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="171" name="ShapeOf_257261" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18608" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257261" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="172" name="Constant_257256" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257256" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="173" name="Subtract_257257" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257256, Subtract_257257" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="174" name="Broadcast_257258" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_257258, Constant_257255" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="175" name="Concat_257262" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_257262, Constant_257255, ShapeOf_257261" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="176" name="Reshape_257263" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_257263" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>32</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="177" name="/encoder/down_blocks.0/resnets.1/norm2/InstanceNormalization" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.1/norm2/InstanceNormalization" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.0/resnets.1/norm2/InstanceNormalization_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="178" name="/encoder/down_blocks.0/resnets.1/norm2/Shape" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.1/norm2/Shape" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/encoder/down_blocks.0/resnets.1/norm2/Shape_output_0">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="179" name="/encoder/down_blocks.0/resnets.1/norm2/Reshape_1" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.1/norm2/Reshape_1" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.0/resnets.1/norm2/Reshape_1_output_0">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="180" name="onnx::Mul_970" type="Const" version="opset1">
<data element_type="f32" shape="128, 1, 1" offset="1792828" size="512" />
<rt_info>
<attribute name="fused_names" version="0" value="onnx::Mul_970" />
</rt_info>
<output>
<port id="0" precision="FP32" names="onnx::Mul_970">
<dim>128</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="181" name="/encoder/down_blocks.0/resnets.1/norm2/Mul" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.1/norm2/Mul" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>128</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.0/resnets.1/norm2/Mul_output_0">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="182" name="onnx::Add_971" type="Const" version="opset1">
<data element_type="f32" shape="128, 1, 1" offset="1793340" size="512" />
<rt_info>
<attribute name="fused_names" version="0" value="onnx::Add_971" />
</rt_info>
<output>
<port id="0" precision="FP32" names="onnx::Add_971">
<dim>128</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="183" name="/encoder/down_blocks.0/resnets.1/norm2/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.1/norm2/Add" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>128</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.0/resnets.1/norm2/Add_output_0">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="184" name="/encoder/down_blocks.0/resnets.1/nonlinearity_1/Sigmoid" type="Sigmoid" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.1/nonlinearity_1/Sigmoid" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="FP32" names="/encoder/down_blocks.0/resnets.1/nonlinearity_1/Sigmoid_output_0">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="185" name="/encoder/down_blocks.0/resnets.1/nonlinearity_1/Mul" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.1/nonlinearity_1/Mul" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.0/resnets.1/nonlinearity_1/Mul_output_0">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="186" name="encoder.down_blocks.0.resnets.1.conv2.weight" type="Const" version="opset1">
<data element_type="f32" shape="128, 128, 3, 3" offset="1793852" size="589824" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.down_blocks.0.resnets.1.conv2.weight" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.down_blocks.0.resnets.1.conv2.weight">
<dim>128</dim>
<dim>128</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="187" name="/encoder/down_blocks.0/resnets.1/conv2/Conv/WithoutBiases" type="Convolution" version="opset1">
<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.1/conv2/Conv/WithoutBiases" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>128</dim>
<dim>128</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="188" name="encoder.down_blocks.0.resnets.1.conv2.bias" type="Const" version="opset1">
<data element_type="f32" shape="128" offset="2383676" size="512" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.down_blocks.0.resnets.1.conv2.bias" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.down_blocks.0.resnets.1.conv2.bias">
<dim>128</dim>
</port>
</output>
</layer>
<layer id="189" name="Constant_257282" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257282" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="190" name="ShapeOf_257288" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18440" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257288" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="191" name="ShapeOf_257280" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257280" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="192" name="ShapeOf_257281" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257281" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="193" name="Constant_257283" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257283" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="194" name="Subtract_257284" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257283, Subtract_257284" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="195" name="Broadcast_257285" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_257285, Constant_257282" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</output>
</layer>
<layer id="196" name="Concat_257289" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_257289, Constant_257282, ShapeOf_257288" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="197" name="Reshape_257290" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_257290" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>128</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>128</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="198" name="/encoder/down_blocks.0/resnets.1/conv2/Conv" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.1/conv2/Conv" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>128</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.0/resnets.1/conv2/Conv_output_0">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="199" name="/encoder/down_blocks.0/resnets.1/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.1/Add" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.0/resnets.1/Add_output_0">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="200" name="/encoder/down_blocks.0/resnets.1/Constant" type="Const" version="opset1">
<data element_type="f32" shape="" offset="1201464" size="4" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.1/Constant" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/down_blocks.0/resnets.1/Constant_output_0" />
</output>
</layer>
<layer id="201" name="/encoder/down_blocks.0/resnets.1/Div" type="Divide" version="opset1">
<data auto_broadcast="numpy" m_pythondiv="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.1/Div" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32" />
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.0/resnets.1/Div_output_0">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="202" name="/encoder/down_blocks.0/downsamplers.0/Constant_1" type="Const" version="opset1">
<data element_type="i64" shape="4" offset="2384188" size="32" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/downsamplers.0/Constant_1" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/down_blocks.0/downsamplers.0/Constant_1_output_0">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="203" name="Constant_257300" type="Const" version="opset1">
<data element_type="i64" shape="" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257300" />
</rt_info>
<output>
<port id="0" precision="I64" />
</output>
</layer>
<layer id="204" name="/encoder/down_blocks.0/downsamplers.0/Constant" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="2384228" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/downsamplers.0/Constant" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/down_blocks.0/downsamplers.0/Constant_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="205" name="/encoder/down_blocks.0/downsamplers.0/ConstantOfShape" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/downsamplers.0/Constant, /encoder/down_blocks.0/downsamplers.0/ConstantOfShape, Constant_257300" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/down_blocks.0/downsamplers.0/ConstantOfShape_output_0">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="206" name="/encoder/down_blocks.0/downsamplers.0/Concat" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/downsamplers.0/Concat, /encoder/down_blocks.0/downsamplers.0/Constant_1" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>4</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/down_blocks.0/downsamplers.0/Concat_output_0">
<dim>8</dim>
</port>
</output>
</layer>
<layer id="207" name="/encoder/down_blocks.0/downsamplers.0/Constant_2" type="Const" version="opset1">
<data element_type="i64" shape="2" offset="2384236" size="16" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/downsamplers.0/Constant_2" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/down_blocks.0/downsamplers.0/Constant_2_output_0">
<dim>2</dim>
</port>
</output>
</layer>
<layer id="208" name="/encoder/down_blocks.0/downsamplers.0/Reshape" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/downsamplers.0/Constant_2, /encoder/down_blocks.0/downsamplers.0/Reshape" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>8</dim>
</port>
<port id="1" precision="I64">
<dim>2</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/down_blocks.0/downsamplers.0/Reshape_output_0">
<dim>4</dim>
<dim>2</dim>
</port>
</output>
</layer>
<layer id="209" name="/encoder/down_blocks.0/downsamplers.0/Constant_4" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="2384252" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/downsamplers.0/Constant_4" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/down_blocks.0/downsamplers.0/Constant_4_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="210" name="/encoder/down_blocks.0/downsamplers.0/Constant_5" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="2384260" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/downsamplers.0/Constant_5" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/down_blocks.0/downsamplers.0/Constant_5_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="211" name="/encoder/down_blocks.0/downsamplers.0/Constant_6" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="2384252" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/downsamplers.0/Constant_6" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/down_blocks.0/downsamplers.0/Constant_6_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="212" name="/encoder/down_blocks.0/downsamplers.0/Constant_3" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/downsamplers.0/Constant_3" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/down_blocks.0/downsamplers.0/Constant_3_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="213" name="/encoder/down_blocks.0/downsamplers.0/Slice" type="Slice" version="opset8">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/downsamplers.0/Constant_3, /encoder/down_blocks.0/downsamplers.0/Constant_4, /encoder/down_blocks.0/downsamplers.0/Constant_5, /encoder/down_blocks.0/downsamplers.0/Constant_6, /encoder/down_blocks.0/downsamplers.0/Slice" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>4</dim>
<dim>2</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
<port id="3" precision="I64">
<dim>1</dim>
</port>
<port id="4" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="5" precision="I64" names="/encoder/down_blocks.0/downsamplers.0/Slice_output_0">
<dim>4</dim>
<dim>2</dim>
</port>
</output>
</layer>
<layer id="214" name="Constant_257314" type="Const" version="opset1">
<data element_type="i64" shape="2" offset="2384268" size="16" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257314" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>2</dim>
</port>
</output>
</layer>
<layer id="215" name="/encoder/down_blocks.0/downsamplers.0/Transpose" type="Transpose" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/downsamplers.0/Transpose, Constant_257314" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>4</dim>
<dim>2</dim>
</port>
<port id="1" precision="I64">
<dim>2</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/down_blocks.0/downsamplers.0/Transpose_output_0">
<dim>2</dim>
<dim>4</dim>
</port>
</output>
</layer>
<layer id="216" name="Constant_842543" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="2384252" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/downsamplers.0/Reshape_1" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="217" name="/encoder/down_blocks.0/downsamplers.0/Reshape_1" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/downsamplers.0/Reshape_1" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>2</dim>
<dim>4</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/down_blocks.0/downsamplers.0/Reshape_1_output_0">
<dim>8</dim>
</port>
</output>
</layer>
<layer id="218" name="/encoder/down_blocks.0/downsamplers.0/Cast" type="Convert" version="opset1">
<data destination_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/downsamplers.0/Cast" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>8</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/encoder/down_blocks.0/downsamplers.0/Cast_output_0">
<dim>8</dim>
</port>
</output>
</layer>
<layer id="219" name="Constant_257322" type="Const" version="opset1">
<data element_type="i64" shape="" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257322" />
</rt_info>
<output>
<port id="0" precision="I64" />
</output>
</layer>
<layer id="220" name="Split_257323" type="Split" version="opset1">
<data num_splits="2" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257322, Split_257323" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>8</dim>
</port>
<port id="1" precision="I64" />
</input>
<output>
<port id="2" precision="I64">
<dim>4</dim>
</port>
<port id="3" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="221" name="/encoder/down_blocks.0/downsamplers.0/Constant_8" type="Const" version="opset1">
<data element_type="f32" shape="" offset="2384284" size="4" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/downsamplers.0/Constant_8" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/down_blocks.0/downsamplers.0/Constant_8_output_0" />
</output>
</layer>
<layer id="222" name="/encoder/down_blocks.0/downsamplers.0/Pad" type="Pad" version="opset1">
<data pad_mode="constant" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/downsamplers.0/Pad" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
<port id="2" precision="I64">
<dim>4</dim>
</port>
<port id="3" precision="FP32" />
</input>
<output>
<port id="4" precision="FP32" names="/encoder/down_blocks.0/downsamplers.0/Pad_output_0">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="223" name="encoder.down_blocks.0.downsamplers.0.conv.weight" type="Const" version="opset1">
<data element_type="f32" shape="128, 128, 3, 3" offset="2384288" size="589824" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.down_blocks.0.downsamplers.0.conv.weight" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.down_blocks.0.downsamplers.0.conv.weight">
<dim>128</dim>
<dim>128</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="224" name="/encoder/down_blocks.0/downsamplers.0/conv/Conv/WithoutBiases" type="Convolution" version="opset1">
<data strides="2, 2" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/downsamplers.0/conv/Conv/WithoutBiases" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>128</dim>
<dim>128</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="225" name="encoder.down_blocks.0.downsamplers.0.conv.bias" type="Const" version="opset1">
<data element_type="f32" shape="128" offset="2974112" size="512" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.down_blocks.0.downsamplers.0.conv.bias" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.down_blocks.0.downsamplers.0.conv.bias">
<dim>128</dim>
</port>
</output>
</layer>
<layer id="226" name="Constant_257338" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257338" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="227" name="ShapeOf_257344" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18440" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257344" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="228" name="ShapeOf_257336" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257336" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="229" name="ShapeOf_257337" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257337" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="230" name="Constant_257339" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257339" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="231" name="Subtract_257340" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257339, Subtract_257340" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="232" name="Broadcast_257341" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_257341, Constant_257338" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</output>
</layer>
<layer id="233" name="Concat_257345" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_257345, Constant_257338, ShapeOf_257344" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="234" name="Reshape_257346" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_257346" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>128</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>128</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="235" name="/encoder/down_blocks.0/downsamplers.0/conv/Conv" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/downsamplers.0/conv/Conv" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>128</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.0/downsamplers.0/conv/Conv_output_0">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="236" name="encoder.down_blocks.1.resnets.0.conv_shortcut.weight" type="Const" version="opset1">
<data element_type="f32" shape="256, 128, 1, 1" offset="2974624" size="131072" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.down_blocks.1.resnets.0.conv_shortcut.weight" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.down_blocks.1.resnets.0.conv_shortcut.weight">
<dim>256</dim>
<dim>128</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="237" name="/encoder/down_blocks.1/resnets.0/conv_shortcut/Conv/WithoutBiases" type="Convolution" version="opset1">
<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.0/conv_shortcut/Conv/WithoutBiases" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>256</dim>
<dim>128</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="238" name="encoder.down_blocks.1.resnets.0.conv_shortcut.bias" type="Const" version="opset1">
<data element_type="f32" shape="256" offset="3105696" size="1024" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.down_blocks.1.resnets.0.conv_shortcut.bias" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.down_blocks.1.resnets.0.conv_shortcut.bias">
<dim>256</dim>
</port>
</output>
</layer>
<layer id="239" name="Constant_257475" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257475" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="240" name="ShapeOf_257481" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="3106720" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257481" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="241" name="ShapeOf_257473" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257473" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="242" name="ShapeOf_257474" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257474" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="243" name="Constant_257476" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257476" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="244" name="Subtract_257477" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257476, Subtract_257477" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="245" name="Broadcast_257478" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_257478, Constant_257475" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</output>
</layer>
<layer id="246" name="Concat_257482" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_257482, Constant_257475, ShapeOf_257481" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="247" name="Reshape_257483" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_257483" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>256</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>256</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="248" name="/encoder/down_blocks.1/resnets.0/conv_shortcut/Conv" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.0/conv_shortcut/Conv" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>256</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.1/resnets.0/conv_shortcut/Conv_output_0">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="249" name="/encoder/down_blocks.1/resnets.0/norm1/Constant" type="Const" version="opset1">
<data element_type="i64" shape="3" offset="18456" size="24" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.0/norm1/Constant" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/down_blocks.1/resnets.0/norm1/Constant_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="250" name="/encoder/down_blocks.1/resnets.0/norm1/Reshape" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.0/norm1/Reshape" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.1/resnets.0/norm1/Reshape_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="251" name="Constant_257356" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257356" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="252" name="MVN_257357" type="MVN" version="opset6">
<data eps="9.9999999747524271e-07" normalize_variance="true" eps_mode="INSIDE_SQRT" />
<rt_info>
<attribute name="fused_names" version="0" value="MVN_257357" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="253" name="/encoder/down_blocks.1/resnets.0/norm1/Constant_1" type="Const" version="opset1">
<data element_type="f32" shape="32" offset="18480" size="128" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.0/norm1/Constant_1" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/down_blocks.1/resnets.0/norm1/Constant_1_output_0">
<dim>32</dim>
</port>
</output>
</layer>
<layer id="254" name="Constant_257360" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257360" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="255" name="ShapeOf_257366" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18608" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257366" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="256" name="ShapeOf_257358" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257358" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="257" name="ShapeOf_257359" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257359" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="258" name="Constant_257361" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257361" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="259" name="Subtract_257362" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257361, Subtract_257362" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="260" name="Broadcast_257363" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_257363, Constant_257360" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="261" name="Concat_257367" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_257367, Constant_257360, ShapeOf_257366" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="262" name="Reshape_257368" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_257368" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>32</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="263" name="Multiply_257371" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Multiply_257371" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="264" name="/encoder/down_blocks.1/resnets.0/norm1/Constant_2" type="Const" version="opset1">
<data element_type="f32" shape="32" offset="18616" size="128" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.0/norm1/Constant_2" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/down_blocks.1/resnets.0/norm1/Constant_2_output_0">
<dim>32</dim>
</port>
</output>
</layer>
<layer id="265" name="Constant_257372" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257372" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="266" name="ShapeOf_257378" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18608" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257378" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="267" name="Constant_257373" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257373" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="268" name="Subtract_257374" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257373, Subtract_257374" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="269" name="Broadcast_257375" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_257375, Constant_257372" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="270" name="Concat_257379" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_257379, Constant_257372, ShapeOf_257378" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="271" name="Reshape_257380" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_257380" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>32</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="272" name="/encoder/down_blocks.1/resnets.0/norm1/InstanceNormalization" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.0/norm1/InstanceNormalization" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.1/resnets.0/norm1/InstanceNormalization_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="273" name="/encoder/down_blocks.1/resnets.0/norm1/Shape" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.0/norm1/Shape" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/encoder/down_blocks.1/resnets.0/norm1/Shape_output_0">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="274" name="/encoder/down_blocks.1/resnets.0/norm1/Reshape_1" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.0/norm1/Reshape_1" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.1/resnets.0/norm1/Reshape_1_output_0">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="275" name="onnx::Mul_977" type="Const" version="opset1">
<data element_type="f32" shape="128, 1, 1" offset="3106728" size="512" />
<rt_info>
<attribute name="fused_names" version="0" value="onnx::Mul_977" />
</rt_info>
<output>
<port id="0" precision="FP32" names="onnx::Mul_977">
<dim>128</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="276" name="/encoder/down_blocks.1/resnets.0/norm1/Mul" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.0/norm1/Mul" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>128</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.1/resnets.0/norm1/Mul_output_0">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="277" name="onnx::Add_978" type="Const" version="opset1">
<data element_type="f32" shape="128, 1, 1" offset="3107240" size="512" />
<rt_info>
<attribute name="fused_names" version="0" value="onnx::Add_978" />
</rt_info>
<output>
<port id="0" precision="FP32" names="onnx::Add_978">
<dim>128</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="278" name="/encoder/down_blocks.1/resnets.0/norm1/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.0/norm1/Add" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>128</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.1/resnets.0/norm1/Add_output_0">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="279" name="/encoder/down_blocks.1/resnets.0/nonlinearity/Sigmoid" type="Sigmoid" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.0/nonlinearity/Sigmoid" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="FP32" names="/encoder/down_blocks.1/resnets.0/nonlinearity/Sigmoid_output_0">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="280" name="/encoder/down_blocks.1/resnets.0/nonlinearity/Mul" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.0/nonlinearity/Mul" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.1/resnets.0/nonlinearity/Mul_output_0">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="281" name="encoder.down_blocks.1.resnets.0.conv1.weight" type="Const" version="opset1">
<data element_type="f32" shape="256, 128, 3, 3" offset="3107752" size="1179648" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.down_blocks.1.resnets.0.conv1.weight" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.down_blocks.1.resnets.0.conv1.weight">
<dim>256</dim>
<dim>128</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="282" name="/encoder/down_blocks.1/resnets.0/conv1/Conv/WithoutBiases" type="Convolution" version="opset1">
<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.0/conv1/Conv/WithoutBiases" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>256</dim>
<dim>128</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="283" name="encoder.down_blocks.1.resnets.0.conv1.bias" type="Const" version="opset1">
<data element_type="f32" shape="256" offset="4287400" size="1024" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.down_blocks.1.resnets.0.conv1.bias" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.down_blocks.1.resnets.0.conv1.bias">
<dim>256</dim>
</port>
</output>
</layer>
<layer id="284" name="Constant_257399" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257399" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="285" name="ShapeOf_257405" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="3106720" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257405" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="286" name="ShapeOf_257397" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257397" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="287" name="ShapeOf_257398" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257398" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="288" name="Constant_257400" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257400" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="289" name="Subtract_257401" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257400, Subtract_257401" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="290" name="Broadcast_257402" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_257402, Constant_257399" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</output>
</layer>
<layer id="291" name="Concat_257406" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_257406, Constant_257399, ShapeOf_257405" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="292" name="Reshape_257407" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_257407" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>256</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>256</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="293" name="/encoder/down_blocks.1/resnets.0/conv1/Conv" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.0/conv1/Conv" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>256</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.1/resnets.0/conv1/Conv_output_0">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="294" name="/encoder/down_blocks.1/resnets.0/norm2/Constant" type="Const" version="opset1">
<data element_type="i64" shape="3" offset="18456" size="24" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.0/norm2/Constant" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/down_blocks.1/resnets.0/norm2/Constant_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="295" name="/encoder/down_blocks.1/resnets.0/norm2/Reshape" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.0/norm2/Reshape" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.1/resnets.0/norm2/Reshape_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="296" name="Constant_257417" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257417" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="297" name="MVN_257418" type="MVN" version="opset6">
<data eps="9.9999999747524271e-07" normalize_variance="true" eps_mode="INSIDE_SQRT" />
<rt_info>
<attribute name="fused_names" version="0" value="MVN_257418" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="298" name="/encoder/down_blocks.1/resnets.0/norm2/Constant_1" type="Const" version="opset1">
<data element_type="f32" shape="32" offset="18480" size="128" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.0/norm2/Constant_1" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/down_blocks.1/resnets.0/norm2/Constant_1_output_0">
<dim>32</dim>
</port>
</output>
</layer>
<layer id="299" name="Constant_257421" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257421" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="300" name="ShapeOf_257427" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18608" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257427" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="301" name="ShapeOf_257419" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257419" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="302" name="ShapeOf_257420" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257420" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="303" name="Constant_257422" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257422" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="304" name="Subtract_257423" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257422, Subtract_257423" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="305" name="Broadcast_257424" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_257424, Constant_257421" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="306" name="Concat_257428" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_257428, Constant_257421, ShapeOf_257427" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="307" name="Reshape_257429" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_257429" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>32</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="308" name="Multiply_257432" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Multiply_257432" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="309" name="/encoder/down_blocks.1/resnets.0/norm2/Constant_2" type="Const" version="opset1">
<data element_type="f32" shape="32" offset="18616" size="128" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.0/norm2/Constant_2" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/down_blocks.1/resnets.0/norm2/Constant_2_output_0">
<dim>32</dim>
</port>
</output>
</layer>
<layer id="310" name="Constant_257433" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257433" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="311" name="ShapeOf_257439" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18608" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257439" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="312" name="Constant_257434" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257434" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="313" name="Subtract_257435" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257434, Subtract_257435" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="314" name="Broadcast_257436" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_257436, Constant_257433" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="315" name="Concat_257440" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_257440, Constant_257433, ShapeOf_257439" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="316" name="Reshape_257441" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_257441" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>32</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="317" name="/encoder/down_blocks.1/resnets.0/norm2/InstanceNormalization" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.0/norm2/InstanceNormalization" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.1/resnets.0/norm2/InstanceNormalization_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="318" name="/encoder/down_blocks.1/resnets.0/norm2/Shape" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.0/norm2/Shape" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/encoder/down_blocks.1/resnets.0/norm2/Shape_output_0">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="319" name="/encoder/down_blocks.1/resnets.0/norm2/Reshape_1" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.0/norm2/Reshape_1" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.1/resnets.0/norm2/Reshape_1_output_0">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="320" name="onnx::Mul_979" type="Const" version="opset1">
<data element_type="f32" shape="256, 1, 1" offset="4288424" size="1024" />
<rt_info>
<attribute name="fused_names" version="0" value="onnx::Mul_979" />
</rt_info>
<output>
<port id="0" precision="FP32" names="onnx::Mul_979">
<dim>256</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="321" name="/encoder/down_blocks.1/resnets.0/norm2/Mul" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.0/norm2/Mul" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>256</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.1/resnets.0/norm2/Mul_output_0">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="322" name="onnx::Add_980" type="Const" version="opset1">
<data element_type="f32" shape="256, 1, 1" offset="4289448" size="1024" />
<rt_info>
<attribute name="fused_names" version="0" value="onnx::Add_980" />
</rt_info>
<output>
<port id="0" precision="FP32" names="onnx::Add_980">
<dim>256</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="323" name="/encoder/down_blocks.1/resnets.0/norm2/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.0/norm2/Add" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>256</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.1/resnets.0/norm2/Add_output_0">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="324" name="/encoder/down_blocks.1/resnets.0/nonlinearity_1/Sigmoid" type="Sigmoid" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.0/nonlinearity_1/Sigmoid" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="FP32" names="/encoder/down_blocks.1/resnets.0/nonlinearity_1/Sigmoid_output_0">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="325" name="/encoder/down_blocks.1/resnets.0/nonlinearity_1/Mul" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.0/nonlinearity_1/Mul" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.1/resnets.0/nonlinearity_1/Mul_output_0">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="326" name="encoder.down_blocks.1.resnets.0.conv2.weight" type="Const" version="opset1">
<data element_type="f32" shape="256, 256, 3, 3" offset="4290472" size="2359296" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.down_blocks.1.resnets.0.conv2.weight" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.down_blocks.1.resnets.0.conv2.weight">
<dim>256</dim>
<dim>256</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="327" name="/encoder/down_blocks.1/resnets.0/conv2/Conv/WithoutBiases" type="Convolution" version="opset1">
<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.0/conv2/Conv/WithoutBiases" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>256</dim>
<dim>256</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="328" name="encoder.down_blocks.1.resnets.0.conv2.bias" type="Const" version="opset1">
<data element_type="f32" shape="256" offset="6649768" size="1024" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.down_blocks.1.resnets.0.conv2.bias" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.down_blocks.1.resnets.0.conv2.bias">
<dim>256</dim>
</port>
</output>
</layer>
<layer id="329" name="Constant_257460" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257460" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="330" name="ShapeOf_257466" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="3106720" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257466" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="331" name="ShapeOf_257458" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257458" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="332" name="ShapeOf_257459" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257459" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="333" name="Constant_257461" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257461" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="334" name="Subtract_257462" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257461, Subtract_257462" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="335" name="Broadcast_257463" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_257463, Constant_257460" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</output>
</layer>
<layer id="336" name="Concat_257467" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_257467, Constant_257460, ShapeOf_257466" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="337" name="Reshape_257468" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_257468" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>256</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>256</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="338" name="/encoder/down_blocks.1/resnets.0/conv2/Conv" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.0/conv2/Conv" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>256</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.1/resnets.0/conv2/Conv_output_0">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="339" name="/encoder/down_blocks.1/resnets.0/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.0/Add" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.1/resnets.0/Add_output_0">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="340" name="/encoder/down_blocks.1/resnets.0/Constant" type="Const" version="opset1">
<data element_type="f32" shape="" offset="1201464" size="4" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.0/Constant" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/down_blocks.1/resnets.0/Constant_output_0" />
</output>
</layer>
<layer id="341" name="/encoder/down_blocks.1/resnets.0/Div" type="Divide" version="opset1">
<data auto_broadcast="numpy" m_pythondiv="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.0/Div" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32" />
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.1/resnets.0/Div_output_0">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="342" name="/encoder/down_blocks.1/resnets.1/norm1/Constant" type="Const" version="opset1">
<data element_type="i64" shape="3" offset="18456" size="24" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.1/norm1/Constant" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/down_blocks.1/resnets.1/norm1/Constant_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="343" name="/encoder/down_blocks.1/resnets.1/norm1/Reshape" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.1/norm1/Reshape" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.1/resnets.1/norm1/Reshape_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="344" name="Constant_257496" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257496" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="345" name="MVN_257497" type="MVN" version="opset6">
<data eps="9.9999999747524271e-07" normalize_variance="true" eps_mode="INSIDE_SQRT" />
<rt_info>
<attribute name="fused_names" version="0" value="MVN_257497" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="346" name="/encoder/down_blocks.1/resnets.1/norm1/Constant_1" type="Const" version="opset1">
<data element_type="f32" shape="32" offset="18480" size="128" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.1/norm1/Constant_1" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/down_blocks.1/resnets.1/norm1/Constant_1_output_0">
<dim>32</dim>
</port>
</output>
</layer>
<layer id="347" name="Constant_257500" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257500" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="348" name="ShapeOf_257506" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18608" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257506" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="349" name="ShapeOf_257498" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257498" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="350" name="ShapeOf_257499" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257499" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="351" name="Constant_257501" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257501" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="352" name="Subtract_257502" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257501, Subtract_257502" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="353" name="Broadcast_257503" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_257503, Constant_257500" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="354" name="Concat_257507" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_257507, Constant_257500, ShapeOf_257506" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="355" name="Reshape_257508" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_257508" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>32</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="356" name="Multiply_257511" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Multiply_257511" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="357" name="/encoder/down_blocks.1/resnets.1/norm1/Constant_2" type="Const" version="opset1">
<data element_type="f32" shape="32" offset="18616" size="128" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.1/norm1/Constant_2" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/down_blocks.1/resnets.1/norm1/Constant_2_output_0">
<dim>32</dim>
</port>
</output>
</layer>
<layer id="358" name="Constant_257512" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257512" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="359" name="ShapeOf_257518" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18608" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257518" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="360" name="Constant_257513" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257513" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="361" name="Subtract_257514" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257513, Subtract_257514" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="362" name="Broadcast_257515" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_257515, Constant_257512" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="363" name="Concat_257519" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_257519, Constant_257512, ShapeOf_257518" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="364" name="Reshape_257520" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_257520" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>32</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="365" name="/encoder/down_blocks.1/resnets.1/norm1/InstanceNormalization" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.1/norm1/InstanceNormalization" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.1/resnets.1/norm1/InstanceNormalization_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="366" name="/encoder/down_blocks.1/resnets.1/norm1/Shape" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.1/norm1/Shape" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/encoder/down_blocks.1/resnets.1/norm1/Shape_output_0">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="367" name="/encoder/down_blocks.1/resnets.1/norm1/Reshape_1" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.1/norm1/Reshape_1" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.1/resnets.1/norm1/Reshape_1_output_0">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="368" name="onnx::Mul_981" type="Const" version="opset1">
<data element_type="f32" shape="256, 1, 1" offset="6650792" size="1024" />
<rt_info>
<attribute name="fused_names" version="0" value="onnx::Mul_981" />
</rt_info>
<output>
<port id="0" precision="FP32" names="onnx::Mul_981">
<dim>256</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="369" name="/encoder/down_blocks.1/resnets.1/norm1/Mul" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.1/norm1/Mul" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>256</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.1/resnets.1/norm1/Mul_output_0">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="370" name="onnx::Add_982" type="Const" version="opset1">
<data element_type="f32" shape="256, 1, 1" offset="6651816" size="1024" />
<rt_info>
<attribute name="fused_names" version="0" value="onnx::Add_982" />
</rt_info>
<output>
<port id="0" precision="FP32" names="onnx::Add_982">
<dim>256</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="371" name="/encoder/down_blocks.1/resnets.1/norm1/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.1/norm1/Add" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>256</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.1/resnets.1/norm1/Add_output_0">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="372" name="/encoder/down_blocks.1/resnets.1/nonlinearity/Sigmoid" type="Sigmoid" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.1/nonlinearity/Sigmoid" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="FP32" names="/encoder/down_blocks.1/resnets.1/nonlinearity/Sigmoid_output_0">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="373" name="/encoder/down_blocks.1/resnets.1/nonlinearity/Mul" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.1/nonlinearity/Mul" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.1/resnets.1/nonlinearity/Mul_output_0">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="374" name="encoder.down_blocks.1.resnets.1.conv1.weight" type="Const" version="opset1">
<data element_type="f32" shape="256, 256, 3, 3" offset="6652840" size="2359296" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.down_blocks.1.resnets.1.conv1.weight" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.down_blocks.1.resnets.1.conv1.weight">
<dim>256</dim>
<dim>256</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="375" name="/encoder/down_blocks.1/resnets.1/conv1/Conv/WithoutBiases" type="Convolution" version="opset1">
<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.1/conv1/Conv/WithoutBiases" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>256</dim>
<dim>256</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="376" name="encoder.down_blocks.1.resnets.1.conv1.bias" type="Const" version="opset1">
<data element_type="f32" shape="256" offset="9012136" size="1024" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.down_blocks.1.resnets.1.conv1.bias" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.down_blocks.1.resnets.1.conv1.bias">
<dim>256</dim>
</port>
</output>
</layer>
<layer id="377" name="Constant_257539" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257539" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="378" name="ShapeOf_257545" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="3106720" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257545" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="379" name="ShapeOf_257537" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257537" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="380" name="ShapeOf_257538" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257538" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="381" name="Constant_257540" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257540" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="382" name="Subtract_257541" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257540, Subtract_257541" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="383" name="Broadcast_257542" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_257542, Constant_257539" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</output>
</layer>
<layer id="384" name="Concat_257546" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_257546, Constant_257539, ShapeOf_257545" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="385" name="Reshape_257547" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_257547" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>256</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>256</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="386" name="/encoder/down_blocks.1/resnets.1/conv1/Conv" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.1/conv1/Conv" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>256</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.1/resnets.1/conv1/Conv_output_0">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="387" name="/encoder/down_blocks.1/resnets.1/norm2/Constant" type="Const" version="opset1">
<data element_type="i64" shape="3" offset="18456" size="24" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.1/norm2/Constant" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/down_blocks.1/resnets.1/norm2/Constant_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="388" name="/encoder/down_blocks.1/resnets.1/norm2/Reshape" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.1/norm2/Reshape" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.1/resnets.1/norm2/Reshape_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="389" name="Constant_257557" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257557" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="390" name="MVN_257558" type="MVN" version="opset6">
<data eps="9.9999999747524271e-07" normalize_variance="true" eps_mode="INSIDE_SQRT" />
<rt_info>
<attribute name="fused_names" version="0" value="MVN_257558" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="391" name="/encoder/down_blocks.1/resnets.1/norm2/Constant_1" type="Const" version="opset1">
<data element_type="f32" shape="32" offset="18480" size="128" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.1/norm2/Constant_1" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/down_blocks.1/resnets.1/norm2/Constant_1_output_0">
<dim>32</dim>
</port>
</output>
</layer>
<layer id="392" name="Constant_257561" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257561" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="393" name="ShapeOf_257567" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18608" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257567" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="394" name="ShapeOf_257559" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257559" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="395" name="ShapeOf_257560" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257560" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="396" name="Constant_257562" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257562" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="397" name="Subtract_257563" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257562, Subtract_257563" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="398" name="Broadcast_257564" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_257564, Constant_257561" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="399" name="Concat_257568" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_257568, Constant_257561, ShapeOf_257567" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="400" name="Reshape_257569" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_257569" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>32</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="401" name="Multiply_257572" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Multiply_257572" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="402" name="/encoder/down_blocks.1/resnets.1/norm2/Constant_2" type="Const" version="opset1">
<data element_type="f32" shape="32" offset="18616" size="128" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.1/norm2/Constant_2" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/down_blocks.1/resnets.1/norm2/Constant_2_output_0">
<dim>32</dim>
</port>
</output>
</layer>
<layer id="403" name="Constant_257573" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257573" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="404" name="ShapeOf_257579" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18608" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257579" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="405" name="Constant_257574" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257574" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="406" name="Subtract_257575" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257574, Subtract_257575" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="407" name="Broadcast_257576" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_257576, Constant_257573" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="408" name="Concat_257580" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_257580, Constant_257573, ShapeOf_257579" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="409" name="Reshape_257581" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_257581" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>32</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="410" name="/encoder/down_blocks.1/resnets.1/norm2/InstanceNormalization" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.1/norm2/InstanceNormalization" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.1/resnets.1/norm2/InstanceNormalization_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="411" name="/encoder/down_blocks.1/resnets.1/norm2/Shape" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.1/norm2/Shape" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/encoder/down_blocks.1/resnets.1/norm2/Shape_output_0">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="412" name="/encoder/down_blocks.1/resnets.1/norm2/Reshape_1" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.1/norm2/Reshape_1" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.1/resnets.1/norm2/Reshape_1_output_0">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="413" name="onnx::Mul_983" type="Const" version="opset1">
<data element_type="f32" shape="256, 1, 1" offset="9013160" size="1024" />
<rt_info>
<attribute name="fused_names" version="0" value="onnx::Mul_983" />
</rt_info>
<output>
<port id="0" precision="FP32" names="onnx::Mul_983">
<dim>256</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="414" name="/encoder/down_blocks.1/resnets.1/norm2/Mul" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.1/norm2/Mul" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>256</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.1/resnets.1/norm2/Mul_output_0">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="415" name="onnx::Add_984" type="Const" version="opset1">
<data element_type="f32" shape="256, 1, 1" offset="9014184" size="1024" />
<rt_info>
<attribute name="fused_names" version="0" value="onnx::Add_984" />
</rt_info>
<output>
<port id="0" precision="FP32" names="onnx::Add_984">
<dim>256</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="416" name="/encoder/down_blocks.1/resnets.1/norm2/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.1/norm2/Add" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>256</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.1/resnets.1/norm2/Add_output_0">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="417" name="/encoder/down_blocks.1/resnets.1/nonlinearity_1/Sigmoid" type="Sigmoid" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.1/nonlinearity_1/Sigmoid" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="FP32" names="/encoder/down_blocks.1/resnets.1/nonlinearity_1/Sigmoid_output_0">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="418" name="/encoder/down_blocks.1/resnets.1/nonlinearity_1/Mul" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.1/nonlinearity_1/Mul" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.1/resnets.1/nonlinearity_1/Mul_output_0">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="419" name="encoder.down_blocks.1.resnets.1.conv2.weight" type="Const" version="opset1">
<data element_type="f32" shape="256, 256, 3, 3" offset="9015208" size="2359296" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.down_blocks.1.resnets.1.conv2.weight" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.down_blocks.1.resnets.1.conv2.weight">
<dim>256</dim>
<dim>256</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="420" name="/encoder/down_blocks.1/resnets.1/conv2/Conv/WithoutBiases" type="Convolution" version="opset1">
<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.1/conv2/Conv/WithoutBiases" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>256</dim>
<dim>256</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="421" name="encoder.down_blocks.1.resnets.1.conv2.bias" type="Const" version="opset1">
<data element_type="f32" shape="256" offset="11374504" size="1024" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.down_blocks.1.resnets.1.conv2.bias" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.down_blocks.1.resnets.1.conv2.bias">
<dim>256</dim>
</port>
</output>
</layer>
<layer id="422" name="Constant_257600" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257600" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="423" name="ShapeOf_257606" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="3106720" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257606" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="424" name="ShapeOf_257598" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257598" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="425" name="ShapeOf_257599" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257599" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="426" name="Constant_257601" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257601" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="427" name="Subtract_257602" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257601, Subtract_257602" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="428" name="Broadcast_257603" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_257603, Constant_257600" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</output>
</layer>
<layer id="429" name="Concat_257607" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_257607, Constant_257600, ShapeOf_257606" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="430" name="Reshape_257608" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_257608" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>256</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>256</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="431" name="/encoder/down_blocks.1/resnets.1/conv2/Conv" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.1/conv2/Conv" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>256</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.1/resnets.1/conv2/Conv_output_0">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="432" name="/encoder/down_blocks.1/resnets.1/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.1/Add" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.1/resnets.1/Add_output_0">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="433" name="/encoder/down_blocks.1/resnets.1/Constant" type="Const" version="opset1">
<data element_type="f32" shape="" offset="1201464" size="4" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.1/Constant" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/down_blocks.1/resnets.1/Constant_output_0" />
</output>
</layer>
<layer id="434" name="/encoder/down_blocks.1/resnets.1/Div" type="Divide" version="opset1">
<data auto_broadcast="numpy" m_pythondiv="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.1/Div" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32" />
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.1/resnets.1/Div_output_0">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="435" name="/encoder/down_blocks.1/downsamplers.0/Constant_1" type="Const" version="opset1">
<data element_type="i64" shape="4" offset="2384188" size="32" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/downsamplers.0/Constant_1" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/down_blocks.1/downsamplers.0/Constant_1_output_0">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="436" name="Constant_257618" type="Const" version="opset1">
<data element_type="i64" shape="" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257618" />
</rt_info>
<output>
<port id="0" precision="I64" />
</output>
</layer>
<layer id="437" name="/encoder/down_blocks.1/downsamplers.0/Constant" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="2384228" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/downsamplers.0/Constant" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/down_blocks.1/downsamplers.0/Constant_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="438" name="/encoder/down_blocks.1/downsamplers.0/ConstantOfShape" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/downsamplers.0/Constant, /encoder/down_blocks.1/downsamplers.0/ConstantOfShape, Constant_257618" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/down_blocks.1/downsamplers.0/ConstantOfShape_output_0">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="439" name="/encoder/down_blocks.1/downsamplers.0/Concat" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/downsamplers.0/Concat, /encoder/down_blocks.1/downsamplers.0/Constant_1" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>4</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/down_blocks.1/downsamplers.0/Concat_output_0">
<dim>8</dim>
</port>
</output>
</layer>
<layer id="440" name="/encoder/down_blocks.1/downsamplers.0/Constant_2" type="Const" version="opset1">
<data element_type="i64" shape="2" offset="2384236" size="16" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/downsamplers.0/Constant_2" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/down_blocks.1/downsamplers.0/Constant_2_output_0">
<dim>2</dim>
</port>
</output>
</layer>
<layer id="441" name="/encoder/down_blocks.1/downsamplers.0/Reshape" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/downsamplers.0/Constant_2, /encoder/down_blocks.1/downsamplers.0/Reshape" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>8</dim>
</port>
<port id="1" precision="I64">
<dim>2</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/down_blocks.1/downsamplers.0/Reshape_output_0">
<dim>4</dim>
<dim>2</dim>
</port>
</output>
</layer>
<layer id="442" name="/encoder/down_blocks.1/downsamplers.0/Constant_4" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="2384252" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/downsamplers.0/Constant_4" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/down_blocks.1/downsamplers.0/Constant_4_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="443" name="/encoder/down_blocks.1/downsamplers.0/Constant_5" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="2384260" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/downsamplers.0/Constant_5" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/down_blocks.1/downsamplers.0/Constant_5_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="444" name="/encoder/down_blocks.1/downsamplers.0/Constant_6" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="2384252" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/downsamplers.0/Constant_6" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/down_blocks.1/downsamplers.0/Constant_6_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="445" name="/encoder/down_blocks.1/downsamplers.0/Constant_3" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/downsamplers.0/Constant_3" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/down_blocks.1/downsamplers.0/Constant_3_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="446" name="/encoder/down_blocks.1/downsamplers.0/Slice" type="Slice" version="opset8">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/downsamplers.0/Constant_3, /encoder/down_blocks.1/downsamplers.0/Constant_4, /encoder/down_blocks.1/downsamplers.0/Constant_5, /encoder/down_blocks.1/downsamplers.0/Constant_6, /encoder/down_blocks.1/downsamplers.0/Slice" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>4</dim>
<dim>2</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
<port id="3" precision="I64">
<dim>1</dim>
</port>
<port id="4" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="5" precision="I64" names="/encoder/down_blocks.1/downsamplers.0/Slice_output_0">
<dim>4</dim>
<dim>2</dim>
</port>
</output>
</layer>
<layer id="447" name="Constant_257632" type="Const" version="opset1">
<data element_type="i64" shape="2" offset="2384268" size="16" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257632" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>2</dim>
</port>
</output>
</layer>
<layer id="448" name="/encoder/down_blocks.1/downsamplers.0/Transpose" type="Transpose" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/downsamplers.0/Transpose, Constant_257632" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>4</dim>
<dim>2</dim>
</port>
<port id="1" precision="I64">
<dim>2</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/down_blocks.1/downsamplers.0/Transpose_output_0">
<dim>2</dim>
<dim>4</dim>
</port>
</output>
</layer>
<layer id="449" name="Constant_842544" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="2384252" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/downsamplers.0/Reshape_1" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="450" name="/encoder/down_blocks.1/downsamplers.0/Reshape_1" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/downsamplers.0/Reshape_1" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>2</dim>
<dim>4</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/down_blocks.1/downsamplers.0/Reshape_1_output_0">
<dim>8</dim>
</port>
</output>
</layer>
<layer id="451" name="/encoder/down_blocks.1/downsamplers.0/Cast" type="Convert" version="opset1">
<data destination_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/downsamplers.0/Cast" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>8</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/encoder/down_blocks.1/downsamplers.0/Cast_output_0">
<dim>8</dim>
</port>
</output>
</layer>
<layer id="452" name="Constant_257640" type="Const" version="opset1">
<data element_type="i64" shape="" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257640" />
</rt_info>
<output>
<port id="0" precision="I64" />
</output>
</layer>
<layer id="453" name="Split_257641" type="Split" version="opset1">
<data num_splits="2" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257640, Split_257641" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>8</dim>
</port>
<port id="1" precision="I64" />
</input>
<output>
<port id="2" precision="I64">
<dim>4</dim>
</port>
<port id="3" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="454" name="/encoder/down_blocks.1/downsamplers.0/Constant_8" type="Const" version="opset1">
<data element_type="f32" shape="" offset="2384284" size="4" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/downsamplers.0/Constant_8" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/down_blocks.1/downsamplers.0/Constant_8_output_0" />
</output>
</layer>
<layer id="455" name="/encoder/down_blocks.1/downsamplers.0/Pad" type="Pad" version="opset1">
<data pad_mode="constant" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/downsamplers.0/Pad" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
<port id="2" precision="I64">
<dim>4</dim>
</port>
<port id="3" precision="FP32" />
</input>
<output>
<port id="4" precision="FP32" names="/encoder/down_blocks.1/downsamplers.0/Pad_output_0">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="456" name="encoder.down_blocks.1.downsamplers.0.conv.weight" type="Const" version="opset1">
<data element_type="f32" shape="256, 256, 3, 3" offset="11375528" size="2359296" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.down_blocks.1.downsamplers.0.conv.weight" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.down_blocks.1.downsamplers.0.conv.weight">
<dim>256</dim>
<dim>256</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="457" name="/encoder/down_blocks.1/downsamplers.0/conv/Conv/WithoutBiases" type="Convolution" version="opset1">
<data strides="2, 2" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/downsamplers.0/conv/Conv/WithoutBiases" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>256</dim>
<dim>256</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="458" name="encoder.down_blocks.1.downsamplers.0.conv.bias" type="Const" version="opset1">
<data element_type="f32" shape="256" offset="13734824" size="1024" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.down_blocks.1.downsamplers.0.conv.bias" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.down_blocks.1.downsamplers.0.conv.bias">
<dim>256</dim>
</port>
</output>
</layer>
<layer id="459" name="Constant_257656" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257656" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="460" name="ShapeOf_257662" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="3106720" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257662" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="461" name="ShapeOf_257654" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257654" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="462" name="ShapeOf_257655" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257655" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="463" name="Constant_257657" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257657" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="464" name="Subtract_257658" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257657, Subtract_257658" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="465" name="Broadcast_257659" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_257659, Constant_257656" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</output>
</layer>
<layer id="466" name="Concat_257663" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_257663, Constant_257656, ShapeOf_257662" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="467" name="Reshape_257664" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_257664" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>256</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>256</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="468" name="/encoder/down_blocks.1/downsamplers.0/conv/Conv" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/downsamplers.0/conv/Conv" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>256</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.1/downsamplers.0/conv/Conv_output_0">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="469" name="encoder.down_blocks.2.resnets.0.conv_shortcut.weight" type="Const" version="opset1">
<data element_type="f32" shape="512, 256, 1, 1" offset="13735848" size="524288" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.down_blocks.2.resnets.0.conv_shortcut.weight" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.down_blocks.2.resnets.0.conv_shortcut.weight">
<dim>512</dim>
<dim>256</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="470" name="/encoder/down_blocks.2/resnets.0/conv_shortcut/Conv/WithoutBiases" type="Convolution" version="opset1">
<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/resnets.0/conv_shortcut/Conv/WithoutBiases" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>512</dim>
<dim>256</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="471" name="encoder.down_blocks.2.resnets.0.conv_shortcut.bias" type="Const" version="opset1">
<data element_type="f32" shape="512" offset="14260136" size="2048" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.down_blocks.2.resnets.0.conv_shortcut.bias" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.down_blocks.2.resnets.0.conv_shortcut.bias">
<dim>512</dim>
</port>
</output>
</layer>
<layer id="472" name="Constant_257793" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257793" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="473" name="ShapeOf_257799" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="14262184" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257799" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="474" name="ShapeOf_257791" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257791" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="475" name="ShapeOf_257792" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257792" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="476" name="Constant_257794" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257794" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="477" name="Subtract_257795" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257794, Subtract_257795" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="478" name="Broadcast_257796" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_257796, Constant_257793" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</output>
</layer>
<layer id="479" name="Concat_257800" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_257800, Constant_257793, ShapeOf_257799" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="480" name="Reshape_257801" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_257801" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>512</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="481" name="/encoder/down_blocks.2/resnets.0/conv_shortcut/Conv" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/resnets.0/conv_shortcut/Conv" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.2/resnets.0/conv_shortcut/Conv_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="482" name="/encoder/down_blocks.2/resnets.0/norm1/Constant" type="Const" version="opset1">
<data element_type="i64" shape="3" offset="18456" size="24" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/resnets.0/norm1/Constant" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/down_blocks.2/resnets.0/norm1/Constant_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="483" name="/encoder/down_blocks.2/resnets.0/norm1/Reshape" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/resnets.0/norm1/Reshape" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.2/resnets.0/norm1/Reshape_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="484" name="Constant_257674" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257674" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="485" name="MVN_257675" type="MVN" version="opset6">
<data eps="9.9999999747524271e-07" normalize_variance="true" eps_mode="INSIDE_SQRT" />
<rt_info>
<attribute name="fused_names" version="0" value="MVN_257675" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="486" name="/encoder/down_blocks.2/resnets.0/norm1/Constant_1" type="Const" version="opset1">
<data element_type="f32" shape="32" offset="18480" size="128" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/resnets.0/norm1/Constant_1" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/down_blocks.2/resnets.0/norm1/Constant_1_output_0">
<dim>32</dim>
</port>
</output>
</layer>
<layer id="487" name="Constant_257678" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257678" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="488" name="ShapeOf_257684" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18608" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257684" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="489" name="ShapeOf_257676" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257676" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="490" name="ShapeOf_257677" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257677" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="491" name="Constant_257679" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257679" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="492" name="Subtract_257680" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257679, Subtract_257680" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="493" name="Broadcast_257681" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_257681, Constant_257678" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="494" name="Concat_257685" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_257685, Constant_257678, ShapeOf_257684" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="495" name="Reshape_257686" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_257686" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>32</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="496" name="Multiply_257689" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Multiply_257689" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="497" name="/encoder/down_blocks.2/resnets.0/norm1/Constant_2" type="Const" version="opset1">
<data element_type="f32" shape="32" offset="18616" size="128" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/resnets.0/norm1/Constant_2" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/down_blocks.2/resnets.0/norm1/Constant_2_output_0">
<dim>32</dim>
</port>
</output>
</layer>
<layer id="498" name="Constant_257690" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257690" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="499" name="ShapeOf_257696" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18608" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257696" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="500" name="Constant_257691" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257691" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="501" name="Subtract_257692" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257691, Subtract_257692" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="502" name="Broadcast_257693" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_257693, Constant_257690" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="503" name="Concat_257697" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_257697, Constant_257690, ShapeOf_257696" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="504" name="Reshape_257698" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_257698" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>32</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="505" name="/encoder/down_blocks.2/resnets.0/norm1/InstanceNormalization" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/resnets.0/norm1/InstanceNormalization" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.2/resnets.0/norm1/InstanceNormalization_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="506" name="/encoder/down_blocks.2/resnets.0/norm1/Shape" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/resnets.0/norm1/Shape" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/encoder/down_blocks.2/resnets.0/norm1/Shape_output_0">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="507" name="/encoder/down_blocks.2/resnets.0/norm1/Reshape_1" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/resnets.0/norm1/Reshape_1" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.2/resnets.0/norm1/Reshape_1_output_0">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="508" name="onnx::Mul_990" type="Const" version="opset1">
<data element_type="f32" shape="256, 1, 1" offset="14262192" size="1024" />
<rt_info>
<attribute name="fused_names" version="0" value="onnx::Mul_990" />
</rt_info>
<output>
<port id="0" precision="FP32" names="onnx::Mul_990">
<dim>256</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="509" name="/encoder/down_blocks.2/resnets.0/norm1/Mul" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/resnets.0/norm1/Mul" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>256</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.2/resnets.0/norm1/Mul_output_0">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="510" name="onnx::Add_991" type="Const" version="opset1">
<data element_type="f32" shape="256, 1, 1" offset="14263216" size="1024" />
<rt_info>
<attribute name="fused_names" version="0" value="onnx::Add_991" />
</rt_info>
<output>
<port id="0" precision="FP32" names="onnx::Add_991">
<dim>256</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="511" name="/encoder/down_blocks.2/resnets.0/norm1/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/resnets.0/norm1/Add" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>256</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.2/resnets.0/norm1/Add_output_0">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="512" name="/encoder/down_blocks.2/resnets.0/nonlinearity/Sigmoid" type="Sigmoid" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/resnets.0/nonlinearity/Sigmoid" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="FP32" names="/encoder/down_blocks.2/resnets.0/nonlinearity/Sigmoid_output_0">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="513" name="/encoder/down_blocks.2/resnets.0/nonlinearity/Mul" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/resnets.0/nonlinearity/Mul" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.2/resnets.0/nonlinearity/Mul_output_0">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="514" name="encoder.down_blocks.2.resnets.0.conv1.weight" type="Const" version="opset1">
<data element_type="f32" shape="512, 256, 3, 3" offset="14264240" size="4718592" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.down_blocks.2.resnets.0.conv1.weight" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.down_blocks.2.resnets.0.conv1.weight">
<dim>512</dim>
<dim>256</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="515" name="/encoder/down_blocks.2/resnets.0/conv1/Conv/WithoutBiases" type="Convolution" version="opset1">
<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/resnets.0/conv1/Conv/WithoutBiases" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>256</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>512</dim>
<dim>256</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="516" name="encoder.down_blocks.2.resnets.0.conv1.bias" type="Const" version="opset1">
<data element_type="f32" shape="512" offset="18982832" size="2048" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.down_blocks.2.resnets.0.conv1.bias" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.down_blocks.2.resnets.0.conv1.bias">
<dim>512</dim>
</port>
</output>
</layer>
<layer id="517" name="Constant_257717" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257717" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="518" name="ShapeOf_257723" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="14262184" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257723" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="519" name="ShapeOf_257715" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257715" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="520" name="ShapeOf_257716" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257716" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="521" name="Constant_257718" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257718" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="522" name="Subtract_257719" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257718, Subtract_257719" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="523" name="Broadcast_257720" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_257720, Constant_257717" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</output>
</layer>
<layer id="524" name="Concat_257724" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_257724, Constant_257717, ShapeOf_257723" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="525" name="Reshape_257725" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_257725" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>512</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="526" name="/encoder/down_blocks.2/resnets.0/conv1/Conv" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/resnets.0/conv1/Conv" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.2/resnets.0/conv1/Conv_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="527" name="/encoder/down_blocks.2/resnets.0/norm2/Constant" type="Const" version="opset1">
<data element_type="i64" shape="3" offset="18456" size="24" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/resnets.0/norm2/Constant" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/down_blocks.2/resnets.0/norm2/Constant_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="528" name="/encoder/down_blocks.2/resnets.0/norm2/Reshape" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/resnets.0/norm2/Reshape" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.2/resnets.0/norm2/Reshape_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="529" name="Constant_257735" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257735" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="530" name="MVN_257736" type="MVN" version="opset6">
<data eps="9.9999999747524271e-07" normalize_variance="true" eps_mode="INSIDE_SQRT" />
<rt_info>
<attribute name="fused_names" version="0" value="MVN_257736" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="531" name="/encoder/down_blocks.2/resnets.0/norm2/Constant_1" type="Const" version="opset1">
<data element_type="f32" shape="32" offset="18480" size="128" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/resnets.0/norm2/Constant_1" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/down_blocks.2/resnets.0/norm2/Constant_1_output_0">
<dim>32</dim>
</port>
</output>
</layer>
<layer id="532" name="Constant_257739" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257739" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="533" name="ShapeOf_257745" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18608" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257745" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="534" name="ShapeOf_257737" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257737" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="535" name="ShapeOf_257738" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257738" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="536" name="Constant_257740" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257740" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="537" name="Subtract_257741" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257740, Subtract_257741" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="538" name="Broadcast_257742" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_257742, Constant_257739" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="539" name="Concat_257746" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_257746, Constant_257739, ShapeOf_257745" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="540" name="Reshape_257747" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_257747" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>32</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="541" name="Multiply_257750" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Multiply_257750" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="542" name="/encoder/down_blocks.2/resnets.0/norm2/Constant_2" type="Const" version="opset1">
<data element_type="f32" shape="32" offset="18616" size="128" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/resnets.0/norm2/Constant_2" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/down_blocks.2/resnets.0/norm2/Constant_2_output_0">
<dim>32</dim>
</port>
</output>
</layer>
<layer id="543" name="Constant_257751" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257751" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="544" name="ShapeOf_257757" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18608" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257757" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="545" name="Constant_257752" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257752" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="546" name="Subtract_257753" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257752, Subtract_257753" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="547" name="Broadcast_257754" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_257754, Constant_257751" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="548" name="Concat_257758" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_257758, Constant_257751, ShapeOf_257757" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="549" name="Reshape_257759" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_257759" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>32</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="550" name="/encoder/down_blocks.2/resnets.0/norm2/InstanceNormalization" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/resnets.0/norm2/InstanceNormalization" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.2/resnets.0/norm2/InstanceNormalization_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="551" name="/encoder/down_blocks.2/resnets.0/norm2/Shape" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/resnets.0/norm2/Shape" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/encoder/down_blocks.2/resnets.0/norm2/Shape_output_0">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="552" name="/encoder/down_blocks.2/resnets.0/norm2/Reshape_1" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/resnets.0/norm2/Reshape_1" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.2/resnets.0/norm2/Reshape_1_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="553" name="onnx::Mul_992" type="Const" version="opset1">
<data element_type="f32" shape="512, 1, 1" offset="18984880" size="2048" />
<rt_info>
<attribute name="fused_names" version="0" value="onnx::Mul_992" />
</rt_info>
<output>
<port id="0" precision="FP32" names="onnx::Mul_992">
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="554" name="/encoder/down_blocks.2/resnets.0/norm2/Mul" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/resnets.0/norm2/Mul" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.2/resnets.0/norm2/Mul_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="555" name="onnx::Add_993" type="Const" version="opset1">
<data element_type="f32" shape="512, 1, 1" offset="18986928" size="2048" />
<rt_info>
<attribute name="fused_names" version="0" value="onnx::Add_993" />
</rt_info>
<output>
<port id="0" precision="FP32" names="onnx::Add_993">
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="556" name="/encoder/down_blocks.2/resnets.0/norm2/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/resnets.0/norm2/Add" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.2/resnets.0/norm2/Add_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="557" name="/encoder/down_blocks.2/resnets.0/nonlinearity_1/Sigmoid" type="Sigmoid" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/resnets.0/nonlinearity_1/Sigmoid" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="FP32" names="/encoder/down_blocks.2/resnets.0/nonlinearity_1/Sigmoid_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="558" name="/encoder/down_blocks.2/resnets.0/nonlinearity_1/Mul" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/resnets.0/nonlinearity_1/Mul" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.2/resnets.0/nonlinearity_1/Mul_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="559" name="encoder.down_blocks.2.resnets.0.conv2.weight" type="Const" version="opset1">
<data element_type="f32" shape="512, 512, 3, 3" offset="18988976" size="9437184" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.down_blocks.2.resnets.0.conv2.weight" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.down_blocks.2.resnets.0.conv2.weight">
<dim>512</dim>
<dim>512</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="560" name="/encoder/down_blocks.2/resnets.0/conv2/Conv/WithoutBiases" type="Convolution" version="opset1">
<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/resnets.0/conv2/Conv/WithoutBiases" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>512</dim>
<dim>512</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="561" name="encoder.down_blocks.2.resnets.0.conv2.bias" type="Const" version="opset1">
<data element_type="f32" shape="512" offset="28426160" size="2048" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.down_blocks.2.resnets.0.conv2.bias" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.down_blocks.2.resnets.0.conv2.bias">
<dim>512</dim>
</port>
</output>
</layer>
<layer id="562" name="Constant_257778" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257778" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="563" name="ShapeOf_257784" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="14262184" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257784" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="564" name="ShapeOf_257776" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257776" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="565" name="ShapeOf_257777" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257777" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="566" name="Constant_257779" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257779" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="567" name="Subtract_257780" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257779, Subtract_257780" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="568" name="Broadcast_257781" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_257781, Constant_257778" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</output>
</layer>
<layer id="569" name="Concat_257785" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_257785, Constant_257778, ShapeOf_257784" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="570" name="Reshape_257786" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_257786" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>512</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="571" name="/encoder/down_blocks.2/resnets.0/conv2/Conv" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/resnets.0/conv2/Conv" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.2/resnets.0/conv2/Conv_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="572" name="/encoder/down_blocks.2/resnets.0/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/resnets.0/Add" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.2/resnets.0/Add_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="573" name="/encoder/down_blocks.2/resnets.0/Constant" type="Const" version="opset1">
<data element_type="f32" shape="" offset="1201464" size="4" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/resnets.0/Constant" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/down_blocks.2/resnets.0/Constant_output_0" />
</output>
</layer>
<layer id="574" name="/encoder/down_blocks.2/resnets.0/Div" type="Divide" version="opset1">
<data auto_broadcast="numpy" m_pythondiv="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/resnets.0/Div" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32" />
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.2/resnets.0/Div_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="575" name="/encoder/down_blocks.2/resnets.1/norm1/Constant" type="Const" version="opset1">
<data element_type="i64" shape="3" offset="18456" size="24" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/resnets.1/norm1/Constant" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/down_blocks.2/resnets.1/norm1/Constant_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="576" name="/encoder/down_blocks.2/resnets.1/norm1/Reshape" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/resnets.1/norm1/Reshape" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.2/resnets.1/norm1/Reshape_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="577" name="Constant_257814" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257814" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="578" name="MVN_257815" type="MVN" version="opset6">
<data eps="9.9999999747524271e-07" normalize_variance="true" eps_mode="INSIDE_SQRT" />
<rt_info>
<attribute name="fused_names" version="0" value="MVN_257815" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="579" name="/encoder/down_blocks.2/resnets.1/norm1/Constant_1" type="Const" version="opset1">
<data element_type="f32" shape="32" offset="18480" size="128" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/resnets.1/norm1/Constant_1" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/down_blocks.2/resnets.1/norm1/Constant_1_output_0">
<dim>32</dim>
</port>
</output>
</layer>
<layer id="580" name="Constant_257818" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257818" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="581" name="ShapeOf_257824" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18608" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257824" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="582" name="ShapeOf_257816" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257816" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="583" name="ShapeOf_257817" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257817" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="584" name="Constant_257819" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257819" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="585" name="Subtract_257820" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257819, Subtract_257820" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="586" name="Broadcast_257821" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_257821, Constant_257818" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="587" name="Concat_257825" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_257825, Constant_257818, ShapeOf_257824" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="588" name="Reshape_257826" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_257826" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>32</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="589" name="Multiply_257829" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Multiply_257829" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="590" name="/encoder/down_blocks.2/resnets.1/norm1/Constant_2" type="Const" version="opset1">
<data element_type="f32" shape="32" offset="18616" size="128" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/resnets.1/norm1/Constant_2" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/down_blocks.2/resnets.1/norm1/Constant_2_output_0">
<dim>32</dim>
</port>
</output>
</layer>
<layer id="591" name="Constant_257830" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257830" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="592" name="ShapeOf_257836" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18608" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257836" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="593" name="Constant_257831" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257831" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="594" name="Subtract_257832" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257831, Subtract_257832" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="595" name="Broadcast_257833" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_257833, Constant_257830" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="596" name="Concat_257837" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_257837, Constant_257830, ShapeOf_257836" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="597" name="Reshape_257838" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_257838" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>32</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="598" name="/encoder/down_blocks.2/resnets.1/norm1/InstanceNormalization" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/resnets.1/norm1/InstanceNormalization" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.2/resnets.1/norm1/InstanceNormalization_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="599" name="/encoder/down_blocks.2/resnets.1/norm1/Shape" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/resnets.1/norm1/Shape" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/encoder/down_blocks.2/resnets.1/norm1/Shape_output_0">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="600" name="/encoder/down_blocks.2/resnets.1/norm1/Reshape_1" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/resnets.1/norm1/Reshape_1" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.2/resnets.1/norm1/Reshape_1_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="601" name="onnx::Mul_994" type="Const" version="opset1">
<data element_type="f32" shape="512, 1, 1" offset="28428208" size="2048" />
<rt_info>
<attribute name="fused_names" version="0" value="onnx::Mul_994" />
</rt_info>
<output>
<port id="0" precision="FP32" names="onnx::Mul_994">
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="602" name="/encoder/down_blocks.2/resnets.1/norm1/Mul" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/resnets.1/norm1/Mul" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.2/resnets.1/norm1/Mul_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="603" name="onnx::Add_995" type="Const" version="opset1">
<data element_type="f32" shape="512, 1, 1" offset="28430256" size="2048" />
<rt_info>
<attribute name="fused_names" version="0" value="onnx::Add_995" />
</rt_info>
<output>
<port id="0" precision="FP32" names="onnx::Add_995">
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="604" name="/encoder/down_blocks.2/resnets.1/norm1/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/resnets.1/norm1/Add" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.2/resnets.1/norm1/Add_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="605" name="/encoder/down_blocks.2/resnets.1/nonlinearity/Sigmoid" type="Sigmoid" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/resnets.1/nonlinearity/Sigmoid" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="FP32" names="/encoder/down_blocks.2/resnets.1/nonlinearity/Sigmoid_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="606" name="/encoder/down_blocks.2/resnets.1/nonlinearity/Mul" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/resnets.1/nonlinearity/Mul" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.2/resnets.1/nonlinearity/Mul_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="607" name="encoder.down_blocks.2.resnets.1.conv1.weight" type="Const" version="opset1">
<data element_type="f32" shape="512, 512, 3, 3" offset="28432304" size="9437184" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.down_blocks.2.resnets.1.conv1.weight" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.down_blocks.2.resnets.1.conv1.weight">
<dim>512</dim>
<dim>512</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="608" name="/encoder/down_blocks.2/resnets.1/conv1/Conv/WithoutBiases" type="Convolution" version="opset1">
<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/resnets.1/conv1/Conv/WithoutBiases" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>512</dim>
<dim>512</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="609" name="encoder.down_blocks.2.resnets.1.conv1.bias" type="Const" version="opset1">
<data element_type="f32" shape="512" offset="37869488" size="2048" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.down_blocks.2.resnets.1.conv1.bias" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.down_blocks.2.resnets.1.conv1.bias">
<dim>512</dim>
</port>
</output>
</layer>
<layer id="610" name="Constant_257857" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257857" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="611" name="ShapeOf_257863" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="14262184" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257863" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="612" name="ShapeOf_257855" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257855" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="613" name="ShapeOf_257856" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257856" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="614" name="Constant_257858" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257858" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="615" name="Subtract_257859" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257858, Subtract_257859" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="616" name="Broadcast_257860" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_257860, Constant_257857" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</output>
</layer>
<layer id="617" name="Concat_257864" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_257864, Constant_257857, ShapeOf_257863" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="618" name="Reshape_257865" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_257865" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>512</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="619" name="/encoder/down_blocks.2/resnets.1/conv1/Conv" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/resnets.1/conv1/Conv" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.2/resnets.1/conv1/Conv_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="620" name="/encoder/down_blocks.2/resnets.1/norm2/Constant" type="Const" version="opset1">
<data element_type="i64" shape="3" offset="18456" size="24" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/resnets.1/norm2/Constant" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/down_blocks.2/resnets.1/norm2/Constant_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="621" name="/encoder/down_blocks.2/resnets.1/norm2/Reshape" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/resnets.1/norm2/Reshape" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.2/resnets.1/norm2/Reshape_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="622" name="Constant_257875" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257875" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="623" name="MVN_257876" type="MVN" version="opset6">
<data eps="9.9999999747524271e-07" normalize_variance="true" eps_mode="INSIDE_SQRT" />
<rt_info>
<attribute name="fused_names" version="0" value="MVN_257876" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="624" name="/encoder/down_blocks.2/resnets.1/norm2/Constant_1" type="Const" version="opset1">
<data element_type="f32" shape="32" offset="18480" size="128" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/resnets.1/norm2/Constant_1" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/down_blocks.2/resnets.1/norm2/Constant_1_output_0">
<dim>32</dim>
</port>
</output>
</layer>
<layer id="625" name="Constant_257879" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257879" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="626" name="ShapeOf_257885" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18608" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257885" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="627" name="ShapeOf_257877" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257877" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="628" name="ShapeOf_257878" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257878" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="629" name="Constant_257880" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257880" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="630" name="Subtract_257881" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257880, Subtract_257881" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="631" name="Broadcast_257882" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_257882, Constant_257879" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="632" name="Concat_257886" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_257886, Constant_257879, ShapeOf_257885" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="633" name="Reshape_257887" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_257887" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>32</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="634" name="Multiply_257890" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Multiply_257890" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="635" name="/encoder/down_blocks.2/resnets.1/norm2/Constant_2" type="Const" version="opset1">
<data element_type="f32" shape="32" offset="18616" size="128" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/resnets.1/norm2/Constant_2" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/down_blocks.2/resnets.1/norm2/Constant_2_output_0">
<dim>32</dim>
</port>
</output>
</layer>
<layer id="636" name="Constant_257891" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257891" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="637" name="ShapeOf_257897" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18608" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257897" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="638" name="Constant_257892" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257892" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="639" name="Subtract_257893" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257892, Subtract_257893" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="640" name="Broadcast_257894" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_257894, Constant_257891" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="641" name="Concat_257898" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_257898, Constant_257891, ShapeOf_257897" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="642" name="Reshape_257899" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_257899" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>32</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="643" name="/encoder/down_blocks.2/resnets.1/norm2/InstanceNormalization" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/resnets.1/norm2/InstanceNormalization" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.2/resnets.1/norm2/InstanceNormalization_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="644" name="/encoder/down_blocks.2/resnets.1/norm2/Shape" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/resnets.1/norm2/Shape" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/encoder/down_blocks.2/resnets.1/norm2/Shape_output_0">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="645" name="/encoder/down_blocks.2/resnets.1/norm2/Reshape_1" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/resnets.1/norm2/Reshape_1" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.2/resnets.1/norm2/Reshape_1_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="646" name="onnx::Mul_996" type="Const" version="opset1">
<data element_type="f32" shape="512, 1, 1" offset="37871536" size="2048" />
<rt_info>
<attribute name="fused_names" version="0" value="onnx::Mul_996" />
</rt_info>
<output>
<port id="0" precision="FP32" names="onnx::Mul_996">
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="647" name="/encoder/down_blocks.2/resnets.1/norm2/Mul" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/resnets.1/norm2/Mul" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.2/resnets.1/norm2/Mul_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="648" name="onnx::Add_997" type="Const" version="opset1">
<data element_type="f32" shape="512, 1, 1" offset="37873584" size="2048" />
<rt_info>
<attribute name="fused_names" version="0" value="onnx::Add_997" />
</rt_info>
<output>
<port id="0" precision="FP32" names="onnx::Add_997">
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="649" name="/encoder/down_blocks.2/resnets.1/norm2/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/resnets.1/norm2/Add" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.2/resnets.1/norm2/Add_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="650" name="/encoder/down_blocks.2/resnets.1/nonlinearity_1/Sigmoid" type="Sigmoid" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/resnets.1/nonlinearity_1/Sigmoid" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="FP32" names="/encoder/down_blocks.2/resnets.1/nonlinearity_1/Sigmoid_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="651" name="/encoder/down_blocks.2/resnets.1/nonlinearity_1/Mul" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/resnets.1/nonlinearity_1/Mul" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.2/resnets.1/nonlinearity_1/Mul_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="652" name="encoder.down_blocks.2.resnets.1.conv2.weight" type="Const" version="opset1">
<data element_type="f32" shape="512, 512, 3, 3" offset="37875632" size="9437184" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.down_blocks.2.resnets.1.conv2.weight" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.down_blocks.2.resnets.1.conv2.weight">
<dim>512</dim>
<dim>512</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="653" name="/encoder/down_blocks.2/resnets.1/conv2/Conv/WithoutBiases" type="Convolution" version="opset1">
<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/resnets.1/conv2/Conv/WithoutBiases" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>512</dim>
<dim>512</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="654" name="encoder.down_blocks.2.resnets.1.conv2.bias" type="Const" version="opset1">
<data element_type="f32" shape="512" offset="47312816" size="2048" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.down_blocks.2.resnets.1.conv2.bias" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.down_blocks.2.resnets.1.conv2.bias">
<dim>512</dim>
</port>
</output>
</layer>
<layer id="655" name="Constant_257918" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257918" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="656" name="ShapeOf_257924" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="14262184" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257924" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="657" name="ShapeOf_257916" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257916" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="658" name="ShapeOf_257917" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257917" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="659" name="Constant_257919" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257919" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="660" name="Subtract_257920" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257919, Subtract_257920" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="661" name="Broadcast_257921" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_257921, Constant_257918" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</output>
</layer>
<layer id="662" name="Concat_257925" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_257925, Constant_257918, ShapeOf_257924" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="663" name="Reshape_257926" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_257926" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>512</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="664" name="/encoder/down_blocks.2/resnets.1/conv2/Conv" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/resnets.1/conv2/Conv" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.2/resnets.1/conv2/Conv_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="665" name="/encoder/down_blocks.2/resnets.1/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/resnets.1/Add" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.2/resnets.1/Add_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="666" name="/encoder/down_blocks.2/resnets.1/Constant" type="Const" version="opset1">
<data element_type="f32" shape="" offset="1201464" size="4" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/resnets.1/Constant" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/down_blocks.2/resnets.1/Constant_output_0" />
</output>
</layer>
<layer id="667" name="/encoder/down_blocks.2/resnets.1/Div" type="Divide" version="opset1">
<data auto_broadcast="numpy" m_pythondiv="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/resnets.1/Div" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32" />
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.2/resnets.1/Div_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="668" name="/encoder/down_blocks.2/downsamplers.0/Constant_1" type="Const" version="opset1">
<data element_type="i64" shape="4" offset="2384188" size="32" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/downsamplers.0/Constant_1" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/down_blocks.2/downsamplers.0/Constant_1_output_0">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="669" name="Constant_257936" type="Const" version="opset1">
<data element_type="i64" shape="" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257936" />
</rt_info>
<output>
<port id="0" precision="I64" />
</output>
</layer>
<layer id="670" name="/encoder/down_blocks.2/downsamplers.0/Constant" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="2384228" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/downsamplers.0/Constant" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/down_blocks.2/downsamplers.0/Constant_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="671" name="/encoder/down_blocks.2/downsamplers.0/ConstantOfShape" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/downsamplers.0/Constant, /encoder/down_blocks.2/downsamplers.0/ConstantOfShape, Constant_257936" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/down_blocks.2/downsamplers.0/ConstantOfShape_output_0">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="672" name="/encoder/down_blocks.2/downsamplers.0/Concat" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/downsamplers.0/Concat, /encoder/down_blocks.2/downsamplers.0/Constant_1" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>4</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/down_blocks.2/downsamplers.0/Concat_output_0">
<dim>8</dim>
</port>
</output>
</layer>
<layer id="673" name="/encoder/down_blocks.2/downsamplers.0/Constant_2" type="Const" version="opset1">
<data element_type="i64" shape="2" offset="2384236" size="16" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/downsamplers.0/Constant_2" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/down_blocks.2/downsamplers.0/Constant_2_output_0">
<dim>2</dim>
</port>
</output>
</layer>
<layer id="674" name="/encoder/down_blocks.2/downsamplers.0/Reshape" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/downsamplers.0/Constant_2, /encoder/down_blocks.2/downsamplers.0/Reshape" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>8</dim>
</port>
<port id="1" precision="I64">
<dim>2</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/down_blocks.2/downsamplers.0/Reshape_output_0">
<dim>4</dim>
<dim>2</dim>
</port>
</output>
</layer>
<layer id="675" name="/encoder/down_blocks.2/downsamplers.0/Constant_4" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="2384252" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/downsamplers.0/Constant_4" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/down_blocks.2/downsamplers.0/Constant_4_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="676" name="/encoder/down_blocks.2/downsamplers.0/Constant_5" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="2384260" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/downsamplers.0/Constant_5" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/down_blocks.2/downsamplers.0/Constant_5_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="677" name="/encoder/down_blocks.2/downsamplers.0/Constant_6" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="2384252" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/downsamplers.0/Constant_6" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/down_blocks.2/downsamplers.0/Constant_6_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="678" name="/encoder/down_blocks.2/downsamplers.0/Constant_3" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/downsamplers.0/Constant_3" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/down_blocks.2/downsamplers.0/Constant_3_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="679" name="/encoder/down_blocks.2/downsamplers.0/Slice" type="Slice" version="opset8">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/downsamplers.0/Constant_3, /encoder/down_blocks.2/downsamplers.0/Constant_4, /encoder/down_blocks.2/downsamplers.0/Constant_5, /encoder/down_blocks.2/downsamplers.0/Constant_6, /encoder/down_blocks.2/downsamplers.0/Slice" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>4</dim>
<dim>2</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
<port id="3" precision="I64">
<dim>1</dim>
</port>
<port id="4" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="5" precision="I64" names="/encoder/down_blocks.2/downsamplers.0/Slice_output_0">
<dim>4</dim>
<dim>2</dim>
</port>
</output>
</layer>
<layer id="680" name="Constant_257950" type="Const" version="opset1">
<data element_type="i64" shape="2" offset="2384268" size="16" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257950" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>2</dim>
</port>
</output>
</layer>
<layer id="681" name="/encoder/down_blocks.2/downsamplers.0/Transpose" type="Transpose" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/downsamplers.0/Transpose, Constant_257950" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>4</dim>
<dim>2</dim>
</port>
<port id="1" precision="I64">
<dim>2</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/down_blocks.2/downsamplers.0/Transpose_output_0">
<dim>2</dim>
<dim>4</dim>
</port>
</output>
</layer>
<layer id="682" name="Constant_842545" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="2384252" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/downsamplers.0/Reshape_1" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="683" name="/encoder/down_blocks.2/downsamplers.0/Reshape_1" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/downsamplers.0/Reshape_1" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>2</dim>
<dim>4</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/down_blocks.2/downsamplers.0/Reshape_1_output_0">
<dim>8</dim>
</port>
</output>
</layer>
<layer id="684" name="/encoder/down_blocks.2/downsamplers.0/Cast" type="Convert" version="opset1">
<data destination_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/downsamplers.0/Cast" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>8</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/encoder/down_blocks.2/downsamplers.0/Cast_output_0">
<dim>8</dim>
</port>
</output>
</layer>
<layer id="685" name="Constant_257958" type="Const" version="opset1">
<data element_type="i64" shape="" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257958" />
</rt_info>
<output>
<port id="0" precision="I64" />
</output>
</layer>
<layer id="686" name="Split_257959" type="Split" version="opset1">
<data num_splits="2" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257958, Split_257959" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>8</dim>
</port>
<port id="1" precision="I64" />
</input>
<output>
<port id="2" precision="I64">
<dim>4</dim>
</port>
<port id="3" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="687" name="/encoder/down_blocks.2/downsamplers.0/Constant_8" type="Const" version="opset1">
<data element_type="f32" shape="" offset="2384284" size="4" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/downsamplers.0/Constant_8" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/down_blocks.2/downsamplers.0/Constant_8_output_0" />
</output>
</layer>
<layer id="688" name="/encoder/down_blocks.2/downsamplers.0/Pad" type="Pad" version="opset1">
<data pad_mode="constant" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/downsamplers.0/Pad" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
<port id="2" precision="I64">
<dim>4</dim>
</port>
<port id="3" precision="FP32" />
</input>
<output>
<port id="4" precision="FP32" names="/encoder/down_blocks.2/downsamplers.0/Pad_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="689" name="encoder.down_blocks.2.downsamplers.0.conv.weight" type="Const" version="opset1">
<data element_type="f32" shape="512, 512, 3, 3" offset="47314864" size="9437184" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.down_blocks.2.downsamplers.0.conv.weight" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.down_blocks.2.downsamplers.0.conv.weight">
<dim>512</dim>
<dim>512</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="690" name="/encoder/down_blocks.2/downsamplers.0/conv/Conv/WithoutBiases" type="Convolution" version="opset1">
<data strides="2, 2" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/downsamplers.0/conv/Conv/WithoutBiases" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>512</dim>
<dim>512</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="691" name="encoder.down_blocks.2.downsamplers.0.conv.bias" type="Const" version="opset1">
<data element_type="f32" shape="512" offset="56752048" size="2048" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.down_blocks.2.downsamplers.0.conv.bias" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.down_blocks.2.downsamplers.0.conv.bias">
<dim>512</dim>
</port>
</output>
</layer>
<layer id="692" name="Constant_257974" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257974" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="693" name="ShapeOf_257980" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="14262184" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257980" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="694" name="ShapeOf_257972" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257972" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="695" name="ShapeOf_257973" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257973" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="696" name="Constant_257975" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257975" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="697" name="Subtract_257976" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257975, Subtract_257976" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="698" name="Broadcast_257977" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_257977, Constant_257974" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</output>
</layer>
<layer id="699" name="Concat_257981" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_257981, Constant_257974, ShapeOf_257980" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="700" name="Reshape_257982" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_257982" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>512</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="701" name="/encoder/down_blocks.2/downsamplers.0/conv/Conv" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.2/downsamplers.0/conv/Conv" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.2/downsamplers.0/conv/Conv_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="702" name="/encoder/down_blocks.3/resnets.0/norm1/Constant" type="Const" version="opset1">
<data element_type="i64" shape="3" offset="18456" size="24" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.3/resnets.0/norm1/Constant" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/down_blocks.3/resnets.0/norm1/Constant_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="703" name="/encoder/down_blocks.3/resnets.0/norm1/Reshape" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.3/resnets.0/norm1/Reshape" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.3/resnets.0/norm1/Reshape_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="704" name="Constant_257992" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257992" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="705" name="MVN_257993" type="MVN" version="opset6">
<data eps="9.9999999747524271e-07" normalize_variance="true" eps_mode="INSIDE_SQRT" />
<rt_info>
<attribute name="fused_names" version="0" value="MVN_257993" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="706" name="/encoder/down_blocks.3/resnets.0/norm1/Constant_1" type="Const" version="opset1">
<data element_type="f32" shape="32" offset="18480" size="128" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.3/resnets.0/norm1/Constant_1" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/down_blocks.3/resnets.0/norm1/Constant_1_output_0">
<dim>32</dim>
</port>
</output>
</layer>
<layer id="707" name="Constant_257996" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257996" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="708" name="ShapeOf_258002" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18608" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_258002" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="709" name="ShapeOf_257994" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257994" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="710" name="ShapeOf_257995" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_257995" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="711" name="Constant_257997" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257997" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="712" name="Subtract_257998" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_257997, Subtract_257998" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="713" name="Broadcast_257999" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_257999, Constant_257996" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="714" name="Concat_258003" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_258003, Constant_257996, ShapeOf_258002" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="715" name="Reshape_258004" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_258004" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>32</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="716" name="Multiply_258007" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Multiply_258007" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="717" name="/encoder/down_blocks.3/resnets.0/norm1/Constant_2" type="Const" version="opset1">
<data element_type="f32" shape="32" offset="18616" size="128" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.3/resnets.0/norm1/Constant_2" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/down_blocks.3/resnets.0/norm1/Constant_2_output_0">
<dim>32</dim>
</port>
</output>
</layer>
<layer id="718" name="Constant_258008" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258008" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="719" name="ShapeOf_258014" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18608" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_258014" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="720" name="Constant_258009" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258009" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="721" name="Subtract_258010" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258009, Subtract_258010" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="722" name="Broadcast_258011" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_258011, Constant_258008" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="723" name="Concat_258015" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_258015, Constant_258008, ShapeOf_258014" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="724" name="Reshape_258016" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_258016" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>32</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="725" name="/encoder/down_blocks.3/resnets.0/norm1/InstanceNormalization" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.3/resnets.0/norm1/InstanceNormalization" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.3/resnets.0/norm1/InstanceNormalization_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="726" name="/encoder/down_blocks.3/resnets.0/norm1/Shape" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.3/resnets.0/norm1/Shape" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/encoder/down_blocks.3/resnets.0/norm1/Shape_output_0">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="727" name="/encoder/down_blocks.3/resnets.0/norm1/Reshape_1" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.3/resnets.0/norm1/Reshape_1" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.3/resnets.0/norm1/Reshape_1_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="728" name="onnx::Mul_1003" type="Const" version="opset1">
<data element_type="f32" shape="512, 1, 1" offset="56754096" size="2048" />
<rt_info>
<attribute name="fused_names" version="0" value="onnx::Mul_1003" />
</rt_info>
<output>
<port id="0" precision="FP32" names="onnx::Mul_1003">
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="729" name="/encoder/down_blocks.3/resnets.0/norm1/Mul" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.3/resnets.0/norm1/Mul" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.3/resnets.0/norm1/Mul_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="730" name="onnx::Add_1004" type="Const" version="opset1">
<data element_type="f32" shape="512, 1, 1" offset="56756144" size="2048" />
<rt_info>
<attribute name="fused_names" version="0" value="onnx::Add_1004" />
</rt_info>
<output>
<port id="0" precision="FP32" names="onnx::Add_1004">
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="731" name="/encoder/down_blocks.3/resnets.0/norm1/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.3/resnets.0/norm1/Add" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.3/resnets.0/norm1/Add_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="732" name="/encoder/down_blocks.3/resnets.0/nonlinearity/Sigmoid" type="Sigmoid" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.3/resnets.0/nonlinearity/Sigmoid" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="FP32" names="/encoder/down_blocks.3/resnets.0/nonlinearity/Sigmoid_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="733" name="/encoder/down_blocks.3/resnets.0/nonlinearity/Mul" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.3/resnets.0/nonlinearity/Mul" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.3/resnets.0/nonlinearity/Mul_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="734" name="encoder.down_blocks.3.resnets.0.conv1.weight" type="Const" version="opset1">
<data element_type="f32" shape="512, 512, 3, 3" offset="56758192" size="9437184" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.down_blocks.3.resnets.0.conv1.weight" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.down_blocks.3.resnets.0.conv1.weight">
<dim>512</dim>
<dim>512</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="735" name="/encoder/down_blocks.3/resnets.0/conv1/Conv/WithoutBiases" type="Convolution" version="opset1">
<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.3/resnets.0/conv1/Conv/WithoutBiases" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>512</dim>
<dim>512</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="736" name="encoder.down_blocks.3.resnets.0.conv1.bias" type="Const" version="opset1">
<data element_type="f32" shape="512" offset="66195376" size="2048" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.down_blocks.3.resnets.0.conv1.bias" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.down_blocks.3.resnets.0.conv1.bias">
<dim>512</dim>
</port>
</output>
</layer>
<layer id="737" name="Constant_258035" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258035" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="738" name="ShapeOf_258041" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="14262184" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_258041" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="739" name="ShapeOf_258033" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_258033" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="740" name="ShapeOf_258034" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_258034" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="741" name="Constant_258036" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258036" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="742" name="Subtract_258037" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258036, Subtract_258037" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="743" name="Broadcast_258038" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_258038, Constant_258035" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</output>
</layer>
<layer id="744" name="Concat_258042" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_258042, Constant_258035, ShapeOf_258041" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="745" name="Reshape_258043" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_258043" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>512</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="746" name="/encoder/down_blocks.3/resnets.0/conv1/Conv" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.3/resnets.0/conv1/Conv" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.3/resnets.0/conv1/Conv_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="747" name="/encoder/down_blocks.3/resnets.0/norm2/Constant" type="Const" version="opset1">
<data element_type="i64" shape="3" offset="18456" size="24" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.3/resnets.0/norm2/Constant" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/down_blocks.3/resnets.0/norm2/Constant_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="748" name="/encoder/down_blocks.3/resnets.0/norm2/Reshape" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.3/resnets.0/norm2/Reshape" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.3/resnets.0/norm2/Reshape_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="749" name="Constant_258053" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258053" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="750" name="MVN_258054" type="MVN" version="opset6">
<data eps="9.9999999747524271e-07" normalize_variance="true" eps_mode="INSIDE_SQRT" />
<rt_info>
<attribute name="fused_names" version="0" value="MVN_258054" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="751" name="/encoder/down_blocks.3/resnets.0/norm2/Constant_1" type="Const" version="opset1">
<data element_type="f32" shape="32" offset="18480" size="128" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.3/resnets.0/norm2/Constant_1" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/down_blocks.3/resnets.0/norm2/Constant_1_output_0">
<dim>32</dim>
</port>
</output>
</layer>
<layer id="752" name="Constant_258057" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258057" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="753" name="ShapeOf_258063" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18608" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_258063" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="754" name="ShapeOf_258055" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_258055" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="755" name="ShapeOf_258056" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_258056" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="756" name="Constant_258058" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258058" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="757" name="Subtract_258059" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258058, Subtract_258059" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="758" name="Broadcast_258060" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_258060, Constant_258057" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="759" name="Concat_258064" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_258064, Constant_258057, ShapeOf_258063" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="760" name="Reshape_258065" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_258065" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>32</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="761" name="Multiply_258068" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Multiply_258068" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="762" name="/encoder/down_blocks.3/resnets.0/norm2/Constant_2" type="Const" version="opset1">
<data element_type="f32" shape="32" offset="18616" size="128" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.3/resnets.0/norm2/Constant_2" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/down_blocks.3/resnets.0/norm2/Constant_2_output_0">
<dim>32</dim>
</port>
</output>
</layer>
<layer id="763" name="Constant_258069" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258069" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="764" name="ShapeOf_258075" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18608" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_258075" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="765" name="Constant_258070" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258070" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="766" name="Subtract_258071" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258070, Subtract_258071" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="767" name="Broadcast_258072" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_258072, Constant_258069" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="768" name="Concat_258076" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_258076, Constant_258069, ShapeOf_258075" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="769" name="Reshape_258077" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_258077" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>32</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="770" name="/encoder/down_blocks.3/resnets.0/norm2/InstanceNormalization" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.3/resnets.0/norm2/InstanceNormalization" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.3/resnets.0/norm2/InstanceNormalization_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="771" name="/encoder/down_blocks.3/resnets.0/norm2/Shape" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.3/resnets.0/norm2/Shape" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/encoder/down_blocks.3/resnets.0/norm2/Shape_output_0">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="772" name="/encoder/down_blocks.3/resnets.0/norm2/Reshape_1" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.3/resnets.0/norm2/Reshape_1" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.3/resnets.0/norm2/Reshape_1_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="773" name="onnx::Mul_1005" type="Const" version="opset1">
<data element_type="f32" shape="512, 1, 1" offset="66197424" size="2048" />
<rt_info>
<attribute name="fused_names" version="0" value="onnx::Mul_1005" />
</rt_info>
<output>
<port id="0" precision="FP32" names="onnx::Mul_1005">
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="774" name="/encoder/down_blocks.3/resnets.0/norm2/Mul" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.3/resnets.0/norm2/Mul" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.3/resnets.0/norm2/Mul_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="775" name="onnx::Add_1006" type="Const" version="opset1">
<data element_type="f32" shape="512, 1, 1" offset="66199472" size="2048" />
<rt_info>
<attribute name="fused_names" version="0" value="onnx::Add_1006" />
</rt_info>
<output>
<port id="0" precision="FP32" names="onnx::Add_1006">
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="776" name="/encoder/down_blocks.3/resnets.0/norm2/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.3/resnets.0/norm2/Add" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.3/resnets.0/norm2/Add_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="777" name="/encoder/down_blocks.3/resnets.0/nonlinearity_1/Sigmoid" type="Sigmoid" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.3/resnets.0/nonlinearity_1/Sigmoid" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="FP32" names="/encoder/down_blocks.3/resnets.0/nonlinearity_1/Sigmoid_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="778" name="/encoder/down_blocks.3/resnets.0/nonlinearity_1/Mul" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.3/resnets.0/nonlinearity_1/Mul" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.3/resnets.0/nonlinearity_1/Mul_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="779" name="encoder.down_blocks.3.resnets.0.conv2.weight" type="Const" version="opset1">
<data element_type="f32" shape="512, 512, 3, 3" offset="66201520" size="9437184" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.down_blocks.3.resnets.0.conv2.weight" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.down_blocks.3.resnets.0.conv2.weight">
<dim>512</dim>
<dim>512</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="780" name="/encoder/down_blocks.3/resnets.0/conv2/Conv/WithoutBiases" type="Convolution" version="opset1">
<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.3/resnets.0/conv2/Conv/WithoutBiases" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>512</dim>
<dim>512</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="781" name="encoder.down_blocks.3.resnets.0.conv2.bias" type="Const" version="opset1">
<data element_type="f32" shape="512" offset="75638704" size="2048" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.down_blocks.3.resnets.0.conv2.bias" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.down_blocks.3.resnets.0.conv2.bias">
<dim>512</dim>
</port>
</output>
</layer>
<layer id="782" name="Constant_258096" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258096" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="783" name="ShapeOf_258102" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="14262184" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_258102" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="784" name="ShapeOf_258094" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_258094" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="785" name="ShapeOf_258095" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_258095" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="786" name="Constant_258097" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258097" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="787" name="Subtract_258098" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258097, Subtract_258098" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="788" name="Broadcast_258099" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_258099, Constant_258096" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</output>
</layer>
<layer id="789" name="Concat_258103" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_258103, Constant_258096, ShapeOf_258102" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="790" name="Reshape_258104" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_258104" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>512</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="791" name="/encoder/down_blocks.3/resnets.0/conv2/Conv" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.3/resnets.0/conv2/Conv" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.3/resnets.0/conv2/Conv_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="792" name="/encoder/down_blocks.3/resnets.0/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.3/resnets.0/Add" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.3/resnets.0/Add_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="793" name="/encoder/down_blocks.3/resnets.0/Constant" type="Const" version="opset1">
<data element_type="f32" shape="" offset="1201464" size="4" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.3/resnets.0/Constant" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/down_blocks.3/resnets.0/Constant_output_0" />
</output>
</layer>
<layer id="794" name="/encoder/down_blocks.3/resnets.0/Div" type="Divide" version="opset1">
<data auto_broadcast="numpy" m_pythondiv="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.3/resnets.0/Div" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32" />
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.3/resnets.0/Div_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="795" name="/encoder/down_blocks.3/resnets.1/norm1/Constant" type="Const" version="opset1">
<data element_type="i64" shape="3" offset="18456" size="24" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.3/resnets.1/norm1/Constant" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/down_blocks.3/resnets.1/norm1/Constant_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="796" name="/encoder/down_blocks.3/resnets.1/norm1/Reshape" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.3/resnets.1/norm1/Reshape" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.3/resnets.1/norm1/Reshape_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="797" name="Constant_258117" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258117" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="798" name="MVN_258118" type="MVN" version="opset6">
<data eps="9.9999999747524271e-07" normalize_variance="true" eps_mode="INSIDE_SQRT" />
<rt_info>
<attribute name="fused_names" version="0" value="MVN_258118" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="799" name="/encoder/down_blocks.3/resnets.1/norm1/Constant_1" type="Const" version="opset1">
<data element_type="f32" shape="32" offset="18480" size="128" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.3/resnets.1/norm1/Constant_1" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/down_blocks.3/resnets.1/norm1/Constant_1_output_0">
<dim>32</dim>
</port>
</output>
</layer>
<layer id="800" name="Constant_258121" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258121" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="801" name="ShapeOf_258127" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18608" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_258127" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="802" name="ShapeOf_258119" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_258119" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="803" name="ShapeOf_258120" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_258120" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="804" name="Constant_258122" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258122" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="805" name="Subtract_258123" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258122, Subtract_258123" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="806" name="Broadcast_258124" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_258124, Constant_258121" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="807" name="Concat_258128" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_258128, Constant_258121, ShapeOf_258127" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="808" name="Reshape_258129" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_258129" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>32</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="809" name="Multiply_258132" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Multiply_258132" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="810" name="/encoder/down_blocks.3/resnets.1/norm1/Constant_2" type="Const" version="opset1">
<data element_type="f32" shape="32" offset="18616" size="128" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.3/resnets.1/norm1/Constant_2" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/down_blocks.3/resnets.1/norm1/Constant_2_output_0">
<dim>32</dim>
</port>
</output>
</layer>
<layer id="811" name="Constant_258133" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258133" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="812" name="ShapeOf_258139" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18608" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_258139" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="813" name="Constant_258134" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258134" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="814" name="Subtract_258135" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258134, Subtract_258135" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="815" name="Broadcast_258136" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_258136, Constant_258133" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="816" name="Concat_258140" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_258140, Constant_258133, ShapeOf_258139" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="817" name="Reshape_258141" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_258141" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>32</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="818" name="/encoder/down_blocks.3/resnets.1/norm1/InstanceNormalization" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.3/resnets.1/norm1/InstanceNormalization" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.3/resnets.1/norm1/InstanceNormalization_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="819" name="/encoder/down_blocks.3/resnets.1/norm1/Shape" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.3/resnets.1/norm1/Shape" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/encoder/down_blocks.3/resnets.1/norm1/Shape_output_0">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="820" name="/encoder/down_blocks.3/resnets.1/norm1/Reshape_1" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.3/resnets.1/norm1/Reshape_1" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.3/resnets.1/norm1/Reshape_1_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="821" name="onnx::Mul_1007" type="Const" version="opset1">
<data element_type="f32" shape="512, 1, 1" offset="75640752" size="2048" />
<rt_info>
<attribute name="fused_names" version="0" value="onnx::Mul_1007" />
</rt_info>
<output>
<port id="0" precision="FP32" names="onnx::Mul_1007">
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="822" name="/encoder/down_blocks.3/resnets.1/norm1/Mul" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.3/resnets.1/norm1/Mul" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.3/resnets.1/norm1/Mul_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="823" name="onnx::Add_1008" type="Const" version="opset1">
<data element_type="f32" shape="512, 1, 1" offset="75642800" size="2048" />
<rt_info>
<attribute name="fused_names" version="0" value="onnx::Add_1008" />
</rt_info>
<output>
<port id="0" precision="FP32" names="onnx::Add_1008">
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="824" name="/encoder/down_blocks.3/resnets.1/norm1/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.3/resnets.1/norm1/Add" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.3/resnets.1/norm1/Add_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="825" name="/encoder/down_blocks.3/resnets.1/nonlinearity/Sigmoid" type="Sigmoid" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.3/resnets.1/nonlinearity/Sigmoid" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="FP32" names="/encoder/down_blocks.3/resnets.1/nonlinearity/Sigmoid_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="826" name="/encoder/down_blocks.3/resnets.1/nonlinearity/Mul" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.3/resnets.1/nonlinearity/Mul" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.3/resnets.1/nonlinearity/Mul_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="827" name="encoder.down_blocks.3.resnets.1.conv1.weight" type="Const" version="opset1">
<data element_type="f32" shape="512, 512, 3, 3" offset="75644848" size="9437184" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.down_blocks.3.resnets.1.conv1.weight" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.down_blocks.3.resnets.1.conv1.weight">
<dim>512</dim>
<dim>512</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="828" name="/encoder/down_blocks.3/resnets.1/conv1/Conv/WithoutBiases" type="Convolution" version="opset1">
<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.3/resnets.1/conv1/Conv/WithoutBiases" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>512</dim>
<dim>512</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="829" name="encoder.down_blocks.3.resnets.1.conv1.bias" type="Const" version="opset1">
<data element_type="f32" shape="512" offset="85082032" size="2048" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.down_blocks.3.resnets.1.conv1.bias" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.down_blocks.3.resnets.1.conv1.bias">
<dim>512</dim>
</port>
</output>
</layer>
<layer id="830" name="Constant_258160" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258160" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="831" name="ShapeOf_258166" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="14262184" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_258166" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="832" name="ShapeOf_258158" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_258158" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="833" name="ShapeOf_258159" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_258159" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="834" name="Constant_258161" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258161" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="835" name="Subtract_258162" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258161, Subtract_258162" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="836" name="Broadcast_258163" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_258163, Constant_258160" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</output>
</layer>
<layer id="837" name="Concat_258167" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_258167, Constant_258160, ShapeOf_258166" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="838" name="Reshape_258168" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_258168" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>512</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="839" name="/encoder/down_blocks.3/resnets.1/conv1/Conv" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.3/resnets.1/conv1/Conv" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.3/resnets.1/conv1/Conv_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="840" name="/encoder/down_blocks.3/resnets.1/norm2/Constant" type="Const" version="opset1">
<data element_type="i64" shape="3" offset="18456" size="24" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.3/resnets.1/norm2/Constant" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/down_blocks.3/resnets.1/norm2/Constant_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="841" name="/encoder/down_blocks.3/resnets.1/norm2/Reshape" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.3/resnets.1/norm2/Reshape" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.3/resnets.1/norm2/Reshape_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="842" name="Constant_258178" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258178" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="843" name="MVN_258179" type="MVN" version="opset6">
<data eps="9.9999999747524271e-07" normalize_variance="true" eps_mode="INSIDE_SQRT" />
<rt_info>
<attribute name="fused_names" version="0" value="MVN_258179" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="844" name="/encoder/down_blocks.3/resnets.1/norm2/Constant_1" type="Const" version="opset1">
<data element_type="f32" shape="32" offset="18480" size="128" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.3/resnets.1/norm2/Constant_1" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/down_blocks.3/resnets.1/norm2/Constant_1_output_0">
<dim>32</dim>
</port>
</output>
</layer>
<layer id="845" name="Constant_258182" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258182" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="846" name="ShapeOf_258188" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18608" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_258188" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="847" name="ShapeOf_258180" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_258180" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="848" name="ShapeOf_258181" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_258181" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="849" name="Constant_258183" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258183" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="850" name="Subtract_258184" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258183, Subtract_258184" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="851" name="Broadcast_258185" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_258185, Constant_258182" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="852" name="Concat_258189" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_258189, Constant_258182, ShapeOf_258188" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="853" name="Reshape_258190" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_258190" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>32</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="854" name="Multiply_258193" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Multiply_258193" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="855" name="/encoder/down_blocks.3/resnets.1/norm2/Constant_2" type="Const" version="opset1">
<data element_type="f32" shape="32" offset="18616" size="128" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.3/resnets.1/norm2/Constant_2" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/down_blocks.3/resnets.1/norm2/Constant_2_output_0">
<dim>32</dim>
</port>
</output>
</layer>
<layer id="856" name="Constant_258194" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258194" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="857" name="ShapeOf_258200" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18608" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_258200" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="858" name="Constant_258195" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258195" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="859" name="Subtract_258196" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258195, Subtract_258196" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="860" name="Broadcast_258197" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_258197, Constant_258194" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="861" name="Concat_258201" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_258201, Constant_258194, ShapeOf_258200" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="862" name="Reshape_258202" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_258202" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>32</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="863" name="/encoder/down_blocks.3/resnets.1/norm2/InstanceNormalization" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.3/resnets.1/norm2/InstanceNormalization" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.3/resnets.1/norm2/InstanceNormalization_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="864" name="/encoder/down_blocks.3/resnets.1/norm2/Shape" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.3/resnets.1/norm2/Shape" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/encoder/down_blocks.3/resnets.1/norm2/Shape_output_0">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="865" name="/encoder/down_blocks.3/resnets.1/norm2/Reshape_1" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.3/resnets.1/norm2/Reshape_1" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.3/resnets.1/norm2/Reshape_1_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="866" name="onnx::Mul_1009" type="Const" version="opset1">
<data element_type="f32" shape="512, 1, 1" offset="85084080" size="2048" />
<rt_info>
<attribute name="fused_names" version="0" value="onnx::Mul_1009" />
</rt_info>
<output>
<port id="0" precision="FP32" names="onnx::Mul_1009">
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="867" name="/encoder/down_blocks.3/resnets.1/norm2/Mul" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.3/resnets.1/norm2/Mul" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.3/resnets.1/norm2/Mul_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="868" name="onnx::Add_1010" type="Const" version="opset1">
<data element_type="f32" shape="512, 1, 1" offset="85086128" size="2048" />
<rt_info>
<attribute name="fused_names" version="0" value="onnx::Add_1010" />
</rt_info>
<output>
<port id="0" precision="FP32" names="onnx::Add_1010">
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="869" name="/encoder/down_blocks.3/resnets.1/norm2/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.3/resnets.1/norm2/Add" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.3/resnets.1/norm2/Add_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="870" name="/encoder/down_blocks.3/resnets.1/nonlinearity_1/Sigmoid" type="Sigmoid" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.3/resnets.1/nonlinearity_1/Sigmoid" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="FP32" names="/encoder/down_blocks.3/resnets.1/nonlinearity_1/Sigmoid_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="871" name="/encoder/down_blocks.3/resnets.1/nonlinearity_1/Mul" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.3/resnets.1/nonlinearity_1/Mul" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.3/resnets.1/nonlinearity_1/Mul_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="872" name="encoder.down_blocks.3.resnets.1.conv2.weight" type="Const" version="opset1">
<data element_type="f32" shape="512, 512, 3, 3" offset="85088176" size="9437184" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.down_blocks.3.resnets.1.conv2.weight" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.down_blocks.3.resnets.1.conv2.weight">
<dim>512</dim>
<dim>512</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="873" name="/encoder/down_blocks.3/resnets.1/conv2/Conv/WithoutBiases" type="Convolution" version="opset1">
<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.3/resnets.1/conv2/Conv/WithoutBiases" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>512</dim>
<dim>512</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="874" name="encoder.down_blocks.3.resnets.1.conv2.bias" type="Const" version="opset1">
<data element_type="f32" shape="512" offset="94525360" size="2048" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.down_blocks.3.resnets.1.conv2.bias" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.down_blocks.3.resnets.1.conv2.bias">
<dim>512</dim>
</port>
</output>
</layer>
<layer id="875" name="Constant_258221" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258221" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="876" name="ShapeOf_258227" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="14262184" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_258227" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="877" name="ShapeOf_258219" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_258219" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="878" name="ShapeOf_258220" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_258220" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="879" name="Constant_258222" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258222" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="880" name="Subtract_258223" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258222, Subtract_258223" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="881" name="Broadcast_258224" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_258224, Constant_258221" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</output>
</layer>
<layer id="882" name="Concat_258228" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_258228, Constant_258221, ShapeOf_258227" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="883" name="Reshape_258229" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_258229" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>512</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="884" name="/encoder/down_blocks.3/resnets.1/conv2/Conv" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.3/resnets.1/conv2/Conv" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.3/resnets.1/conv2/Conv_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="885" name="/encoder/down_blocks.3/resnets.1/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.3/resnets.1/Add" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.3/resnets.1/Add_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="886" name="/encoder/down_blocks.3/resnets.1/Constant" type="Const" version="opset1">
<data element_type="f32" shape="" offset="1201464" size="4" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.3/resnets.1/Constant" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/down_blocks.3/resnets.1/Constant_output_0" />
</output>
</layer>
<layer id="887" name="/encoder/down_blocks.3/resnets.1/Div" type="Divide" version="opset1">
<data auto_broadcast="numpy" m_pythondiv="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.3/resnets.1/Div" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32" />
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.3/resnets.1/Div_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="888" name="/encoder/mid_block/resnets.0/norm1/Constant" type="Const" version="opset1">
<data element_type="i64" shape="3" offset="18456" size="24" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.0/norm1/Constant" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/mid_block/resnets.0/norm1/Constant_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="889" name="/encoder/mid_block/resnets.0/norm1/Reshape" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.0/norm1/Reshape" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/resnets.0/norm1/Reshape_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="890" name="Constant_258242" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258242" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="891" name="MVN_258243" type="MVN" version="opset6">
<data eps="9.9999999747524271e-07" normalize_variance="true" eps_mode="INSIDE_SQRT" />
<rt_info>
<attribute name="fused_names" version="0" value="MVN_258243" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="892" name="/encoder/mid_block/resnets.0/norm1/Constant_1" type="Const" version="opset1">
<data element_type="f32" shape="32" offset="18480" size="128" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.0/norm1/Constant_1" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/mid_block/resnets.0/norm1/Constant_1_output_0">
<dim>32</dim>
</port>
</output>
</layer>
<layer id="893" name="Constant_258246" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258246" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="894" name="ShapeOf_258252" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18608" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_258252" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="895" name="ShapeOf_258244" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_258244" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="896" name="ShapeOf_258245" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_258245" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="897" name="Constant_258247" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258247" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="898" name="Subtract_258248" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258247, Subtract_258248" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="899" name="Broadcast_258249" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_258249, Constant_258246" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="900" name="Concat_258253" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_258253, Constant_258246, ShapeOf_258252" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="901" name="Reshape_258254" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_258254" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>32</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="902" name="Multiply_258257" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Multiply_258257" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="903" name="/encoder/mid_block/resnets.0/norm1/Constant_2" type="Const" version="opset1">
<data element_type="f32" shape="32" offset="18616" size="128" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.0/norm1/Constant_2" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/mid_block/resnets.0/norm1/Constant_2_output_0">
<dim>32</dim>
</port>
</output>
</layer>
<layer id="904" name="Constant_258258" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258258" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="905" name="ShapeOf_258264" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18608" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_258264" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="906" name="Constant_258259" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258259" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="907" name="Subtract_258260" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258259, Subtract_258260" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="908" name="Broadcast_258261" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_258261, Constant_258258" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="909" name="Concat_258265" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_258265, Constant_258258, ShapeOf_258264" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="910" name="Reshape_258266" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_258266" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>32</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="911" name="/encoder/mid_block/resnets.0/norm1/InstanceNormalization" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.0/norm1/InstanceNormalization" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/resnets.0/norm1/InstanceNormalization_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="912" name="/encoder/mid_block/resnets.0/norm1/Shape" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.0/norm1/Shape" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/encoder/mid_block/resnets.0/norm1/Shape_output_0">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="913" name="/encoder/mid_block/resnets.0/norm1/Reshape_1" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.0/norm1/Reshape_1" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/resnets.0/norm1/Reshape_1_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="914" name="onnx::Mul_1011" type="Const" version="opset1">
<data element_type="f32" shape="512, 1, 1" offset="94527408" size="2048" />
<rt_info>
<attribute name="fused_names" version="0" value="onnx::Mul_1011" />
</rt_info>
<output>
<port id="0" precision="FP32" names="onnx::Mul_1011">
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="915" name="/encoder/mid_block/resnets.0/norm1/Mul" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.0/norm1/Mul" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/resnets.0/norm1/Mul_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="916" name="onnx::Add_1012" type="Const" version="opset1">
<data element_type="f32" shape="512, 1, 1" offset="94529456" size="2048" />
<rt_info>
<attribute name="fused_names" version="0" value="onnx::Add_1012" />
</rt_info>
<output>
<port id="0" precision="FP32" names="onnx::Add_1012">
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="917" name="/encoder/mid_block/resnets.0/norm1/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.0/norm1/Add" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/resnets.0/norm1/Add_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="918" name="/encoder/mid_block/resnets.0/nonlinearity/Sigmoid" type="Sigmoid" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.0/nonlinearity/Sigmoid" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="FP32" names="/encoder/mid_block/resnets.0/nonlinearity/Sigmoid_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="919" name="/encoder/mid_block/resnets.0/nonlinearity/Mul" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.0/nonlinearity/Mul" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/resnets.0/nonlinearity/Mul_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="920" name="encoder.mid_block.resnets.0.conv1.weight" type="Const" version="opset1">
<data element_type="f32" shape="512, 512, 3, 3" offset="94531504" size="9437184" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.mid_block.resnets.0.conv1.weight" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.mid_block.resnets.0.conv1.weight">
<dim>512</dim>
<dim>512</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="921" name="/encoder/mid_block/resnets.0/conv1/Conv/WithoutBiases" type="Convolution" version="opset1">
<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.0/conv1/Conv/WithoutBiases" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>512</dim>
<dim>512</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="922" name="encoder.mid_block.resnets.0.conv1.bias" type="Const" version="opset1">
<data element_type="f32" shape="512" offset="103968688" size="2048" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.mid_block.resnets.0.conv1.bias" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.mid_block.resnets.0.conv1.bias">
<dim>512</dim>
</port>
</output>
</layer>
<layer id="923" name="Constant_258285" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258285" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="924" name="ShapeOf_258291" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="14262184" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_258291" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="925" name="ShapeOf_258283" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_258283" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="926" name="ShapeOf_258284" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_258284" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="927" name="Constant_258286" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258286" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="928" name="Subtract_258287" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258286, Subtract_258287" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="929" name="Broadcast_258288" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_258288, Constant_258285" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</output>
</layer>
<layer id="930" name="Concat_258292" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_258292, Constant_258285, ShapeOf_258291" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="931" name="Reshape_258293" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_258293" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>512</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="932" name="/encoder/mid_block/resnets.0/conv1/Conv" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.0/conv1/Conv" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/resnets.0/conv1/Conv_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="933" name="/encoder/mid_block/resnets.0/norm2/Constant" type="Const" version="opset1">
<data element_type="i64" shape="3" offset="18456" size="24" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.0/norm2/Constant" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/mid_block/resnets.0/norm2/Constant_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="934" name="/encoder/mid_block/resnets.0/norm2/Reshape" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.0/norm2/Reshape" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/resnets.0/norm2/Reshape_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="935" name="Constant_258303" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258303" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="936" name="MVN_258304" type="MVN" version="opset6">
<data eps="9.9999999747524271e-07" normalize_variance="true" eps_mode="INSIDE_SQRT" />
<rt_info>
<attribute name="fused_names" version="0" value="MVN_258304" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="937" name="/encoder/mid_block/resnets.0/norm2/Constant_1" type="Const" version="opset1">
<data element_type="f32" shape="32" offset="18480" size="128" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.0/norm2/Constant_1" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/mid_block/resnets.0/norm2/Constant_1_output_0">
<dim>32</dim>
</port>
</output>
</layer>
<layer id="938" name="Constant_258307" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258307" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="939" name="ShapeOf_258313" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18608" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_258313" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="940" name="ShapeOf_258305" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_258305" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="941" name="ShapeOf_258306" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_258306" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="942" name="Constant_258308" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258308" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="943" name="Subtract_258309" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258308, Subtract_258309" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="944" name="Broadcast_258310" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_258310, Constant_258307" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="945" name="Concat_258314" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_258314, Constant_258307, ShapeOf_258313" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="946" name="Reshape_258315" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_258315" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>32</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="947" name="Multiply_258318" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Multiply_258318" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="948" name="/encoder/mid_block/resnets.0/norm2/Constant_2" type="Const" version="opset1">
<data element_type="f32" shape="32" offset="18616" size="128" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.0/norm2/Constant_2" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/mid_block/resnets.0/norm2/Constant_2_output_0">
<dim>32</dim>
</port>
</output>
</layer>
<layer id="949" name="Constant_258319" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258319" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="950" name="ShapeOf_258325" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18608" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_258325" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="951" name="Constant_258320" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258320" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="952" name="Subtract_258321" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258320, Subtract_258321" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="953" name="Broadcast_258322" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_258322, Constant_258319" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="954" name="Concat_258326" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_258326, Constant_258319, ShapeOf_258325" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="955" name="Reshape_258327" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_258327" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>32</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="956" name="/encoder/mid_block/resnets.0/norm2/InstanceNormalization" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.0/norm2/InstanceNormalization" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/resnets.0/norm2/InstanceNormalization_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="957" name="/encoder/mid_block/resnets.0/norm2/Shape" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.0/norm2/Shape" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/encoder/mid_block/resnets.0/norm2/Shape_output_0">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="958" name="/encoder/mid_block/resnets.0/norm2/Reshape_1" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.0/norm2/Reshape_1" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/resnets.0/norm2/Reshape_1_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="959" name="onnx::Mul_1013" type="Const" version="opset1">
<data element_type="f32" shape="512, 1, 1" offset="103970736" size="2048" />
<rt_info>
<attribute name="fused_names" version="0" value="onnx::Mul_1013" />
</rt_info>
<output>
<port id="0" precision="FP32" names="onnx::Mul_1013">
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="960" name="/encoder/mid_block/resnets.0/norm2/Mul" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.0/norm2/Mul" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/resnets.0/norm2/Mul_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="961" name="onnx::Add_1014" type="Const" version="opset1">
<data element_type="f32" shape="512, 1, 1" offset="103972784" size="2048" />
<rt_info>
<attribute name="fused_names" version="0" value="onnx::Add_1014" />
</rt_info>
<output>
<port id="0" precision="FP32" names="onnx::Add_1014">
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="962" name="/encoder/mid_block/resnets.0/norm2/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.0/norm2/Add" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/resnets.0/norm2/Add_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="963" name="/encoder/mid_block/resnets.0/nonlinearity_1/Sigmoid" type="Sigmoid" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.0/nonlinearity_1/Sigmoid" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="FP32" names="/encoder/mid_block/resnets.0/nonlinearity_1/Sigmoid_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="964" name="/encoder/mid_block/resnets.0/nonlinearity_1/Mul" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.0/nonlinearity_1/Mul" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/resnets.0/nonlinearity_1/Mul_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="965" name="encoder.mid_block.resnets.0.conv2.weight" type="Const" version="opset1">
<data element_type="f32" shape="512, 512, 3, 3" offset="103974832" size="9437184" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.mid_block.resnets.0.conv2.weight" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.mid_block.resnets.0.conv2.weight">
<dim>512</dim>
<dim>512</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="966" name="/encoder/mid_block/resnets.0/conv2/Conv/WithoutBiases" type="Convolution" version="opset1">
<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.0/conv2/Conv/WithoutBiases" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>512</dim>
<dim>512</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="967" name="encoder.mid_block.resnets.0.conv2.bias" type="Const" version="opset1">
<data element_type="f32" shape="512" offset="113412016" size="2048" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.mid_block.resnets.0.conv2.bias" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.mid_block.resnets.0.conv2.bias">
<dim>512</dim>
</port>
</output>
</layer>
<layer id="968" name="Constant_258346" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258346" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="969" name="ShapeOf_258352" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="14262184" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_258352" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="970" name="ShapeOf_258344" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_258344" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="971" name="ShapeOf_258345" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_258345" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="972" name="Constant_258347" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258347" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="973" name="Subtract_258348" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258347, Subtract_258348" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="974" name="Broadcast_258349" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_258349, Constant_258346" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</output>
</layer>
<layer id="975" name="Concat_258353" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_258353, Constant_258346, ShapeOf_258352" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="976" name="Reshape_258354" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_258354" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>512</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="977" name="/encoder/mid_block/resnets.0/conv2/Conv" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.0/conv2/Conv" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/resnets.0/conv2/Conv_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="978" name="/encoder/mid_block/resnets.0/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.0/Add" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/resnets.0/Add_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="979" name="/encoder/mid_block/resnets.0/Constant" type="Const" version="opset1">
<data element_type="f32" shape="" offset="1201464" size="4" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.0/Constant" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/mid_block/resnets.0/Constant_output_0" />
</output>
</layer>
<layer id="980" name="/encoder/mid_block/resnets.0/Div" type="Divide" version="opset1">
<data auto_broadcast="numpy" m_pythondiv="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.0/Div" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32" />
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/resnets.0/Div_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="981" name="/encoder/mid_block/attentions.0/Shape" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Shape" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/encoder/mid_block/attentions.0/Shape_output_0">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="982" name="/encoder/mid_block/attentions.0/Constant" type="Const" version="opset1">
<data element_type="i64" shape="" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/mid_block/attentions.0/Constant_output_0" />
</output>
</layer>
<layer id="983" name="Constant_258363" type="Const" version="opset1">
<data element_type="i64" shape="" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258363" />
</rt_info>
<output>
<port id="0" precision="I64" />
</output>
</layer>
<layer id="984" name="/encoder/mid_block/attentions.0/Gather" type="Gather" version="opset8">
<data batch_dims="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant, /encoder/mid_block/attentions.0/Gather, Constant_258363" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>4</dim>
</port>
<port id="1" precision="I64" />
<port id="2" precision="I64" />
</input>
<output>
<port id="3" precision="I64" names="/encoder/mid_block/attentions.0/Gather_output_0" />
</output>
</layer>
<layer id="985" name="Constant_313" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_313" />
</rt_info>
<output>
<port id="0" precision="I64" names="onnx::Unsqueeze_655">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="986" name="/encoder/mid_block/attentions.0/Unsqueeze" type="Unsqueeze" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Unsqueeze, Constant_313" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Unsqueeze_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="987" name="/encoder/mid_block/attentions.0/Shape_1" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Shape_1" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/encoder/mid_block/attentions.0/Shape_1_output_0">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="988" name="/encoder/mid_block/attentions.0/Constant_1" type="Const" version="opset1">
<data element_type="i64" shape="" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_1" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/mid_block/attentions.0/Constant_1_output_0" />
</output>
</layer>
<layer id="989" name="Constant_258367" type="Const" version="opset1">
<data element_type="i64" shape="" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258367" />
</rt_info>
<output>
<port id="0" precision="I64" />
</output>
</layer>
<layer id="990" name="/encoder/mid_block/attentions.0/Gather_1" type="Gather" version="opset8">
<data batch_dims="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_1, /encoder/mid_block/attentions.0/Gather_1, Constant_258367" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>4</dim>
</port>
<port id="1" precision="I64" />
<port id="2" precision="I64" />
</input>
<output>
<port id="3" precision="I64" names="/encoder/mid_block/attentions.0/Gather_1_output_0" />
</output>
</layer>
<layer id="991" name="Constant_315" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_315" />
</rt_info>
<output>
<port id="0" precision="I64" names="onnx::Unsqueeze_657">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="992" name="/encoder/mid_block/attentions.0/Unsqueeze_1" type="Unsqueeze" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Unsqueeze_1, Constant_315" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Unsqueeze_1_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="993" name="/encoder/mid_block/attentions.0/Shape_2" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Shape_2" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/encoder/mid_block/attentions.0/Shape_2_output_0">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="994" name="/encoder/mid_block/attentions.0/Constant_2" type="Const" version="opset1">
<data element_type="i64" shape="" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_2" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/mid_block/attentions.0/Constant_2_output_0" />
</output>
</layer>
<layer id="995" name="Constant_258371" type="Const" version="opset1">
<data element_type="i64" shape="" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258371" />
</rt_info>
<output>
<port id="0" precision="I64" />
</output>
</layer>
<layer id="996" name="/encoder/mid_block/attentions.0/Gather_2" type="Gather" version="opset8">
<data batch_dims="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_2, /encoder/mid_block/attentions.0/Gather_2, Constant_258371" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>4</dim>
</port>
<port id="1" precision="I64" />
<port id="2" precision="I64" />
</input>
<output>
<port id="3" precision="I64" names="/encoder/mid_block/attentions.0/Gather_2_output_0" />
</output>
</layer>
<layer id="997" name="/encoder/mid_block/attentions.0/Shape_3" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Shape_3" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/encoder/mid_block/attentions.0/Shape_3_output_0">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="998" name="/encoder/mid_block/attentions.0/Constant_3" type="Const" version="opset1">
<data element_type="i64" shape="" offset="113414064" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_3" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/mid_block/attentions.0/Constant_3_output_0" />
</output>
</layer>
<layer id="999" name="Constant_258375" type="Const" version="opset1">
<data element_type="i64" shape="" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258375" />
</rt_info>
<output>
<port id="0" precision="I64" />
</output>
</layer>
<layer id="1000" name="/encoder/mid_block/attentions.0/Gather_3" type="Gather" version="opset8">
<data batch_dims="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_3, /encoder/mid_block/attentions.0/Gather_3, Constant_258375" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>4</dim>
</port>
<port id="1" precision="I64" />
<port id="2" precision="I64" />
</input>
<output>
<port id="3" precision="I64" names="/encoder/mid_block/attentions.0/Gather_3_output_0" />
</output>
</layer>
<layer id="1001" name="/encoder/mid_block/attentions.0/Mul" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Mul" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64" />
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Mul_output_0" />
</output>
</layer>
<layer id="1002" name="Constant_317" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_317" />
</rt_info>
<output>
<port id="0" precision="I64" names="onnx::Unsqueeze_659">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1003" name="/encoder/mid_block/attentions.0/Unsqueeze_2" type="Unsqueeze" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Unsqueeze_2, Constant_317" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Unsqueeze_2_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1004" name="/encoder/mid_block/attentions.0/Concat" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Concat" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64" names="/encoder/mid_block/attentions.0/Concat_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="1005" name="/encoder/mid_block/attentions.0/Reshape" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Reshape" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/attentions.0/Reshape_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1006" name="/encoder/mid_block/attentions.0/group_norm/Constant" type="Const" version="opset1">
<data element_type="i64" shape="3" offset="18456" size="24" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/group_norm/Constant" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/mid_block/attentions.0/group_norm/Constant_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="1007" name="/encoder/mid_block/attentions.0/group_norm/Reshape" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/group_norm/Reshape" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/attentions.0/group_norm/Reshape_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1008" name="Constant_258476" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258476" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1009" name="MVN_258477" type="MVN" version="opset6">
<data eps="9.9999999747524271e-07" normalize_variance="true" eps_mode="INSIDE_SQRT" />
<rt_info>
<attribute name="fused_names" version="0" value="MVN_258477" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1010" name="/encoder/mid_block/attentions.0/group_norm/Constant_1" type="Const" version="opset1">
<data element_type="f32" shape="32" offset="18480" size="128" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/group_norm/Constant_1" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/mid_block/attentions.0/group_norm/Constant_1_output_0">
<dim>32</dim>
</port>
</output>
</layer>
<layer id="1011" name="Constant_258480" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258480" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1012" name="ShapeOf_258486" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18608" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_258486" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1013" name="ShapeOf_258478" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_258478" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="1014" name="ShapeOf_258479" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_258479" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1015" name="Constant_258481" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258481" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1016" name="Subtract_258482" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258481, Subtract_258482" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1017" name="Broadcast_258483" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_258483, Constant_258480" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1018" name="Concat_258487" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_258487, Constant_258480, ShapeOf_258486" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="1019" name="Reshape_258488" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_258488" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>32</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1020" name="Multiply_258491" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Multiply_258491" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1021" name="/encoder/mid_block/attentions.0/group_norm/Constant_2" type="Const" version="opset1">
<data element_type="f32" shape="32" offset="18616" size="128" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/group_norm/Constant_2" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/mid_block/attentions.0/group_norm/Constant_2_output_0">
<dim>32</dim>
</port>
</output>
</layer>
<layer id="1022" name="Constant_258492" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258492" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1023" name="ShapeOf_258498" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18608" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_258498" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1024" name="Constant_258493" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258493" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1025" name="Subtract_258494" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258493, Subtract_258494" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1026" name="Broadcast_258495" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_258495, Constant_258492" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1027" name="Concat_258499" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_258499, Constant_258492, ShapeOf_258498" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="1028" name="Reshape_258500" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_258500" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>32</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1029" name="/encoder/mid_block/attentions.0/group_norm/InstanceNormalization" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/group_norm/InstanceNormalization" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/attentions.0/group_norm/InstanceNormalization_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1030" name="/encoder/mid_block/attentions.0/group_norm/Shape" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/group_norm/Shape" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/encoder/mid_block/attentions.0/group_norm/Shape_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="1031" name="/encoder/mid_block/attentions.0/group_norm/Reshape_1" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/group_norm/Reshape_1" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/attentions.0/group_norm/Reshape_1_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1032" name="onnx::Mul_1015" type="Const" version="opset1">
<data element_type="f32" shape="512, 1" offset="113414072" size="2048" />
<rt_info>
<attribute name="fused_names" version="0" value="onnx::Mul_1015" />
</rt_info>
<output>
<port id="0" precision="FP32" names="onnx::Mul_1015">
<dim>512</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1033" name="/encoder/mid_block/attentions.0/group_norm/Mul" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/group_norm/Mul" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>512</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/attentions.0/group_norm/Mul_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1034" name="onnx::Add_1016" type="Const" version="opset1">
<data element_type="f32" shape="512, 1" offset="113416120" size="2048" />
<rt_info>
<attribute name="fused_names" version="0" value="onnx::Add_1016" />
</rt_info>
<output>
<port id="0" precision="FP32" names="onnx::Add_1016">
<dim>512</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1035" name="/encoder/mid_block/attentions.0/group_norm/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/group_norm/Add" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>512</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/attentions.0/group_norm/Add_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1036" name="onnx::MatMul_1017" type="Const" version="opset1">
<data element_type="f32" shape="512, 512" offset="113418168" size="1048576" />
<rt_info>
<attribute name="fused_names" version="0" value="onnx::MatMul_1017" />
</rt_info>
<output>
<port id="0" precision="FP32" names="onnx::MatMul_1017">
<dim>512</dim>
<dim>512</dim>
</port>
</output>
</layer>
<layer id="1037" name="/encoder/mid_block/attentions.0/to_q/MatMul" type="MatMul" version="opset1">
<data transpose_a="true" transpose_b="false" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Transpose_1, /encoder/mid_block/attentions.0/to_q/MatMul" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>512</dim>
<dim>512</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/attentions.0/to_q/MatMul_output_0">
<dim>-1</dim>
<dim>-1</dim>
<dim>512</dim>
</port>
</output>
</layer>
<layer id="1038" name="/encoder/mid_block/attentions.0/to_q/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/to_q/Add" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>512</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>512</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/attentions.0/to_q/Add_output_0">
<dim>-1</dim>
<dim>-1</dim>
<dim>512</dim>
</port>
</output>
</layer>
<layer id="1039" name="/encoder/mid_block/attentions.0/Shape_5" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Shape_5" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>512</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/encoder/mid_block/attentions.0/Shape_5_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="1040" name="/encoder/mid_block/attentions.0/Constant_5" type="Const" version="opset1">
<data element_type="i64" shape="" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_5" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/mid_block/attentions.0/Constant_5_output_0" />
</output>
</layer>
<layer id="1041" name="Constant_258524" type="Const" version="opset1">
<data element_type="i64" shape="" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258524" />
</rt_info>
<output>
<port id="0" precision="I64" />
</output>
</layer>
<layer id="1042" name="/encoder/mid_block/attentions.0/Gather_5" type="Gather" version="opset8">
<data batch_dims="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_5, /encoder/mid_block/attentions.0/Gather_5, Constant_258524" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>3</dim>
</port>
<port id="1" precision="I64" />
<port id="2" precision="I64" />
</input>
<output>
<port id="3" precision="I64" names="/encoder/mid_block/attentions.0/Gather_5_output_0" />
</output>
</layer>
<layer id="1043" name="Constant_354" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_354" />
</rt_info>
<output>
<port id="0" precision="I64" names="onnx::Unsqueeze_703">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1044" name="/encoder/mid_block/attentions.0/Unsqueeze_3" type="Unsqueeze" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Unsqueeze_3, Constant_354" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Unsqueeze_3_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1045" name="/encoder/mid_block/attentions.0/Shape_6" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Shape_6" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>512</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/encoder/mid_block/attentions.0/Shape_6_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="1046" name="/encoder/mid_block/attentions.0/Constant_6" type="Const" version="opset1">
<data element_type="i64" shape="" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_6" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/mid_block/attentions.0/Constant_6_output_0" />
</output>
</layer>
<layer id="1047" name="Constant_258528" type="Const" version="opset1">
<data element_type="i64" shape="" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258528" />
</rt_info>
<output>
<port id="0" precision="I64" />
</output>
</layer>
<layer id="1048" name="/encoder/mid_block/attentions.0/Gather_6" type="Gather" version="opset8">
<data batch_dims="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_6, /encoder/mid_block/attentions.0/Gather_6, Constant_258528" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>3</dim>
</port>
<port id="1" precision="I64" />
<port id="2" precision="I64" />
</input>
<output>
<port id="3" precision="I64" names="/encoder/mid_block/attentions.0/Gather_6_output_0" />
</output>
</layer>
<layer id="1049" name="Constant_356" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_356" />
</rt_info>
<output>
<port id="0" precision="I64" names="onnx::Unsqueeze_705">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1050" name="/encoder/mid_block/attentions.0/Unsqueeze_4" type="Unsqueeze" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Unsqueeze_4, Constant_356" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Unsqueeze_4_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1051" name="/encoder/mid_block/attentions.0/Constant_9" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_9" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/mid_block/attentions.0/Constant_9_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1052" name="/encoder/mid_block/attentions.0/Shape_7" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Shape_7" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>512</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/encoder/mid_block/attentions.0/Shape_7_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="1053" name="/encoder/mid_block/attentions.0/Constant_7" type="Const" version="opset1">
<data element_type="i64" shape="" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_7" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/mid_block/attentions.0/Constant_7_output_0" />
</output>
</layer>
<layer id="1054" name="Constant_258532" type="Const" version="opset1">
<data element_type="i64" shape="" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258532" />
</rt_info>
<output>
<port id="0" precision="I64" />
</output>
</layer>
<layer id="1055" name="/encoder/mid_block/attentions.0/Gather_7" type="Gather" version="opset8">
<data batch_dims="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_7, /encoder/mid_block/attentions.0/Gather_7, Constant_258532" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>3</dim>
</port>
<port id="1" precision="I64" />
<port id="2" precision="I64" />
</input>
<output>
<port id="3" precision="I64" names="/encoder/mid_block/attentions.0/Gather_7_output_0" />
</output>
</layer>
<layer id="1056" name="/encoder/mid_block/attentions.0/Constant_8" type="Const" version="opset1">
<data element_type="i64" shape="" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_8" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/mid_block/attentions.0/Constant_8_output_0" />
</output>
</layer>
<layer id="1057" name="/encoder/mid_block/attentions.0/Div" type="Divide" version="opset1">
<data auto_broadcast="numpy" m_pythondiv="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_8, /encoder/mid_block/attentions.0/Div" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64" />
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Div_output_0" />
</output>
</layer>
<layer id="1058" name="/encoder/mid_block/attentions.0/Cast" type="Convert" version="opset1">
<data destination_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Cast" />
</rt_info>
<input>
<port id="0" precision="I64" />
</input>
<output>
<port id="1" precision="I64" names="/encoder/mid_block/attentions.0/Cast_output_0" />
</output>
</layer>
<layer id="1059" name="/encoder/mid_block/attentions.0/Cast_1" type="Convert" version="opset1">
<data destination_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Cast_1" />
</rt_info>
<input>
<port id="0" precision="I64" />
</input>
<output>
<port id="1" precision="I64" names="/encoder/mid_block/attentions.0/Cast_1_output_0" />
</output>
</layer>
<layer id="1060" name="Constant_359" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_359" />
</rt_info>
<output>
<port id="0" precision="I64" names="onnx::Unsqueeze_709">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1061" name="/encoder/mid_block/attentions.0/Unsqueeze_5" type="Unsqueeze" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Unsqueeze_5, Constant_359" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Unsqueeze_5_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1062" name="/encoder/mid_block/attentions.0/Concat_1" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Concat_1, /encoder/mid_block/attentions.0/Constant_9" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
<port id="3" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="4" precision="I64" names="/encoder/mid_block/attentions.0/Concat_1_output_0">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="1063" name="/encoder/mid_block/attentions.0/Reshape_1" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Reshape_1" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>512</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/attentions.0/Reshape_1_output_0">
<dim>-1</dim>
<dim>-1</dim>
<dim>1</dim>
<dim>512</dim>
</port>
</output>
</layer>
<layer id="1064" name="Constant_258615" type="Const" version="opset1">
<data element_type="i64" shape="4" offset="114466744" size="32" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258615" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="1065" name="/encoder/mid_block/attentions.0/Transpose_2" type="Transpose" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Transpose_2" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>1</dim>
<dim>512</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/attentions.0/Transpose_2_output_0">
<dim>-1</dim>
<dim>1</dim>
<dim>-1</dim>
<dim>512</dim>
</port>
</output>
</layer>
<layer id="1066" name="Constant_364" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_364" />
</rt_info>
<output>
<port id="0" precision="I64" names="onnx::Unsqueeze_714">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1067" name="/encoder/mid_block/attentions.0/Unsqueeze_6" type="Unsqueeze" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Unsqueeze_6, Constant_364" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Unsqueeze_6_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1068" name="Constant_366" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_366" />
</rt_info>
<output>
<port id="0" precision="I64" names="onnx::Unsqueeze_716">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1069" name="/encoder/mid_block/attentions.0/Unsqueeze_7" type="Unsqueeze" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Unsqueeze_7, Constant_366" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Unsqueeze_7_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1070" name="Constant_368" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_368" />
</rt_info>
<output>
<port id="0" precision="I64" names="onnx::Unsqueeze_718">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1071" name="/encoder/mid_block/attentions.0/Unsqueeze_8" type="Unsqueeze" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Unsqueeze_8, Constant_368" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Unsqueeze_8_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1072" name="/encoder/mid_block/attentions.0/Concat_2" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Concat_2" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64" names="/encoder/mid_block/attentions.0/Concat_2_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="1073" name="/encoder/mid_block/attentions.0/Reshape_2" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Reshape_2" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>1</dim>
<dim>-1</dim>
<dim>512</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/attentions.0/Reshape_2_output_0">
<dim>-1</dim>
<dim>-1</dim>
<dim>512</dim>
</port>
</output>
</layer>
<layer id="1074" name="encoder.mid_block.attentions.0.to_k.bias" type="Const" version="opset1">
<data element_type="f32" shape="512" offset="114466776" size="2048" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.mid_block.attentions.0.to_k.bias" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.mid_block.attentions.0.to_k.bias">
<dim>512</dim>
</port>
</output>
</layer>
<layer id="1075" name="onnx::MatMul_1018" type="Const" version="opset1">
<data element_type="f32" shape="512, 512" offset="114468824" size="1048576" />
<rt_info>
<attribute name="fused_names" version="0" value="onnx::MatMul_1018" />
</rt_info>
<output>
<port id="0" precision="FP32" names="onnx::MatMul_1018">
<dim>512</dim>
<dim>512</dim>
</port>
</output>
</layer>
<layer id="1076" name="/encoder/mid_block/attentions.0/to_k/MatMul" type="MatMul" version="opset1">
<data transpose_a="true" transpose_b="false" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Transpose_1, /encoder/mid_block/attentions.0/to_k/MatMul" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>512</dim>
<dim>512</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/attentions.0/to_k/MatMul_output_0">
<dim>-1</dim>
<dim>-1</dim>
<dim>512</dim>
</port>
</output>
</layer>
<layer id="1077" name="/encoder/mid_block/attentions.0/to_k/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/to_k/Add" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>512</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>512</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/attentions.0/to_k/Add_output_0">
<dim>-1</dim>
<dim>-1</dim>
<dim>512</dim>
</port>
</output>
</layer>
<layer id="1078" name="/encoder/mid_block/attentions.0/Shape_8" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Shape_8" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>512</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/encoder/mid_block/attentions.0/Shape_8_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="1079" name="/encoder/mid_block/attentions.0/Constant_10" type="Const" version="opset1">
<data element_type="i64" shape="" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_10" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/mid_block/attentions.0/Constant_10_output_0" />
</output>
</layer>
<layer id="1080" name="Constant_258695" type="Const" version="opset1">
<data element_type="i64" shape="" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258695" />
</rt_info>
<output>
<port id="0" precision="I64" />
</output>
</layer>
<layer id="1081" name="/encoder/mid_block/attentions.0/Gather_8" type="Gather" version="opset8">
<data batch_dims="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_10, /encoder/mid_block/attentions.0/Gather_8, Constant_258695" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>3</dim>
</port>
<port id="1" precision="I64" />
<port id="2" precision="I64" />
</input>
<output>
<port id="3" precision="I64" names="/encoder/mid_block/attentions.0/Gather_8_output_0" />
</output>
</layer>
<layer id="1082" name="Constant_385" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_385" />
</rt_info>
<output>
<port id="0" precision="I64" names="onnx::Unsqueeze_735">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1083" name="/encoder/mid_block/attentions.0/Unsqueeze_9" type="Unsqueeze" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Unsqueeze_9, Constant_385" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Unsqueeze_9_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1084" name="/encoder/mid_block/attentions.0/Shape_9" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Shape_9" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>512</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/encoder/mid_block/attentions.0/Shape_9_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="1085" name="/encoder/mid_block/attentions.0/Constant_11" type="Const" version="opset1">
<data element_type="i64" shape="" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_11" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/mid_block/attentions.0/Constant_11_output_0" />
</output>
</layer>
<layer id="1086" name="Constant_258699" type="Const" version="opset1">
<data element_type="i64" shape="" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258699" />
</rt_info>
<output>
<port id="0" precision="I64" />
</output>
</layer>
<layer id="1087" name="/encoder/mid_block/attentions.0/Gather_9" type="Gather" version="opset8">
<data batch_dims="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_11, /encoder/mid_block/attentions.0/Gather_9, Constant_258699" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>3</dim>
</port>
<port id="1" precision="I64" />
<port id="2" precision="I64" />
</input>
<output>
<port id="3" precision="I64" names="/encoder/mid_block/attentions.0/Gather_9_output_0" />
</output>
</layer>
<layer id="1088" name="Constant_387" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_387" />
</rt_info>
<output>
<port id="0" precision="I64" names="onnx::Unsqueeze_737">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1089" name="/encoder/mid_block/attentions.0/Unsqueeze_10" type="Unsqueeze" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Unsqueeze_10, Constant_387" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Unsqueeze_10_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1090" name="/encoder/mid_block/attentions.0/Constant_14" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_14" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/mid_block/attentions.0/Constant_14_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1091" name="/encoder/mid_block/attentions.0/Shape_10" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Shape_10" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>512</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/encoder/mid_block/attentions.0/Shape_10_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="1092" name="/encoder/mid_block/attentions.0/Constant_12" type="Const" version="opset1">
<data element_type="i64" shape="" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_12" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/mid_block/attentions.0/Constant_12_output_0" />
</output>
</layer>
<layer id="1093" name="Constant_258703" type="Const" version="opset1">
<data element_type="i64" shape="" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258703" />
</rt_info>
<output>
<port id="0" precision="I64" />
</output>
</layer>
<layer id="1094" name="/encoder/mid_block/attentions.0/Gather_10" type="Gather" version="opset8">
<data batch_dims="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_12, /encoder/mid_block/attentions.0/Gather_10, Constant_258703" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>3</dim>
</port>
<port id="1" precision="I64" />
<port id="2" precision="I64" />
</input>
<output>
<port id="3" precision="I64" names="/encoder/mid_block/attentions.0/Gather_10_output_0" />
</output>
</layer>
<layer id="1095" name="/encoder/mid_block/attentions.0/Constant_13" type="Const" version="opset1">
<data element_type="i64" shape="" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_13" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/mid_block/attentions.0/Constant_13_output_0" />
</output>
</layer>
<layer id="1096" name="/encoder/mid_block/attentions.0/Div_1" type="Divide" version="opset1">
<data auto_broadcast="numpy" m_pythondiv="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_13, /encoder/mid_block/attentions.0/Div_1" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64" />
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Div_1_output_0" />
</output>
</layer>
<layer id="1097" name="/encoder/mid_block/attentions.0/Cast_2" type="Convert" version="opset1">
<data destination_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Cast_2" />
</rt_info>
<input>
<port id="0" precision="I64" />
</input>
<output>
<port id="1" precision="I64" names="/encoder/mid_block/attentions.0/Cast_2_output_0" />
</output>
</layer>
<layer id="1098" name="/encoder/mid_block/attentions.0/Cast_3" type="Convert" version="opset1">
<data destination_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Cast_3" />
</rt_info>
<input>
<port id="0" precision="I64" />
</input>
<output>
<port id="1" precision="I64" names="/encoder/mid_block/attentions.0/Cast_3_output_0" />
</output>
</layer>
<layer id="1099" name="Constant_390" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_390" />
</rt_info>
<output>
<port id="0" precision="I64" names="onnx::Unsqueeze_741">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1100" name="/encoder/mid_block/attentions.0/Unsqueeze_11" type="Unsqueeze" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Unsqueeze_11, Constant_390" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Unsqueeze_11_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1101" name="/encoder/mid_block/attentions.0/Concat_3" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Concat_3, /encoder/mid_block/attentions.0/Constant_14" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
<port id="3" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="4" precision="I64" names="/encoder/mid_block/attentions.0/Concat_3_output_0">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="1102" name="/encoder/mid_block/attentions.0/Reshape_3" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Reshape_3" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>512</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/attentions.0/Reshape_3_output_0">
<dim>-1</dim>
<dim>-1</dim>
<dim>1</dim>
<dim>512</dim>
</port>
</output>
</layer>
<layer id="1103" name="Constant_258786" type="Const" version="opset1">
<data element_type="i64" shape="4" offset="114466744" size="32" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258786" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="1104" name="/encoder/mid_block/attentions.0/Transpose_3" type="Transpose" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Transpose_3" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>1</dim>
<dim>512</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/attentions.0/Transpose_3_output_0">
<dim>-1</dim>
<dim>1</dim>
<dim>-1</dim>
<dim>512</dim>
</port>
</output>
</layer>
<layer id="1105" name="Constant_395" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_395" />
</rt_info>
<output>
<port id="0" precision="I64" names="onnx::Unsqueeze_746">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1106" name="/encoder/mid_block/attentions.0/Unsqueeze_12" type="Unsqueeze" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Unsqueeze_12, Constant_395" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Unsqueeze_12_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1107" name="Constant_397" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_397" />
</rt_info>
<output>
<port id="0" precision="I64" names="onnx::Unsqueeze_748">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1108" name="/encoder/mid_block/attentions.0/Unsqueeze_13" type="Unsqueeze" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Unsqueeze_13, Constant_397" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Unsqueeze_13_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1109" name="Constant_399" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_399" />
</rt_info>
<output>
<port id="0" precision="I64" names="onnx::Unsqueeze_750">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1110" name="/encoder/mid_block/attentions.0/Unsqueeze_14" type="Unsqueeze" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Unsqueeze_14, Constant_399" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Unsqueeze_14_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1111" name="/encoder/mid_block/attentions.0/Concat_4" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Concat_4" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64" names="/encoder/mid_block/attentions.0/Concat_4_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="1112" name="/encoder/mid_block/attentions.0/Reshape_4" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Reshape_4" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>1</dim>
<dim>-1</dim>
<dim>512</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/attentions.0/Reshape_4_output_0">
<dim>-1</dim>
<dim>-1</dim>
<dim>512</dim>
</port>
</output>
</layer>
<layer id="1113" name="/encoder/mid_block/attentions.0/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/MatMul, /encoder/mid_block/attentions.0/Transpose_5" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>512</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>512</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/attentions.0/MatMul_output_0">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1114" name="/encoder/mid_block/attentions.0/Constant_23" type="Const" version="opset1">
<data element_type="f32" shape="" offset="115517400" size="4" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_23" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/mid_block/attentions.0/Constant_23_output_0" />
</output>
</layer>
<layer id="1115" name="/encoder/mid_block/attentions.0/Mul_1" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Mul_1" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32" />
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/attentions.0/Mul_1_output_0">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1116" name="Constant_259055" type="Const" version="opset1">
<data element_type="f32" shape="" offset="2384284" size="4" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_259055" />
</rt_info>
<output>
<port id="0" precision="FP32" />
</output>
</layer>
<layer id="1117" name="/encoder/mid_block/attentions.0/Shape_14" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Shape_14" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>512</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/encoder/mid_block/attentions.0/Shape_14_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="1118" name="/encoder/mid_block/attentions.0/Constant_20" type="Const" version="opset1">
<data element_type="i64" shape="" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_20" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/mid_block/attentions.0/Constant_20_output_0" />
</output>
</layer>
<layer id="1119" name="Constant_259037" type="Const" version="opset1">
<data element_type="i64" shape="" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_259037" />
</rt_info>
<output>
<port id="0" precision="I64" />
</output>
</layer>
<layer id="1120" name="/encoder/mid_block/attentions.0/Gather_14" type="Gather" version="opset8">
<data batch_dims="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_20, /encoder/mid_block/attentions.0/Gather_14, Constant_259037" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>3</dim>
</port>
<port id="1" precision="I64" />
<port id="2" precision="I64" />
</input>
<output>
<port id="3" precision="I64" names="/encoder/mid_block/attentions.0/Gather_14_output_0" />
</output>
</layer>
<layer id="1121" name="Constant_443" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_443" />
</rt_info>
<output>
<port id="0" precision="I64" names="onnx::Unsqueeze_795">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1122" name="/encoder/mid_block/attentions.0/Unsqueeze_21" type="Unsqueeze" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Unsqueeze_21, Constant_443" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Unsqueeze_21_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1123" name="/encoder/mid_block/attentions.0/Shape_15" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Shape_15" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>512</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/encoder/mid_block/attentions.0/Shape_15_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="1124" name="/encoder/mid_block/attentions.0/Constant_21" type="Const" version="opset1">
<data element_type="i64" shape="" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_21" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/mid_block/attentions.0/Constant_21_output_0" />
</output>
</layer>
<layer id="1125" name="Constant_259041" type="Const" version="opset1">
<data element_type="i64" shape="" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_259041" />
</rt_info>
<output>
<port id="0" precision="I64" />
</output>
</layer>
<layer id="1126" name="/encoder/mid_block/attentions.0/Gather_15" type="Gather" version="opset8">
<data batch_dims="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_21, /encoder/mid_block/attentions.0/Gather_15, Constant_259041" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>3</dim>
</port>
<port id="1" precision="I64" />
<port id="2" precision="I64" />
</input>
<output>
<port id="3" precision="I64" names="/encoder/mid_block/attentions.0/Gather_15_output_0" />
</output>
</layer>
<layer id="1127" name="Constant_445" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_445" />
</rt_info>
<output>
<port id="0" precision="I64" names="onnx::Unsqueeze_797">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1128" name="/encoder/mid_block/attentions.0/Unsqueeze_22" type="Unsqueeze" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Unsqueeze_22, Constant_445" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Unsqueeze_22_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1129" name="/encoder/mid_block/attentions.0/Shape_16" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Shape_16" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>512</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/encoder/mid_block/attentions.0/Shape_16_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="1130" name="/encoder/mid_block/attentions.0/Constant_22" type="Const" version="opset1">
<data element_type="i64" shape="" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_22" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/mid_block/attentions.0/Constant_22_output_0" />
</output>
</layer>
<layer id="1131" name="Constant_259045" type="Const" version="opset1">
<data element_type="i64" shape="" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_259045" />
</rt_info>
<output>
<port id="0" precision="I64" />
</output>
</layer>
<layer id="1132" name="/encoder/mid_block/attentions.0/Gather_16" type="Gather" version="opset8">
<data batch_dims="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_22, /encoder/mid_block/attentions.0/Gather_16, Constant_259045" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>3</dim>
</port>
<port id="1" precision="I64" />
<port id="2" precision="I64" />
</input>
<output>
<port id="3" precision="I64" names="/encoder/mid_block/attentions.0/Gather_16_output_0" />
</output>
</layer>
<layer id="1133" name="Constant_447" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_447" />
</rt_info>
<output>
<port id="0" precision="I64" names="onnx::Unsqueeze_799">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1134" name="/encoder/mid_block/attentions.0/Unsqueeze_23" type="Unsqueeze" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Unsqueeze_23, Constant_447" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Unsqueeze_23_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1135" name="/encoder/mid_block/attentions.0/Concat_7" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Concat_7" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64" names="/encoder/mid_block/attentions.0/Concat_7_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="1136" name="/encoder/mid_block/attentions.0/ConstantOfShape" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/ConstantOfShape" />
</rt_info>
<input>
<port id="0" precision="FP32" />
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/attentions.0/ConstantOfShape_output_0">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1137" name="/encoder/mid_block/attentions.0/Constant_24" type="Const" version="opset1">
<data element_type="f32" shape="" offset="2384284" size="4" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_24" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/mid_block/attentions.0/Constant_24_output_0" />
</output>
</layer>
<layer id="1138" name="/encoder/mid_block/attentions.0/Mul_2" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Mul_2" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32" />
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/attentions.0/Mul_2_output_0">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1139" name="/encoder/mid_block/attentions.0/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Add" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/attentions.0/Add_output_0">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1140" name="/encoder/mid_block/attentions.0/Cast_6" type="Convert" version="opset1">
<data destination_type="f32" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Cast_6" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="FP32" names="/encoder/mid_block/attentions.0/Cast_6_output_0">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1141" name="/encoder/mid_block/attentions.0/Softmax" type="SoftMax" version="opset8">
<data axis="-1" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Softmax" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="FP32" names="/encoder/mid_block/attentions.0/Softmax_output_0">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1142" name="/encoder/mid_block/attentions.0/Cast_7" type="Convert" version="opset1">
<data destination_type="f32" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Cast_7" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="FP32" names="/encoder/mid_block/attentions.0/Cast_7_output_0">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1143" name="encoder.mid_block.attentions.0.to_v.bias" type="Const" version="opset1">
<data element_type="f32" shape="512" offset="115517404" size="2048" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.mid_block.attentions.0.to_v.bias" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.mid_block.attentions.0.to_v.bias">
<dim>512</dim>
</port>
</output>
</layer>
<layer id="1144" name="onnx::MatMul_1019" type="Const" version="opset1">
<data element_type="f32" shape="512, 512" offset="115519452" size="1048576" />
<rt_info>
<attribute name="fused_names" version="0" value="onnx::MatMul_1019" />
</rt_info>
<output>
<port id="0" precision="FP32" names="onnx::MatMul_1019">
<dim>512</dim>
<dim>512</dim>
</port>
</output>
</layer>
<layer id="1145" name="/encoder/mid_block/attentions.0/to_v/MatMul" type="MatMul" version="opset1">
<data transpose_a="true" transpose_b="false" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Transpose_1, /encoder/mid_block/attentions.0/to_v/MatMul" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>512</dim>
<dim>512</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/attentions.0/to_v/MatMul_output_0">
<dim>-1</dim>
<dim>-1</dim>
<dim>512</dim>
</port>
</output>
</layer>
<layer id="1146" name="/encoder/mid_block/attentions.0/to_v/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/to_v/Add" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>512</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>512</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/attentions.0/to_v/Add_output_0">
<dim>-1</dim>
<dim>-1</dim>
<dim>512</dim>
</port>
</output>
</layer>
<layer id="1147" name="/encoder/mid_block/attentions.0/Shape_11" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Shape_11" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>512</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/encoder/mid_block/attentions.0/Shape_11_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="1148" name="/encoder/mid_block/attentions.0/Constant_15" type="Const" version="opset1">
<data element_type="i64" shape="" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_15" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/mid_block/attentions.0/Constant_15_output_0" />
</output>
</layer>
<layer id="1149" name="Constant_258866" type="Const" version="opset1">
<data element_type="i64" shape="" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258866" />
</rt_info>
<output>
<port id="0" precision="I64" />
</output>
</layer>
<layer id="1150" name="/encoder/mid_block/attentions.0/Gather_11" type="Gather" version="opset8">
<data batch_dims="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_15, /encoder/mid_block/attentions.0/Gather_11, Constant_258866" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>3</dim>
</port>
<port id="1" precision="I64" />
<port id="2" precision="I64" />
</input>
<output>
<port id="3" precision="I64" names="/encoder/mid_block/attentions.0/Gather_11_output_0" />
</output>
</layer>
<layer id="1151" name="Constant_416" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_416" />
</rt_info>
<output>
<port id="0" precision="I64" names="onnx::Unsqueeze_767">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1152" name="/encoder/mid_block/attentions.0/Unsqueeze_15" type="Unsqueeze" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Unsqueeze_15, Constant_416" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Unsqueeze_15_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1153" name="/encoder/mid_block/attentions.0/Shape_12" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Shape_12" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>512</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/encoder/mid_block/attentions.0/Shape_12_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="1154" name="/encoder/mid_block/attentions.0/Constant_16" type="Const" version="opset1">
<data element_type="i64" shape="" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_16" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/mid_block/attentions.0/Constant_16_output_0" />
</output>
</layer>
<layer id="1155" name="Constant_258870" type="Const" version="opset1">
<data element_type="i64" shape="" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258870" />
</rt_info>
<output>
<port id="0" precision="I64" />
</output>
</layer>
<layer id="1156" name="/encoder/mid_block/attentions.0/Gather_12" type="Gather" version="opset8">
<data batch_dims="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_16, /encoder/mid_block/attentions.0/Gather_12, Constant_258870" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>3</dim>
</port>
<port id="1" precision="I64" />
<port id="2" precision="I64" />
</input>
<output>
<port id="3" precision="I64" names="/encoder/mid_block/attentions.0/Gather_12_output_0" />
</output>
</layer>
<layer id="1157" name="Constant_418" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_418" />
</rt_info>
<output>
<port id="0" precision="I64" names="onnx::Unsqueeze_769">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1158" name="/encoder/mid_block/attentions.0/Unsqueeze_16" type="Unsqueeze" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Unsqueeze_16, Constant_418" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Unsqueeze_16_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1159" name="/encoder/mid_block/attentions.0/Constant_19" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_19" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/mid_block/attentions.0/Constant_19_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1160" name="/encoder/mid_block/attentions.0/Shape_13" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Shape_13" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>512</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/encoder/mid_block/attentions.0/Shape_13_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="1161" name="/encoder/mid_block/attentions.0/Constant_17" type="Const" version="opset1">
<data element_type="i64" shape="" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_17" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/mid_block/attentions.0/Constant_17_output_0" />
</output>
</layer>
<layer id="1162" name="Constant_258874" type="Const" version="opset1">
<data element_type="i64" shape="" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258874" />
</rt_info>
<output>
<port id="0" precision="I64" />
</output>
</layer>
<layer id="1163" name="/encoder/mid_block/attentions.0/Gather_13" type="Gather" version="opset8">
<data batch_dims="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_17, /encoder/mid_block/attentions.0/Gather_13, Constant_258874" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>3</dim>
</port>
<port id="1" precision="I64" />
<port id="2" precision="I64" />
</input>
<output>
<port id="3" precision="I64" names="/encoder/mid_block/attentions.0/Gather_13_output_0" />
</output>
</layer>
<layer id="1164" name="/encoder/mid_block/attentions.0/Constant_18" type="Const" version="opset1">
<data element_type="i64" shape="" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_18" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/mid_block/attentions.0/Constant_18_output_0" />
</output>
</layer>
<layer id="1165" name="/encoder/mid_block/attentions.0/Div_2" type="Divide" version="opset1">
<data auto_broadcast="numpy" m_pythondiv="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_18, /encoder/mid_block/attentions.0/Div_2" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64" />
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Div_2_output_0" />
</output>
</layer>
<layer id="1166" name="/encoder/mid_block/attentions.0/Cast_4" type="Convert" version="opset1">
<data destination_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Cast_4" />
</rt_info>
<input>
<port id="0" precision="I64" />
</input>
<output>
<port id="1" precision="I64" names="/encoder/mid_block/attentions.0/Cast_4_output_0" />
</output>
</layer>
<layer id="1167" name="/encoder/mid_block/attentions.0/Cast_5" type="Convert" version="opset1">
<data destination_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Cast_5" />
</rt_info>
<input>
<port id="0" precision="I64" />
</input>
<output>
<port id="1" precision="I64" names="/encoder/mid_block/attentions.0/Cast_5_output_0" />
</output>
</layer>
<layer id="1168" name="Constant_421" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_421" />
</rt_info>
<output>
<port id="0" precision="I64" names="onnx::Unsqueeze_773">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1169" name="/encoder/mid_block/attentions.0/Unsqueeze_17" type="Unsqueeze" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Unsqueeze_17, Constant_421" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Unsqueeze_17_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1170" name="/encoder/mid_block/attentions.0/Concat_5" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Concat_5, /encoder/mid_block/attentions.0/Constant_19" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
<port id="3" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="4" precision="I64" names="/encoder/mid_block/attentions.0/Concat_5_output_0">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="1171" name="/encoder/mid_block/attentions.0/Reshape_5" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Reshape_5" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>512</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/attentions.0/Reshape_5_output_0">
<dim>-1</dim>
<dim>-1</dim>
<dim>1</dim>
<dim>512</dim>
</port>
</output>
</layer>
<layer id="1172" name="Constant_258957" type="Const" version="opset1">
<data element_type="i64" shape="4" offset="114466744" size="32" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258957" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="1173" name="/encoder/mid_block/attentions.0/Transpose_4" type="Transpose" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Transpose_4" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>1</dim>
<dim>512</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/attentions.0/Transpose_4_output_0">
<dim>-1</dim>
<dim>1</dim>
<dim>-1</dim>
<dim>512</dim>
</port>
</output>
</layer>
<layer id="1174" name="Constant_426" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_426" />
</rt_info>
<output>
<port id="0" precision="I64" names="onnx::Unsqueeze_778">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1175" name="/encoder/mid_block/attentions.0/Unsqueeze_18" type="Unsqueeze" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Unsqueeze_18, Constant_426" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Unsqueeze_18_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1176" name="Constant_428" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_428" />
</rt_info>
<output>
<port id="0" precision="I64" names="onnx::Unsqueeze_780">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1177" name="/encoder/mid_block/attentions.0/Unsqueeze_19" type="Unsqueeze" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Unsqueeze_19, Constant_428" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Unsqueeze_19_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1178" name="Constant_430" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_430" />
</rt_info>
<output>
<port id="0" precision="I64" names="onnx::Unsqueeze_782">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1179" name="/encoder/mid_block/attentions.0/Unsqueeze_20" type="Unsqueeze" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Unsqueeze_20, Constant_430" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Unsqueeze_20_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1180" name="/encoder/mid_block/attentions.0/Concat_6" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Concat_6" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64" names="/encoder/mid_block/attentions.0/Concat_6_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="1181" name="/encoder/mid_block/attentions.0/Reshape_6" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Reshape_6" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>1</dim>
<dim>-1</dim>
<dim>512</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/attentions.0/Reshape_6_output_0">
<dim>-1</dim>
<dim>-1</dim>
<dim>512</dim>
</port>
</output>
</layer>
<layer id="1182" name="/encoder/mid_block/attentions.0/MatMul_1" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="false" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/MatMul_1" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>512</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/attentions.0/MatMul_1_output_0">
<dim>-1</dim>
<dim>-1</dim>
<dim>512</dim>
</port>
</output>
</layer>
<layer id="1183" name="/encoder/mid_block/attentions.0/Shape_17" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Shape_17" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>512</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/encoder/mid_block/attentions.0/Shape_17_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="1184" name="/encoder/mid_block/attentions.0/Constant_25" type="Const" version="opset1">
<data element_type="i64" shape="" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_25" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/mid_block/attentions.0/Constant_25_output_0" />
</output>
</layer>
<layer id="1185" name="Constant_259085" type="Const" version="opset1">
<data element_type="i64" shape="" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_259085" />
</rt_info>
<output>
<port id="0" precision="I64" />
</output>
</layer>
<layer id="1186" name="/encoder/mid_block/attentions.0/Gather_17" type="Gather" version="opset8">
<data batch_dims="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_25, /encoder/mid_block/attentions.0/Gather_17, Constant_259085" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>3</dim>
</port>
<port id="1" precision="I64" />
<port id="2" precision="I64" />
</input>
<output>
<port id="3" precision="I64" names="/encoder/mid_block/attentions.0/Gather_17_output_0" />
</output>
</layer>
<layer id="1187" name="/encoder/mid_block/attentions.0/Constant_28" type="Const" version="opset1">
<data element_type="i64" shape="" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_28" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/mid_block/attentions.0/Constant_28_output_0" />
</output>
</layer>
<layer id="1188" name="/encoder/mid_block/attentions.0/Div_3" type="Divide" version="opset1">
<data auto_broadcast="numpy" m_pythondiv="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_28, /encoder/mid_block/attentions.0/Div_3" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64" />
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Div_3_output_0" />
</output>
</layer>
<layer id="1189" name="/encoder/mid_block/attentions.0/Cast_8" type="Convert" version="opset1">
<data destination_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Cast_8" />
</rt_info>
<input>
<port id="0" precision="I64" />
</input>
<output>
<port id="1" precision="I64" names="/encoder/mid_block/attentions.0/Cast_8_output_0" />
</output>
</layer>
<layer id="1190" name="/encoder/mid_block/attentions.0/Cast_9" type="Convert" version="opset1">
<data destination_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Cast_9" />
</rt_info>
<input>
<port id="0" precision="I64" />
</input>
<output>
<port id="1" precision="I64" names="/encoder/mid_block/attentions.0/Cast_9_output_0" />
</output>
</layer>
<layer id="1191" name="Constant_475" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_475" />
</rt_info>
<output>
<port id="0" precision="I64" names="onnx::Unsqueeze_828">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1192" name="/encoder/mid_block/attentions.0/Unsqueeze_24" type="Unsqueeze" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Unsqueeze_24, Constant_475" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Unsqueeze_24_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1193" name="/encoder/mid_block/attentions.0/Constant_29" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_29" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/mid_block/attentions.0/Constant_29_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1194" name="/encoder/mid_block/attentions.0/Shape_18" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Shape_18" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>512</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/encoder/mid_block/attentions.0/Shape_18_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="1195" name="/encoder/mid_block/attentions.0/Constant_26" type="Const" version="opset1">
<data element_type="i64" shape="" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_26" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/mid_block/attentions.0/Constant_26_output_0" />
</output>
</layer>
<layer id="1196" name="Constant_259089" type="Const" version="opset1">
<data element_type="i64" shape="" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_259089" />
</rt_info>
<output>
<port id="0" precision="I64" />
</output>
</layer>
<layer id="1197" name="/encoder/mid_block/attentions.0/Gather_18" type="Gather" version="opset8">
<data batch_dims="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_26, /encoder/mid_block/attentions.0/Gather_18, Constant_259089" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>3</dim>
</port>
<port id="1" precision="I64" />
<port id="2" precision="I64" />
</input>
<output>
<port id="3" precision="I64" names="/encoder/mid_block/attentions.0/Gather_18_output_0" />
</output>
</layer>
<layer id="1198" name="Constant_478" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_478" />
</rt_info>
<output>
<port id="0" precision="I64" names="onnx::Unsqueeze_832">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1199" name="/encoder/mid_block/attentions.0/Unsqueeze_25" type="Unsqueeze" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Unsqueeze_25, Constant_478" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Unsqueeze_25_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1200" name="/encoder/mid_block/attentions.0/Shape_19" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Shape_19" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>512</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/encoder/mid_block/attentions.0/Shape_19_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="1201" name="/encoder/mid_block/attentions.0/Constant_27" type="Const" version="opset1">
<data element_type="i64" shape="" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_27" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/mid_block/attentions.0/Constant_27_output_0" />
</output>
</layer>
<layer id="1202" name="Constant_259093" type="Const" version="opset1">
<data element_type="i64" shape="" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_259093" />
</rt_info>
<output>
<port id="0" precision="I64" />
</output>
</layer>
<layer id="1203" name="/encoder/mid_block/attentions.0/Gather_19" type="Gather" version="opset8">
<data batch_dims="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_27, /encoder/mid_block/attentions.0/Gather_19, Constant_259093" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>3</dim>
</port>
<port id="1" precision="I64" />
<port id="2" precision="I64" />
</input>
<output>
<port id="3" precision="I64" names="/encoder/mid_block/attentions.0/Gather_19_output_0" />
</output>
</layer>
<layer id="1204" name="Constant_480" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_480" />
</rt_info>
<output>
<port id="0" precision="I64" names="onnx::Unsqueeze_834">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1205" name="/encoder/mid_block/attentions.0/Unsqueeze_26" type="Unsqueeze" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Unsqueeze_26, Constant_480" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Unsqueeze_26_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1206" name="/encoder/mid_block/attentions.0/Concat_8" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Concat_8, /encoder/mid_block/attentions.0/Constant_29" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
<port id="3" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="4" precision="I64" names="/encoder/mid_block/attentions.0/Concat_8_output_0">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="1207" name="/encoder/mid_block/attentions.0/Reshape_7" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Reshape_7" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>512</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/attentions.0/Reshape_7_output_0">
<dim>-1</dim>
<dim>1</dim>
<dim>-1</dim>
<dim>512</dim>
</port>
</output>
</layer>
<layer id="1208" name="Constant_259176" type="Const" version="opset1">
<data element_type="i64" shape="4" offset="114466744" size="32" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_259176" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="1209" name="/encoder/mid_block/attentions.0/Transpose_6" type="Transpose" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Transpose_6" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>1</dim>
<dim>-1</dim>
<dim>512</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/attentions.0/Transpose_6_output_0">
<dim>-1</dim>
<dim>-1</dim>
<dim>1</dim>
<dim>512</dim>
</port>
</output>
</layer>
<layer id="1210" name="Constant_485" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_485" />
</rt_info>
<output>
<port id="0" precision="I64" names="onnx::Unsqueeze_839">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1211" name="/encoder/mid_block/attentions.0/Unsqueeze_27" type="Unsqueeze" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Unsqueeze_27, Constant_485" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Unsqueeze_27_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1212" name="Constant_487" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_487" />
</rt_info>
<output>
<port id="0" precision="I64" names="onnx::Unsqueeze_841">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1213" name="/encoder/mid_block/attentions.0/Unsqueeze_28" type="Unsqueeze" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Unsqueeze_28, Constant_487" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Unsqueeze_28_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1214" name="Constant_489" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_489" />
</rt_info>
<output>
<port id="0" precision="I64" names="onnx::Unsqueeze_843">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1215" name="/encoder/mid_block/attentions.0/Unsqueeze_29" type="Unsqueeze" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Unsqueeze_29, Constant_489" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Unsqueeze_29_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1216" name="/encoder/mid_block/attentions.0/Concat_9" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Concat_9" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64" names="/encoder/mid_block/attentions.0/Concat_9_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="1217" name="/encoder/mid_block/attentions.0/Reshape_8" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Reshape_8" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>1</dim>
<dim>512</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/attentions.0/Reshape_8_output_0">
<dim>-1</dim>
<dim>-1</dim>
<dim>512</dim>
</port>
</output>
</layer>
<layer id="1218" name="onnx::MatMul_1026" type="Const" version="opset1">
<data element_type="f32" shape="512, 512" offset="116568028" size="1048576" />
<rt_info>
<attribute name="fused_names" version="0" value="onnx::MatMul_1026" />
</rt_info>
<output>
<port id="0" precision="FP32" names="onnx::MatMul_1026">
<dim>512</dim>
<dim>512</dim>
</port>
</output>
</layer>
<layer id="1219" name="/encoder/mid_block/attentions.0/to_out.0/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="false" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/to_out.0/MatMul" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>512</dim>
</port>
<port id="1" precision="FP32">
<dim>512</dim>
<dim>512</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/attentions.0/to_out.0/MatMul_output_0">
<dim>-1</dim>
<dim>-1</dim>
<dim>512</dim>
</port>
</output>
</layer>
<layer id="1220" name="/encoder/mid_block/attentions.0/to_out.0/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/to_out.0/Add" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>512</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>512</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/attentions.0/to_out.0/Add_output_0">
<dim>-1</dim>
<dim>-1</dim>
<dim>512</dim>
</port>
</output>
</layer>
<layer id="1221" name="Constant_259256" type="Const" version="opset1">
<data element_type="i64" shape="3" offset="117616604" size="24" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_259256" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="1222" name="/encoder/mid_block/attentions.0/Transpose_7" type="Transpose" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Transpose_7" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>512</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/attentions.0/Transpose_7_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1223" name="Constant_258464" type="Const" version="opset1">
<data element_type="i64" shape="3" offset="117616604" size="24" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258464" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="1224" name="/encoder/mid_block/attentions.0/Transpose" type="Transpose" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Transpose" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/attentions.0/Transpose_output_0">
<dim>-1</dim>
<dim>-1</dim>
<dim>512</dim>
</port>
</output>
</layer>
<layer id="1225" name="/encoder/mid_block/attentions.0/Shape_4" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Shape_4" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>512</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/encoder/mid_block/attentions.0/Shape_4_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="1226" name="/encoder/mid_block/attentions.0/Constant_4" type="Const" version="opset1">
<data element_type="i64" shape="" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_4" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/mid_block/attentions.0/Constant_4_output_0" />
</output>
</layer>
<layer id="1227" name="Constant_258468" type="Const" version="opset1">
<data element_type="i64" shape="" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258468" />
</rt_info>
<output>
<port id="0" precision="I64" />
</output>
</layer>
<layer id="1228" name="/encoder/mid_block/attentions.0/Gather_4" type="Gather" version="opset8">
<data batch_dims="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_4, /encoder/mid_block/attentions.0/Gather_4, Constant_258468" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>3</dim>
</port>
<port id="1" precision="I64" />
<port id="2" precision="I64" />
</input>
<output>
<port id="3" precision="I64" names="/encoder/mid_block/attentions.0/Gather_4_output_0" />
</output>
</layer>
<layer id="1229" name="Constant_496" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_496" />
</rt_info>
<output>
<port id="0" precision="I64" names="onnx::Unsqueeze_851">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1230" name="/encoder/mid_block/attentions.0/Unsqueeze_30" type="Unsqueeze" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Unsqueeze_30, Constant_496" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Unsqueeze_30_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1231" name="Constant_498" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_498" />
</rt_info>
<output>
<port id="0" precision="I64" names="onnx::Unsqueeze_853">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1232" name="/encoder/mid_block/attentions.0/Unsqueeze_31" type="Unsqueeze" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Unsqueeze_31, Constant_498" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Unsqueeze_31_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1233" name="Constant_500" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_500" />
</rt_info>
<output>
<port id="0" precision="I64" names="onnx::Unsqueeze_855">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1234" name="/encoder/mid_block/attentions.0/Unsqueeze_32" type="Unsqueeze" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Unsqueeze_32, Constant_500" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Unsqueeze_32_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1235" name="Constant_502" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_502" />
</rt_info>
<output>
<port id="0" precision="I64" names="onnx::Unsqueeze_857">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1236" name="/encoder/mid_block/attentions.0/Unsqueeze_33" type="Unsqueeze" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Unsqueeze_33, Constant_502" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Unsqueeze_33_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1237" name="/encoder/mid_block/attentions.0/Concat_10" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Concat_10" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
<port id="3" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="4" precision="I64" names="/encoder/mid_block/attentions.0/Concat_10_output_0">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="1238" name="/encoder/mid_block/attentions.0/Reshape_9" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Reshape_9" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/attentions.0/Reshape_9_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1239" name="/encoder/mid_block/attentions.0/Add_1" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Add_1" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/attentions.0/Add_1_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1240" name="/encoder/mid_block/attentions.0/Constant_30" type="Const" version="opset1">
<data element_type="f32" shape="" offset="1201464" size="4" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_30" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/mid_block/attentions.0/Constant_30_output_0" />
</output>
</layer>
<layer id="1241" name="/encoder/mid_block/attentions.0/Div_4" type="Divide" version="opset1">
<data auto_broadcast="numpy" m_pythondiv="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Div_4" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32" />
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/attentions.0/Div_4_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1242" name="/encoder/mid_block/resnets.1/norm1/Constant" type="Const" version="opset1">
<data element_type="i64" shape="3" offset="18456" size="24" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.1/norm1/Constant" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/mid_block/resnets.1/norm1/Constant_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="1243" name="/encoder/mid_block/resnets.1/norm1/Reshape" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.1/norm1/Reshape" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/resnets.1/norm1/Reshape_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1244" name="Constant_259295" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_259295" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1245" name="MVN_259296" type="MVN" version="opset6">
<data eps="9.9999999747524271e-07" normalize_variance="true" eps_mode="INSIDE_SQRT" />
<rt_info>
<attribute name="fused_names" version="0" value="MVN_259296" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1246" name="/encoder/mid_block/resnets.1/norm1/Constant_1" type="Const" version="opset1">
<data element_type="f32" shape="32" offset="18480" size="128" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.1/norm1/Constant_1" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/mid_block/resnets.1/norm1/Constant_1_output_0">
<dim>32</dim>
</port>
</output>
</layer>
<layer id="1247" name="Constant_259299" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_259299" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1248" name="ShapeOf_259305" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18608" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_259305" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1249" name="ShapeOf_259297" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_259297" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="1250" name="ShapeOf_259298" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_259298" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1251" name="Constant_259300" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_259300" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1252" name="Subtract_259301" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_259300, Subtract_259301" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1253" name="Broadcast_259302" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_259302, Constant_259299" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1254" name="Concat_259306" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_259306, Constant_259299, ShapeOf_259305" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="1255" name="Reshape_259307" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_259307" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>32</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1256" name="Multiply_259310" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Multiply_259310" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1257" name="/encoder/mid_block/resnets.1/norm1/Constant_2" type="Const" version="opset1">
<data element_type="f32" shape="32" offset="18616" size="128" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.1/norm1/Constant_2" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/mid_block/resnets.1/norm1/Constant_2_output_0">
<dim>32</dim>
</port>
</output>
</layer>
<layer id="1258" name="Constant_259311" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_259311" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1259" name="ShapeOf_259317" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18608" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_259317" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1260" name="Constant_259312" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_259312" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1261" name="Subtract_259313" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_259312, Subtract_259313" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1262" name="Broadcast_259314" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_259314, Constant_259311" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1263" name="Concat_259318" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_259318, Constant_259311, ShapeOf_259317" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="1264" name="Reshape_259319" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_259319" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>32</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1265" name="/encoder/mid_block/resnets.1/norm1/InstanceNormalization" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.1/norm1/InstanceNormalization" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/resnets.1/norm1/InstanceNormalization_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1266" name="/encoder/mid_block/resnets.1/norm1/Shape" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.1/norm1/Shape" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/encoder/mid_block/resnets.1/norm1/Shape_output_0">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="1267" name="/encoder/mid_block/resnets.1/norm1/Reshape_1" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.1/norm1/Reshape_1" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/resnets.1/norm1/Reshape_1_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1268" name="onnx::Mul_1027" type="Const" version="opset1">
<data element_type="f32" shape="512, 1, 1" offset="117616628" size="2048" />
<rt_info>
<attribute name="fused_names" version="0" value="onnx::Mul_1027" />
</rt_info>
<output>
<port id="0" precision="FP32" names="onnx::Mul_1027">
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1269" name="/encoder/mid_block/resnets.1/norm1/Mul" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.1/norm1/Mul" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/resnets.1/norm1/Mul_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1270" name="onnx::Add_1028" type="Const" version="opset1">
<data element_type="f32" shape="512, 1, 1" offset="117618676" size="2048" />
<rt_info>
<attribute name="fused_names" version="0" value="onnx::Add_1028" />
</rt_info>
<output>
<port id="0" precision="FP32" names="onnx::Add_1028">
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1271" name="/encoder/mid_block/resnets.1/norm1/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.1/norm1/Add" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/resnets.1/norm1/Add_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1272" name="/encoder/mid_block/resnets.1/nonlinearity/Sigmoid" type="Sigmoid" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.1/nonlinearity/Sigmoid" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="FP32" names="/encoder/mid_block/resnets.1/nonlinearity/Sigmoid_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1273" name="/encoder/mid_block/resnets.1/nonlinearity/Mul" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.1/nonlinearity/Mul" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/resnets.1/nonlinearity/Mul_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1274" name="encoder.mid_block.resnets.1.conv1.weight" type="Const" version="opset1">
<data element_type="f32" shape="512, 512, 3, 3" offset="117620724" size="9437184" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.mid_block.resnets.1.conv1.weight" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.mid_block.resnets.1.conv1.weight">
<dim>512</dim>
<dim>512</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="1275" name="/encoder/mid_block/resnets.1/conv1/Conv/WithoutBiases" type="Convolution" version="opset1">
<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.1/conv1/Conv/WithoutBiases" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>512</dim>
<dim>512</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1276" name="encoder.mid_block.resnets.1.conv1.bias" type="Const" version="opset1">
<data element_type="f32" shape="512" offset="127057908" size="2048" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.mid_block.resnets.1.conv1.bias" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.mid_block.resnets.1.conv1.bias">
<dim>512</dim>
</port>
</output>
</layer>
<layer id="1277" name="Constant_259338" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_259338" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1278" name="ShapeOf_259344" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="14262184" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_259344" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1279" name="ShapeOf_259336" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_259336" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="1280" name="ShapeOf_259337" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_259337" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1281" name="Constant_259339" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_259339" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1282" name="Subtract_259340" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_259339, Subtract_259340" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1283" name="Broadcast_259341" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_259341, Constant_259338" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</output>
</layer>
<layer id="1284" name="Concat_259345" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_259345, Constant_259338, ShapeOf_259344" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="1285" name="Reshape_259346" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_259346" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>512</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1286" name="/encoder/mid_block/resnets.1/conv1/Conv" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.1/conv1/Conv" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/resnets.1/conv1/Conv_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1287" name="/encoder/mid_block/resnets.1/norm2/Constant" type="Const" version="opset1">
<data element_type="i64" shape="3" offset="18456" size="24" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.1/norm2/Constant" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/mid_block/resnets.1/norm2/Constant_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="1288" name="/encoder/mid_block/resnets.1/norm2/Reshape" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.1/norm2/Reshape" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/resnets.1/norm2/Reshape_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1289" name="Constant_259356" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_259356" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1290" name="MVN_259357" type="MVN" version="opset6">
<data eps="9.9999999747524271e-07" normalize_variance="true" eps_mode="INSIDE_SQRT" />
<rt_info>
<attribute name="fused_names" version="0" value="MVN_259357" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1291" name="/encoder/mid_block/resnets.1/norm2/Constant_1" type="Const" version="opset1">
<data element_type="f32" shape="32" offset="18480" size="128" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.1/norm2/Constant_1" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/mid_block/resnets.1/norm2/Constant_1_output_0">
<dim>32</dim>
</port>
</output>
</layer>
<layer id="1292" name="Constant_259360" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_259360" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1293" name="ShapeOf_259366" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18608" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_259366" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1294" name="ShapeOf_259358" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_259358" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="1295" name="ShapeOf_259359" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_259359" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1296" name="Constant_259361" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_259361" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1297" name="Subtract_259362" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_259361, Subtract_259362" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1298" name="Broadcast_259363" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_259363, Constant_259360" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1299" name="Concat_259367" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_259367, Constant_259360, ShapeOf_259366" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="1300" name="Reshape_259368" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_259368" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>32</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1301" name="Multiply_259371" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Multiply_259371" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1302" name="/encoder/mid_block/resnets.1/norm2/Constant_2" type="Const" version="opset1">
<data element_type="f32" shape="32" offset="18616" size="128" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.1/norm2/Constant_2" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/mid_block/resnets.1/norm2/Constant_2_output_0">
<dim>32</dim>
</port>
</output>
</layer>
<layer id="1303" name="Constant_259372" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_259372" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1304" name="ShapeOf_259378" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18608" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_259378" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1305" name="Constant_259373" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_259373" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1306" name="Subtract_259374" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_259373, Subtract_259374" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1307" name="Broadcast_259375" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_259375, Constant_259372" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1308" name="Concat_259379" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_259379, Constant_259372, ShapeOf_259378" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="1309" name="Reshape_259380" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_259380" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>32</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1310" name="/encoder/mid_block/resnets.1/norm2/InstanceNormalization" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.1/norm2/InstanceNormalization" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/resnets.1/norm2/InstanceNormalization_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1311" name="/encoder/mid_block/resnets.1/norm2/Shape" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.1/norm2/Shape" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/encoder/mid_block/resnets.1/norm2/Shape_output_0">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="1312" name="/encoder/mid_block/resnets.1/norm2/Reshape_1" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.1/norm2/Reshape_1" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/resnets.1/norm2/Reshape_1_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1313" name="onnx::Mul_1029" type="Const" version="opset1">
<data element_type="f32" shape="512, 1, 1" offset="127059956" size="2048" />
<rt_info>
<attribute name="fused_names" version="0" value="onnx::Mul_1029" />
</rt_info>
<output>
<port id="0" precision="FP32" names="onnx::Mul_1029">
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1314" name="/encoder/mid_block/resnets.1/norm2/Mul" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.1/norm2/Mul" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/resnets.1/norm2/Mul_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1315" name="onnx::Add_1030" type="Const" version="opset1">
<data element_type="f32" shape="512, 1, 1" offset="127062004" size="2048" />
<rt_info>
<attribute name="fused_names" version="0" value="onnx::Add_1030" />
</rt_info>
<output>
<port id="0" precision="FP32" names="onnx::Add_1030">
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1316" name="/encoder/mid_block/resnets.1/norm2/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.1/norm2/Add" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/resnets.1/norm2/Add_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1317" name="/encoder/mid_block/resnets.1/nonlinearity_1/Sigmoid" type="Sigmoid" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.1/nonlinearity_1/Sigmoid" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="FP32" names="/encoder/mid_block/resnets.1/nonlinearity_1/Sigmoid_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1318" name="/encoder/mid_block/resnets.1/nonlinearity_1/Mul" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.1/nonlinearity_1/Mul" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/resnets.1/nonlinearity_1/Mul_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1319" name="encoder.mid_block.resnets.1.conv2.weight" type="Const" version="opset1">
<data element_type="f32" shape="512, 512, 3, 3" offset="127064052" size="9437184" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.mid_block.resnets.1.conv2.weight" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.mid_block.resnets.1.conv2.weight">
<dim>512</dim>
<dim>512</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="1320" name="/encoder/mid_block/resnets.1/conv2/Conv/WithoutBiases" type="Convolution" version="opset1">
<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.1/conv2/Conv/WithoutBiases" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>512</dim>
<dim>512</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1321" name="encoder.mid_block.resnets.1.conv2.bias" type="Const" version="opset1">
<data element_type="f32" shape="512" offset="136501236" size="2048" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.mid_block.resnets.1.conv2.bias" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.mid_block.resnets.1.conv2.bias">
<dim>512</dim>
</port>
</output>
</layer>
<layer id="1322" name="Constant_259399" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_259399" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1323" name="ShapeOf_259405" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="14262184" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_259405" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1324" name="ShapeOf_259397" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_259397" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="1325" name="ShapeOf_259398" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_259398" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1326" name="Constant_259400" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_259400" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1327" name="Subtract_259401" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_259400, Subtract_259401" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1328" name="Broadcast_259402" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_259402, Constant_259399" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</output>
</layer>
<layer id="1329" name="Concat_259406" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_259406, Constant_259399, ShapeOf_259405" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="1330" name="Reshape_259407" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_259407" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>512</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1331" name="/encoder/mid_block/resnets.1/conv2/Conv" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.1/conv2/Conv" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/resnets.1/conv2/Conv_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1332" name="/encoder/mid_block/resnets.1/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.1/Add" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/resnets.1/Add_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1333" name="/encoder/mid_block/resnets.1/Constant" type="Const" version="opset1">
<data element_type="f32" shape="" offset="1201464" size="4" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.1/Constant" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/mid_block/resnets.1/Constant_output_0" />
</output>
</layer>
<layer id="1334" name="/encoder/mid_block/resnets.1/Div" type="Divide" version="opset1">
<data auto_broadcast="numpy" m_pythondiv="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.1/Div" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32" />
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/resnets.1/Div_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1335" name="/encoder/conv_norm_out/Constant" type="Const" version="opset1">
<data element_type="i64" shape="3" offset="18456" size="24" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/conv_norm_out/Constant" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/conv_norm_out/Constant_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="1336" name="/encoder/conv_norm_out/Reshape" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/conv_norm_out/Reshape" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/conv_norm_out/Reshape_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1337" name="Constant_259420" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_259420" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1338" name="MVN_259421" type="MVN" version="opset6">
<data eps="9.9999999747524271e-07" normalize_variance="true" eps_mode="INSIDE_SQRT" />
<rt_info>
<attribute name="fused_names" version="0" value="MVN_259421" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1339" name="/encoder/conv_norm_out/Constant_1" type="Const" version="opset1">
<data element_type="f32" shape="32" offset="18480" size="128" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/conv_norm_out/Constant_1" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/conv_norm_out/Constant_1_output_0">
<dim>32</dim>
</port>
</output>
</layer>
<layer id="1340" name="Constant_259424" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_259424" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1341" name="ShapeOf_259430" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18608" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_259430" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1342" name="ShapeOf_259422" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_259422" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="1343" name="ShapeOf_259423" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_259423" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1344" name="Constant_259425" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_259425" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1345" name="Subtract_259426" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_259425, Subtract_259426" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1346" name="Broadcast_259427" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_259427, Constant_259424" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1347" name="Concat_259431" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_259431, Constant_259424, ShapeOf_259430" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="1348" name="Reshape_259432" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_259432" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>32</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1349" name="Multiply_259435" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Multiply_259435" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1350" name="/encoder/conv_norm_out/Constant_2" type="Const" version="opset1">
<data element_type="f32" shape="32" offset="18616" size="128" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/conv_norm_out/Constant_2" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/conv_norm_out/Constant_2_output_0">
<dim>32</dim>
</port>
</output>
</layer>
<layer id="1351" name="Constant_259436" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_259436" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1352" name="ShapeOf_259442" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18608" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_259442" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1353" name="Constant_259437" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_259437" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1354" name="Subtract_259438" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_259437, Subtract_259438" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1355" name="Broadcast_259439" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_259439, Constant_259436" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1356" name="Concat_259443" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_259443, Constant_259436, ShapeOf_259442" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="1357" name="Reshape_259444" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_259444" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>32</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1358" name="/encoder/conv_norm_out/InstanceNormalization" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/conv_norm_out/InstanceNormalization" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/conv_norm_out/InstanceNormalization_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1359" name="/encoder/conv_norm_out/Shape" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/conv_norm_out/Shape" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/encoder/conv_norm_out/Shape_output_0">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="1360" name="/encoder/conv_norm_out/Reshape_1" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/conv_norm_out/Reshape_1" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/conv_norm_out/Reshape_1_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1361" name="onnx::Mul_1031" type="Const" version="opset1">
<data element_type="f32" shape="512, 1, 1" offset="136503284" size="2048" />
<rt_info>
<attribute name="fused_names" version="0" value="onnx::Mul_1031" />
</rt_info>
<output>
<port id="0" precision="FP32" names="onnx::Mul_1031">
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1362" name="/encoder/conv_norm_out/Mul" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/conv_norm_out/Mul" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/conv_norm_out/Mul_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1363" name="onnx::Add_1032" type="Const" version="opset1">
<data element_type="f32" shape="512, 1, 1" offset="136505332" size="2048" />
<rt_info>
<attribute name="fused_names" version="0" value="onnx::Add_1032" />
</rt_info>
<output>
<port id="0" precision="FP32" names="onnx::Add_1032">
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1364" name="/encoder/conv_norm_out/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/conv_norm_out/Add" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/conv_norm_out/Add_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1365" name="/encoder/conv_act/Sigmoid" type="Sigmoid" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/conv_act/Sigmoid" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="FP32" names="/encoder/conv_act/Sigmoid_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1366" name="/encoder/conv_act/Mul" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/conv_act/Mul" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/conv_act/Mul_output_0">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1367" name="encoder.conv_out.weight" type="Const" version="opset1">
<data element_type="f32" shape="8, 512, 3, 3" offset="136507380" size="147456" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.conv_out.weight" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.conv_out.weight">
<dim>8</dim>
<dim>512</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="1368" name="/encoder/conv_out/Conv/WithoutBiases" type="Convolution" version="opset1">
<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/conv_out/Conv/WithoutBiases" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>512</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>8</dim>
<dim>512</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>8</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1369" name="encoder.conv_out.bias" type="Const" version="opset1">
<data element_type="f32" shape="8" offset="136654836" size="32" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.conv_out.bias" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.conv_out.bias">
<dim>8</dim>
</port>
</output>
</layer>
<layer id="1370" name="Constant_259463" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_259463" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1371" name="ShapeOf_259469" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="136654868" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_259469" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1372" name="ShapeOf_259461" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_259461" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>8</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="1373" name="ShapeOf_259462" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_259462" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1374" name="Constant_259464" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_259464" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1375" name="Subtract_259465" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_259464, Subtract_259465" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1376" name="Broadcast_259466" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_259466, Constant_259463" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</output>
</layer>
<layer id="1377" name="Concat_259470" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_259470, Constant_259463, ShapeOf_259469" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="1378" name="Reshape_259471" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_259471" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>8</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>8</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1379" name="/encoder/conv_out/Conv" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/conv_out/Conv" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>8</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>8</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/conv_out/Conv_output_0">
<dim>-1</dim>
<dim>8</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1380" name="quant_conv.weight" type="Const" version="opset1">
<data element_type="f32" shape="8, 8, 1, 1" offset="136654876" size="256" />
<rt_info>
<attribute name="fused_names" version="0" value="quant_conv.weight" />
</rt_info>
<output>
<port id="0" precision="FP32" names="quant_conv.weight">
<dim>8</dim>
<dim>8</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1381" name="/quant_conv/Conv/WithoutBiases" type="Convolution" version="opset1">
<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
<rt_info>
<attribute name="fused_names" version="0" value="/quant_conv/Conv/WithoutBiases" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>8</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>8</dim>
<dim>8</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>8</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1382" name="quant_conv.bias" type="Const" version="opset1">
<data element_type="f32" shape="8" offset="136655132" size="32" />
<rt_info>
<attribute name="fused_names" version="0" value="quant_conv.bias" />
</rt_info>
<output>
<port id="0" precision="FP32" names="quant_conv.bias">
<dim>8</dim>
</port>
</output>
</layer>
<layer id="1383" name="Constant_259478" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_259478" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1384" name="ShapeOf_259484" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="136654868" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_259484" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1385" name="ShapeOf_259476" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_259476" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>8</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="1386" name="ShapeOf_259477" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_259477" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1387" name="Constant_259479" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_259479" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1388" name="Subtract_259480" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_259479, Subtract_259480" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1389" name="Broadcast_259481" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_259481, Constant_259478" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</output>
</layer>
<layer id="1390" name="Concat_259485" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_259485, Constant_259478, ShapeOf_259484" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="1391" name="Reshape_259486" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_259486" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>8</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>8</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1392" name="/quant_conv/Conv" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/quant_conv/Conv" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>8</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>8</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/quant_conv/Conv_output_0">
<dim>-1</dim>
<dim>8</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1393" name="/Constant_1" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/Constant_1" />
</rt_info>
<output>
<port id="0" precision="I64" names="/Constant_1_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1394" name="/Shape" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/Shape" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>8</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/Shape_output_0">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="1395" name="/Constant" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/Constant" />
</rt_info>
<output>
<port id="0" precision="I64" names="/Constant_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1396" name="Constant_259492" type="Const" version="opset1">
<data element_type="i64" shape="" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_259492" />
</rt_info>
<output>
<port id="0" precision="I64" />
</output>
</layer>
<layer id="1397" name="/Gather" type="Gather" version="opset8">
<data batch_dims="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/Constant, /Gather, Constant_259492" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>4</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64" />
</input>
<output>
<port id="3" precision="I64" names="/Gather_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1398" name="/Constant_2" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/Constant_2" />
</rt_info>
<output>
<port id="0" precision="I64" names="/Constant_2_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1399" name="/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/Add, /Constant_2" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/Add_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1400" name="/Constant_3" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/Constant_3" />
</rt_info>
<output>
<port id="0" precision="I64" names="/Constant_3_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1401" name="/Div" type="Divide" version="opset1">
<data auto_broadcast="numpy" m_pythondiv="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/Constant_3, /Div" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/Div_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1402" name="/Constant_4" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/Constant_4" />
</rt_info>
<output>
<port id="0" precision="I64" names="/Constant_4_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1403" name="/Mul" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/Constant_4, /Mul" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/Mul_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1404" name="Constant_259501" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_259501" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1405" name="ShapeOf_259502" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_259502" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1406" name="Broadcast_259503" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_259503, Constant_259501, ShapeOf_259502" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1407" name="/Slice" type="Slice" version="opset8">
<rt_info>
<attribute name="fused_names" version="0" value="/Constant, /Slice" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>8</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
<port id="3" precision="I64">
<dim>1</dim>
</port>
<port id="4" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="5" precision="FP32" names="/Slice_output_0">
<dim>-1</dim>
<dim>4</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1408" name="/Constant_7" type="Const" version="opset1">
<data element_type="f32" shape="" offset="136655164" size="4" />
<rt_info>
<attribute name="fused_names" version="0" value="/Constant_7" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/Constant_7_output_0" />
</output>
</layer>
<layer id="1409" name="/Constant_6" type="Const" version="opset1">
<data element_type="f32" shape="" offset="136655168" size="4" />
<rt_info>
<attribute name="fused_names" version="0" value="/Constant_6" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/Constant_6_output_0" />
</output>
</layer>
<layer id="1410" name="/Constant_5" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/Constant_5" />
</rt_info>
<output>
<port id="0" precision="I64" names="/Constant_5_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1411" name="/Mul_1" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/Constant_5, /Mul_1" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/Mul_1_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1412" name="Constant_259554" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_259554" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1413" name="ShapeOf_259555" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_259555" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1414" name="Broadcast_259556" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_259556, Constant_259554" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1415" name="/Slice_1" type="Slice" version="opset8">
<rt_info>
<attribute name="fused_names" version="0" value="/Slice_1" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>8</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
<port id="3" precision="I64">
<dim>1</dim>
</port>
<port id="4" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="5" precision="FP32" names="/Slice_1_output_0">
<dim>-1</dim>
<dim>4</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1416" name="Maximum_259609" type="Maximum" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Maximum_259609" />
</rt_info>
<input>
<port id="0" precision="FP32" />
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>4</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>4</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1417" name="/Clip" type="Minimum" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/Clip" />
</rt_info>
<input>
<port id="0" precision="FP32" />
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>4</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/Clip_output_0">
<dim>-1</dim>
<dim>4</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1418" name="/Constant_8" type="Const" version="opset1">
<data element_type="f32" shape="" offset="136655172" size="4" />
<rt_info>
<attribute name="fused_names" version="0" value="/Constant_8" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/Constant_8_output_0" />
</output>
</layer>
<layer id="1419" name="/Mul_2" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/Mul_2" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>4</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32" />
</input>
<output>
<port id="2" precision="FP32" names="/Mul_2_output_0">
<dim>-1</dim>
<dim>4</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1420" name="/Exp" type="Exp" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/Exp" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>4</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="FP32" names="/Exp_output_0">
<dim>-1</dim>
<dim>4</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1421" name="Constant_259678" type="Const" version="opset1">
<data element_type="f32" shape="1" offset="1201464" size="4" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_259678" />
</rt_info>
<output>
<port id="0" precision="FP32">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1422" name="Constant_259640" type="Const" version="opset1">
<data element_type="f32" shape="" offset="2384284" size="4" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_259640" />
</rt_info>
<output>
<port id="0" precision="FP32" />
</output>
</layer>
<layer id="1423" name="/Shape_1" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/Shape_1" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>4</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/Shape_1_output_0">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="1424" name="/Constant_9" type="Const" version="opset1">
<data element_type="i64" shape="" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/Constant_9" />
</rt_info>
<output>
<port id="0" precision="I64" names="/Constant_9_output_0" />
</output>
</layer>
<layer id="1425" name="Constant_259616" type="Const" version="opset1">
<data element_type="i64" shape="" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_259616" />
</rt_info>
<output>
<port id="0" precision="I64" />
</output>
</layer>
<layer id="1426" name="/Gather_1" type="Gather" version="opset8">
<data batch_dims="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/Constant_9, /Gather_1, Constant_259616" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>4</dim>
</port>
<port id="1" precision="I64" />
<port id="2" precision="I64" />
</input>
<output>
<port id="3" precision="I64" names="/Gather_1_output_0" />
</output>
</layer>
<layer id="1427" name="Constant_581" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_581" />
</rt_info>
<output>
<port id="0" precision="I64" names="onnx::Unsqueeze_950">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1428" name="/Unsqueeze" type="Unsqueeze" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/Unsqueeze, Constant_581" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/Unsqueeze_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1429" name="/Shape_2" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/Shape_2" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>4</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/Shape_2_output_0">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="1430" name="/Constant_10" type="Const" version="opset1">
<data element_type="i64" shape="" offset="18432" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/Constant_10" />
</rt_info>
<output>
<port id="0" precision="I64" names="/Constant_10_output_0" />
</output>
</layer>
<layer id="1431" name="Constant_259620" type="Const" version="opset1">
<data element_type="i64" shape="" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_259620" />
</rt_info>
<output>
<port id="0" precision="I64" />
</output>
</layer>
<layer id="1432" name="/Gather_2" type="Gather" version="opset8">
<data batch_dims="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/Constant_10, /Gather_2, Constant_259620" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>4</dim>
</port>
<port id="1" precision="I64" />
<port id="2" precision="I64" />
</input>
<output>
<port id="3" precision="I64" names="/Gather_2_output_0" />
</output>
</layer>
<layer id="1433" name="Constant_583" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_583" />
</rt_info>
<output>
<port id="0" precision="I64" names="onnx::Unsqueeze_952">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1434" name="/Unsqueeze_1" type="Unsqueeze" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/Unsqueeze_1, Constant_583" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/Unsqueeze_1_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1435" name="/Shape_3" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/Shape_3" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>4</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/Shape_3_output_0">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="1436" name="/Constant_11" type="Const" version="opset1">
<data element_type="i64" shape="" offset="18448" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/Constant_11" />
</rt_info>
<output>
<port id="0" precision="I64" names="/Constant_11_output_0" />
</output>
</layer>
<layer id="1437" name="Constant_259624" type="Const" version="opset1">
<data element_type="i64" shape="" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_259624" />
</rt_info>
<output>
<port id="0" precision="I64" />
</output>
</layer>
<layer id="1438" name="/Gather_3" type="Gather" version="opset8">
<data batch_dims="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/Constant_11, /Gather_3, Constant_259624" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>4</dim>
</port>
<port id="1" precision="I64" />
<port id="2" precision="I64" />
</input>
<output>
<port id="3" precision="I64" names="/Gather_3_output_0" />
</output>
</layer>
<layer id="1439" name="Constant_585" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_585" />
</rt_info>
<output>
<port id="0" precision="I64" names="onnx::Unsqueeze_954">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1440" name="/Unsqueeze_2" type="Unsqueeze" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/Unsqueeze_2, Constant_585" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/Unsqueeze_2_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1441" name="/Shape_4" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/Shape_4" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>4</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/Shape_4_output_0">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="1442" name="/Constant_12" type="Const" version="opset1">
<data element_type="i64" shape="" offset="113414064" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/Constant_12" />
</rt_info>
<output>
<port id="0" precision="I64" names="/Constant_12_output_0" />
</output>
</layer>
<layer id="1443" name="Constant_259628" type="Const" version="opset1">
<data element_type="i64" shape="" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_259628" />
</rt_info>
<output>
<port id="0" precision="I64" />
</output>
</layer>
<layer id="1444" name="/Gather_4" type="Gather" version="opset8">
<data batch_dims="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/Constant_12, /Gather_4, Constant_259628" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>4</dim>
</port>
<port id="1" precision="I64" />
<port id="2" precision="I64" />
</input>
<output>
<port id="3" precision="I64" names="/Gather_4_output_0" />
</output>
</layer>
<layer id="1445" name="Constant_587" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="2384220" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_587" />
</rt_info>
<output>
<port id="0" precision="I64" names="onnx::Unsqueeze_956">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1446" name="/Unsqueeze_3" type="Unsqueeze" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/Unsqueeze_3, Constant_587" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/Unsqueeze_3_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1447" name="/Concat" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/Concat" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
<port id="3" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="4" precision="I64" names="/Concat_output_0">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="1448" name="/ConstantOfShape" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/ConstantOfShape" />
</rt_info>
<input>
<port id="0" precision="FP32" />
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/ConstantOfShape_output_0">
<dim>-1</dim>
<dim>4</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1449" name="ShapeOf_259660" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_259660" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>4</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="1450" name="Constant_259661" type="Const" version="opset1">
<data element_type="f32" shape="1" offset="2384284" size="4" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_259661" />
</rt_info>
<output>
<port id="0" precision="FP32">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1451" name="Constant_259662" type="Const" version="opset1">
<data element_type="f32" shape="1" offset="1201464" size="4" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_259662" />
</rt_info>
<output>
<port id="0" precision="FP32">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1452" name="RandomUniform_259663" type="RandomUniform" version="opset8">
<data output_type="f32" op_seed="1478" global_seed="0" />
<rt_info>
<attribute name="fused_names" version="0" value="RandomUniform_259663" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>4</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
</port>
<port id="2" precision="FP32">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1453" name="Log_259672" type="Log" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="Log_259672" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1454" name="Constant_259670" type="Const" version="opset1">
<data element_type="f32" shape="1" offset="136655176" size="4" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_259670" />
</rt_info>
<output>
<port id="0" precision="FP32">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1455" name="Multiply_259673" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Multiply_259673" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1456" name="Sqrt_259674" type="Sqrt" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="Sqrt_259674" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1457" name="RandomUniform_259668" type="RandomUniform" version="opset8">
<data output_type="f32" op_seed="4710" global_seed="0" />
<rt_info>
<attribute name="fused_names" version="0" value="RandomUniform_259668" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>4</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
</port>
<port id="2" precision="FP32">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1458" name="Constant_259669" type="Const" version="opset1">
<data element_type="f32" shape="1" offset="136655180" size="4" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_259669" />
</rt_info>
<output>
<port id="0" precision="FP32">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1459" name="Multiply_259675" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Multiply_259675" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1460" name="Multiply_259676" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Multiply_259676" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1461" name="Cos_259677" type="Cos" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="Cos_259677" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1462" name="Multiply_259680" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Multiply_259680" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1463" name="Multiply_259681" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Multiply_259681" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>1</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1464" name="Constant_259679" type="Const" version="opset1">
<data element_type="f32" shape="1" offset="2384284" size="4" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_259679" />
</rt_info>
<output>
<port id="0" precision="FP32">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="1465" name="/RandomNormalLike" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/RandomNormalLike" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/RandomNormalLike_output_0">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1466" name="/Cast" type="Convert" version="opset1">
<data destination_type="f32" />
<rt_info>
<attribute name="fused_names" version="0" value="/Cast" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="FP32" names="/Cast_output_0">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1467" name="/Mul_3" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/Mul_3" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>4</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/Mul_3_output_0">
<dim>-1</dim>
<dim>4</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1468" name="latent_sample" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="latent_sample" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>4</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>4</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="latent_sample">
<dim>-1</dim>
<dim>4</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1469" name="latent_sample/sink_port_0" type="Result" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="latent_sample/sink_port_0" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>4</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
</layer>
</layers>
<edges>
<edge from-layer="0" from-port="0" to-layer="4" to-port="0" />
<edge from-layer="1" from-port="0" to-layer="1220" to-port="0" />
<edge from-layer="2" from-port="0" to-layer="1038" to-port="0" />
<edge from-layer="3" from-port="0" to-layer="4" to-port="1" />
<edge from-layer="4" from-port="2" to-layer="15" to-port="0" />
<edge from-layer="4" from-port="2" to-layer="8" to-port="0" />
<edge from-layer="5" from-port="0" to-layer="14" to-port="0" />
<edge from-layer="6" from-port="0" to-layer="12" to-port="0" />
<edge from-layer="6" from-port="0" to-layer="13" to-port="0" />
<edge from-layer="7" from-port="0" to-layer="13" to-port="1" />
<edge from-layer="8" from-port="1" to-layer="9" to-port="0" />
<edge from-layer="9" from-port="1" to-layer="11" to-port="0" />
<edge from-layer="10" from-port="0" to-layer="11" to-port="1" />
<edge from-layer="11" from-port="2" to-layer="12" to-port="1" />
<edge from-layer="12" from-port="2" to-layer="13" to-port="2" />
<edge from-layer="13" from-port="3" to-layer="14" to-port="1" />
<edge from-layer="14" from-port="2" to-layer="15" to-port="1" />
<edge from-layer="15" from-port="2" to-layer="106" to-port="0" />
<edge from-layer="15" from-port="2" to-layer="40" to-port="0" />
<edge from-layer="15" from-port="2" to-layer="17" to-port="0" />
<edge from-layer="16" from-port="0" to-layer="17" to-port="1" />
<edge from-layer="17" from-port="2" to-layer="19" to-port="0" />
<edge from-layer="18" from-port="0" to-layer="19" to-port="1" />
<edge from-layer="19" from-port="2" to-layer="23" to-port="0" />
<edge from-layer="19" from-port="2" to-layer="30" to-port="0" />
<edge from-layer="20" from-port="0" to-layer="29" to-port="0" />
<edge from-layer="21" from-port="0" to-layer="28" to-port="0" />
<edge from-layer="21" from-port="0" to-layer="27" to-port="0" />
<edge from-layer="22" from-port="0" to-layer="28" to-port="1" />
<edge from-layer="23" from-port="1" to-layer="24" to-port="0" />
<edge from-layer="24" from-port="1" to-layer="35" to-port="0" />
<edge from-layer="24" from-port="1" to-layer="26" to-port="0" />
<edge from-layer="25" from-port="0" to-layer="26" to-port="1" />
<edge from-layer="26" from-port="2" to-layer="27" to-port="1" />
<edge from-layer="27" from-port="2" to-layer="28" to-port="2" />
<edge from-layer="28" from-port="3" to-layer="29" to-port="1" />
<edge from-layer="29" from-port="2" to-layer="30" to-port="1" />
<edge from-layer="30" from-port="2" to-layer="39" to-port="0" />
<edge from-layer="31" from-port="0" to-layer="38" to-port="0" />
<edge from-layer="32" from-port="0" to-layer="37" to-port="0" />
<edge from-layer="32" from-port="0" to-layer="36" to-port="0" />
<edge from-layer="33" from-port="0" to-layer="37" to-port="1" />
<edge from-layer="34" from-port="0" to-layer="35" to-port="1" />
<edge from-layer="35" from-port="2" to-layer="36" to-port="1" />
<edge from-layer="36" from-port="2" to-layer="37" to-port="2" />
<edge from-layer="37" from-port="3" to-layer="38" to-port="1" />
<edge from-layer="38" from-port="2" to-layer="39" to-port="1" />
<edge from-layer="39" from-port="2" to-layer="41" to-port="0" />
<edge from-layer="40" from-port="1" to-layer="41" to-port="1" />
<edge from-layer="41" from-port="2" to-layer="43" to-port="0" />
<edge from-layer="42" from-port="0" to-layer="43" to-port="1" />
<edge from-layer="43" from-port="2" to-layer="45" to-port="0" />
<edge from-layer="44" from-port="0" to-layer="45" to-port="1" />
<edge from-layer="45" from-port="2" to-layer="46" to-port="0" />
<edge from-layer="45" from-port="2" to-layer="47" to-port="0" />
<edge from-layer="46" from-port="1" to-layer="47" to-port="1" />
<edge from-layer="47" from-port="2" to-layer="49" to-port="0" />
<edge from-layer="48" from-port="0" to-layer="49" to-port="1" />
<edge from-layer="49" from-port="2" to-layer="53" to-port="0" />
<edge from-layer="49" from-port="2" to-layer="60" to-port="0" />
<edge from-layer="50" from-port="0" to-layer="59" to-port="0" />
<edge from-layer="51" from-port="0" to-layer="57" to-port="0" />
<edge from-layer="51" from-port="0" to-layer="58" to-port="0" />
<edge from-layer="52" from-port="0" to-layer="58" to-port="1" />
<edge from-layer="53" from-port="1" to-layer="54" to-port="0" />
<edge from-layer="54" from-port="1" to-layer="56" to-port="0" />
<edge from-layer="55" from-port="0" to-layer="56" to-port="1" />
<edge from-layer="56" from-port="2" to-layer="57" to-port="1" />
<edge from-layer="57" from-port="2" to-layer="58" to-port="2" />
<edge from-layer="58" from-port="3" to-layer="59" to-port="1" />
<edge from-layer="59" from-port="2" to-layer="60" to-port="1" />
<edge from-layer="60" from-port="2" to-layer="62" to-port="0" />
<edge from-layer="60" from-port="2" to-layer="85" to-port="0" />
<edge from-layer="61" from-port="0" to-layer="62" to-port="1" />
<edge from-layer="62" from-port="2" to-layer="64" to-port="0" />
<edge from-layer="63" from-port="0" to-layer="64" to-port="1" />
<edge from-layer="64" from-port="2" to-layer="68" to-port="0" />
<edge from-layer="64" from-port="2" to-layer="75" to-port="0" />
<edge from-layer="65" from-port="0" to-layer="74" to-port="0" />
<edge from-layer="66" from-port="0" to-layer="72" to-port="0" />
<edge from-layer="66" from-port="0" to-layer="73" to-port="0" />
<edge from-layer="67" from-port="0" to-layer="73" to-port="1" />
<edge from-layer="68" from-port="1" to-layer="69" to-port="0" />
<edge from-layer="69" from-port="1" to-layer="71" to-port="0" />
<edge from-layer="69" from-port="1" to-layer="80" to-port="0" />
<edge from-layer="70" from-port="0" to-layer="71" to-port="1" />
<edge from-layer="71" from-port="2" to-layer="72" to-port="1" />
<edge from-layer="72" from-port="2" to-layer="73" to-port="2" />
<edge from-layer="73" from-port="3" to-layer="74" to-port="1" />
<edge from-layer="74" from-port="2" to-layer="75" to-port="1" />
<edge from-layer="75" from-port="2" to-layer="84" to-port="0" />
<edge from-layer="76" from-port="0" to-layer="83" to-port="0" />
<edge from-layer="77" from-port="0" to-layer="81" to-port="0" />
<edge from-layer="77" from-port="0" to-layer="82" to-port="0" />
<edge from-layer="78" from-port="0" to-layer="82" to-port="1" />
<edge from-layer="79" from-port="0" to-layer="80" to-port="1" />
<edge from-layer="80" from-port="2" to-layer="81" to-port="1" />
<edge from-layer="81" from-port="2" to-layer="82" to-port="2" />
<edge from-layer="82" from-port="3" to-layer="83" to-port="1" />
<edge from-layer="83" from-port="2" to-layer="84" to-port="1" />
<edge from-layer="84" from-port="2" to-layer="86" to-port="0" />
<edge from-layer="85" from-port="1" to-layer="86" to-port="1" />
<edge from-layer="86" from-port="2" to-layer="88" to-port="0" />
<edge from-layer="87" from-port="0" to-layer="88" to-port="1" />
<edge from-layer="88" from-port="2" to-layer="90" to-port="0" />
<edge from-layer="89" from-port="0" to-layer="90" to-port="1" />
<edge from-layer="90" from-port="2" to-layer="91" to-port="0" />
<edge from-layer="90" from-port="2" to-layer="92" to-port="0" />
<edge from-layer="91" from-port="1" to-layer="92" to-port="1" />
<edge from-layer="92" from-port="2" to-layer="94" to-port="0" />
<edge from-layer="93" from-port="0" to-layer="94" to-port="1" />
<edge from-layer="94" from-port="2" to-layer="98" to-port="0" />
<edge from-layer="94" from-port="2" to-layer="105" to-port="0" />
<edge from-layer="95" from-port="0" to-layer="104" to-port="0" />
<edge from-layer="96" from-port="0" to-layer="103" to-port="0" />
<edge from-layer="96" from-port="0" to-layer="102" to-port="0" />
<edge from-layer="97" from-port="0" to-layer="103" to-port="1" />
<edge from-layer="98" from-port="1" to-layer="99" to-port="0" />
<edge from-layer="99" from-port="1" to-layer="101" to-port="0" />
<edge from-layer="100" from-port="0" to-layer="101" to-port="1" />
<edge from-layer="101" from-port="2" to-layer="102" to-port="1" />
<edge from-layer="102" from-port="2" to-layer="103" to-port="2" />
<edge from-layer="103" from-port="3" to-layer="104" to-port="1" />
<edge from-layer="104" from-port="2" to-layer="105" to-port="1" />
<edge from-layer="105" from-port="2" to-layer="106" to-port="1" />
<edge from-layer="106" from-port="2" to-layer="108" to-port="0" />
<edge from-layer="107" from-port="0" to-layer="108" to-port="1" />
<edge from-layer="108" from-port="2" to-layer="199" to-port="0" />
<edge from-layer="108" from-port="2" to-layer="110" to-port="0" />
<edge from-layer="108" from-port="2" to-layer="133" to-port="0" />
<edge from-layer="109" from-port="0" to-layer="110" to-port="1" />
<edge from-layer="110" from-port="2" to-layer="112" to-port="0" />
<edge from-layer="111" from-port="0" to-layer="112" to-port="1" />
<edge from-layer="112" from-port="2" to-layer="116" to-port="0" />
<edge from-layer="112" from-port="2" to-layer="123" to-port="0" />
<edge from-layer="113" from-port="0" to-layer="122" to-port="0" />
<edge from-layer="114" from-port="0" to-layer="121" to-port="0" />
<edge from-layer="114" from-port="0" to-layer="120" to-port="0" />
<edge from-layer="115" from-port="0" to-layer="121" to-port="1" />
<edge from-layer="116" from-port="1" to-layer="117" to-port="0" />
<edge from-layer="117" from-port="1" to-layer="119" to-port="0" />
<edge from-layer="117" from-port="1" to-layer="128" to-port="0" />
<edge from-layer="118" from-port="0" to-layer="119" to-port="1" />
<edge from-layer="119" from-port="2" to-layer="120" to-port="1" />
<edge from-layer="120" from-port="2" to-layer="121" to-port="2" />
<edge from-layer="121" from-port="3" to-layer="122" to-port="1" />
<edge from-layer="122" from-port="2" to-layer="123" to-port="1" />
<edge from-layer="123" from-port="2" to-layer="132" to-port="0" />
<edge from-layer="124" from-port="0" to-layer="131" to-port="0" />
<edge from-layer="125" from-port="0" to-layer="129" to-port="0" />
<edge from-layer="125" from-port="0" to-layer="130" to-port="0" />
<edge from-layer="126" from-port="0" to-layer="130" to-port="1" />
<edge from-layer="127" from-port="0" to-layer="128" to-port="1" />
<edge from-layer="128" from-port="2" to-layer="129" to-port="1" />
<edge from-layer="129" from-port="2" to-layer="130" to-port="2" />
<edge from-layer="130" from-port="3" to-layer="131" to-port="1" />
<edge from-layer="131" from-port="2" to-layer="132" to-port="1" />
<edge from-layer="132" from-port="2" to-layer="134" to-port="0" />
<edge from-layer="133" from-port="1" to-layer="134" to-port="1" />
<edge from-layer="134" from-port="2" to-layer="136" to-port="0" />
<edge from-layer="135" from-port="0" to-layer="136" to-port="1" />
<edge from-layer="136" from-port="2" to-layer="138" to-port="0" />
<edge from-layer="137" from-port="0" to-layer="138" to-port="1" />
<edge from-layer="138" from-port="2" to-layer="139" to-port="0" />
<edge from-layer="138" from-port="2" to-layer="140" to-port="0" />
<edge from-layer="139" from-port="1" to-layer="140" to-port="1" />
<edge from-layer="140" from-port="2" to-layer="142" to-port="0" />
<edge from-layer="141" from-port="0" to-layer="142" to-port="1" />
<edge from-layer="142" from-port="2" to-layer="146" to-port="0" />
<edge from-layer="142" from-port="2" to-layer="153" to-port="0" />
<edge from-layer="143" from-port="0" to-layer="152" to-port="0" />
<edge from-layer="144" from-port="0" to-layer="151" to-port="0" />
<edge from-layer="144" from-port="0" to-layer="150" to-port="0" />
<edge from-layer="145" from-port="0" to-layer="151" to-port="1" />
<edge from-layer="146" from-port="1" to-layer="147" to-port="0" />
<edge from-layer="147" from-port="1" to-layer="149" to-port="0" />
<edge from-layer="148" from-port="0" to-layer="149" to-port="1" />
<edge from-layer="149" from-port="2" to-layer="150" to-port="1" />
<edge from-layer="150" from-port="2" to-layer="151" to-port="2" />
<edge from-layer="151" from-port="3" to-layer="152" to-port="1" />
<edge from-layer="152" from-port="2" to-layer="153" to-port="1" />
<edge from-layer="153" from-port="2" to-layer="155" to-port="0" />
<edge from-layer="153" from-port="2" to-layer="178" to-port="0" />
<edge from-layer="154" from-port="0" to-layer="155" to-port="1" />
<edge from-layer="155" from-port="2" to-layer="157" to-port="0" />
<edge from-layer="156" from-port="0" to-layer="157" to-port="1" />
<edge from-layer="157" from-port="2" to-layer="161" to-port="0" />
<edge from-layer="157" from-port="2" to-layer="168" to-port="0" />
<edge from-layer="158" from-port="0" to-layer="167" to-port="0" />
<edge from-layer="159" from-port="0" to-layer="166" to-port="0" />
<edge from-layer="159" from-port="0" to-layer="165" to-port="0" />
<edge from-layer="160" from-port="0" to-layer="166" to-port="1" />
<edge from-layer="161" from-port="1" to-layer="162" to-port="0" />
<edge from-layer="162" from-port="1" to-layer="173" to-port="0" />
<edge from-layer="162" from-port="1" to-layer="164" to-port="0" />
<edge from-layer="163" from-port="0" to-layer="164" to-port="1" />
<edge from-layer="164" from-port="2" to-layer="165" to-port="1" />
<edge from-layer="165" from-port="2" to-layer="166" to-port="2" />
<edge from-layer="166" from-port="3" to-layer="167" to-port="1" />
<edge from-layer="167" from-port="2" to-layer="168" to-port="1" />
<edge from-layer="168" from-port="2" to-layer="177" to-port="0" />
<edge from-layer="169" from-port="0" to-layer="176" to-port="0" />
<edge from-layer="170" from-port="0" to-layer="174" to-port="0" />
<edge from-layer="170" from-port="0" to-layer="175" to-port="0" />
<edge from-layer="171" from-port="0" to-layer="175" to-port="1" />
<edge from-layer="172" from-port="0" to-layer="173" to-port="1" />
<edge from-layer="173" from-port="2" to-layer="174" to-port="1" />
<edge from-layer="174" from-port="2" to-layer="175" to-port="2" />
<edge from-layer="175" from-port="3" to-layer="176" to-port="1" />
<edge from-layer="176" from-port="2" to-layer="177" to-port="1" />
<edge from-layer="177" from-port="2" to-layer="179" to-port="0" />
<edge from-layer="178" from-port="1" to-layer="179" to-port="1" />
<edge from-layer="179" from-port="2" to-layer="181" to-port="0" />
<edge from-layer="180" from-port="0" to-layer="181" to-port="1" />
<edge from-layer="181" from-port="2" to-layer="183" to-port="0" />
<edge from-layer="182" from-port="0" to-layer="183" to-port="1" />
<edge from-layer="183" from-port="2" to-layer="184" to-port="0" />
<edge from-layer="183" from-port="2" to-layer="185" to-port="0" />
<edge from-layer="184" from-port="1" to-layer="185" to-port="1" />
<edge from-layer="185" from-port="2" to-layer="187" to-port="0" />
<edge from-layer="186" from-port="0" to-layer="187" to-port="1" />
<edge from-layer="187" from-port="2" to-layer="191" to-port="0" />
<edge from-layer="187" from-port="2" to-layer="198" to-port="0" />
<edge from-layer="188" from-port="0" to-layer="197" to-port="0" />
<edge from-layer="189" from-port="0" to-layer="195" to-port="0" />
<edge from-layer="189" from-port="0" to-layer="196" to-port="0" />
<edge from-layer="190" from-port="0" to-layer="196" to-port="1" />
<edge from-layer="191" from-port="1" to-layer="192" to-port="0" />
<edge from-layer="192" from-port="1" to-layer="194" to-port="0" />
<edge from-layer="193" from-port="0" to-layer="194" to-port="1" />
<edge from-layer="194" from-port="2" to-layer="195" to-port="1" />
<edge from-layer="195" from-port="2" to-layer="196" to-port="2" />
<edge from-layer="196" from-port="3" to-layer="197" to-port="1" />
<edge from-layer="197" from-port="2" to-layer="198" to-port="1" />
<edge from-layer="198" from-port="2" to-layer="199" to-port="1" />
<edge from-layer="199" from-port="2" to-layer="201" to-port="0" />
<edge from-layer="200" from-port="0" to-layer="201" to-port="1" />
<edge from-layer="201" from-port="2" to-layer="222" to-port="0" />
<edge from-layer="202" from-port="0" to-layer="206" to-port="0" />
<edge from-layer="203" from-port="0" to-layer="205" to-port="0" />
<edge from-layer="204" from-port="0" to-layer="205" to-port="1" />
<edge from-layer="205" from-port="2" to-layer="206" to-port="1" />
<edge from-layer="206" from-port="2" to-layer="208" to-port="0" />
<edge from-layer="207" from-port="0" to-layer="208" to-port="1" />
<edge from-layer="208" from-port="2" to-layer="213" to-port="0" />
<edge from-layer="209" from-port="0" to-layer="213" to-port="1" />
<edge from-layer="210" from-port="0" to-layer="213" to-port="2" />
<edge from-layer="211" from-port="0" to-layer="213" to-port="3" />
<edge from-layer="212" from-port="0" to-layer="213" to-port="4" />
<edge from-layer="213" from-port="5" to-layer="215" to-port="0" />
<edge from-layer="214" from-port="0" to-layer="215" to-port="1" />
<edge from-layer="215" from-port="2" to-layer="217" to-port="0" />
<edge from-layer="216" from-port="0" to-layer="217" to-port="1" />
<edge from-layer="217" from-port="2" to-layer="218" to-port="0" />
<edge from-layer="218" from-port="1" to-layer="220" to-port="0" />
<edge from-layer="219" from-port="0" to-layer="220" to-port="1" />
<edge from-layer="220" from-port="2" to-layer="222" to-port="1" />
<edge from-layer="220" from-port="3" to-layer="222" to-port="2" />
<edge from-layer="221" from-port="0" to-layer="222" to-port="3" />
<edge from-layer="222" from-port="4" to-layer="224" to-port="0" />
<edge from-layer="223" from-port="0" to-layer="224" to-port="1" />
<edge from-layer="224" from-port="2" to-layer="228" to-port="0" />
<edge from-layer="224" from-port="2" to-layer="235" to-port="0" />
<edge from-layer="225" from-port="0" to-layer="234" to-port="0" />
<edge from-layer="226" from-port="0" to-layer="233" to-port="0" />
<edge from-layer="226" from-port="0" to-layer="232" to-port="0" />
<edge from-layer="227" from-port="0" to-layer="233" to-port="1" />
<edge from-layer="228" from-port="1" to-layer="229" to-port="0" />
<edge from-layer="229" from-port="1" to-layer="231" to-port="0" />
<edge from-layer="230" from-port="0" to-layer="231" to-port="1" />
<edge from-layer="231" from-port="2" to-layer="232" to-port="1" />
<edge from-layer="232" from-port="2" to-layer="233" to-port="2" />
<edge from-layer="233" from-port="3" to-layer="234" to-port="1" />
<edge from-layer="234" from-port="2" to-layer="235" to-port="1" />
<edge from-layer="235" from-port="2" to-layer="237" to-port="0" />
<edge from-layer="235" from-port="2" to-layer="250" to-port="0" />
<edge from-layer="235" from-port="2" to-layer="273" to-port="0" />
<edge from-layer="236" from-port="0" to-layer="237" to-port="1" />
<edge from-layer="237" from-port="2" to-layer="241" to-port="0" />
<edge from-layer="237" from-port="2" to-layer="248" to-port="0" />
<edge from-layer="238" from-port="0" to-layer="247" to-port="0" />
<edge from-layer="239" from-port="0" to-layer="246" to-port="0" />
<edge from-layer="239" from-port="0" to-layer="245" to-port="0" />
<edge from-layer="240" from-port="0" to-layer="246" to-port="1" />
<edge from-layer="241" from-port="1" to-layer="242" to-port="0" />
<edge from-layer="242" from-port="1" to-layer="244" to-port="0" />
<edge from-layer="243" from-port="0" to-layer="244" to-port="1" />
<edge from-layer="244" from-port="2" to-layer="245" to-port="1" />
<edge from-layer="245" from-port="2" to-layer="246" to-port="2" />
<edge from-layer="246" from-port="3" to-layer="247" to-port="1" />
<edge from-layer="247" from-port="2" to-layer="248" to-port="1" />
<edge from-layer="248" from-port="2" to-layer="339" to-port="0" />
<edge from-layer="249" from-port="0" to-layer="250" to-port="1" />
<edge from-layer="250" from-port="2" to-layer="252" to-port="0" />
<edge from-layer="251" from-port="0" to-layer="252" to-port="1" />
<edge from-layer="252" from-port="2" to-layer="263" to-port="0" />
<edge from-layer="252" from-port="2" to-layer="256" to-port="0" />
<edge from-layer="253" from-port="0" to-layer="262" to-port="0" />
<edge from-layer="254" from-port="0" to-layer="261" to-port="0" />
<edge from-layer="254" from-port="0" to-layer="260" to-port="0" />
<edge from-layer="255" from-port="0" to-layer="261" to-port="1" />
<edge from-layer="256" from-port="1" to-layer="257" to-port="0" />
<edge from-layer="257" from-port="1" to-layer="268" to-port="0" />
<edge from-layer="257" from-port="1" to-layer="259" to-port="0" />
<edge from-layer="258" from-port="0" to-layer="259" to-port="1" />
<edge from-layer="259" from-port="2" to-layer="260" to-port="1" />
<edge from-layer="260" from-port="2" to-layer="261" to-port="2" />
<edge from-layer="261" from-port="3" to-layer="262" to-port="1" />
<edge from-layer="262" from-port="2" to-layer="263" to-port="1" />
<edge from-layer="263" from-port="2" to-layer="272" to-port="0" />
<edge from-layer="264" from-port="0" to-layer="271" to-port="0" />
<edge from-layer="265" from-port="0" to-layer="269" to-port="0" />
<edge from-layer="265" from-port="0" to-layer="270" to-port="0" />
<edge from-layer="266" from-port="0" to-layer="270" to-port="1" />
<edge from-layer="267" from-port="0" to-layer="268" to-port="1" />
<edge from-layer="268" from-port="2" to-layer="269" to-port="1" />
<edge from-layer="269" from-port="2" to-layer="270" to-port="2" />
<edge from-layer="270" from-port="3" to-layer="271" to-port="1" />
<edge from-layer="271" from-port="2" to-layer="272" to-port="1" />
<edge from-layer="272" from-port="2" to-layer="274" to-port="0" />
<edge from-layer="273" from-port="1" to-layer="274" to-port="1" />
<edge from-layer="274" from-port="2" to-layer="276" to-port="0" />
<edge from-layer="275" from-port="0" to-layer="276" to-port="1" />
<edge from-layer="276" from-port="2" to-layer="278" to-port="0" />
<edge from-layer="277" from-port="0" to-layer="278" to-port="1" />
<edge from-layer="278" from-port="2" to-layer="279" to-port="0" />
<edge from-layer="278" from-port="2" to-layer="280" to-port="0" />
<edge from-layer="279" from-port="1" to-layer="280" to-port="1" />
<edge from-layer="280" from-port="2" to-layer="282" to-port="0" />
<edge from-layer="281" from-port="0" to-layer="282" to-port="1" />
<edge from-layer="282" from-port="2" to-layer="286" to-port="0" />
<edge from-layer="282" from-port="2" to-layer="293" to-port="0" />
<edge from-layer="283" from-port="0" to-layer="292" to-port="0" />
<edge from-layer="284" from-port="0" to-layer="291" to-port="0" />
<edge from-layer="284" from-port="0" to-layer="290" to-port="0" />
<edge from-layer="285" from-port="0" to-layer="291" to-port="1" />
<edge from-layer="286" from-port="1" to-layer="287" to-port="0" />
<edge from-layer="287" from-port="1" to-layer="289" to-port="0" />
<edge from-layer="288" from-port="0" to-layer="289" to-port="1" />
<edge from-layer="289" from-port="2" to-layer="290" to-port="1" />
<edge from-layer="290" from-port="2" to-layer="291" to-port="2" />
<edge from-layer="291" from-port="3" to-layer="292" to-port="1" />
<edge from-layer="292" from-port="2" to-layer="293" to-port="1" />
<edge from-layer="293" from-port="2" to-layer="318" to-port="0" />
<edge from-layer="293" from-port="2" to-layer="295" to-port="0" />
<edge from-layer="294" from-port="0" to-layer="295" to-port="1" />
<edge from-layer="295" from-port="2" to-layer="297" to-port="0" />
<edge from-layer="296" from-port="0" to-layer="297" to-port="1" />
<edge from-layer="297" from-port="2" to-layer="301" to-port="0" />
<edge from-layer="297" from-port="2" to-layer="308" to-port="0" />
<edge from-layer="298" from-port="0" to-layer="307" to-port="0" />
<edge from-layer="299" from-port="0" to-layer="305" to-port="0" />
<edge from-layer="299" from-port="0" to-layer="306" to-port="0" />
<edge from-layer="300" from-port="0" to-layer="306" to-port="1" />
<edge from-layer="301" from-port="1" to-layer="302" to-port="0" />
<edge from-layer="302" from-port="1" to-layer="313" to-port="0" />
<edge from-layer="302" from-port="1" to-layer="304" to-port="0" />
<edge from-layer="303" from-port="0" to-layer="304" to-port="1" />
<edge from-layer="304" from-port="2" to-layer="305" to-port="1" />
<edge from-layer="305" from-port="2" to-layer="306" to-port="2" />
<edge from-layer="306" from-port="3" to-layer="307" to-port="1" />
<edge from-layer="307" from-port="2" to-layer="308" to-port="1" />
<edge from-layer="308" from-port="2" to-layer="317" to-port="0" />
<edge from-layer="309" from-port="0" to-layer="316" to-port="0" />
<edge from-layer="310" from-port="0" to-layer="315" to-port="0" />
<edge from-layer="310" from-port="0" to-layer="314" to-port="0" />
<edge from-layer="311" from-port="0" to-layer="315" to-port="1" />
<edge from-layer="312" from-port="0" to-layer="313" to-port="1" />
<edge from-layer="313" from-port="2" to-layer="314" to-port="1" />
<edge from-layer="314" from-port="2" to-layer="315" to-port="2" />
<edge from-layer="315" from-port="3" to-layer="316" to-port="1" />
<edge from-layer="316" from-port="2" to-layer="317" to-port="1" />
<edge from-layer="317" from-port="2" to-layer="319" to-port="0" />
<edge from-layer="318" from-port="1" to-layer="319" to-port="1" />
<edge from-layer="319" from-port="2" to-layer="321" to-port="0" />
<edge from-layer="320" from-port="0" to-layer="321" to-port="1" />
<edge from-layer="321" from-port="2" to-layer="323" to-port="0" />
<edge from-layer="322" from-port="0" to-layer="323" to-port="1" />
<edge from-layer="323" from-port="2" to-layer="324" to-port="0" />
<edge from-layer="323" from-port="2" to-layer="325" to-port="0" />
<edge from-layer="324" from-port="1" to-layer="325" to-port="1" />
<edge from-layer="325" from-port="2" to-layer="327" to-port="0" />
<edge from-layer="326" from-port="0" to-layer="327" to-port="1" />
<edge from-layer="327" from-port="2" to-layer="331" to-port="0" />
<edge from-layer="327" from-port="2" to-layer="338" to-port="0" />
<edge from-layer="328" from-port="0" to-layer="337" to-port="0" />
<edge from-layer="329" from-port="0" to-layer="336" to-port="0" />
<edge from-layer="329" from-port="0" to-layer="335" to-port="0" />
<edge from-layer="330" from-port="0" to-layer="336" to-port="1" />
<edge from-layer="331" from-port="1" to-layer="332" to-port="0" />
<edge from-layer="332" from-port="1" to-layer="334" to-port="0" />
<edge from-layer="333" from-port="0" to-layer="334" to-port="1" />
<edge from-layer="334" from-port="2" to-layer="335" to-port="1" />
<edge from-layer="335" from-port="2" to-layer="336" to-port="2" />
<edge from-layer="336" from-port="3" to-layer="337" to-port="1" />
<edge from-layer="337" from-port="2" to-layer="338" to-port="1" />
<edge from-layer="338" from-port="2" to-layer="339" to-port="1" />
<edge from-layer="339" from-port="2" to-layer="341" to-port="0" />
<edge from-layer="340" from-port="0" to-layer="341" to-port="1" />
<edge from-layer="341" from-port="2" to-layer="432" to-port="0" />
<edge from-layer="341" from-port="2" to-layer="366" to-port="0" />
<edge from-layer="341" from-port="2" to-layer="343" to-port="0" />
<edge from-layer="342" from-port="0" to-layer="343" to-port="1" />
<edge from-layer="343" from-port="2" to-layer="345" to-port="0" />
<edge from-layer="344" from-port="0" to-layer="345" to-port="1" />
<edge from-layer="345" from-port="2" to-layer="349" to-port="0" />
<edge from-layer="345" from-port="2" to-layer="356" to-port="0" />
<edge from-layer="346" from-port="0" to-layer="355" to-port="0" />
<edge from-layer="347" from-port="0" to-layer="353" to-port="0" />
<edge from-layer="347" from-port="0" to-layer="354" to-port="0" />
<edge from-layer="348" from-port="0" to-layer="354" to-port="1" />
<edge from-layer="349" from-port="1" to-layer="350" to-port="0" />
<edge from-layer="350" from-port="1" to-layer="352" to-port="0" />
<edge from-layer="350" from-port="1" to-layer="361" to-port="0" />
<edge from-layer="351" from-port="0" to-layer="352" to-port="1" />
<edge from-layer="352" from-port="2" to-layer="353" to-port="1" />
<edge from-layer="353" from-port="2" to-layer="354" to-port="2" />
<edge from-layer="354" from-port="3" to-layer="355" to-port="1" />
<edge from-layer="355" from-port="2" to-layer="356" to-port="1" />
<edge from-layer="356" from-port="2" to-layer="365" to-port="0" />
<edge from-layer="357" from-port="0" to-layer="364" to-port="0" />
<edge from-layer="358" from-port="0" to-layer="363" to-port="0" />
<edge from-layer="358" from-port="0" to-layer="362" to-port="0" />
<edge from-layer="359" from-port="0" to-layer="363" to-port="1" />
<edge from-layer="360" from-port="0" to-layer="361" to-port="1" />
<edge from-layer="361" from-port="2" to-layer="362" to-port="1" />
<edge from-layer="362" from-port="2" to-layer="363" to-port="2" />
<edge from-layer="363" from-port="3" to-layer="364" to-port="1" />
<edge from-layer="364" from-port="2" to-layer="365" to-port="1" />
<edge from-layer="365" from-port="2" to-layer="367" to-port="0" />
<edge from-layer="366" from-port="1" to-layer="367" to-port="1" />
<edge from-layer="367" from-port="2" to-layer="369" to-port="0" />
<edge from-layer="368" from-port="0" to-layer="369" to-port="1" />
<edge from-layer="369" from-port="2" to-layer="371" to-port="0" />
<edge from-layer="370" from-port="0" to-layer="371" to-port="1" />
<edge from-layer="371" from-port="2" to-layer="372" to-port="0" />
<edge from-layer="371" from-port="2" to-layer="373" to-port="0" />
<edge from-layer="372" from-port="1" to-layer="373" to-port="1" />
<edge from-layer="373" from-port="2" to-layer="375" to-port="0" />
<edge from-layer="374" from-port="0" to-layer="375" to-port="1" />
<edge from-layer="375" from-port="2" to-layer="379" to-port="0" />
<edge from-layer="375" from-port="2" to-layer="386" to-port="0" />
<edge from-layer="376" from-port="0" to-layer="385" to-port="0" />
<edge from-layer="377" from-port="0" to-layer="383" to-port="0" />
<edge from-layer="377" from-port="0" to-layer="384" to-port="0" />
<edge from-layer="378" from-port="0" to-layer="384" to-port="1" />
<edge from-layer="379" from-port="1" to-layer="380" to-port="0" />
<edge from-layer="380" from-port="1" to-layer="382" to-port="0" />
<edge from-layer="381" from-port="0" to-layer="382" to-port="1" />
<edge from-layer="382" from-port="2" to-layer="383" to-port="1" />
<edge from-layer="383" from-port="2" to-layer="384" to-port="2" />
<edge from-layer="384" from-port="3" to-layer="385" to-port="1" />
<edge from-layer="385" from-port="2" to-layer="386" to-port="1" />
<edge from-layer="386" from-port="2" to-layer="388" to-port="0" />
<edge from-layer="386" from-port="2" to-layer="411" to-port="0" />
<edge from-layer="387" from-port="0" to-layer="388" to-port="1" />
<edge from-layer="388" from-port="2" to-layer="390" to-port="0" />
<edge from-layer="389" from-port="0" to-layer="390" to-port="1" />
<edge from-layer="390" from-port="2" to-layer="394" to-port="0" />
<edge from-layer="390" from-port="2" to-layer="401" to-port="0" />
<edge from-layer="391" from-port="0" to-layer="400" to-port="0" />
<edge from-layer="392" from-port="0" to-layer="398" to-port="0" />
<edge from-layer="392" from-port="0" to-layer="399" to-port="0" />
<edge from-layer="393" from-port="0" to-layer="399" to-port="1" />
<edge from-layer="394" from-port="1" to-layer="395" to-port="0" />
<edge from-layer="395" from-port="1" to-layer="406" to-port="0" />
<edge from-layer="395" from-port="1" to-layer="397" to-port="0" />
<edge from-layer="396" from-port="0" to-layer="397" to-port="1" />
<edge from-layer="397" from-port="2" to-layer="398" to-port="1" />
<edge from-layer="398" from-port="2" to-layer="399" to-port="2" />
<edge from-layer="399" from-port="3" to-layer="400" to-port="1" />
<edge from-layer="400" from-port="2" to-layer="401" to-port="1" />
<edge from-layer="401" from-port="2" to-layer="410" to-port="0" />
<edge from-layer="402" from-port="0" to-layer="409" to-port="0" />
<edge from-layer="403" from-port="0" to-layer="408" to-port="0" />
<edge from-layer="403" from-port="0" to-layer="407" to-port="0" />
<edge from-layer="404" from-port="0" to-layer="408" to-port="1" />
<edge from-layer="405" from-port="0" to-layer="406" to-port="1" />
<edge from-layer="406" from-port="2" to-layer="407" to-port="1" />
<edge from-layer="407" from-port="2" to-layer="408" to-port="2" />
<edge from-layer="408" from-port="3" to-layer="409" to-port="1" />
<edge from-layer="409" from-port="2" to-layer="410" to-port="1" />
<edge from-layer="410" from-port="2" to-layer="412" to-port="0" />
<edge from-layer="411" from-port="1" to-layer="412" to-port="1" />
<edge from-layer="412" from-port="2" to-layer="414" to-port="0" />
<edge from-layer="413" from-port="0" to-layer="414" to-port="1" />
<edge from-layer="414" from-port="2" to-layer="416" to-port="0" />
<edge from-layer="415" from-port="0" to-layer="416" to-port="1" />
<edge from-layer="416" from-port="2" to-layer="417" to-port="0" />
<edge from-layer="416" from-port="2" to-layer="418" to-port="0" />
<edge from-layer="417" from-port="1" to-layer="418" to-port="1" />
<edge from-layer="418" from-port="2" to-layer="420" to-port="0" />
<edge from-layer="419" from-port="0" to-layer="420" to-port="1" />
<edge from-layer="420" from-port="2" to-layer="424" to-port="0" />
<edge from-layer="420" from-port="2" to-layer="431" to-port="0" />
<edge from-layer="421" from-port="0" to-layer="430" to-port="0" />
<edge from-layer="422" from-port="0" to-layer="429" to-port="0" />
<edge from-layer="422" from-port="0" to-layer="428" to-port="0" />
<edge from-layer="423" from-port="0" to-layer="429" to-port="1" />
<edge from-layer="424" from-port="1" to-layer="425" to-port="0" />
<edge from-layer="425" from-port="1" to-layer="427" to-port="0" />
<edge from-layer="426" from-port="0" to-layer="427" to-port="1" />
<edge from-layer="427" from-port="2" to-layer="428" to-port="1" />
<edge from-layer="428" from-port="2" to-layer="429" to-port="2" />
<edge from-layer="429" from-port="3" to-layer="430" to-port="1" />
<edge from-layer="430" from-port="2" to-layer="431" to-port="1" />
<edge from-layer="431" from-port="2" to-layer="432" to-port="1" />
<edge from-layer="432" from-port="2" to-layer="434" to-port="0" />
<edge from-layer="433" from-port="0" to-layer="434" to-port="1" />
<edge from-layer="434" from-port="2" to-layer="455" to-port="0" />
<edge from-layer="435" from-port="0" to-layer="439" to-port="0" />
<edge from-layer="436" from-port="0" to-layer="438" to-port="0" />
<edge from-layer="437" from-port="0" to-layer="438" to-port="1" />
<edge from-layer="438" from-port="2" to-layer="439" to-port="1" />
<edge from-layer="439" from-port="2" to-layer="441" to-port="0" />
<edge from-layer="440" from-port="0" to-layer="441" to-port="1" />
<edge from-layer="441" from-port="2" to-layer="446" to-port="0" />
<edge from-layer="442" from-port="0" to-layer="446" to-port="1" />
<edge from-layer="443" from-port="0" to-layer="446" to-port="2" />
<edge from-layer="444" from-port="0" to-layer="446" to-port="3" />
<edge from-layer="445" from-port="0" to-layer="446" to-port="4" />
<edge from-layer="446" from-port="5" to-layer="448" to-port="0" />
<edge from-layer="447" from-port="0" to-layer="448" to-port="1" />
<edge from-layer="448" from-port="2" to-layer="450" to-port="0" />
<edge from-layer="449" from-port="0" to-layer="450" to-port="1" />
<edge from-layer="450" from-port="2" to-layer="451" to-port="0" />
<edge from-layer="451" from-port="1" to-layer="453" to-port="0" />
<edge from-layer="452" from-port="0" to-layer="453" to-port="1" />
<edge from-layer="453" from-port="3" to-layer="455" to-port="2" />
<edge from-layer="453" from-port="2" to-layer="455" to-port="1" />
<edge from-layer="454" from-port="0" to-layer="455" to-port="3" />
<edge from-layer="455" from-port="4" to-layer="457" to-port="0" />
<edge from-layer="456" from-port="0" to-layer="457" to-port="1" />
<edge from-layer="457" from-port="2" to-layer="461" to-port="0" />
<edge from-layer="457" from-port="2" to-layer="468" to-port="0" />
<edge from-layer="458" from-port="0" to-layer="467" to-port="0" />
<edge from-layer="459" from-port="0" to-layer="465" to-port="0" />
<edge from-layer="459" from-port="0" to-layer="466" to-port="0" />
<edge from-layer="460" from-port="0" to-layer="466" to-port="1" />
<edge from-layer="461" from-port="1" to-layer="462" to-port="0" />
<edge from-layer="462" from-port="1" to-layer="464" to-port="0" />
<edge from-layer="463" from-port="0" to-layer="464" to-port="1" />
<edge from-layer="464" from-port="2" to-layer="465" to-port="1" />
<edge from-layer="465" from-port="2" to-layer="466" to-port="2" />
<edge from-layer="466" from-port="3" to-layer="467" to-port="1" />
<edge from-layer="467" from-port="2" to-layer="468" to-port="1" />
<edge from-layer="468" from-port="2" to-layer="470" to-port="0" />
<edge from-layer="468" from-port="2" to-layer="506" to-port="0" />
<edge from-layer="468" from-port="2" to-layer="483" to-port="0" />
<edge from-layer="469" from-port="0" to-layer="470" to-port="1" />
<edge from-layer="470" from-port="2" to-layer="474" to-port="0" />
<edge from-layer="470" from-port="2" to-layer="481" to-port="0" />
<edge from-layer="471" from-port="0" to-layer="480" to-port="0" />
<edge from-layer="472" from-port="0" to-layer="478" to-port="0" />
<edge from-layer="472" from-port="0" to-layer="479" to-port="0" />
<edge from-layer="473" from-port="0" to-layer="479" to-port="1" />
<edge from-layer="474" from-port="1" to-layer="475" to-port="0" />
<edge from-layer="475" from-port="1" to-layer="477" to-port="0" />
<edge from-layer="476" from-port="0" to-layer="477" to-port="1" />
<edge from-layer="477" from-port="2" to-layer="478" to-port="1" />
<edge from-layer="478" from-port="2" to-layer="479" to-port="2" />
<edge from-layer="479" from-port="3" to-layer="480" to-port="1" />
<edge from-layer="480" from-port="2" to-layer="481" to-port="1" />
<edge from-layer="481" from-port="2" to-layer="572" to-port="0" />
<edge from-layer="482" from-port="0" to-layer="483" to-port="1" />
<edge from-layer="483" from-port="2" to-layer="485" to-port="0" />
<edge from-layer="484" from-port="0" to-layer="485" to-port="1" />
<edge from-layer="485" from-port="2" to-layer="496" to-port="0" />
<edge from-layer="485" from-port="2" to-layer="489" to-port="0" />
<edge from-layer="486" from-port="0" to-layer="495" to-port="0" />
<edge from-layer="487" from-port="0" to-layer="493" to-port="0" />
<edge from-layer="487" from-port="0" to-layer="494" to-port="0" />
<edge from-layer="488" from-port="0" to-layer="494" to-port="1" />
<edge from-layer="489" from-port="1" to-layer="490" to-port="0" />
<edge from-layer="490" from-port="1" to-layer="501" to-port="0" />
<edge from-layer="490" from-port="1" to-layer="492" to-port="0" />
<edge from-layer="491" from-port="0" to-layer="492" to-port="1" />
<edge from-layer="492" from-port="2" to-layer="493" to-port="1" />
<edge from-layer="493" from-port="2" to-layer="494" to-port="2" />
<edge from-layer="494" from-port="3" to-layer="495" to-port="1" />
<edge from-layer="495" from-port="2" to-layer="496" to-port="1" />
<edge from-layer="496" from-port="2" to-layer="505" to-port="0" />
<edge from-layer="497" from-port="0" to-layer="504" to-port="0" />
<edge from-layer="498" from-port="0" to-layer="503" to-port="0" />
<edge from-layer="498" from-port="0" to-layer="502" to-port="0" />
<edge from-layer="499" from-port="0" to-layer="503" to-port="1" />
<edge from-layer="500" from-port="0" to-layer="501" to-port="1" />
<edge from-layer="501" from-port="2" to-layer="502" to-port="1" />
<edge from-layer="502" from-port="2" to-layer="503" to-port="2" />
<edge from-layer="503" from-port="3" to-layer="504" to-port="1" />
<edge from-layer="504" from-port="2" to-layer="505" to-port="1" />
<edge from-layer="505" from-port="2" to-layer="507" to-port="0" />
<edge from-layer="506" from-port="1" to-layer="507" to-port="1" />
<edge from-layer="507" from-port="2" to-layer="509" to-port="0" />
<edge from-layer="508" from-port="0" to-layer="509" to-port="1" />
<edge from-layer="509" from-port="2" to-layer="511" to-port="0" />
<edge from-layer="510" from-port="0" to-layer="511" to-port="1" />
<edge from-layer="511" from-port="2" to-layer="512" to-port="0" />
<edge from-layer="511" from-port="2" to-layer="513" to-port="0" />
<edge from-layer="512" from-port="1" to-layer="513" to-port="1" />
<edge from-layer="513" from-port="2" to-layer="515" to-port="0" />
<edge from-layer="514" from-port="0" to-layer="515" to-port="1" />
<edge from-layer="515" from-port="2" to-layer="519" to-port="0" />
<edge from-layer="515" from-port="2" to-layer="526" to-port="0" />
<edge from-layer="516" from-port="0" to-layer="525" to-port="0" />
<edge from-layer="517" from-port="0" to-layer="524" to-port="0" />
<edge from-layer="517" from-port="0" to-layer="523" to-port="0" />
<edge from-layer="518" from-port="0" to-layer="524" to-port="1" />
<edge from-layer="519" from-port="1" to-layer="520" to-port="0" />
<edge from-layer="520" from-port="1" to-layer="522" to-port="0" />
<edge from-layer="521" from-port="0" to-layer="522" to-port="1" />
<edge from-layer="522" from-port="2" to-layer="523" to-port="1" />
<edge from-layer="523" from-port="2" to-layer="524" to-port="2" />
<edge from-layer="524" from-port="3" to-layer="525" to-port="1" />
<edge from-layer="525" from-port="2" to-layer="526" to-port="1" />
<edge from-layer="526" from-port="2" to-layer="528" to-port="0" />
<edge from-layer="526" from-port="2" to-layer="551" to-port="0" />
<edge from-layer="527" from-port="0" to-layer="528" to-port="1" />
<edge from-layer="528" from-port="2" to-layer="530" to-port="0" />
<edge from-layer="529" from-port="0" to-layer="530" to-port="1" />
<edge from-layer="530" from-port="2" to-layer="534" to-port="0" />
<edge from-layer="530" from-port="2" to-layer="541" to-port="0" />
<edge from-layer="531" from-port="0" to-layer="540" to-port="0" />
<edge from-layer="532" from-port="0" to-layer="538" to-port="0" />
<edge from-layer="532" from-port="0" to-layer="539" to-port="0" />
<edge from-layer="533" from-port="0" to-layer="539" to-port="1" />
<edge from-layer="534" from-port="1" to-layer="535" to-port="0" />
<edge from-layer="535" from-port="1" to-layer="537" to-port="0" />
<edge from-layer="535" from-port="1" to-layer="546" to-port="0" />
<edge from-layer="536" from-port="0" to-layer="537" to-port="1" />
<edge from-layer="537" from-port="2" to-layer="538" to-port="1" />
<edge from-layer="538" from-port="2" to-layer="539" to-port="2" />
<edge from-layer="539" from-port="3" to-layer="540" to-port="1" />
<edge from-layer="540" from-port="2" to-layer="541" to-port="1" />
<edge from-layer="541" from-port="2" to-layer="550" to-port="0" />
<edge from-layer="542" from-port="0" to-layer="549" to-port="0" />
<edge from-layer="543" from-port="0" to-layer="548" to-port="0" />
<edge from-layer="543" from-port="0" to-layer="547" to-port="0" />
<edge from-layer="544" from-port="0" to-layer="548" to-port="1" />
<edge from-layer="545" from-port="0" to-layer="546" to-port="1" />
<edge from-layer="546" from-port="2" to-layer="547" to-port="1" />
<edge from-layer="547" from-port="2" to-layer="548" to-port="2" />
<edge from-layer="548" from-port="3" to-layer="549" to-port="1" />
<edge from-layer="549" from-port="2" to-layer="550" to-port="1" />
<edge from-layer="550" from-port="2" to-layer="552" to-port="0" />
<edge from-layer="551" from-port="1" to-layer="552" to-port="1" />
<edge from-layer="552" from-port="2" to-layer="554" to-port="0" />
<edge from-layer="553" from-port="0" to-layer="554" to-port="1" />
<edge from-layer="554" from-port="2" to-layer="556" to-port="0" />
<edge from-layer="555" from-port="0" to-layer="556" to-port="1" />
<edge from-layer="556" from-port="2" to-layer="557" to-port="0" />
<edge from-layer="556" from-port="2" to-layer="558" to-port="0" />
<edge from-layer="557" from-port="1" to-layer="558" to-port="1" />
<edge from-layer="558" from-port="2" to-layer="560" to-port="0" />
<edge from-layer="559" from-port="0" to-layer="560" to-port="1" />
<edge from-layer="560" from-port="2" to-layer="564" to-port="0" />
<edge from-layer="560" from-port="2" to-layer="571" to-port="0" />
<edge from-layer="561" from-port="0" to-layer="570" to-port="0" />
<edge from-layer="562" from-port="0" to-layer="568" to-port="0" />
<edge from-layer="562" from-port="0" to-layer="569" to-port="0" />
<edge from-layer="563" from-port="0" to-layer="569" to-port="1" />
<edge from-layer="564" from-port="1" to-layer="565" to-port="0" />
<edge from-layer="565" from-port="1" to-layer="567" to-port="0" />
<edge from-layer="566" from-port="0" to-layer="567" to-port="1" />
<edge from-layer="567" from-port="2" to-layer="568" to-port="1" />
<edge from-layer="568" from-port="2" to-layer="569" to-port="2" />
<edge from-layer="569" from-port="3" to-layer="570" to-port="1" />
<edge from-layer="570" from-port="2" to-layer="571" to-port="1" />
<edge from-layer="571" from-port="2" to-layer="572" to-port="1" />
<edge from-layer="572" from-port="2" to-layer="574" to-port="0" />
<edge from-layer="573" from-port="0" to-layer="574" to-port="1" />
<edge from-layer="574" from-port="2" to-layer="576" to-port="0" />
<edge from-layer="574" from-port="2" to-layer="599" to-port="0" />
<edge from-layer="574" from-port="2" to-layer="665" to-port="0" />
<edge from-layer="575" from-port="0" to-layer="576" to-port="1" />
<edge from-layer="576" from-port="2" to-layer="578" to-port="0" />
<edge from-layer="577" from-port="0" to-layer="578" to-port="1" />
<edge from-layer="578" from-port="2" to-layer="582" to-port="0" />
<edge from-layer="578" from-port="2" to-layer="589" to-port="0" />
<edge from-layer="579" from-port="0" to-layer="588" to-port="0" />
<edge from-layer="580" from-port="0" to-layer="586" to-port="0" />
<edge from-layer="580" from-port="0" to-layer="587" to-port="0" />
<edge from-layer="581" from-port="0" to-layer="587" to-port="1" />
<edge from-layer="582" from-port="1" to-layer="583" to-port="0" />
<edge from-layer="583" from-port="1" to-layer="585" to-port="0" />
<edge from-layer="583" from-port="1" to-layer="594" to-port="0" />
<edge from-layer="584" from-port="0" to-layer="585" to-port="1" />
<edge from-layer="585" from-port="2" to-layer="586" to-port="1" />
<edge from-layer="586" from-port="2" to-layer="587" to-port="2" />
<edge from-layer="587" from-port="3" to-layer="588" to-port="1" />
<edge from-layer="588" from-port="2" to-layer="589" to-port="1" />
<edge from-layer="589" from-port="2" to-layer="598" to-port="0" />
<edge from-layer="590" from-port="0" to-layer="597" to-port="0" />
<edge from-layer="591" from-port="0" to-layer="595" to-port="0" />
<edge from-layer="591" from-port="0" to-layer="596" to-port="0" />
<edge from-layer="592" from-port="0" to-layer="596" to-port="1" />
<edge from-layer="593" from-port="0" to-layer="594" to-port="1" />
<edge from-layer="594" from-port="2" to-layer="595" to-port="1" />
<edge from-layer="595" from-port="2" to-layer="596" to-port="2" />
<edge from-layer="596" from-port="3" to-layer="597" to-port="1" />
<edge from-layer="597" from-port="2" to-layer="598" to-port="1" />
<edge from-layer="598" from-port="2" to-layer="600" to-port="0" />
<edge from-layer="599" from-port="1" to-layer="600" to-port="1" />
<edge from-layer="600" from-port="2" to-layer="602" to-port="0" />
<edge from-layer="601" from-port="0" to-layer="602" to-port="1" />
<edge from-layer="602" from-port="2" to-layer="604" to-port="0" />
<edge from-layer="603" from-port="0" to-layer="604" to-port="1" />
<edge from-layer="604" from-port="2" to-layer="605" to-port="0" />
<edge from-layer="604" from-port="2" to-layer="606" to-port="0" />
<edge from-layer="605" from-port="1" to-layer="606" to-port="1" />
<edge from-layer="606" from-port="2" to-layer="608" to-port="0" />
<edge from-layer="607" from-port="0" to-layer="608" to-port="1" />
<edge from-layer="608" from-port="2" to-layer="612" to-port="0" />
<edge from-layer="608" from-port="2" to-layer="619" to-port="0" />
<edge from-layer="609" from-port="0" to-layer="618" to-port="0" />
<edge from-layer="610" from-port="0" to-layer="617" to-port="0" />
<edge from-layer="610" from-port="0" to-layer="616" to-port="0" />
<edge from-layer="611" from-port="0" to-layer="617" to-port="1" />
<edge from-layer="612" from-port="1" to-layer="613" to-port="0" />
<edge from-layer="613" from-port="1" to-layer="615" to-port="0" />
<edge from-layer="614" from-port="0" to-layer="615" to-port="1" />
<edge from-layer="615" from-port="2" to-layer="616" to-port="1" />
<edge from-layer="616" from-port="2" to-layer="617" to-port="2" />
<edge from-layer="617" from-port="3" to-layer="618" to-port="1" />
<edge from-layer="618" from-port="2" to-layer="619" to-port="1" />
<edge from-layer="619" from-port="2" to-layer="621" to-port="0" />
<edge from-layer="619" from-port="2" to-layer="644" to-port="0" />
<edge from-layer="620" from-port="0" to-layer="621" to-port="1" />
<edge from-layer="621" from-port="2" to-layer="623" to-port="0" />
<edge from-layer="622" from-port="0" to-layer="623" to-port="1" />
<edge from-layer="623" from-port="2" to-layer="627" to-port="0" />
<edge from-layer="623" from-port="2" to-layer="634" to-port="0" />
<edge from-layer="624" from-port="0" to-layer="633" to-port="0" />
<edge from-layer="625" from-port="0" to-layer="632" to-port="0" />
<edge from-layer="625" from-port="0" to-layer="631" to-port="0" />
<edge from-layer="626" from-port="0" to-layer="632" to-port="1" />
<edge from-layer="627" from-port="1" to-layer="628" to-port="0" />
<edge from-layer="628" from-port="1" to-layer="639" to-port="0" />
<edge from-layer="628" from-port="1" to-layer="630" to-port="0" />
<edge from-layer="629" from-port="0" to-layer="630" to-port="1" />
<edge from-layer="630" from-port="2" to-layer="631" to-port="1" />
<edge from-layer="631" from-port="2" to-layer="632" to-port="2" />
<edge from-layer="632" from-port="3" to-layer="633" to-port="1" />
<edge from-layer="633" from-port="2" to-layer="634" to-port="1" />
<edge from-layer="634" from-port="2" to-layer="643" to-port="0" />
<edge from-layer="635" from-port="0" to-layer="642" to-port="0" />
<edge from-layer="636" from-port="0" to-layer="640" to-port="0" />
<edge from-layer="636" from-port="0" to-layer="641" to-port="0" />
<edge from-layer="637" from-port="0" to-layer="641" to-port="1" />
<edge from-layer="638" from-port="0" to-layer="639" to-port="1" />
<edge from-layer="639" from-port="2" to-layer="640" to-port="1" />
<edge from-layer="640" from-port="2" to-layer="641" to-port="2" />
<edge from-layer="641" from-port="3" to-layer="642" to-port="1" />
<edge from-layer="642" from-port="2" to-layer="643" to-port="1" />
<edge from-layer="643" from-port="2" to-layer="645" to-port="0" />
<edge from-layer="644" from-port="1" to-layer="645" to-port="1" />
<edge from-layer="645" from-port="2" to-layer="647" to-port="0" />
<edge from-layer="646" from-port="0" to-layer="647" to-port="1" />
<edge from-layer="647" from-port="2" to-layer="649" to-port="0" />
<edge from-layer="648" from-port="0" to-layer="649" to-port="1" />
<edge from-layer="649" from-port="2" to-layer="650" to-port="0" />
<edge from-layer="649" from-port="2" to-layer="651" to-port="0" />
<edge from-layer="650" from-port="1" to-layer="651" to-port="1" />
<edge from-layer="651" from-port="2" to-layer="653" to-port="0" />
<edge from-layer="652" from-port="0" to-layer="653" to-port="1" />
<edge from-layer="653" from-port="2" to-layer="657" to-port="0" />
<edge from-layer="653" from-port="2" to-layer="664" to-port="0" />
<edge from-layer="654" from-port="0" to-layer="663" to-port="0" />
<edge from-layer="655" from-port="0" to-layer="661" to-port="0" />
<edge from-layer="655" from-port="0" to-layer="662" to-port="0" />
<edge from-layer="656" from-port="0" to-layer="662" to-port="1" />
<edge from-layer="657" from-port="1" to-layer="658" to-port="0" />
<edge from-layer="658" from-port="1" to-layer="660" to-port="0" />
<edge from-layer="659" from-port="0" to-layer="660" to-port="1" />
<edge from-layer="660" from-port="2" to-layer="661" to-port="1" />
<edge from-layer="661" from-port="2" to-layer="662" to-port="2" />
<edge from-layer="662" from-port="3" to-layer="663" to-port="1" />
<edge from-layer="663" from-port="2" to-layer="664" to-port="1" />
<edge from-layer="664" from-port="2" to-layer="665" to-port="1" />
<edge from-layer="665" from-port="2" to-layer="667" to-port="0" />
<edge from-layer="666" from-port="0" to-layer="667" to-port="1" />
<edge from-layer="667" from-port="2" to-layer="688" to-port="0" />
<edge from-layer="668" from-port="0" to-layer="672" to-port="0" />
<edge from-layer="669" from-port="0" to-layer="671" to-port="0" />
<edge from-layer="670" from-port="0" to-layer="671" to-port="1" />
<edge from-layer="671" from-port="2" to-layer="672" to-port="1" />
<edge from-layer="672" from-port="2" to-layer="674" to-port="0" />
<edge from-layer="673" from-port="0" to-layer="674" to-port="1" />
<edge from-layer="674" from-port="2" to-layer="679" to-port="0" />
<edge from-layer="675" from-port="0" to-layer="679" to-port="1" />
<edge from-layer="676" from-port="0" to-layer="679" to-port="2" />
<edge from-layer="677" from-port="0" to-layer="679" to-port="3" />
<edge from-layer="678" from-port="0" to-layer="679" to-port="4" />
<edge from-layer="679" from-port="5" to-layer="681" to-port="0" />
<edge from-layer="680" from-port="0" to-layer="681" to-port="1" />
<edge from-layer="681" from-port="2" to-layer="683" to-port="0" />
<edge from-layer="682" from-port="0" to-layer="683" to-port="1" />
<edge from-layer="683" from-port="2" to-layer="684" to-port="0" />
<edge from-layer="684" from-port="1" to-layer="686" to-port="0" />
<edge from-layer="685" from-port="0" to-layer="686" to-port="1" />
<edge from-layer="686" from-port="3" to-layer="688" to-port="2" />
<edge from-layer="686" from-port="2" to-layer="688" to-port="1" />
<edge from-layer="687" from-port="0" to-layer="688" to-port="3" />
<edge from-layer="688" from-port="4" to-layer="690" to-port="0" />
<edge from-layer="689" from-port="0" to-layer="690" to-port="1" />
<edge from-layer="690" from-port="2" to-layer="701" to-port="0" />
<edge from-layer="690" from-port="2" to-layer="694" to-port="0" />
<edge from-layer="691" from-port="0" to-layer="700" to-port="0" />
<edge from-layer="692" from-port="0" to-layer="698" to-port="0" />
<edge from-layer="692" from-port="0" to-layer="699" to-port="0" />
<edge from-layer="693" from-port="0" to-layer="699" to-port="1" />
<edge from-layer="694" from-port="1" to-layer="695" to-port="0" />
<edge from-layer="695" from-port="1" to-layer="697" to-port="0" />
<edge from-layer="696" from-port="0" to-layer="697" to-port="1" />
<edge from-layer="697" from-port="2" to-layer="698" to-port="1" />
<edge from-layer="698" from-port="2" to-layer="699" to-port="2" />
<edge from-layer="699" from-port="3" to-layer="700" to-port="1" />
<edge from-layer="700" from-port="2" to-layer="701" to-port="1" />
<edge from-layer="701" from-port="2" to-layer="792" to-port="0" />
<edge from-layer="701" from-port="2" to-layer="726" to-port="0" />
<edge from-layer="701" from-port="2" to-layer="703" to-port="0" />
<edge from-layer="702" from-port="0" to-layer="703" to-port="1" />
<edge from-layer="703" from-port="2" to-layer="705" to-port="0" />
<edge from-layer="704" from-port="0" to-layer="705" to-port="1" />
<edge from-layer="705" from-port="2" to-layer="716" to-port="0" />
<edge from-layer="705" from-port="2" to-layer="709" to-port="0" />
<edge from-layer="706" from-port="0" to-layer="715" to-port="0" />
<edge from-layer="707" from-port="0" to-layer="713" to-port="0" />
<edge from-layer="707" from-port="0" to-layer="714" to-port="0" />
<edge from-layer="708" from-port="0" to-layer="714" to-port="1" />
<edge from-layer="709" from-port="1" to-layer="710" to-port="0" />
<edge from-layer="710" from-port="1" to-layer="712" to-port="0" />
<edge from-layer="710" from-port="1" to-layer="721" to-port="0" />
<edge from-layer="711" from-port="0" to-layer="712" to-port="1" />
<edge from-layer="712" from-port="2" to-layer="713" to-port="1" />
<edge from-layer="713" from-port="2" to-layer="714" to-port="2" />
<edge from-layer="714" from-port="3" to-layer="715" to-port="1" />
<edge from-layer="715" from-port="2" to-layer="716" to-port="1" />
<edge from-layer="716" from-port="2" to-layer="725" to-port="0" />
<edge from-layer="717" from-port="0" to-layer="724" to-port="0" />
<edge from-layer="718" from-port="0" to-layer="722" to-port="0" />
<edge from-layer="718" from-port="0" to-layer="723" to-port="0" />
<edge from-layer="719" from-port="0" to-layer="723" to-port="1" />
<edge from-layer="720" from-port="0" to-layer="721" to-port="1" />
<edge from-layer="721" from-port="2" to-layer="722" to-port="1" />
<edge from-layer="722" from-port="2" to-layer="723" to-port="2" />
<edge from-layer="723" from-port="3" to-layer="724" to-port="1" />
<edge from-layer="724" from-port="2" to-layer="725" to-port="1" />
<edge from-layer="725" from-port="2" to-layer="727" to-port="0" />
<edge from-layer="726" from-port="1" to-layer="727" to-port="1" />
<edge from-layer="727" from-port="2" to-layer="729" to-port="0" />
<edge from-layer="728" from-port="0" to-layer="729" to-port="1" />
<edge from-layer="729" from-port="2" to-layer="731" to-port="0" />
<edge from-layer="730" from-port="0" to-layer="731" to-port="1" />
<edge from-layer="731" from-port="2" to-layer="732" to-port="0" />
<edge from-layer="731" from-port="2" to-layer="733" to-port="0" />
<edge from-layer="732" from-port="1" to-layer="733" to-port="1" />
<edge from-layer="733" from-port="2" to-layer="735" to-port="0" />
<edge from-layer="734" from-port="0" to-layer="735" to-port="1" />
<edge from-layer="735" from-port="2" to-layer="739" to-port="0" />
<edge from-layer="735" from-port="2" to-layer="746" to-port="0" />
<edge from-layer="736" from-port="0" to-layer="745" to-port="0" />
<edge from-layer="737" from-port="0" to-layer="743" to-port="0" />
<edge from-layer="737" from-port="0" to-layer="744" to-port="0" />
<edge from-layer="738" from-port="0" to-layer="744" to-port="1" />
<edge from-layer="739" from-port="1" to-layer="740" to-port="0" />
<edge from-layer="740" from-port="1" to-layer="742" to-port="0" />
<edge from-layer="741" from-port="0" to-layer="742" to-port="1" />
<edge from-layer="742" from-port="2" to-layer="743" to-port="1" />
<edge from-layer="743" from-port="2" to-layer="744" to-port="2" />
<edge from-layer="744" from-port="3" to-layer="745" to-port="1" />
<edge from-layer="745" from-port="2" to-layer="746" to-port="1" />
<edge from-layer="746" from-port="2" to-layer="748" to-port="0" />
<edge from-layer="746" from-port="2" to-layer="771" to-port="0" />
<edge from-layer="747" from-port="0" to-layer="748" to-port="1" />
<edge from-layer="748" from-port="2" to-layer="750" to-port="0" />
<edge from-layer="749" from-port="0" to-layer="750" to-port="1" />
<edge from-layer="750" from-port="2" to-layer="754" to-port="0" />
<edge from-layer="750" from-port="2" to-layer="761" to-port="0" />
<edge from-layer="751" from-port="0" to-layer="760" to-port="0" />
<edge from-layer="752" from-port="0" to-layer="758" to-port="0" />
<edge from-layer="752" from-port="0" to-layer="759" to-port="0" />
<edge from-layer="753" from-port="0" to-layer="759" to-port="1" />
<edge from-layer="754" from-port="1" to-layer="755" to-port="0" />
<edge from-layer="755" from-port="1" to-layer="757" to-port="0" />
<edge from-layer="755" from-port="1" to-layer="766" to-port="0" />
<edge from-layer="756" from-port="0" to-layer="757" to-port="1" />
<edge from-layer="757" from-port="2" to-layer="758" to-port="1" />
<edge from-layer="758" from-port="2" to-layer="759" to-port="2" />
<edge from-layer="759" from-port="3" to-layer="760" to-port="1" />
<edge from-layer="760" from-port="2" to-layer="761" to-port="1" />
<edge from-layer="761" from-port="2" to-layer="770" to-port="0" />
<edge from-layer="762" from-port="0" to-layer="769" to-port="0" />
<edge from-layer="763" from-port="0" to-layer="767" to-port="0" />
<edge from-layer="763" from-port="0" to-layer="768" to-port="0" />
<edge from-layer="764" from-port="0" to-layer="768" to-port="1" />
<edge from-layer="765" from-port="0" to-layer="766" to-port="1" />
<edge from-layer="766" from-port="2" to-layer="767" to-port="1" />
<edge from-layer="767" from-port="2" to-layer="768" to-port="2" />
<edge from-layer="768" from-port="3" to-layer="769" to-port="1" />
<edge from-layer="769" from-port="2" to-layer="770" to-port="1" />
<edge from-layer="770" from-port="2" to-layer="772" to-port="0" />
<edge from-layer="771" from-port="1" to-layer="772" to-port="1" />
<edge from-layer="772" from-port="2" to-layer="774" to-port="0" />
<edge from-layer="773" from-port="0" to-layer="774" to-port="1" />
<edge from-layer="774" from-port="2" to-layer="776" to-port="0" />
<edge from-layer="775" from-port="0" to-layer="776" to-port="1" />
<edge from-layer="776" from-port="2" to-layer="777" to-port="0" />
<edge from-layer="776" from-port="2" to-layer="778" to-port="0" />
<edge from-layer="777" from-port="1" to-layer="778" to-port="1" />
<edge from-layer="778" from-port="2" to-layer="780" to-port="0" />
<edge from-layer="779" from-port="0" to-layer="780" to-port="1" />
<edge from-layer="780" from-port="2" to-layer="784" to-port="0" />
<edge from-layer="780" from-port="2" to-layer="791" to-port="0" />
<edge from-layer="781" from-port="0" to-layer="790" to-port="0" />
<edge from-layer="782" from-port="0" to-layer="788" to-port="0" />
<edge from-layer="782" from-port="0" to-layer="789" to-port="0" />
<edge from-layer="783" from-port="0" to-layer="789" to-port="1" />
<edge from-layer="784" from-port="1" to-layer="785" to-port="0" />
<edge from-layer="785" from-port="1" to-layer="787" to-port="0" />
<edge from-layer="786" from-port="0" to-layer="787" to-port="1" />
<edge from-layer="787" from-port="2" to-layer="788" to-port="1" />
<edge from-layer="788" from-port="2" to-layer="789" to-port="2" />
<edge from-layer="789" from-port="3" to-layer="790" to-port="1" />
<edge from-layer="790" from-port="2" to-layer="791" to-port="1" />
<edge from-layer="791" from-port="2" to-layer="792" to-port="1" />
<edge from-layer="792" from-port="2" to-layer="794" to-port="0" />
<edge from-layer="793" from-port="0" to-layer="794" to-port="1" />
<edge from-layer="794" from-port="2" to-layer="885" to-port="0" />
<edge from-layer="794" from-port="2" to-layer="796" to-port="0" />
<edge from-layer="794" from-port="2" to-layer="819" to-port="0" />
<edge from-layer="795" from-port="0" to-layer="796" to-port="1" />
<edge from-layer="796" from-port="2" to-layer="798" to-port="0" />
<edge from-layer="797" from-port="0" to-layer="798" to-port="1" />
<edge from-layer="798" from-port="2" to-layer="802" to-port="0" />
<edge from-layer="798" from-port="2" to-layer="809" to-port="0" />
<edge from-layer="799" from-port="0" to-layer="808" to-port="0" />
<edge from-layer="800" from-port="0" to-layer="806" to-port="0" />
<edge from-layer="800" from-port="0" to-layer="807" to-port="0" />
<edge from-layer="801" from-port="0" to-layer="807" to-port="1" />
<edge from-layer="802" from-port="1" to-layer="803" to-port="0" />
<edge from-layer="803" from-port="1" to-layer="805" to-port="0" />
<edge from-layer="803" from-port="1" to-layer="814" to-port="0" />
<edge from-layer="804" from-port="0" to-layer="805" to-port="1" />
<edge from-layer="805" from-port="2" to-layer="806" to-port="1" />
<edge from-layer="806" from-port="2" to-layer="807" to-port="2" />
<edge from-layer="807" from-port="3" to-layer="808" to-port="1" />
<edge from-layer="808" from-port="2" to-layer="809" to-port="1" />
<edge from-layer="809" from-port="2" to-layer="818" to-port="0" />
<edge from-layer="810" from-port="0" to-layer="817" to-port="0" />
<edge from-layer="811" from-port="0" to-layer="815" to-port="0" />
<edge from-layer="811" from-port="0" to-layer="816" to-port="0" />
<edge from-layer="812" from-port="0" to-layer="816" to-port="1" />
<edge from-layer="813" from-port="0" to-layer="814" to-port="1" />
<edge from-layer="814" from-port="2" to-layer="815" to-port="1" />
<edge from-layer="815" from-port="2" to-layer="816" to-port="2" />
<edge from-layer="816" from-port="3" to-layer="817" to-port="1" />
<edge from-layer="817" from-port="2" to-layer="818" to-port="1" />
<edge from-layer="818" from-port="2" to-layer="820" to-port="0" />
<edge from-layer="819" from-port="1" to-layer="820" to-port="1" />
<edge from-layer="820" from-port="2" to-layer="822" to-port="0" />
<edge from-layer="821" from-port="0" to-layer="822" to-port="1" />
<edge from-layer="822" from-port="2" to-layer="824" to-port="0" />
<edge from-layer="823" from-port="0" to-layer="824" to-port="1" />
<edge from-layer="824" from-port="2" to-layer="826" to-port="0" />
<edge from-layer="824" from-port="2" to-layer="825" to-port="0" />
<edge from-layer="825" from-port="1" to-layer="826" to-port="1" />
<edge from-layer="826" from-port="2" to-layer="828" to-port="0" />
<edge from-layer="827" from-port="0" to-layer="828" to-port="1" />
<edge from-layer="828" from-port="2" to-layer="832" to-port="0" />
<edge from-layer="828" from-port="2" to-layer="839" to-port="0" />
<edge from-layer="829" from-port="0" to-layer="838" to-port="0" />
<edge from-layer="830" from-port="0" to-layer="836" to-port="0" />
<edge from-layer="830" from-port="0" to-layer="837" to-port="0" />
<edge from-layer="831" from-port="0" to-layer="837" to-port="1" />
<edge from-layer="832" from-port="1" to-layer="833" to-port="0" />
<edge from-layer="833" from-port="1" to-layer="835" to-port="0" />
<edge from-layer="834" from-port="0" to-layer="835" to-port="1" />
<edge from-layer="835" from-port="2" to-layer="836" to-port="1" />
<edge from-layer="836" from-port="2" to-layer="837" to-port="2" />
<edge from-layer="837" from-port="3" to-layer="838" to-port="1" />
<edge from-layer="838" from-port="2" to-layer="839" to-port="1" />
<edge from-layer="839" from-port="2" to-layer="841" to-port="0" />
<edge from-layer="839" from-port="2" to-layer="864" to-port="0" />
<edge from-layer="840" from-port="0" to-layer="841" to-port="1" />
<edge from-layer="841" from-port="2" to-layer="843" to-port="0" />
<edge from-layer="842" from-port="0" to-layer="843" to-port="1" />
<edge from-layer="843" from-port="2" to-layer="847" to-port="0" />
<edge from-layer="843" from-port="2" to-layer="854" to-port="0" />
<edge from-layer="844" from-port="0" to-layer="853" to-port="0" />
<edge from-layer="845" from-port="0" to-layer="851" to-port="0" />
<edge from-layer="845" from-port="0" to-layer="852" to-port="0" />
<edge from-layer="846" from-port="0" to-layer="852" to-port="1" />
<edge from-layer="847" from-port="1" to-layer="848" to-port="0" />
<edge from-layer="848" from-port="1" to-layer="859" to-port="0" />
<edge from-layer="848" from-port="1" to-layer="850" to-port="0" />
<edge from-layer="849" from-port="0" to-layer="850" to-port="1" />
<edge from-layer="850" from-port="2" to-layer="851" to-port="1" />
<edge from-layer="851" from-port="2" to-layer="852" to-port="2" />
<edge from-layer="852" from-port="3" to-layer="853" to-port="1" />
<edge from-layer="853" from-port="2" to-layer="854" to-port="1" />
<edge from-layer="854" from-port="2" to-layer="863" to-port="0" />
<edge from-layer="855" from-port="0" to-layer="862" to-port="0" />
<edge from-layer="856" from-port="0" to-layer="860" to-port="0" />
<edge from-layer="856" from-port="0" to-layer="861" to-port="0" />
<edge from-layer="857" from-port="0" to-layer="861" to-port="1" />
<edge from-layer="858" from-port="0" to-layer="859" to-port="1" />
<edge from-layer="859" from-port="2" to-layer="860" to-port="1" />
<edge from-layer="860" from-port="2" to-layer="861" to-port="2" />
<edge from-layer="861" from-port="3" to-layer="862" to-port="1" />
<edge from-layer="862" from-port="2" to-layer="863" to-port="1" />
<edge from-layer="863" from-port="2" to-layer="865" to-port="0" />
<edge from-layer="864" from-port="1" to-layer="865" to-port="1" />
<edge from-layer="865" from-port="2" to-layer="867" to-port="0" />
<edge from-layer="866" from-port="0" to-layer="867" to-port="1" />
<edge from-layer="867" from-port="2" to-layer="869" to-port="0" />
<edge from-layer="868" from-port="0" to-layer="869" to-port="1" />
<edge from-layer="869" from-port="2" to-layer="870" to-port="0" />
<edge from-layer="869" from-port="2" to-layer="871" to-port="0" />
<edge from-layer="870" from-port="1" to-layer="871" to-port="1" />
<edge from-layer="871" from-port="2" to-layer="873" to-port="0" />
<edge from-layer="872" from-port="0" to-layer="873" to-port="1" />
<edge from-layer="873" from-port="2" to-layer="884" to-port="0" />
<edge from-layer="873" from-port="2" to-layer="877" to-port="0" />
<edge from-layer="874" from-port="0" to-layer="883" to-port="0" />
<edge from-layer="875" from-port="0" to-layer="882" to-port="0" />
<edge from-layer="875" from-port="0" to-layer="881" to-port="0" />
<edge from-layer="876" from-port="0" to-layer="882" to-port="1" />
<edge from-layer="877" from-port="1" to-layer="878" to-port="0" />
<edge from-layer="878" from-port="1" to-layer="880" to-port="0" />
<edge from-layer="879" from-port="0" to-layer="880" to-port="1" />
<edge from-layer="880" from-port="2" to-layer="881" to-port="1" />
<edge from-layer="881" from-port="2" to-layer="882" to-port="2" />
<edge from-layer="882" from-port="3" to-layer="883" to-port="1" />
<edge from-layer="883" from-port="2" to-layer="884" to-port="1" />
<edge from-layer="884" from-port="2" to-layer="885" to-port="1" />
<edge from-layer="885" from-port="2" to-layer="887" to-port="0" />
<edge from-layer="886" from-port="0" to-layer="887" to-port="1" />
<edge from-layer="887" from-port="2" to-layer="978" to-port="0" />
<edge from-layer="887" from-port="2" to-layer="912" to-port="0" />
<edge from-layer="887" from-port="2" to-layer="889" to-port="0" />
<edge from-layer="888" from-port="0" to-layer="889" to-port="1" />
<edge from-layer="889" from-port="2" to-layer="891" to-port="0" />
<edge from-layer="890" from-port="0" to-layer="891" to-port="1" />
<edge from-layer="891" from-port="2" to-layer="895" to-port="0" />
<edge from-layer="891" from-port="2" to-layer="902" to-port="0" />
<edge from-layer="892" from-port="0" to-layer="901" to-port="0" />
<edge from-layer="893" from-port="0" to-layer="899" to-port="0" />
<edge from-layer="893" from-port="0" to-layer="900" to-port="0" />
<edge from-layer="894" from-port="0" to-layer="900" to-port="1" />
<edge from-layer="895" from-port="1" to-layer="896" to-port="0" />
<edge from-layer="896" from-port="1" to-layer="907" to-port="0" />
<edge from-layer="896" from-port="1" to-layer="898" to-port="0" />
<edge from-layer="897" from-port="0" to-layer="898" to-port="1" />
<edge from-layer="898" from-port="2" to-layer="899" to-port="1" />
<edge from-layer="899" from-port="2" to-layer="900" to-port="2" />
<edge from-layer="900" from-port="3" to-layer="901" to-port="1" />
<edge from-layer="901" from-port="2" to-layer="902" to-port="1" />
<edge from-layer="902" from-port="2" to-layer="911" to-port="0" />
<edge from-layer="903" from-port="0" to-layer="910" to-port="0" />
<edge from-layer="904" from-port="0" to-layer="908" to-port="0" />
<edge from-layer="904" from-port="0" to-layer="909" to-port="0" />
<edge from-layer="905" from-port="0" to-layer="909" to-port="1" />
<edge from-layer="906" from-port="0" to-layer="907" to-port="1" />
<edge from-layer="907" from-port="2" to-layer="908" to-port="1" />
<edge from-layer="908" from-port="2" to-layer="909" to-port="2" />
<edge from-layer="909" from-port="3" to-layer="910" to-port="1" />
<edge from-layer="910" from-port="2" to-layer="911" to-port="1" />
<edge from-layer="911" from-port="2" to-layer="913" to-port="0" />
<edge from-layer="912" from-port="1" to-layer="913" to-port="1" />
<edge from-layer="913" from-port="2" to-layer="915" to-port="0" />
<edge from-layer="914" from-port="0" to-layer="915" to-port="1" />
<edge from-layer="915" from-port="2" to-layer="917" to-port="0" />
<edge from-layer="916" from-port="0" to-layer="917" to-port="1" />
<edge from-layer="917" from-port="2" to-layer="918" to-port="0" />
<edge from-layer="917" from-port="2" to-layer="919" to-port="0" />
<edge from-layer="918" from-port="1" to-layer="919" to-port="1" />
<edge from-layer="919" from-port="2" to-layer="921" to-port="0" />
<edge from-layer="920" from-port="0" to-layer="921" to-port="1" />
<edge from-layer="921" from-port="2" to-layer="932" to-port="0" />
<edge from-layer="921" from-port="2" to-layer="925" to-port="0" />
<edge from-layer="922" from-port="0" to-layer="931" to-port="0" />
<edge from-layer="923" from-port="0" to-layer="930" to-port="0" />
<edge from-layer="923" from-port="0" to-layer="929" to-port="0" />
<edge from-layer="924" from-port="0" to-layer="930" to-port="1" />
<edge from-layer="925" from-port="1" to-layer="926" to-port="0" />
<edge from-layer="926" from-port="1" to-layer="928" to-port="0" />
<edge from-layer="927" from-port="0" to-layer="928" to-port="1" />
<edge from-layer="928" from-port="2" to-layer="929" to-port="1" />
<edge from-layer="929" from-port="2" to-layer="930" to-port="2" />
<edge from-layer="930" from-port="3" to-layer="931" to-port="1" />
<edge from-layer="931" from-port="2" to-layer="932" to-port="1" />
<edge from-layer="932" from-port="2" to-layer="957" to-port="0" />
<edge from-layer="932" from-port="2" to-layer="934" to-port="0" />
<edge from-layer="933" from-port="0" to-layer="934" to-port="1" />
<edge from-layer="934" from-port="2" to-layer="936" to-port="0" />
<edge from-layer="935" from-port="0" to-layer="936" to-port="1" />
<edge from-layer="936" from-port="2" to-layer="940" to-port="0" />
<edge from-layer="936" from-port="2" to-layer="947" to-port="0" />
<edge from-layer="937" from-port="0" to-layer="946" to-port="0" />
<edge from-layer="938" from-port="0" to-layer="944" to-port="0" />
<edge from-layer="938" from-port="0" to-layer="945" to-port="0" />
<edge from-layer="939" from-port="0" to-layer="945" to-port="1" />
<edge from-layer="940" from-port="1" to-layer="941" to-port="0" />
<edge from-layer="941" from-port="1" to-layer="943" to-port="0" />
<edge from-layer="941" from-port="1" to-layer="952" to-port="0" />
<edge from-layer="942" from-port="0" to-layer="943" to-port="1" />
<edge from-layer="943" from-port="2" to-layer="944" to-port="1" />
<edge from-layer="944" from-port="2" to-layer="945" to-port="2" />
<edge from-layer="945" from-port="3" to-layer="946" to-port="1" />
<edge from-layer="946" from-port="2" to-layer="947" to-port="1" />
<edge from-layer="947" from-port="2" to-layer="956" to-port="0" />
<edge from-layer="948" from-port="0" to-layer="955" to-port="0" />
<edge from-layer="949" from-port="0" to-layer="953" to-port="0" />
<edge from-layer="949" from-port="0" to-layer="954" to-port="0" />
<edge from-layer="950" from-port="0" to-layer="954" to-port="1" />
<edge from-layer="951" from-port="0" to-layer="952" to-port="1" />
<edge from-layer="952" from-port="2" to-layer="953" to-port="1" />
<edge from-layer="953" from-port="2" to-layer="954" to-port="2" />
<edge from-layer="954" from-port="3" to-layer="955" to-port="1" />
<edge from-layer="955" from-port="2" to-layer="956" to-port="1" />
<edge from-layer="956" from-port="2" to-layer="958" to-port="0" />
<edge from-layer="957" from-port="1" to-layer="958" to-port="1" />
<edge from-layer="958" from-port="2" to-layer="960" to-port="0" />
<edge from-layer="959" from-port="0" to-layer="960" to-port="1" />
<edge from-layer="960" from-port="2" to-layer="962" to-port="0" />
<edge from-layer="961" from-port="0" to-layer="962" to-port="1" />
<edge from-layer="962" from-port="2" to-layer="963" to-port="0" />
<edge from-layer="962" from-port="2" to-layer="964" to-port="0" />
<edge from-layer="963" from-port="1" to-layer="964" to-port="1" />
<edge from-layer="964" from-port="2" to-layer="966" to-port="0" />
<edge from-layer="965" from-port="0" to-layer="966" to-port="1" />
<edge from-layer="966" from-port="2" to-layer="977" to-port="0" />
<edge from-layer="966" from-port="2" to-layer="970" to-port="0" />
<edge from-layer="967" from-port="0" to-layer="976" to-port="0" />
<edge from-layer="968" from-port="0" to-layer="974" to-port="0" />
<edge from-layer="968" from-port="0" to-layer="975" to-port="0" />
<edge from-layer="969" from-port="0" to-layer="975" to-port="1" />
<edge from-layer="970" from-port="1" to-layer="971" to-port="0" />
<edge from-layer="971" from-port="1" to-layer="973" to-port="0" />
<edge from-layer="972" from-port="0" to-layer="973" to-port="1" />
<edge from-layer="973" from-port="2" to-layer="974" to-port="1" />
<edge from-layer="974" from-port="2" to-layer="975" to-port="2" />
<edge from-layer="975" from-port="3" to-layer="976" to-port="1" />
<edge from-layer="976" from-port="2" to-layer="977" to-port="1" />
<edge from-layer="977" from-port="2" to-layer="978" to-port="1" />
<edge from-layer="978" from-port="2" to-layer="980" to-port="0" />
<edge from-layer="979" from-port="0" to-layer="980" to-port="1" />
<edge from-layer="980" from-port="2" to-layer="987" to-port="0" />
<edge from-layer="980" from-port="2" to-layer="981" to-port="0" />
<edge from-layer="980" from-port="2" to-layer="1239" to-port="1" />
<edge from-layer="980" from-port="2" to-layer="993" to-port="0" />
<edge from-layer="980" from-port="2" to-layer="1005" to-port="0" />
<edge from-layer="980" from-port="2" to-layer="997" to-port="0" />
<edge from-layer="981" from-port="1" to-layer="984" to-port="0" />
<edge from-layer="982" from-port="0" to-layer="984" to-port="1" />
<edge from-layer="983" from-port="0" to-layer="984" to-port="2" />
<edge from-layer="984" from-port="3" to-layer="986" to-port="0" />
<edge from-layer="985" from-port="0" to-layer="986" to-port="1" />
<edge from-layer="986" from-port="2" to-layer="1004" to-port="0" />
<edge from-layer="987" from-port="1" to-layer="990" to-port="0" />
<edge from-layer="988" from-port="0" to-layer="990" to-port="1" />
<edge from-layer="989" from-port="0" to-layer="990" to-port="2" />
<edge from-layer="990" from-port="3" to-layer="1232" to-port="0" />
<edge from-layer="990" from-port="3" to-layer="992" to-port="0" />
<edge from-layer="991" from-port="0" to-layer="992" to-port="1" />
<edge from-layer="992" from-port="2" to-layer="1004" to-port="1" />
<edge from-layer="993" from-port="1" to-layer="996" to-port="0" />
<edge from-layer="994" from-port="0" to-layer="996" to-port="1" />
<edge from-layer="995" from-port="0" to-layer="996" to-port="2" />
<edge from-layer="996" from-port="3" to-layer="1001" to-port="0" />
<edge from-layer="996" from-port="3" to-layer="1234" to-port="0" />
<edge from-layer="997" from-port="1" to-layer="1000" to-port="0" />
<edge from-layer="998" from-port="0" to-layer="1000" to-port="1" />
<edge from-layer="999" from-port="0" to-layer="1000" to-port="2" />
<edge from-layer="1000" from-port="3" to-layer="1001" to-port="1" />
<edge from-layer="1000" from-port="3" to-layer="1236" to-port="0" />
<edge from-layer="1001" from-port="2" to-layer="1003" to-port="0" />
<edge from-layer="1002" from-port="0" to-layer="1003" to-port="1" />
<edge from-layer="1003" from-port="2" to-layer="1004" to-port="2" />
<edge from-layer="1004" from-port="3" to-layer="1005" to-port="1" />
<edge from-layer="1005" from-port="2" to-layer="1224" to-port="0" />
<edge from-layer="1005" from-port="2" to-layer="1007" to-port="0" />
<edge from-layer="1005" from-port="2" to-layer="1030" to-port="0" />
<edge from-layer="1006" from-port="0" to-layer="1007" to-port="1" />
<edge from-layer="1007" from-port="2" to-layer="1009" to-port="0" />
<edge from-layer="1008" from-port="0" to-layer="1009" to-port="1" />
<edge from-layer="1009" from-port="2" to-layer="1013" to-port="0" />
<edge from-layer="1009" from-port="2" to-layer="1020" to-port="0" />
<edge from-layer="1010" from-port="0" to-layer="1019" to-port="0" />
<edge from-layer="1011" from-port="0" to-layer="1017" to-port="0" />
<edge from-layer="1011" from-port="0" to-layer="1018" to-port="0" />
<edge from-layer="1012" from-port="0" to-layer="1018" to-port="1" />
<edge from-layer="1013" from-port="1" to-layer="1014" to-port="0" />
<edge from-layer="1014" from-port="1" to-layer="1025" to-port="0" />
<edge from-layer="1014" from-port="1" to-layer="1016" to-port="0" />
<edge from-layer="1015" from-port="0" to-layer="1016" to-port="1" />
<edge from-layer="1016" from-port="2" to-layer="1017" to-port="1" />
<edge from-layer="1017" from-port="2" to-layer="1018" to-port="2" />
<edge from-layer="1018" from-port="3" to-layer="1019" to-port="1" />
<edge from-layer="1019" from-port="2" to-layer="1020" to-port="1" />
<edge from-layer="1020" from-port="2" to-layer="1029" to-port="0" />
<edge from-layer="1021" from-port="0" to-layer="1028" to-port="0" />
<edge from-layer="1022" from-port="0" to-layer="1027" to-port="0" />
<edge from-layer="1022" from-port="0" to-layer="1026" to-port="0" />
<edge from-layer="1023" from-port="0" to-layer="1027" to-port="1" />
<edge from-layer="1024" from-port="0" to-layer="1025" to-port="1" />
<edge from-layer="1025" from-port="2" to-layer="1026" to-port="1" />
<edge from-layer="1026" from-port="2" to-layer="1027" to-port="2" />
<edge from-layer="1027" from-port="3" to-layer="1028" to-port="1" />
<edge from-layer="1028" from-port="2" to-layer="1029" to-port="1" />
<edge from-layer="1029" from-port="2" to-layer="1031" to-port="0" />
<edge from-layer="1030" from-port="1" to-layer="1031" to-port="1" />
<edge from-layer="1031" from-port="2" to-layer="1033" to-port="0" />
<edge from-layer="1032" from-port="0" to-layer="1033" to-port="1" />
<edge from-layer="1033" from-port="2" to-layer="1035" to-port="0" />
<edge from-layer="1034" from-port="0" to-layer="1035" to-port="1" />
<edge from-layer="1035" from-port="2" to-layer="1037" to-port="0" />
<edge from-layer="1035" from-port="2" to-layer="1076" to-port="0" />
<edge from-layer="1035" from-port="2" to-layer="1145" to-port="0" />
<edge from-layer="1036" from-port="0" to-layer="1037" to-port="1" />
<edge from-layer="1037" from-port="2" to-layer="1038" to-port="1" />
<edge from-layer="1038" from-port="2" to-layer="1063" to-port="0" />
<edge from-layer="1038" from-port="2" to-layer="1045" to-port="0" />
<edge from-layer="1038" from-port="2" to-layer="1052" to-port="0" />
<edge from-layer="1038" from-port="2" to-layer="1039" to-port="0" />
<edge from-layer="1039" from-port="1" to-layer="1042" to-port="0" />
<edge from-layer="1040" from-port="0" to-layer="1042" to-port="1" />
<edge from-layer="1041" from-port="0" to-layer="1042" to-port="2" />
<edge from-layer="1042" from-port="3" to-layer="1067" to-port="0" />
<edge from-layer="1042" from-port="3" to-layer="1044" to-port="0" />
<edge from-layer="1043" from-port="0" to-layer="1044" to-port="1" />
<edge from-layer="1044" from-port="2" to-layer="1062" to-port="0" />
<edge from-layer="1045" from-port="1" to-layer="1048" to-port="0" />
<edge from-layer="1046" from-port="0" to-layer="1048" to-port="1" />
<edge from-layer="1047" from-port="0" to-layer="1048" to-port="2" />
<edge from-layer="1048" from-port="3" to-layer="1069" to-port="0" />
<edge from-layer="1048" from-port="3" to-layer="1050" to-port="0" />
<edge from-layer="1049" from-port="0" to-layer="1050" to-port="1" />
<edge from-layer="1050" from-port="2" to-layer="1062" to-port="1" />
<edge from-layer="1051" from-port="0" to-layer="1062" to-port="2" />
<edge from-layer="1052" from-port="1" to-layer="1055" to-port="0" />
<edge from-layer="1053" from-port="0" to-layer="1055" to-port="1" />
<edge from-layer="1054" from-port="0" to-layer="1055" to-port="2" />
<edge from-layer="1055" from-port="3" to-layer="1057" to-port="0" />
<edge from-layer="1056" from-port="0" to-layer="1057" to-port="1" />
<edge from-layer="1057" from-port="2" to-layer="1058" to-port="0" />
<edge from-layer="1058" from-port="1" to-layer="1059" to-port="0" />
<edge from-layer="1059" from-port="1" to-layer="1071" to-port="0" />
<edge from-layer="1059" from-port="1" to-layer="1061" to-port="0" />
<edge from-layer="1060" from-port="0" to-layer="1061" to-port="1" />
<edge from-layer="1061" from-port="2" to-layer="1062" to-port="3" />
<edge from-layer="1062" from-port="4" to-layer="1063" to-port="1" />
<edge from-layer="1063" from-port="2" to-layer="1065" to-port="0" />
<edge from-layer="1064" from-port="0" to-layer="1065" to-port="1" />
<edge from-layer="1065" from-port="2" to-layer="1073" to-port="0" />
<edge from-layer="1066" from-port="0" to-layer="1067" to-port="1" />
<edge from-layer="1067" from-port="2" to-layer="1072" to-port="0" />
<edge from-layer="1068" from-port="0" to-layer="1069" to-port="1" />
<edge from-layer="1069" from-port="2" to-layer="1072" to-port="1" />
<edge from-layer="1070" from-port="0" to-layer="1071" to-port="1" />
<edge from-layer="1071" from-port="2" to-layer="1072" to-port="2" />
<edge from-layer="1072" from-port="3" to-layer="1073" to-port="1" />
<edge from-layer="1073" from-port="2" to-layer="1117" to-port="0" />
<edge from-layer="1073" from-port="2" to-layer="1123" to-port="0" />
<edge from-layer="1073" from-port="2" to-layer="1113" to-port="0" />
<edge from-layer="1074" from-port="0" to-layer="1077" to-port="0" />
<edge from-layer="1075" from-port="0" to-layer="1076" to-port="1" />
<edge from-layer="1076" from-port="2" to-layer="1077" to-port="1" />
<edge from-layer="1077" from-port="2" to-layer="1078" to-port="0" />
<edge from-layer="1077" from-port="2" to-layer="1102" to-port="0" />
<edge from-layer="1077" from-port="2" to-layer="1091" to-port="0" />
<edge from-layer="1077" from-port="2" to-layer="1084" to-port="0" />
<edge from-layer="1078" from-port="1" to-layer="1081" to-port="0" />
<edge from-layer="1079" from-port="0" to-layer="1081" to-port="1" />
<edge from-layer="1080" from-port="0" to-layer="1081" to-port="2" />
<edge from-layer="1081" from-port="3" to-layer="1106" to-port="0" />
<edge from-layer="1081" from-port="3" to-layer="1083" to-port="0" />
<edge from-layer="1082" from-port="0" to-layer="1083" to-port="1" />
<edge from-layer="1083" from-port="2" to-layer="1101" to-port="0" />
<edge from-layer="1084" from-port="1" to-layer="1087" to-port="0" />
<edge from-layer="1085" from-port="0" to-layer="1087" to-port="1" />
<edge from-layer="1086" from-port="0" to-layer="1087" to-port="2" />
<edge from-layer="1087" from-port="3" to-layer="1089" to-port="0" />
<edge from-layer="1087" from-port="3" to-layer="1108" to-port="0" />
<edge from-layer="1088" from-port="0" to-layer="1089" to-port="1" />
<edge from-layer="1089" from-port="2" to-layer="1101" to-port="1" />
<edge from-layer="1090" from-port="0" to-layer="1101" to-port="2" />
<edge from-layer="1091" from-port="1" to-layer="1094" to-port="0" />
<edge from-layer="1092" from-port="0" to-layer="1094" to-port="1" />
<edge from-layer="1093" from-port="0" to-layer="1094" to-port="2" />
<edge from-layer="1094" from-port="3" to-layer="1096" to-port="0" />
<edge from-layer="1095" from-port="0" to-layer="1096" to-port="1" />
<edge from-layer="1096" from-port="2" to-layer="1097" to-port="0" />
<edge from-layer="1097" from-port="1" to-layer="1098" to-port="0" />
<edge from-layer="1098" from-port="1" to-layer="1110" to-port="0" />
<edge from-layer="1098" from-port="1" to-layer="1100" to-port="0" />
<edge from-layer="1099" from-port="0" to-layer="1100" to-port="1" />
<edge from-layer="1100" from-port="2" to-layer="1101" to-port="3" />
<edge from-layer="1101" from-port="4" to-layer="1102" to-port="1" />
<edge from-layer="1102" from-port="2" to-layer="1104" to-port="0" />
<edge from-layer="1103" from-port="0" to-layer="1104" to-port="1" />
<edge from-layer="1104" from-port="2" to-layer="1112" to-port="0" />
<edge from-layer="1105" from-port="0" to-layer="1106" to-port="1" />
<edge from-layer="1106" from-port="2" to-layer="1111" to-port="0" />
<edge from-layer="1107" from-port="0" to-layer="1108" to-port="1" />
<edge from-layer="1108" from-port="2" to-layer="1111" to-port="1" />
<edge from-layer="1109" from-port="0" to-layer="1110" to-port="1" />
<edge from-layer="1110" from-port="2" to-layer="1111" to-port="2" />
<edge from-layer="1111" from-port="3" to-layer="1112" to-port="1" />
<edge from-layer="1112" from-port="2" to-layer="1113" to-port="1" />
<edge from-layer="1112" from-port="2" to-layer="1129" to-port="0" />
<edge from-layer="1113" from-port="2" to-layer="1115" to-port="0" />
<edge from-layer="1114" from-port="0" to-layer="1115" to-port="1" />
<edge from-layer="1115" from-port="2" to-layer="1139" to-port="0" />
<edge from-layer="1116" from-port="0" to-layer="1136" to-port="0" />
<edge from-layer="1117" from-port="1" to-layer="1120" to-port="0" />
<edge from-layer="1118" from-port="0" to-layer="1120" to-port="1" />
<edge from-layer="1119" from-port="0" to-layer="1120" to-port="2" />
<edge from-layer="1120" from-port="3" to-layer="1122" to-port="0" />
<edge from-layer="1121" from-port="0" to-layer="1122" to-port="1" />
<edge from-layer="1122" from-port="2" to-layer="1135" to-port="0" />
<edge from-layer="1123" from-port="1" to-layer="1126" to-port="0" />
<edge from-layer="1124" from-port="0" to-layer="1126" to-port="1" />
<edge from-layer="1125" from-port="0" to-layer="1126" to-port="2" />
<edge from-layer="1126" from-port="3" to-layer="1128" to-port="0" />
<edge from-layer="1127" from-port="0" to-layer="1128" to-port="1" />
<edge from-layer="1128" from-port="2" to-layer="1135" to-port="1" />
<edge from-layer="1129" from-port="1" to-layer="1132" to-port="0" />
<edge from-layer="1130" from-port="0" to-layer="1132" to-port="1" />
<edge from-layer="1131" from-port="0" to-layer="1132" to-port="2" />
<edge from-layer="1132" from-port="3" to-layer="1134" to-port="0" />
<edge from-layer="1133" from-port="0" to-layer="1134" to-port="1" />
<edge from-layer="1134" from-port="2" to-layer="1135" to-port="2" />
<edge from-layer="1135" from-port="3" to-layer="1136" to-port="1" />
<edge from-layer="1136" from-port="2" to-layer="1138" to-port="0" />
<edge from-layer="1137" from-port="0" to-layer="1138" to-port="1" />
<edge from-layer="1138" from-port="2" to-layer="1139" to-port="1" />
<edge from-layer="1139" from-port="2" to-layer="1140" to-port="0" />
<edge from-layer="1140" from-port="1" to-layer="1141" to-port="0" />
<edge from-layer="1141" from-port="1" to-layer="1142" to-port="0" />
<edge from-layer="1142" from-port="1" to-layer="1182" to-port="0" />
<edge from-layer="1143" from-port="0" to-layer="1146" to-port="0" />
<edge from-layer="1144" from-port="0" to-layer="1145" to-port="1" />
<edge from-layer="1145" from-port="2" to-layer="1146" to-port="1" />
<edge from-layer="1146" from-port="2" to-layer="1171" to-port="0" />
<edge from-layer="1146" from-port="2" to-layer="1147" to-port="0" />
<edge from-layer="1146" from-port="2" to-layer="1160" to-port="0" />
<edge from-layer="1146" from-port="2" to-layer="1153" to-port="0" />
<edge from-layer="1147" from-port="1" to-layer="1150" to-port="0" />
<edge from-layer="1148" from-port="0" to-layer="1150" to-port="1" />
<edge from-layer="1149" from-port="0" to-layer="1150" to-port="2" />
<edge from-layer="1150" from-port="3" to-layer="1175" to-port="0" />
<edge from-layer="1150" from-port="3" to-layer="1152" to-port="0" />
<edge from-layer="1151" from-port="0" to-layer="1152" to-port="1" />
<edge from-layer="1152" from-port="2" to-layer="1170" to-port="0" />
<edge from-layer="1153" from-port="1" to-layer="1156" to-port="0" />
<edge from-layer="1154" from-port="0" to-layer="1156" to-port="1" />
<edge from-layer="1155" from-port="0" to-layer="1156" to-port="2" />
<edge from-layer="1156" from-port="3" to-layer="1158" to-port="0" />
<edge from-layer="1156" from-port="3" to-layer="1177" to-port="0" />
<edge from-layer="1157" from-port="0" to-layer="1158" to-port="1" />
<edge from-layer="1158" from-port="2" to-layer="1170" to-port="1" />
<edge from-layer="1159" from-port="0" to-layer="1170" to-port="2" />
<edge from-layer="1160" from-port="1" to-layer="1163" to-port="0" />
<edge from-layer="1161" from-port="0" to-layer="1163" to-port="1" />
<edge from-layer="1162" from-port="0" to-layer="1163" to-port="2" />
<edge from-layer="1163" from-port="3" to-layer="1165" to-port="0" />
<edge from-layer="1164" from-port="0" to-layer="1165" to-port="1" />
<edge from-layer="1165" from-port="2" to-layer="1166" to-port="0" />
<edge from-layer="1166" from-port="1" to-layer="1167" to-port="0" />
<edge from-layer="1167" from-port="1" to-layer="1179" to-port="0" />
<edge from-layer="1167" from-port="1" to-layer="1169" to-port="0" />
<edge from-layer="1168" from-port="0" to-layer="1169" to-port="1" />
<edge from-layer="1169" from-port="2" to-layer="1170" to-port="3" />
<edge from-layer="1170" from-port="4" to-layer="1171" to-port="1" />
<edge from-layer="1171" from-port="2" to-layer="1173" to-port="0" />
<edge from-layer="1172" from-port="0" to-layer="1173" to-port="1" />
<edge from-layer="1173" from-port="2" to-layer="1181" to-port="0" />
<edge from-layer="1174" from-port="0" to-layer="1175" to-port="1" />
<edge from-layer="1175" from-port="2" to-layer="1180" to-port="0" />
<edge from-layer="1176" from-port="0" to-layer="1177" to-port="1" />
<edge from-layer="1177" from-port="2" to-layer="1180" to-port="1" />
<edge from-layer="1178" from-port="0" to-layer="1179" to-port="1" />
<edge from-layer="1179" from-port="2" to-layer="1180" to-port="2" />
<edge from-layer="1180" from-port="3" to-layer="1181" to-port="1" />
<edge from-layer="1181" from-port="2" to-layer="1182" to-port="1" />
<edge from-layer="1182" from-port="2" to-layer="1183" to-port="0" />
<edge from-layer="1182" from-port="2" to-layer="1207" to-port="0" />
<edge from-layer="1182" from-port="2" to-layer="1200" to-port="0" />
<edge from-layer="1182" from-port="2" to-layer="1194" to-port="0" />
<edge from-layer="1183" from-port="1" to-layer="1186" to-port="0" />
<edge from-layer="1184" from-port="0" to-layer="1186" to-port="1" />
<edge from-layer="1185" from-port="0" to-layer="1186" to-port="2" />
<edge from-layer="1186" from-port="3" to-layer="1188" to-port="0" />
<edge from-layer="1187" from-port="0" to-layer="1188" to-port="1" />
<edge from-layer="1188" from-port="2" to-layer="1189" to-port="0" />
<edge from-layer="1189" from-port="1" to-layer="1190" to-port="0" />
<edge from-layer="1190" from-port="1" to-layer="1211" to-port="0" />
<edge from-layer="1190" from-port="1" to-layer="1192" to-port="0" />
<edge from-layer="1191" from-port="0" to-layer="1192" to-port="1" />
<edge from-layer="1192" from-port="2" to-layer="1206" to-port="0" />
<edge from-layer="1193" from-port="0" to-layer="1206" to-port="1" />
<edge from-layer="1194" from-port="1" to-layer="1197" to-port="0" />
<edge from-layer="1195" from-port="0" to-layer="1197" to-port="1" />
<edge from-layer="1196" from-port="0" to-layer="1197" to-port="2" />
<edge from-layer="1197" from-port="3" to-layer="1199" to-port="0" />
<edge from-layer="1197" from-port="3" to-layer="1213" to-port="0" />
<edge from-layer="1198" from-port="0" to-layer="1199" to-port="1" />
<edge from-layer="1199" from-port="2" to-layer="1206" to-port="2" />
<edge from-layer="1200" from-port="1" to-layer="1203" to-port="0" />
<edge from-layer="1201" from-port="0" to-layer="1203" to-port="1" />
<edge from-layer="1202" from-port="0" to-layer="1203" to-port="2" />
<edge from-layer="1203" from-port="3" to-layer="1215" to-port="0" />
<edge from-layer="1203" from-port="3" to-layer="1205" to-port="0" />
<edge from-layer="1204" from-port="0" to-layer="1205" to-port="1" />
<edge from-layer="1205" from-port="2" to-layer="1206" to-port="3" />
<edge from-layer="1206" from-port="4" to-layer="1207" to-port="1" />
<edge from-layer="1207" from-port="2" to-layer="1209" to-port="0" />
<edge from-layer="1208" from-port="0" to-layer="1209" to-port="1" />
<edge from-layer="1209" from-port="2" to-layer="1217" to-port="0" />
<edge from-layer="1210" from-port="0" to-layer="1211" to-port="1" />
<edge from-layer="1211" from-port="2" to-layer="1216" to-port="0" />
<edge from-layer="1212" from-port="0" to-layer="1213" to-port="1" />
<edge from-layer="1213" from-port="2" to-layer="1216" to-port="1" />
<edge from-layer="1214" from-port="0" to-layer="1215" to-port="1" />
<edge from-layer="1215" from-port="2" to-layer="1216" to-port="2" />
<edge from-layer="1216" from-port="3" to-layer="1217" to-port="1" />
<edge from-layer="1217" from-port="2" to-layer="1219" to-port="0" />
<edge from-layer="1218" from-port="0" to-layer="1219" to-port="1" />
<edge from-layer="1219" from-port="2" to-layer="1220" to-port="1" />
<edge from-layer="1220" from-port="2" to-layer="1222" to-port="0" />
<edge from-layer="1221" from-port="0" to-layer="1222" to-port="1" />
<edge from-layer="1222" from-port="2" to-layer="1238" to-port="0" />
<edge from-layer="1223" from-port="0" to-layer="1224" to-port="1" />
<edge from-layer="1224" from-port="2" to-layer="1225" to-port="0" />
<edge from-layer="1225" from-port="1" to-layer="1228" to-port="0" />
<edge from-layer="1226" from-port="0" to-layer="1228" to-port="1" />
<edge from-layer="1227" from-port="0" to-layer="1228" to-port="2" />
<edge from-layer="1228" from-port="3" to-layer="1230" to-port="0" />
<edge from-layer="1229" from-port="0" to-layer="1230" to-port="1" />
<edge from-layer="1230" from-port="2" to-layer="1237" to-port="0" />
<edge from-layer="1231" from-port="0" to-layer="1232" to-port="1" />
<edge from-layer="1232" from-port="2" to-layer="1237" to-port="1" />
<edge from-layer="1233" from-port="0" to-layer="1234" to-port="1" />
<edge from-layer="1234" from-port="2" to-layer="1237" to-port="2" />
<edge from-layer="1235" from-port="0" to-layer="1236" to-port="1" />
<edge from-layer="1236" from-port="2" to-layer="1237" to-port="3" />
<edge from-layer="1237" from-port="4" to-layer="1238" to-port="1" />
<edge from-layer="1238" from-port="2" to-layer="1239" to-port="0" />
<edge from-layer="1239" from-port="2" to-layer="1241" to-port="0" />
<edge from-layer="1240" from-port="0" to-layer="1241" to-port="1" />
<edge from-layer="1241" from-port="2" to-layer="1243" to-port="0" />
<edge from-layer="1241" from-port="2" to-layer="1332" to-port="0" />
<edge from-layer="1241" from-port="2" to-layer="1266" to-port="0" />
<edge from-layer="1242" from-port="0" to-layer="1243" to-port="1" />
<edge from-layer="1243" from-port="2" to-layer="1245" to-port="0" />
<edge from-layer="1244" from-port="0" to-layer="1245" to-port="1" />
<edge from-layer="1245" from-port="2" to-layer="1256" to-port="0" />
<edge from-layer="1245" from-port="2" to-layer="1249" to-port="0" />
<edge from-layer="1246" from-port="0" to-layer="1255" to-port="0" />
<edge from-layer="1247" from-port="0" to-layer="1253" to-port="0" />
<edge from-layer="1247" from-port="0" to-layer="1254" to-port="0" />
<edge from-layer="1248" from-port="0" to-layer="1254" to-port="1" />
<edge from-layer="1249" from-port="1" to-layer="1250" to-port="0" />
<edge from-layer="1250" from-port="1" to-layer="1252" to-port="0" />
<edge from-layer="1250" from-port="1" to-layer="1261" to-port="0" />
<edge from-layer="1251" from-port="0" to-layer="1252" to-port="1" />
<edge from-layer="1252" from-port="2" to-layer="1253" to-port="1" />
<edge from-layer="1253" from-port="2" to-layer="1254" to-port="2" />
<edge from-layer="1254" from-port="3" to-layer="1255" to-port="1" />
<edge from-layer="1255" from-port="2" to-layer="1256" to-port="1" />
<edge from-layer="1256" from-port="2" to-layer="1265" to-port="0" />
<edge from-layer="1257" from-port="0" to-layer="1264" to-port="0" />
<edge from-layer="1258" from-port="0" to-layer="1262" to-port="0" />
<edge from-layer="1258" from-port="0" to-layer="1263" to-port="0" />
<edge from-layer="1259" from-port="0" to-layer="1263" to-port="1" />
<edge from-layer="1260" from-port="0" to-layer="1261" to-port="1" />
<edge from-layer="1261" from-port="2" to-layer="1262" to-port="1" />
<edge from-layer="1262" from-port="2" to-layer="1263" to-port="2" />
<edge from-layer="1263" from-port="3" to-layer="1264" to-port="1" />
<edge from-layer="1264" from-port="2" to-layer="1265" to-port="1" />
<edge from-layer="1265" from-port="2" to-layer="1267" to-port="0" />
<edge from-layer="1266" from-port="1" to-layer="1267" to-port="1" />
<edge from-layer="1267" from-port="2" to-layer="1269" to-port="0" />
<edge from-layer="1268" from-port="0" to-layer="1269" to-port="1" />
<edge from-layer="1269" from-port="2" to-layer="1271" to-port="0" />
<edge from-layer="1270" from-port="0" to-layer="1271" to-port="1" />
<edge from-layer="1271" from-port="2" to-layer="1272" to-port="0" />
<edge from-layer="1271" from-port="2" to-layer="1273" to-port="0" />
<edge from-layer="1272" from-port="1" to-layer="1273" to-port="1" />
<edge from-layer="1273" from-port="2" to-layer="1275" to-port="0" />
<edge from-layer="1274" from-port="0" to-layer="1275" to-port="1" />
<edge from-layer="1275" from-port="2" to-layer="1279" to-port="0" />
<edge from-layer="1275" from-port="2" to-layer="1286" to-port="0" />
<edge from-layer="1276" from-port="0" to-layer="1285" to-port="0" />
<edge from-layer="1277" from-port="0" to-layer="1283" to-port="0" />
<edge from-layer="1277" from-port="0" to-layer="1284" to-port="0" />
<edge from-layer="1278" from-port="0" to-layer="1284" to-port="1" />
<edge from-layer="1279" from-port="1" to-layer="1280" to-port="0" />
<edge from-layer="1280" from-port="1" to-layer="1282" to-port="0" />
<edge from-layer="1281" from-port="0" to-layer="1282" to-port="1" />
<edge from-layer="1282" from-port="2" to-layer="1283" to-port="1" />
<edge from-layer="1283" from-port="2" to-layer="1284" to-port="2" />
<edge from-layer="1284" from-port="3" to-layer="1285" to-port="1" />
<edge from-layer="1285" from-port="2" to-layer="1286" to-port="1" />
<edge from-layer="1286" from-port="2" to-layer="1288" to-port="0" />
<edge from-layer="1286" from-port="2" to-layer="1311" to-port="0" />
<edge from-layer="1287" from-port="0" to-layer="1288" to-port="1" />
<edge from-layer="1288" from-port="2" to-layer="1290" to-port="0" />
<edge from-layer="1289" from-port="0" to-layer="1290" to-port="1" />
<edge from-layer="1290" from-port="2" to-layer="1294" to-port="0" />
<edge from-layer="1290" from-port="2" to-layer="1301" to-port="0" />
<edge from-layer="1291" from-port="0" to-layer="1300" to-port="0" />
<edge from-layer="1292" from-port="0" to-layer="1298" to-port="0" />
<edge from-layer="1292" from-port="0" to-layer="1299" to-port="0" />
<edge from-layer="1293" from-port="0" to-layer="1299" to-port="1" />
<edge from-layer="1294" from-port="1" to-layer="1295" to-port="0" />
<edge from-layer="1295" from-port="1" to-layer="1306" to-port="0" />
<edge from-layer="1295" from-port="1" to-layer="1297" to-port="0" />
<edge from-layer="1296" from-port="0" to-layer="1297" to-port="1" />
<edge from-layer="1297" from-port="2" to-layer="1298" to-port="1" />
<edge from-layer="1298" from-port="2" to-layer="1299" to-port="2" />
<edge from-layer="1299" from-port="3" to-layer="1300" to-port="1" />
<edge from-layer="1300" from-port="2" to-layer="1301" to-port="1" />
<edge from-layer="1301" from-port="2" to-layer="1310" to-port="0" />
<edge from-layer="1302" from-port="0" to-layer="1309" to-port="0" />
<edge from-layer="1303" from-port="0" to-layer="1308" to-port="0" />
<edge from-layer="1303" from-port="0" to-layer="1307" to-port="0" />
<edge from-layer="1304" from-port="0" to-layer="1308" to-port="1" />
<edge from-layer="1305" from-port="0" to-layer="1306" to-port="1" />
<edge from-layer="1306" from-port="2" to-layer="1307" to-port="1" />
<edge from-layer="1307" from-port="2" to-layer="1308" to-port="2" />
<edge from-layer="1308" from-port="3" to-layer="1309" to-port="1" />
<edge from-layer="1309" from-port="2" to-layer="1310" to-port="1" />
<edge from-layer="1310" from-port="2" to-layer="1312" to-port="0" />
<edge from-layer="1311" from-port="1" to-layer="1312" to-port="1" />
<edge from-layer="1312" from-port="2" to-layer="1314" to-port="0" />
<edge from-layer="1313" from-port="0" to-layer="1314" to-port="1" />
<edge from-layer="1314" from-port="2" to-layer="1316" to-port="0" />
<edge from-layer="1315" from-port="0" to-layer="1316" to-port="1" />
<edge from-layer="1316" from-port="2" to-layer="1317" to-port="0" />
<edge from-layer="1316" from-port="2" to-layer="1318" to-port="0" />
<edge from-layer="1317" from-port="1" to-layer="1318" to-port="1" />
<edge from-layer="1318" from-port="2" to-layer="1320" to-port="0" />
<edge from-layer="1319" from-port="0" to-layer="1320" to-port="1" />
<edge from-layer="1320" from-port="2" to-layer="1324" to-port="0" />
<edge from-layer="1320" from-port="2" to-layer="1331" to-port="0" />
<edge from-layer="1321" from-port="0" to-layer="1330" to-port="0" />
<edge from-layer="1322" from-port="0" to-layer="1328" to-port="0" />
<edge from-layer="1322" from-port="0" to-layer="1329" to-port="0" />
<edge from-layer="1323" from-port="0" to-layer="1329" to-port="1" />
<edge from-layer="1324" from-port="1" to-layer="1325" to-port="0" />
<edge from-layer="1325" from-port="1" to-layer="1327" to-port="0" />
<edge from-layer="1326" from-port="0" to-layer="1327" to-port="1" />
<edge from-layer="1327" from-port="2" to-layer="1328" to-port="1" />
<edge from-layer="1328" from-port="2" to-layer="1329" to-port="2" />
<edge from-layer="1329" from-port="3" to-layer="1330" to-port="1" />
<edge from-layer="1330" from-port="2" to-layer="1331" to-port="1" />
<edge from-layer="1331" from-port="2" to-layer="1332" to-port="1" />
<edge from-layer="1332" from-port="2" to-layer="1334" to-port="0" />
<edge from-layer="1333" from-port="0" to-layer="1334" to-port="1" />
<edge from-layer="1334" from-port="2" to-layer="1336" to-port="0" />
<edge from-layer="1334" from-port="2" to-layer="1359" to-port="0" />
<edge from-layer="1335" from-port="0" to-layer="1336" to-port="1" />
<edge from-layer="1336" from-port="2" to-layer="1338" to-port="0" />
<edge from-layer="1337" from-port="0" to-layer="1338" to-port="1" />
<edge from-layer="1338" from-port="2" to-layer="1342" to-port="0" />
<edge from-layer="1338" from-port="2" to-layer="1349" to-port="0" />
<edge from-layer="1339" from-port="0" to-layer="1348" to-port="0" />
<edge from-layer="1340" from-port="0" to-layer="1346" to-port="0" />
<edge from-layer="1340" from-port="0" to-layer="1347" to-port="0" />
<edge from-layer="1341" from-port="0" to-layer="1347" to-port="1" />
<edge from-layer="1342" from-port="1" to-layer="1343" to-port="0" />
<edge from-layer="1343" from-port="1" to-layer="1345" to-port="0" />
<edge from-layer="1343" from-port="1" to-layer="1354" to-port="0" />
<edge from-layer="1344" from-port="0" to-layer="1345" to-port="1" />
<edge from-layer="1345" from-port="2" to-layer="1346" to-port="1" />
<edge from-layer="1346" from-port="2" to-layer="1347" to-port="2" />
<edge from-layer="1347" from-port="3" to-layer="1348" to-port="1" />
<edge from-layer="1348" from-port="2" to-layer="1349" to-port="1" />
<edge from-layer="1349" from-port="2" to-layer="1358" to-port="0" />
<edge from-layer="1350" from-port="0" to-layer="1357" to-port="0" />
<edge from-layer="1351" from-port="0" to-layer="1356" to-port="0" />
<edge from-layer="1351" from-port="0" to-layer="1355" to-port="0" />
<edge from-layer="1352" from-port="0" to-layer="1356" to-port="1" />
<edge from-layer="1353" from-port="0" to-layer="1354" to-port="1" />
<edge from-layer="1354" from-port="2" to-layer="1355" to-port="1" />
<edge from-layer="1355" from-port="2" to-layer="1356" to-port="2" />
<edge from-layer="1356" from-port="3" to-layer="1357" to-port="1" />
<edge from-layer="1357" from-port="2" to-layer="1358" to-port="1" />
<edge from-layer="1358" from-port="2" to-layer="1360" to-port="0" />
<edge from-layer="1359" from-port="1" to-layer="1360" to-port="1" />
<edge from-layer="1360" from-port="2" to-layer="1362" to-port="0" />
<edge from-layer="1361" from-port="0" to-layer="1362" to-port="1" />
<edge from-layer="1362" from-port="2" to-layer="1364" to-port="0" />
<edge from-layer="1363" from-port="0" to-layer="1364" to-port="1" />
<edge from-layer="1364" from-port="2" to-layer="1365" to-port="0" />
<edge from-layer="1364" from-port="2" to-layer="1366" to-port="0" />
<edge from-layer="1365" from-port="1" to-layer="1366" to-port="1" />
<edge from-layer="1366" from-port="2" to-layer="1368" to-port="0" />
<edge from-layer="1367" from-port="0" to-layer="1368" to-port="1" />
<edge from-layer="1368" from-port="2" to-layer="1372" to-port="0" />
<edge from-layer="1368" from-port="2" to-layer="1379" to-port="0" />
<edge from-layer="1369" from-port="0" to-layer="1378" to-port="0" />
<edge from-layer="1370" from-port="0" to-layer="1376" to-port="0" />
<edge from-layer="1370" from-port="0" to-layer="1377" to-port="0" />
<edge from-layer="1371" from-port="0" to-layer="1377" to-port="1" />
<edge from-layer="1372" from-port="1" to-layer="1373" to-port="0" />
<edge from-layer="1373" from-port="1" to-layer="1375" to-port="0" />
<edge from-layer="1374" from-port="0" to-layer="1375" to-port="1" />
<edge from-layer="1375" from-port="2" to-layer="1376" to-port="1" />
<edge from-layer="1376" from-port="2" to-layer="1377" to-port="2" />
<edge from-layer="1377" from-port="3" to-layer="1378" to-port="1" />
<edge from-layer="1378" from-port="2" to-layer="1379" to-port="1" />
<edge from-layer="1379" from-port="2" to-layer="1381" to-port="0" />
<edge from-layer="1380" from-port="0" to-layer="1381" to-port="1" />
<edge from-layer="1381" from-port="2" to-layer="1385" to-port="0" />
<edge from-layer="1381" from-port="2" to-layer="1392" to-port="0" />
<edge from-layer="1382" from-port="0" to-layer="1391" to-port="0" />
<edge from-layer="1383" from-port="0" to-layer="1389" to-port="0" />
<edge from-layer="1383" from-port="0" to-layer="1390" to-port="0" />
<edge from-layer="1384" from-port="0" to-layer="1390" to-port="1" />
<edge from-layer="1385" from-port="1" to-layer="1386" to-port="0" />
<edge from-layer="1386" from-port="1" to-layer="1388" to-port="0" />
<edge from-layer="1387" from-port="0" to-layer="1388" to-port="1" />
<edge from-layer="1388" from-port="2" to-layer="1389" to-port="1" />
<edge from-layer="1389" from-port="2" to-layer="1390" to-port="2" />
<edge from-layer="1390" from-port="3" to-layer="1391" to-port="1" />
<edge from-layer="1391" from-port="2" to-layer="1392" to-port="1" />
<edge from-layer="1392" from-port="2" to-layer="1415" to-port="0" />
<edge from-layer="1392" from-port="2" to-layer="1407" to-port="0" />
<edge from-layer="1392" from-port="2" to-layer="1394" to-port="0" />
<edge from-layer="1393" from-port="0" to-layer="1407" to-port="1" />
<edge from-layer="1394" from-port="1" to-layer="1397" to-port="0" />
<edge from-layer="1395" from-port="0" to-layer="1397" to-port="1" />
<edge from-layer="1395" from-port="0" to-layer="1415" to-port="4" />
<edge from-layer="1395" from-port="0" to-layer="1407" to-port="4" />
<edge from-layer="1396" from-port="0" to-layer="1397" to-port="2" />
<edge from-layer="1397" from-port="3" to-layer="1399" to-port="0" />
<edge from-layer="1398" from-port="0" to-layer="1399" to-port="1" />
<edge from-layer="1399" from-port="2" to-layer="1401" to-port="0" />
<edge from-layer="1400" from-port="0" to-layer="1401" to-port="1" />
<edge from-layer="1401" from-port="2" to-layer="1403" to-port="0" />
<edge from-layer="1401" from-port="2" to-layer="1411" to-port="0" />
<edge from-layer="1402" from-port="0" to-layer="1403" to-port="1" />
<edge from-layer="1403" from-port="2" to-layer="1407" to-port="2" />
<edge from-layer="1403" from-port="2" to-layer="1413" to-port="0" />
<edge from-layer="1403" from-port="2" to-layer="1415" to-port="1" />
<edge from-layer="1404" from-port="0" to-layer="1406" to-port="0" />
<edge from-layer="1405" from-port="0" to-layer="1406" to-port="1" />
<edge from-layer="1406" from-port="2" to-layer="1407" to-port="3" />
<edge from-layer="1407" from-port="5" to-layer="1441" to-port="0" />
<edge from-layer="1407" from-port="5" to-layer="1435" to-port="0" />
<edge from-layer="1407" from-port="5" to-layer="1468" to-port="0" />
<edge from-layer="1407" from-port="5" to-layer="1429" to-port="0" />
<edge from-layer="1407" from-port="5" to-layer="1423" to-port="0" />
<edge from-layer="1408" from-port="0" to-layer="1417" to-port="0" />
<edge from-layer="1409" from-port="0" to-layer="1416" to-port="0" />
<edge from-layer="1410" from-port="0" to-layer="1411" to-port="1" />
<edge from-layer="1411" from-port="2" to-layer="1415" to-port="2" />
<edge from-layer="1412" from-port="0" to-layer="1414" to-port="0" />
<edge from-layer="1413" from-port="1" to-layer="1414" to-port="1" />
<edge from-layer="1414" from-port="2" to-layer="1415" to-port="3" />
<edge from-layer="1415" from-port="5" to-layer="1416" to-port="1" />
<edge from-layer="1416" from-port="2" to-layer="1417" to-port="1" />
<edge from-layer="1417" from-port="2" to-layer="1419" to-port="0" />
<edge from-layer="1418" from-port="0" to-layer="1419" to-port="1" />
<edge from-layer="1419" from-port="2" to-layer="1420" to-port="0" />
<edge from-layer="1420" from-port="1" to-layer="1467" to-port="0" />
<edge from-layer="1421" from-port="0" to-layer="1463" to-port="0" />
<edge from-layer="1422" from-port="0" to-layer="1448" to-port="0" />
<edge from-layer="1423" from-port="1" to-layer="1426" to-port="0" />
<edge from-layer="1424" from-port="0" to-layer="1426" to-port="1" />
<edge from-layer="1425" from-port="0" to-layer="1426" to-port="2" />
<edge from-layer="1426" from-port="3" to-layer="1428" to-port="0" />
<edge from-layer="1427" from-port="0" to-layer="1428" to-port="1" />
<edge from-layer="1428" from-port="2" to-layer="1447" to-port="0" />
<edge from-layer="1429" from-port="1" to-layer="1432" to-port="0" />
<edge from-layer="1430" from-port="0" to-layer="1432" to-port="1" />
<edge from-layer="1431" from-port="0" to-layer="1432" to-port="2" />
<edge from-layer="1432" from-port="3" to-layer="1434" to-port="0" />
<edge from-layer="1433" from-port="0" to-layer="1434" to-port="1" />
<edge from-layer="1434" from-port="2" to-layer="1447" to-port="1" />
<edge from-layer="1435" from-port="1" to-layer="1438" to-port="0" />
<edge from-layer="1436" from-port="0" to-layer="1438" to-port="1" />
<edge from-layer="1437" from-port="0" to-layer="1438" to-port="2" />
<edge from-layer="1438" from-port="3" to-layer="1440" to-port="0" />
<edge from-layer="1439" from-port="0" to-layer="1440" to-port="1" />
<edge from-layer="1440" from-port="2" to-layer="1447" to-port="2" />
<edge from-layer="1441" from-port="1" to-layer="1444" to-port="0" />
<edge from-layer="1442" from-port="0" to-layer="1444" to-port="1" />
<edge from-layer="1443" from-port="0" to-layer="1444" to-port="2" />
<edge from-layer="1444" from-port="3" to-layer="1446" to-port="0" />
<edge from-layer="1445" from-port="0" to-layer="1446" to-port="1" />
<edge from-layer="1446" from-port="2" to-layer="1447" to-port="3" />
<edge from-layer="1447" from-port="4" to-layer="1448" to-port="1" />
<edge from-layer="1448" from-port="2" to-layer="1449" to-port="0" />
<edge from-layer="1449" from-port="1" to-layer="1452" to-port="0" />
<edge from-layer="1449" from-port="1" to-layer="1457" to-port="0" />
<edge from-layer="1450" from-port="0" to-layer="1452" to-port="1" />
<edge from-layer="1450" from-port="0" to-layer="1457" to-port="1" />
<edge from-layer="1451" from-port="0" to-layer="1452" to-port="2" />
<edge from-layer="1451" from-port="0" to-layer="1457" to-port="2" />
<edge from-layer="1452" from-port="3" to-layer="1453" to-port="0" />
<edge from-layer="1453" from-port="1" to-layer="1455" to-port="0" />
<edge from-layer="1454" from-port="0" to-layer="1455" to-port="1" />
<edge from-layer="1455" from-port="2" to-layer="1456" to-port="0" />
<edge from-layer="1456" from-port="1" to-layer="1462" to-port="0" />
<edge from-layer="1457" from-port="3" to-layer="1460" to-port="1" />
<edge from-layer="1457" from-port="3" to-layer="1459" to-port="0" />
<edge from-layer="1458" from-port="0" to-layer="1459" to-port="1" />
<edge from-layer="1459" from-port="2" to-layer="1460" to-port="0" />
<edge from-layer="1460" from-port="2" to-layer="1461" to-port="0" />
<edge from-layer="1461" from-port="1" to-layer="1462" to-port="1" />
<edge from-layer="1462" from-port="2" to-layer="1463" to-port="1" />
<edge from-layer="1463" from-port="2" to-layer="1465" to-port="0" />
<edge from-layer="1464" from-port="0" to-layer="1465" to-port="1" />
<edge from-layer="1465" from-port="2" to-layer="1466" to-port="0" />
<edge from-layer="1466" from-port="1" to-layer="1467" to-port="1" />
<edge from-layer="1467" from-port="2" to-layer="1468" to-port="1" />
<edge from-layer="1468" from-port="2" to-layer="1469" to-port="0" />
</edges>
<rt_info />
</net>