23533 lines
692 KiB
XML
23533 lines
692 KiB
XML
<?xml version="1.0"?>
|
|
<net name="Model8046" version="11">
|
|
<layers>
|
|
<layer id="2" name="input_ids" type="Parameter" version="opset1">
|
|
<data shape="?,?" element_type="i64" />
|
|
<output>
|
|
<port id="0" precision="I64" names="input_ids">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1" name="attention_mask" type="Parameter" version="opset1">
|
|
<data shape="?,?" element_type="i64" />
|
|
<output>
|
|
<port id="0" precision="I64" names="attention_mask">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="0" name="token_type_ids" type="Parameter" version="opset1">
|
|
<data shape="?,?" element_type="i64" />
|
|
<output>
|
|
<port id="0" precision="I64" names="token_type_ids">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="3" name="self.embeddings.word_embeddings.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="30522, 1024" offset="0" size="125018112" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.embeddings.word_embeddings.weight">
|
|
<dim>30522</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="4" name="__module.embeddings.word_embeddings/aten::embedding/Convert" type="Convert" version="opset1">
|
|
<data destination_type="i32" />
|
|
<input>
|
|
<port id="0" precision="I64">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="1" precision="I32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="5" name="__module.embeddings.word_embeddings/aten::embedding/Constant" type="Const" version="opset1">
|
|
<data element_type="i32" shape="" offset="125018112" size="4" />
|
|
<output>
|
|
<port id="0" precision="I32" />
|
|
</output>
|
|
</layer>
|
|
<layer id="6" name="__module.embeddings.word_embeddings/aten::embedding/Gather" type="Gather" version="opset8">
|
|
<data batch_dims="0" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>30522</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
</port>
|
|
<port id="2" precision="I32" />
|
|
</input>
|
|
<output>
|
|
<port id="3" precision="FP32" names="79,inputs_embeds">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="7" name="self.embeddings.token_type_embeddings.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="2, 1024" offset="125018116" size="8192" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.embeddings.token_type_embeddings.weight">
|
|
<dim>2</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="8" name="__module.embeddings.token_type_embeddings/aten::embedding/Convert" type="Convert" version="opset1">
|
|
<data destination_type="i32" />
|
|
<input>
|
|
<port id="0" precision="I64">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="1" precision="I32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="9" name="__module.embeddings.token_type_embeddings/aten::embedding/Constant" type="Const" version="opset1">
|
|
<data element_type="i32" shape="" offset="125018112" size="4" />
|
|
<output>
|
|
<port id="0" precision="I32" />
|
|
</output>
|
|
</layer>
|
|
<layer id="10" name="__module.embeddings.token_type_embeddings/aten::embedding/Gather" type="Gather" version="opset8">
|
|
<data batch_dims="0" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>2</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
</port>
|
|
<port id="2" precision="I32" />
|
|
</input>
|
|
<output>
|
|
<port id="3" precision="FP32" names="81,token_type_embeddings.1">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="11" name="__module.embeddings/aten::add/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="82_1">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="12" name="self.embeddings.position_embeddings.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="512, 1024" offset="125026308" size="2097152" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.embeddings.position_embeddings.weight">
|
|
<dim>512</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="13" name="__module.embeddings/aten::slice/Slice" type="Const" version="opset1">
|
|
<data element_type="i64" shape="1, 512" offset="127123460" size="4096" />
|
|
<output>
|
|
<port id="0" precision="I64" names="76">
|
|
<dim>1</dim>
|
|
<dim>512</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="14" name="__module.embeddings/aten::slice/Reshape" type="Const" version="opset1">
|
|
<data element_type="i64" shape="1" offset="127127556" size="8" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>1</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="15" name="ShapeOf_6549601" type="ShapeOf" version="opset3">
|
|
<data output_type="i64" />
|
|
<input>
|
|
<port id="0" precision="I64">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="1" precision="I64">
|
|
<dim>2</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="16" name="Constant_6549741" type="Const" version="opset1">
|
|
<data element_type="i64" shape="1" offset="127127564" size="8" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>1</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="17" name="Constant_6549603" type="Const" version="opset1">
|
|
<data element_type="i64" shape="" offset="127127556" size="8" />
|
|
<output>
|
|
<port id="0" precision="I64" />
|
|
</output>
|
|
</layer>
|
|
<layer id="18" name="Gather_6549604" type="Gather" version="opset8">
|
|
<data batch_dims="0" />
|
|
<input>
|
|
<port id="0" precision="I64">
|
|
<dim>2</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>1</dim>
|
|
</port>
|
|
<port id="2" precision="I64" />
|
|
</input>
|
|
<output>
|
|
<port id="3" precision="I64" names="10,17,19,72,74,75,8">
|
|
<dim>1</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="19" name="__module.embeddings/aten::slice/Reshape_2" type="Const" version="opset1">
|
|
<data element_type="i64" shape="1" offset="127127564" size="8" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>1</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="20" name="__module.embeddings/aten::slice/Reshape_3" type="Const" version="opset1">
|
|
<data element_type="i64" shape="1" offset="127127564" size="8" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>1</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="21" name="__module.embeddings/aten::slice/Slice_1" type="Slice" version="opset8">
|
|
<input>
|
|
<port id="0" precision="I64">
|
|
<dim>1</dim>
|
|
<dim>512</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>1</dim>
|
|
</port>
|
|
<port id="2" precision="I64">
|
|
<dim>1</dim>
|
|
</port>
|
|
<port id="3" precision="I64">
|
|
<dim>1</dim>
|
|
</port>
|
|
<port id="4" precision="I64">
|
|
<dim>1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="5" precision="I64" names="77">
|
|
<dim>1</dim>
|
|
<dim>-1</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="22" name="__module.embeddings.position_embeddings/aten::embedding/Convert" type="Convert" version="opset1">
|
|
<data destination_type="i32" />
|
|
<input>
|
|
<port id="0" precision="I64">
|
|
<dim>1</dim>
|
|
<dim>-1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="1" precision="I32">
|
|
<dim>1</dim>
|
|
<dim>-1</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="23" name="__module.embeddings.position_embeddings/aten::embedding/Constant" type="Const" version="opset1">
|
|
<data element_type="i32" shape="" offset="125018112" size="4" />
|
|
<output>
|
|
<port id="0" precision="I32" />
|
|
</output>
|
|
</layer>
|
|
<layer id="24" name="__module.embeddings.position_embeddings/aten::embedding/Gather" type="Gather" version="opset8">
|
|
<data batch_dims="0" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>512</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I32">
|
|
<dim>1</dim>
|
|
<dim>-1</dim>
|
|
</port>
|
|
<port id="2" precision="I32" />
|
|
</input>
|
|
<output>
|
|
<port id="3" precision="FP32" names="84,position_embeddings.1">
|
|
<dim>1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="25" name="__module.embeddings/aten::add_/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="82,embeddings.1">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="26" name="__module.embeddings.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
|
|
<data element_type="i32" shape="1" offset="127127572" size="4" />
|
|
<output>
|
|
<port id="0" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="27" name="__module.embeddings.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
|
|
<data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="28" name="Constant_6549170" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="127127576" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="29" name="__module.embeddings.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="30" name="Constant_6549171" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="127131672" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="31" name="__module.embeddings.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="89,input.1">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="32" name="self.encoder.layer.0.attention.self.query.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="127135768" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.0.attention.self.query.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="33" name="__module.encoder.layer.0.attention.self.query/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="34" name="Constant_6549172" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="131330072" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="35" name="__module.encoder.layer.0.attention.self.query/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="165,x.1">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="36" name="__module.encoder.layer.0.attention.self/prim::ListConstruct/Concat" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334168" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="37" name="__module.encoder.layer.0.attention.self/aten::view/Reshape" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="169,x.3">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="38" name="Constant_6533773" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334200" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64" names="170">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="39" name="__module.encoder.layer.0.attention.self/aten::permute/Transpose" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="171">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="40" name="self.encoder.layer.0.attention.self.key.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="131334232" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.0.attention.self.key.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="41" name="__module.encoder.layer.0.attention.self.key/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="42" name="Constant_6549173" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="135528536" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="43" name="__module.encoder.layer.0.attention.self.key/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="174,x.5">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="44" name="__module.encoder.layer.0.attention.self/prim::ListConstruct/Concat_1" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334168" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="45" name="__module.encoder.layer.0.attention.self/aten::view/Reshape_1" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="178,x.7">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="46" name="Constant_6533798" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334200" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64" names="179">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="47" name="__module.encoder.layer.0.attention.self/aten::permute/Transpose_1" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="180">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="48" name="self.encoder.layer.0.attention.self.value.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="135532632" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.0.attention.self.value.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="49" name="__module.encoder.layer.0.attention.self.value/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="50" name="Constant_6549174" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="139726936" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="51" name="__module.encoder.layer.0.attention.self.value/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="183,x.9">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="52" name="__module.encoder.layer.0.attention.self/prim::ListConstruct/Concat_2" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334168" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="53" name="__module.encoder.layer.0.attention.self/aten::view/Reshape_2" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="187,x.11">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="54" name="Constant_6533823" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334200" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64" names="188">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="55" name="__module.encoder.layer.0.attention.self/aten::permute/Transpose_2" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="189">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="56" name="Constant_6549176" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1, 1" offset="139731032" size="4" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="57" name="25" type="Const" version="opset1">
|
|
<data element_type="i64" shape="" offset="127127564" size="8" />
|
|
<output>
|
|
<port id="0" precision="I64" names="25" />
|
|
</output>
|
|
</layer>
|
|
<layer id="58" name="aten::unsqueeze/Unsqueeze" type="Unsqueeze" version="opset1">
|
|
<input>
|
|
<port id="0" precision="I64">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
</port>
|
|
<port id="1" precision="I64" />
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="I64" names="26">
|
|
<dim>-1</dim>
|
|
<dim>1</dim>
|
|
<dim>-1</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="59" name="27" type="Const" version="opset1">
|
|
<data element_type="i64" shape="" offset="139731036" size="8" />
|
|
<output>
|
|
<port id="0" precision="I64" names="27" />
|
|
</output>
|
|
</layer>
|
|
<layer id="60" name="aten::unsqueeze/Unsqueeze_1" type="Unsqueeze" version="opset1">
|
|
<input>
|
|
<port id="0" precision="I64">
|
|
<dim>-1</dim>
|
|
<dim>1</dim>
|
|
<dim>-1</dim>
|
|
</port>
|
|
<port id="1" precision="I64" />
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="I64" names="28,33">
|
|
<dim>-1</dim>
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>-1</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="61" name="Constant_6549744" type="Const" version="opset1">
|
|
<data element_type="i64" shape="1" offset="127127556" size="8" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>1</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="62" name="Constant_6549611" type="Const" version="opset1">
|
|
<data element_type="i64" shape="" offset="127127556" size="8" />
|
|
<output>
|
|
<port id="0" precision="I64" />
|
|
</output>
|
|
</layer>
|
|
<layer id="63" name="Gather_6549612" type="Gather" version="opset8">
|
|
<data batch_dims="0" />
|
|
<input>
|
|
<port id="0" precision="I64">
|
|
<dim>2</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>1</dim>
|
|
</port>
|
|
<port id="2" precision="I64" />
|
|
</input>
|
|
<output>
|
|
<port id="3" precision="I64" names="13,15">
|
|
<dim>1</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="64" name="Constant_6546582" type="Const" version="opset1">
|
|
<data element_type="i64" shape="1" offset="127127564" size="8" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>1</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="65" name="Constant_6549746" type="Const" version="opset1">
|
|
<data element_type="i64" shape="2" offset="139731044" size="16" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>2</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="66" name="Constant_6549747" type="Const" version="opset1">
|
|
<data element_type="i64" shape="" offset="127127556" size="8" />
|
|
<output>
|
|
<port id="0" precision="I64" />
|
|
</output>
|
|
</layer>
|
|
<layer id="67" name="Gather_6549748" type="Gather" version="opset8">
|
|
<data batch_dims="0" />
|
|
<input>
|
|
<port id="0" precision="I64">
|
|
<dim>2</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>2</dim>
|
|
</port>
|
|
<port id="2" precision="I64" />
|
|
</input>
|
|
<output>
|
|
<port id="3" precision="I64">
|
|
<dim>2</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="68" name="prim::ListConstruct/Concat" type="Concat" version="opset1">
|
|
<data axis="0" />
|
|
<input>
|
|
<port id="0" precision="I64">
|
|
<dim>1</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>1</dim>
|
|
</port>
|
|
<port id="2" precision="I64">
|
|
<dim>2</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="3" precision="I64" names="35">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="69" name="aten::expand/Broadcast" type="Broadcast" version="opset3">
|
|
<data mode="bidirectional" />
|
|
<input>
|
|
<port id="0" precision="I64">
|
|
<dim>-1</dim>
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>-1</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="I64" names="37">
|
|
<dim>-1</dim>
|
|
<dim>1</dim>
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="70" name="aten::to/Convert" type="Convert" version="opset1">
|
|
<data destination_type="f32" />
|
|
<input>
|
|
<port id="0" precision="I64">
|
|
<dim>-1</dim>
|
|
<dim>1</dim>
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="1" precision="FP32" names="42">
|
|
<dim>-1</dim>
|
|
<dim>1</dim>
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="71" name="Constant_6549175" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1, 1" offset="139731032" size="4" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="72" name="aten::rsub/Multiply" type="Multiply" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>1</dim>
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>1</dim>
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="73" name="aten::rsub/Subtract" type="Subtract" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>1</dim>
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="45,inverted_mask">
|
|
<dim>-1</dim>
|
|
<dim>1</dim>
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="74" name="aten::to/Convert_1" type="Convert" version="opset1">
|
|
<data destination_type="boolean" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>1</dim>
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="1" precision="BOOL" names="50">
|
|
<dim>-1</dim>
|
|
<dim>1</dim>
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="75" name="aten::masked_fill/ConvertLike" type="Const" version="opset1">
|
|
<data element_type="f32" shape="" offset="139731060" size="4" />
|
|
<output>
|
|
<port id="0" precision="FP32" />
|
|
</output>
|
|
</layer>
|
|
<layer id="76" name="aten::masked_fill/Select" type="Select" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="BOOL">
|
|
<dim>-1</dim>
|
|
<dim>1</dim>
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
</port>
|
|
<port id="1" precision="FP32" />
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>1</dim>
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="3" precision="FP32" names="52">
|
|
<dim>-1</dim>
|
|
<dim>1</dim>
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="77" name="__module.encoder.layer.0.attention.self/aten::scaled_dot_product_attention/ScaledDotProductAttention" type="ScaledDotProductAttention" version="opset13">
|
|
<data causal="false" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="3" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>1</dim>
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="4" precision="FP32" names="190,attn_output.1">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="78" name="__module.encoder.layer.0.attention.self/aten::transpose/ScatterElementsUpdate" type="Const" version="opset1">
|
|
<data element_type="i32" shape="4" offset="139731064" size="16" />
|
|
<output>
|
|
<port id="0" precision="I32">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="79" name="__module.encoder.layer.0.attention.self/aten::transpose/Transpose" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I32">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="191,attn_output.3">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="80" name="Constant_6549620" type="Const" version="opset1">
|
|
<data element_type="i64" shape="3" offset="139731080" size="24" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>3</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="81" name="__module.encoder.layer.0.attention.self/aten::reshape/Reshape" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>3</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="193">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="82" name="self.encoder.layer.0.attention.output.dense.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="139731104" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.0.attention.output.dense.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="83" name="__module.encoder.layer.0.attention.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="84" name="Constant_6549177" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="143925408" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="85" name="__module.encoder.layer.0.attention.output.dense/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="199,input.3">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="86" name="__module.encoder.layer.0.attention.output/aten::add/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="201">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="87" name="__module.encoder.layer.0.attention.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
|
|
<data element_type="i32" shape="1" offset="127127572" size="4" />
|
|
<output>
|
|
<port id="0" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="88" name="__module.encoder.layer.0.attention.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
|
|
<data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="89" name="Constant_6549178" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="143929504" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="90" name="__module.encoder.layer.0.attention.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="91" name="Constant_6549179" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="143933600" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="92" name="__module.encoder.layer.0.attention.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="205,input_tensor.1">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="93" name="self.encoder.layer.0.intermediate.dense.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="4096, 1024" offset="143937696" size="16777216" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.0.intermediate.dense.weight">
|
|
<dim>4096</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="94" name="__module.encoder.layer.0.intermediate.dense/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>4096</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="95" name="Constant_6549180" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 4096" offset="160714912" size="16384" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="96" name="__module.encoder.layer.0.intermediate.dense/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="210">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="97" name="__module.encoder.layer.0.intermediate.intermediate_act_fn/aten::gelu/Gelu" type="Gelu" version="opset7">
|
|
<data approximation_mode="ERF" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="1" precision="FP32" names="211">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="98" name="self.encoder.layer.0.output.dense.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 4096" offset="160731296" size="16777216" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.0.output.dense.weight">
|
|
<dim>1024</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="99" name="__module.encoder.layer.0.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="100" name="Constant_6549181" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="177508512" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="101" name="__module.encoder.layer.0.output.dense/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="217,input.5">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="102" name="__module.encoder.layer.0.output/aten::add/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="219">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="103" name="__module.encoder.layer.0.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
|
|
<data element_type="i32" shape="1" offset="127127572" size="4" />
|
|
<output>
|
|
<port id="0" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="104" name="__module.encoder.layer.0.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
|
|
<data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="105" name="Constant_6549182" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="177512608" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="106" name="__module.encoder.layer.0.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="107" name="Constant_6549183" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="177516704" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="108" name="__module.encoder.layer.0.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="223,hidden_states.7">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="109" name="self.encoder.layer.1.attention.self.query.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="177520800" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.1.attention.self.query.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="110" name="__module.encoder.layer.1.attention.self.query/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="111" name="Constant_6549184" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="181715104" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="112" name="__module.encoder.layer.1.attention.self.query/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="236,x.13">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="113" name="__module.encoder.layer.1.attention.self/prim::ListConstruct/Concat" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334168" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="114" name="__module.encoder.layer.1.attention.self/aten::view/Reshape" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="240,x.15">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="115" name="Constant_6534005" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334200" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64" names="241">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="116" name="__module.encoder.layer.1.attention.self/aten::permute/Transpose" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="242">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="117" name="self.encoder.layer.1.attention.self.key.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="181719200" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.1.attention.self.key.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="118" name="__module.encoder.layer.1.attention.self.key/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="119" name="Constant_6549185" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="185913504" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="120" name="__module.encoder.layer.1.attention.self.key/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="245,x.17">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="121" name="__module.encoder.layer.1.attention.self/prim::ListConstruct/Concat_1" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334168" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="122" name="__module.encoder.layer.1.attention.self/aten::view/Reshape_1" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="249,x.19">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="123" name="Constant_6534028" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334200" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64" names="250">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="124" name="__module.encoder.layer.1.attention.self/aten::permute/Transpose_1" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="251">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="125" name="self.encoder.layer.1.attention.self.value.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="185917600" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.1.attention.self.value.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="126" name="__module.encoder.layer.1.attention.self.value/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="127" name="Constant_6549186" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="190111904" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="128" name="__module.encoder.layer.1.attention.self.value/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="254,x.21">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="129" name="__module.encoder.layer.1.attention.self/prim::ListConstruct/Concat_2" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334168" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="130" name="__module.encoder.layer.1.attention.self/aten::view/Reshape_2" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="258,x.23">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="131" name="Constant_6534051" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334200" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64" names="259">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="132" name="__module.encoder.layer.1.attention.self/aten::permute/Transpose_2" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="260">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="133" name="__module.encoder.layer.1.attention.self/aten::scaled_dot_product_attention/ScaledDotProductAttention" type="ScaledDotProductAttention" version="opset13">
|
|
<data causal="false" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="3" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>1</dim>
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="4" precision="FP32" names="261,attn_output.5">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="134" name="__module.encoder.layer.1.attention.self/aten::transpose/ScatterElementsUpdate" type="Const" version="opset1">
|
|
<data element_type="i32" shape="4" offset="139731064" size="16" />
|
|
<output>
|
|
<port id="0" precision="I32">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="135" name="__module.encoder.layer.1.attention.self/aten::transpose/Transpose" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I32">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="262,attn_output.7">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="136" name="Constant_6549621" type="Const" version="opset1">
|
|
<data element_type="i64" shape="3" offset="139731080" size="24" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>3</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="137" name="__module.encoder.layer.1.attention.self/aten::reshape/Reshape" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>3</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="264">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="138" name="self.encoder.layer.1.attention.output.dense.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="190116000" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.1.attention.output.dense.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="139" name="__module.encoder.layer.1.attention.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="140" name="Constant_6549187" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="194310304" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="141" name="__module.encoder.layer.1.attention.output.dense/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="270,input.7">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="142" name="__module.encoder.layer.1.attention.output/aten::add/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="272">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="143" name="__module.encoder.layer.1.attention.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
|
|
<data element_type="i32" shape="1" offset="127127572" size="4" />
|
|
<output>
|
|
<port id="0" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="144" name="__module.encoder.layer.1.attention.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
|
|
<data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="145" name="Constant_6549188" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="194314400" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="146" name="__module.encoder.layer.1.attention.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="147" name="Constant_6549189" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="194318496" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="148" name="__module.encoder.layer.1.attention.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="276,input_tensor.3">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="149" name="self.encoder.layer.1.intermediate.dense.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="4096, 1024" offset="194322592" size="16777216" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.1.intermediate.dense.weight">
|
|
<dim>4096</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="150" name="__module.encoder.layer.1.intermediate.dense/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>4096</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="151" name="Constant_6549190" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 4096" offset="211099808" size="16384" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="152" name="__module.encoder.layer.1.intermediate.dense/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="281">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="153" name="__module.encoder.layer.1.intermediate.intermediate_act_fn/aten::gelu/Gelu" type="Gelu" version="opset7">
|
|
<data approximation_mode="ERF" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="1" precision="FP32" names="282">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="154" name="self.encoder.layer.1.output.dense.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 4096" offset="211116192" size="16777216" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.1.output.dense.weight">
|
|
<dim>1024</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="155" name="__module.encoder.layer.1.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="156" name="Constant_6549191" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="227893408" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="157" name="__module.encoder.layer.1.output.dense/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="288,input.9">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="158" name="__module.encoder.layer.1.output/aten::add/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="290">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="159" name="__module.encoder.layer.1.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
|
|
<data element_type="i32" shape="1" offset="127127572" size="4" />
|
|
<output>
|
|
<port id="0" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="160" name="__module.encoder.layer.1.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
|
|
<data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="161" name="Constant_6549192" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="227897504" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="162" name="__module.encoder.layer.1.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="163" name="Constant_6549193" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="227901600" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="164" name="__module.encoder.layer.1.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="294,hidden_states.13">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="165" name="self.encoder.layer.2.attention.self.query.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="227905696" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.2.attention.self.query.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="166" name="__module.encoder.layer.2.attention.self.query/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="167" name="Constant_6549194" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="232100000" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="168" name="__module.encoder.layer.2.attention.self.query/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="307,x.25">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="169" name="__module.encoder.layer.2.attention.self/prim::ListConstruct/Concat" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334168" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="170" name="__module.encoder.layer.2.attention.self/aten::view/Reshape" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="311,x.27">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="171" name="Constant_6534231" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334200" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64" names="312">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="172" name="__module.encoder.layer.2.attention.self/aten::permute/Transpose" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="313">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="173" name="self.encoder.layer.2.attention.self.key.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="232104096" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.2.attention.self.key.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="174" name="__module.encoder.layer.2.attention.self.key/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="175" name="Constant_6549195" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="236298400" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="176" name="__module.encoder.layer.2.attention.self.key/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="316,x.29">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="177" name="__module.encoder.layer.2.attention.self/prim::ListConstruct/Concat_1" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334168" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="178" name="__module.encoder.layer.2.attention.self/aten::view/Reshape_1" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="320,x.31">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="179" name="Constant_6534254" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334200" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64" names="321">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="180" name="__module.encoder.layer.2.attention.self/aten::permute/Transpose_1" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="322">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="181" name="self.encoder.layer.2.attention.self.value.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="236302496" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.2.attention.self.value.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="182" name="__module.encoder.layer.2.attention.self.value/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="183" name="Constant_6549196" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="240496800" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="184" name="__module.encoder.layer.2.attention.self.value/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="325,x.33">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="185" name="__module.encoder.layer.2.attention.self/prim::ListConstruct/Concat_2" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334168" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="186" name="__module.encoder.layer.2.attention.self/aten::view/Reshape_2" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="329,x.35">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="187" name="Constant_6534277" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334200" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64" names="330">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="188" name="__module.encoder.layer.2.attention.self/aten::permute/Transpose_2" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="331">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="189" name="__module.encoder.layer.2.attention.self/aten::scaled_dot_product_attention/ScaledDotProductAttention" type="ScaledDotProductAttention" version="opset13">
|
|
<data causal="false" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="3" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>1</dim>
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="4" precision="FP32" names="332,attn_output.9">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="190" name="__module.encoder.layer.2.attention.self/aten::transpose/ScatterElementsUpdate" type="Const" version="opset1">
|
|
<data element_type="i32" shape="4" offset="139731064" size="16" />
|
|
<output>
|
|
<port id="0" precision="I32">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="191" name="__module.encoder.layer.2.attention.self/aten::transpose/Transpose" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I32">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="333,attn_output.11">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="192" name="Constant_6549622" type="Const" version="opset1">
|
|
<data element_type="i64" shape="3" offset="139731080" size="24" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>3</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="193" name="__module.encoder.layer.2.attention.self/aten::reshape/Reshape" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>3</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="335">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="194" name="self.encoder.layer.2.attention.output.dense.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="240500896" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.2.attention.output.dense.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="195" name="__module.encoder.layer.2.attention.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="196" name="Constant_6549197" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="244695200" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="197" name="__module.encoder.layer.2.attention.output.dense/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="341,input.11">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="198" name="__module.encoder.layer.2.attention.output/aten::add/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="343">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="199" name="__module.encoder.layer.2.attention.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
|
|
<data element_type="i32" shape="1" offset="127127572" size="4" />
|
|
<output>
|
|
<port id="0" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="200" name="__module.encoder.layer.2.attention.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
|
|
<data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="201" name="Constant_6549198" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="244699296" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="202" name="__module.encoder.layer.2.attention.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="203" name="Constant_6549199" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="244703392" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="204" name="__module.encoder.layer.2.attention.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="347,input_tensor.5">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="205" name="self.encoder.layer.2.intermediate.dense.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="4096, 1024" offset="244707488" size="16777216" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.2.intermediate.dense.weight">
|
|
<dim>4096</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="206" name="__module.encoder.layer.2.intermediate.dense/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>4096</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="207" name="Constant_6549200" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 4096" offset="261484704" size="16384" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="208" name="__module.encoder.layer.2.intermediate.dense/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="352">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="209" name="__module.encoder.layer.2.intermediate.intermediate_act_fn/aten::gelu/Gelu" type="Gelu" version="opset7">
|
|
<data approximation_mode="ERF" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="1" precision="FP32" names="353">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="210" name="self.encoder.layer.2.output.dense.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 4096" offset="261501088" size="16777216" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.2.output.dense.weight">
|
|
<dim>1024</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="211" name="__module.encoder.layer.2.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="212" name="Constant_6549201" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="278278304" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="213" name="__module.encoder.layer.2.output.dense/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="359,input.13">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="214" name="__module.encoder.layer.2.output/aten::add/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="361">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="215" name="__module.encoder.layer.2.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
|
|
<data element_type="i32" shape="1" offset="127127572" size="4" />
|
|
<output>
|
|
<port id="0" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="216" name="__module.encoder.layer.2.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
|
|
<data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="217" name="Constant_6549202" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="278282400" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="218" name="__module.encoder.layer.2.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="219" name="Constant_6549203" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="278286496" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="220" name="__module.encoder.layer.2.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="365,hidden_states.19">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="221" name="self.encoder.layer.3.attention.self.query.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="278290592" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.3.attention.self.query.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="222" name="__module.encoder.layer.3.attention.self.query/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="223" name="Constant_6549204" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="282484896" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="224" name="__module.encoder.layer.3.attention.self.query/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="378,x.37">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="225" name="__module.encoder.layer.3.attention.self/prim::ListConstruct/Concat" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334168" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="226" name="__module.encoder.layer.3.attention.self/aten::view/Reshape" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="382,x.39">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="227" name="Constant_6534457" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334200" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64" names="383">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="228" name="__module.encoder.layer.3.attention.self/aten::permute/Transpose" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="384">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="229" name="self.encoder.layer.3.attention.self.key.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="282488992" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.3.attention.self.key.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="230" name="__module.encoder.layer.3.attention.self.key/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="231" name="Constant_6549205" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="286683296" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="232" name="__module.encoder.layer.3.attention.self.key/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="387,x.41">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="233" name="__module.encoder.layer.3.attention.self/prim::ListConstruct/Concat_1" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334168" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="234" name="__module.encoder.layer.3.attention.self/aten::view/Reshape_1" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="391,x.43">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="235" name="Constant_6534480" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334200" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64" names="392">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="236" name="__module.encoder.layer.3.attention.self/aten::permute/Transpose_1" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="393">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="237" name="self.encoder.layer.3.attention.self.value.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="286687392" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.3.attention.self.value.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="238" name="__module.encoder.layer.3.attention.self.value/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="239" name="Constant_6549206" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="290881696" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="240" name="__module.encoder.layer.3.attention.self.value/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="396,x.45">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="241" name="__module.encoder.layer.3.attention.self/prim::ListConstruct/Concat_2" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334168" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="242" name="__module.encoder.layer.3.attention.self/aten::view/Reshape_2" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="400,x.47">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="243" name="Constant_6534503" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334200" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64" names="401">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="244" name="__module.encoder.layer.3.attention.self/aten::permute/Transpose_2" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="402">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="245" name="__module.encoder.layer.3.attention.self/aten::scaled_dot_product_attention/ScaledDotProductAttention" type="ScaledDotProductAttention" version="opset13">
|
|
<data causal="false" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="3" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>1</dim>
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="4" precision="FP32" names="403,attn_output.13">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="246" name="__module.encoder.layer.3.attention.self/aten::transpose/ScatterElementsUpdate" type="Const" version="opset1">
|
|
<data element_type="i32" shape="4" offset="139731064" size="16" />
|
|
<output>
|
|
<port id="0" precision="I32">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="247" name="__module.encoder.layer.3.attention.self/aten::transpose/Transpose" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I32">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="404,attn_output.15">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="248" name="Constant_6549623" type="Const" version="opset1">
|
|
<data element_type="i64" shape="3" offset="139731080" size="24" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>3</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="249" name="__module.encoder.layer.3.attention.self/aten::reshape/Reshape" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>3</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="406">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="250" name="self.encoder.layer.3.attention.output.dense.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="290885792" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.3.attention.output.dense.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="251" name="__module.encoder.layer.3.attention.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="252" name="Constant_6549207" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="295080096" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="253" name="__module.encoder.layer.3.attention.output.dense/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="412,input.15">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="254" name="__module.encoder.layer.3.attention.output/aten::add/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="414">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="255" name="__module.encoder.layer.3.attention.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
|
|
<data element_type="i32" shape="1" offset="127127572" size="4" />
|
|
<output>
|
|
<port id="0" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="256" name="__module.encoder.layer.3.attention.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
|
|
<data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="257" name="Constant_6549208" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="295084192" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="258" name="__module.encoder.layer.3.attention.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="259" name="Constant_6549209" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="295088288" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="260" name="__module.encoder.layer.3.attention.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="418,input_tensor.7">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="261" name="self.encoder.layer.3.intermediate.dense.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="4096, 1024" offset="295092384" size="16777216" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.3.intermediate.dense.weight">
|
|
<dim>4096</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="262" name="__module.encoder.layer.3.intermediate.dense/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>4096</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="263" name="Constant_6549210" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 4096" offset="311869600" size="16384" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="264" name="__module.encoder.layer.3.intermediate.dense/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="423">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="265" name="__module.encoder.layer.3.intermediate.intermediate_act_fn/aten::gelu/Gelu" type="Gelu" version="opset7">
|
|
<data approximation_mode="ERF" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="1" precision="FP32" names="424">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="266" name="self.encoder.layer.3.output.dense.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 4096" offset="311885984" size="16777216" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.3.output.dense.weight">
|
|
<dim>1024</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="267" name="__module.encoder.layer.3.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="268" name="Constant_6549211" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="328663200" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="269" name="__module.encoder.layer.3.output.dense/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="430,input.17">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="270" name="__module.encoder.layer.3.output/aten::add/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="432">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="271" name="__module.encoder.layer.3.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
|
|
<data element_type="i32" shape="1" offset="127127572" size="4" />
|
|
<output>
|
|
<port id="0" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="272" name="__module.encoder.layer.3.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
|
|
<data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="273" name="Constant_6549212" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="328667296" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="274" name="__module.encoder.layer.3.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="275" name="Constant_6549213" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="328671392" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="276" name="__module.encoder.layer.3.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="436,hidden_states.25">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="277" name="self.encoder.layer.4.attention.self.query.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="328675488" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.4.attention.self.query.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="278" name="__module.encoder.layer.4.attention.self.query/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="279" name="Constant_6549214" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="332869792" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="280" name="__module.encoder.layer.4.attention.self.query/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="449,x.49">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="281" name="__module.encoder.layer.4.attention.self/prim::ListConstruct/Concat" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334168" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="282" name="__module.encoder.layer.4.attention.self/aten::view/Reshape" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="453,x.51">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="283" name="Constant_6534683" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334200" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64" names="454">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="284" name="__module.encoder.layer.4.attention.self/aten::permute/Transpose" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="455">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="285" name="self.encoder.layer.4.attention.self.key.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="332873888" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.4.attention.self.key.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="286" name="__module.encoder.layer.4.attention.self.key/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="287" name="Constant_6549215" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="337068192" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="288" name="__module.encoder.layer.4.attention.self.key/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="458,x.53">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="289" name="__module.encoder.layer.4.attention.self/prim::ListConstruct/Concat_1" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334168" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="290" name="__module.encoder.layer.4.attention.self/aten::view/Reshape_1" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="462,x.55">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="291" name="Constant_6534706" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334200" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64" names="463">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="292" name="__module.encoder.layer.4.attention.self/aten::permute/Transpose_1" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="464">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="293" name="self.encoder.layer.4.attention.self.value.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="337072288" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.4.attention.self.value.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="294" name="__module.encoder.layer.4.attention.self.value/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="295" name="Constant_6549216" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="341266592" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="296" name="__module.encoder.layer.4.attention.self.value/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="467,x.57">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="297" name="__module.encoder.layer.4.attention.self/prim::ListConstruct/Concat_2" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334168" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="298" name="__module.encoder.layer.4.attention.self/aten::view/Reshape_2" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="471,x.59">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="299" name="Constant_6534729" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334200" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64" names="472">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="300" name="__module.encoder.layer.4.attention.self/aten::permute/Transpose_2" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="473">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="301" name="__module.encoder.layer.4.attention.self/aten::scaled_dot_product_attention/ScaledDotProductAttention" type="ScaledDotProductAttention" version="opset13">
|
|
<data causal="false" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="3" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>1</dim>
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="4" precision="FP32" names="474,attn_output.17">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="302" name="__module.encoder.layer.4.attention.self/aten::transpose/ScatterElementsUpdate" type="Const" version="opset1">
|
|
<data element_type="i32" shape="4" offset="139731064" size="16" />
|
|
<output>
|
|
<port id="0" precision="I32">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="303" name="__module.encoder.layer.4.attention.self/aten::transpose/Transpose" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I32">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="475,attn_output.19">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="304" name="Constant_6549624" type="Const" version="opset1">
|
|
<data element_type="i64" shape="3" offset="139731080" size="24" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>3</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="305" name="__module.encoder.layer.4.attention.self/aten::reshape/Reshape" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>3</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="477">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="306" name="self.encoder.layer.4.attention.output.dense.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="341270688" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.4.attention.output.dense.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="307" name="__module.encoder.layer.4.attention.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="308" name="Constant_6549217" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="345464992" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="309" name="__module.encoder.layer.4.attention.output.dense/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="483,input.19">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="310" name="__module.encoder.layer.4.attention.output/aten::add/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="485">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="311" name="__module.encoder.layer.4.attention.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
|
|
<data element_type="i32" shape="1" offset="127127572" size="4" />
|
|
<output>
|
|
<port id="0" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="312" name="__module.encoder.layer.4.attention.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
|
|
<data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="313" name="Constant_6549218" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="345469088" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="314" name="__module.encoder.layer.4.attention.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="315" name="Constant_6549219" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="345473184" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="316" name="__module.encoder.layer.4.attention.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="489,input_tensor.9">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="317" name="self.encoder.layer.4.intermediate.dense.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="4096, 1024" offset="345477280" size="16777216" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.4.intermediate.dense.weight">
|
|
<dim>4096</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="318" name="__module.encoder.layer.4.intermediate.dense/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>4096</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="319" name="Constant_6549220" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 4096" offset="362254496" size="16384" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="320" name="__module.encoder.layer.4.intermediate.dense/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="494">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="321" name="__module.encoder.layer.4.intermediate.intermediate_act_fn/aten::gelu/Gelu" type="Gelu" version="opset7">
|
|
<data approximation_mode="ERF" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="1" precision="FP32" names="495">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="322" name="self.encoder.layer.4.output.dense.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 4096" offset="362270880" size="16777216" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.4.output.dense.weight">
|
|
<dim>1024</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="323" name="__module.encoder.layer.4.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="324" name="Constant_6549221" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="379048096" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="325" name="__module.encoder.layer.4.output.dense/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="501,input.21">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="326" name="__module.encoder.layer.4.output/aten::add/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="503">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="327" name="__module.encoder.layer.4.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
|
|
<data element_type="i32" shape="1" offset="127127572" size="4" />
|
|
<output>
|
|
<port id="0" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="328" name="__module.encoder.layer.4.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
|
|
<data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="329" name="Constant_6549222" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="379052192" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="330" name="__module.encoder.layer.4.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="331" name="Constant_6549223" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="379056288" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="332" name="__module.encoder.layer.4.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="507,hidden_states.31">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="333" name="self.encoder.layer.5.attention.self.query.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="379060384" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.5.attention.self.query.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="334" name="__module.encoder.layer.5.attention.self.query/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="335" name="Constant_6549224" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="383254688" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="336" name="__module.encoder.layer.5.attention.self.query/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="520,x.61">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="337" name="__module.encoder.layer.5.attention.self/prim::ListConstruct/Concat" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334168" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="338" name="__module.encoder.layer.5.attention.self/aten::view/Reshape" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="524,x.63">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="339" name="Constant_6534909" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334200" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64" names="525">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="340" name="__module.encoder.layer.5.attention.self/aten::permute/Transpose" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="526">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="341" name="self.encoder.layer.5.attention.self.key.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="383258784" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.5.attention.self.key.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="342" name="__module.encoder.layer.5.attention.self.key/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="343" name="Constant_6549225" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="387453088" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="344" name="__module.encoder.layer.5.attention.self.key/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="529,x.65">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="345" name="__module.encoder.layer.5.attention.self/prim::ListConstruct/Concat_1" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334168" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="346" name="__module.encoder.layer.5.attention.self/aten::view/Reshape_1" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="533,x.67">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="347" name="Constant_6534932" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334200" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64" names="534">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="348" name="__module.encoder.layer.5.attention.self/aten::permute/Transpose_1" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="535">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="349" name="self.encoder.layer.5.attention.self.value.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="387457184" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.5.attention.self.value.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="350" name="__module.encoder.layer.5.attention.self.value/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="351" name="Constant_6549226" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="391651488" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="352" name="__module.encoder.layer.5.attention.self.value/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="538,x.69">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="353" name="__module.encoder.layer.5.attention.self/prim::ListConstruct/Concat_2" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334168" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="354" name="__module.encoder.layer.5.attention.self/aten::view/Reshape_2" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="542,x.71">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="355" name="Constant_6534955" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334200" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64" names="543">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="356" name="__module.encoder.layer.5.attention.self/aten::permute/Transpose_2" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="544">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="357" name="__module.encoder.layer.5.attention.self/aten::scaled_dot_product_attention/ScaledDotProductAttention" type="ScaledDotProductAttention" version="opset13">
|
|
<data causal="false" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="3" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>1</dim>
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="4" precision="FP32" names="545,attn_output.21">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="358" name="__module.encoder.layer.5.attention.self/aten::transpose/ScatterElementsUpdate" type="Const" version="opset1">
|
|
<data element_type="i32" shape="4" offset="139731064" size="16" />
|
|
<output>
|
|
<port id="0" precision="I32">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="359" name="__module.encoder.layer.5.attention.self/aten::transpose/Transpose" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I32">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="546,attn_output.23">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="360" name="Constant_6549625" type="Const" version="opset1">
|
|
<data element_type="i64" shape="3" offset="139731080" size="24" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>3</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="361" name="__module.encoder.layer.5.attention.self/aten::reshape/Reshape" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>3</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="548">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="362" name="self.encoder.layer.5.attention.output.dense.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="391655584" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.5.attention.output.dense.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="363" name="__module.encoder.layer.5.attention.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="364" name="Constant_6549227" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="395849888" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="365" name="__module.encoder.layer.5.attention.output.dense/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="554,input.23">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="366" name="__module.encoder.layer.5.attention.output/aten::add/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="556">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="367" name="__module.encoder.layer.5.attention.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
|
|
<data element_type="i32" shape="1" offset="127127572" size="4" />
|
|
<output>
|
|
<port id="0" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="368" name="__module.encoder.layer.5.attention.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
|
|
<data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="369" name="Constant_6549228" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="395853984" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="370" name="__module.encoder.layer.5.attention.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="371" name="Constant_6549229" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="395858080" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="372" name="__module.encoder.layer.5.attention.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="560,input_tensor.11">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="373" name="self.encoder.layer.5.intermediate.dense.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="4096, 1024" offset="395862176" size="16777216" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.5.intermediate.dense.weight">
|
|
<dim>4096</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="374" name="__module.encoder.layer.5.intermediate.dense/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>4096</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="375" name="Constant_6549230" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 4096" offset="412639392" size="16384" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="376" name="__module.encoder.layer.5.intermediate.dense/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="565">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="377" name="__module.encoder.layer.5.intermediate.intermediate_act_fn/aten::gelu/Gelu" type="Gelu" version="opset7">
|
|
<data approximation_mode="ERF" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="1" precision="FP32" names="566">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="378" name="self.encoder.layer.5.output.dense.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 4096" offset="412655776" size="16777216" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.5.output.dense.weight">
|
|
<dim>1024</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="379" name="__module.encoder.layer.5.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="380" name="Constant_6549231" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="429432992" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="381" name="__module.encoder.layer.5.output.dense/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="572,input.25">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="382" name="__module.encoder.layer.5.output/aten::add/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="574">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="383" name="__module.encoder.layer.5.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
|
|
<data element_type="i32" shape="1" offset="127127572" size="4" />
|
|
<output>
|
|
<port id="0" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="384" name="__module.encoder.layer.5.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
|
|
<data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="385" name="Constant_6549232" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="429437088" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="386" name="__module.encoder.layer.5.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="387" name="Constant_6549233" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="429441184" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="388" name="__module.encoder.layer.5.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="578,hidden_states.37">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="389" name="self.encoder.layer.6.attention.self.query.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="429445280" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.6.attention.self.query.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="390" name="__module.encoder.layer.6.attention.self.query/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="391" name="Constant_6549234" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="433639584" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="392" name="__module.encoder.layer.6.attention.self.query/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="591,x.73">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="393" name="__module.encoder.layer.6.attention.self/prim::ListConstruct/Concat" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334168" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="394" name="__module.encoder.layer.6.attention.self/aten::view/Reshape" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="595,x.75">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="395" name="Constant_6535135" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334200" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64" names="596">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="396" name="__module.encoder.layer.6.attention.self/aten::permute/Transpose" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="597">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="397" name="self.encoder.layer.6.attention.self.key.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="433643680" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.6.attention.self.key.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="398" name="__module.encoder.layer.6.attention.self.key/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="399" name="Constant_6549235" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="437837984" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="400" name="__module.encoder.layer.6.attention.self.key/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="600,x.77">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="401" name="__module.encoder.layer.6.attention.self/prim::ListConstruct/Concat_1" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334168" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="402" name="__module.encoder.layer.6.attention.self/aten::view/Reshape_1" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="604,x.79">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="403" name="Constant_6535158" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334200" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64" names="605">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="404" name="__module.encoder.layer.6.attention.self/aten::permute/Transpose_1" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="606">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="405" name="self.encoder.layer.6.attention.self.value.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="437842080" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.6.attention.self.value.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="406" name="__module.encoder.layer.6.attention.self.value/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="407" name="Constant_6549236" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="442036384" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="408" name="__module.encoder.layer.6.attention.self.value/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="609,x.81">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="409" name="__module.encoder.layer.6.attention.self/prim::ListConstruct/Concat_2" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334168" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="410" name="__module.encoder.layer.6.attention.self/aten::view/Reshape_2" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="613,x.83">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="411" name="Constant_6535181" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334200" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64" names="614">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="412" name="__module.encoder.layer.6.attention.self/aten::permute/Transpose_2" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="615">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="413" name="__module.encoder.layer.6.attention.self/aten::scaled_dot_product_attention/ScaledDotProductAttention" type="ScaledDotProductAttention" version="opset13">
|
|
<data causal="false" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="3" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>1</dim>
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="4" precision="FP32" names="616,attn_output.25">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="414" name="__module.encoder.layer.6.attention.self/aten::transpose/ScatterElementsUpdate" type="Const" version="opset1">
|
|
<data element_type="i32" shape="4" offset="139731064" size="16" />
|
|
<output>
|
|
<port id="0" precision="I32">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="415" name="__module.encoder.layer.6.attention.self/aten::transpose/Transpose" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I32">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="617,attn_output.27">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="416" name="Constant_6549626" type="Const" version="opset1">
|
|
<data element_type="i64" shape="3" offset="139731080" size="24" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>3</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="417" name="__module.encoder.layer.6.attention.self/aten::reshape/Reshape" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>3</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="619">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="418" name="self.encoder.layer.6.attention.output.dense.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="442040480" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.6.attention.output.dense.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="419" name="__module.encoder.layer.6.attention.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="420" name="Constant_6549237" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="446234784" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="421" name="__module.encoder.layer.6.attention.output.dense/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="625,input.27">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="422" name="__module.encoder.layer.6.attention.output/aten::add/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="627">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="423" name="__module.encoder.layer.6.attention.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
|
|
<data element_type="i32" shape="1" offset="127127572" size="4" />
|
|
<output>
|
|
<port id="0" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="424" name="__module.encoder.layer.6.attention.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
|
|
<data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="425" name="Constant_6549238" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="446238880" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="426" name="__module.encoder.layer.6.attention.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="427" name="Constant_6549239" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="446242976" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="428" name="__module.encoder.layer.6.attention.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="631,input_tensor.13">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="429" name="self.encoder.layer.6.intermediate.dense.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="4096, 1024" offset="446247072" size="16777216" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.6.intermediate.dense.weight">
|
|
<dim>4096</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="430" name="__module.encoder.layer.6.intermediate.dense/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>4096</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="431" name="Constant_6549240" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 4096" offset="463024288" size="16384" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="432" name="__module.encoder.layer.6.intermediate.dense/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="636">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="433" name="__module.encoder.layer.6.intermediate.intermediate_act_fn/aten::gelu/Gelu" type="Gelu" version="opset7">
|
|
<data approximation_mode="ERF" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="1" precision="FP32" names="637">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="434" name="self.encoder.layer.6.output.dense.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 4096" offset="463040672" size="16777216" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.6.output.dense.weight">
|
|
<dim>1024</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="435" name="__module.encoder.layer.6.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="436" name="Constant_6549241" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="479817888" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="437" name="__module.encoder.layer.6.output.dense/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="643,input.29">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="438" name="__module.encoder.layer.6.output/aten::add/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="645">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="439" name="__module.encoder.layer.6.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
|
|
<data element_type="i32" shape="1" offset="127127572" size="4" />
|
|
<output>
|
|
<port id="0" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="440" name="__module.encoder.layer.6.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
|
|
<data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="441" name="Constant_6549242" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="479821984" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="442" name="__module.encoder.layer.6.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="443" name="Constant_6549243" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="479826080" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="444" name="__module.encoder.layer.6.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="649,hidden_states.43">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="445" name="self.encoder.layer.7.attention.self.query.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="479830176" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.7.attention.self.query.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="446" name="__module.encoder.layer.7.attention.self.query/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="447" name="Constant_6549244" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="484024480" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="448" name="__module.encoder.layer.7.attention.self.query/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="662,x.85">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="449" name="__module.encoder.layer.7.attention.self/prim::ListConstruct/Concat" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334168" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="450" name="__module.encoder.layer.7.attention.self/aten::view/Reshape" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="666,x.87">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="451" name="Constant_6535361" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334200" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64" names="667">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="452" name="__module.encoder.layer.7.attention.self/aten::permute/Transpose" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="668">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="453" name="self.encoder.layer.7.attention.self.key.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="484028576" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.7.attention.self.key.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="454" name="__module.encoder.layer.7.attention.self.key/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="455" name="Constant_6549245" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="488222880" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="456" name="__module.encoder.layer.7.attention.self.key/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="671,x.89">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="457" name="__module.encoder.layer.7.attention.self/prim::ListConstruct/Concat_1" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334168" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="458" name="__module.encoder.layer.7.attention.self/aten::view/Reshape_1" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="675,x.91">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="459" name="Constant_6535384" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334200" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64" names="676">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="460" name="__module.encoder.layer.7.attention.self/aten::permute/Transpose_1" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="677">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="461" name="self.encoder.layer.7.attention.self.value.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="488226976" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.7.attention.self.value.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="462" name="__module.encoder.layer.7.attention.self.value/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="463" name="Constant_6549246" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="492421280" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="464" name="__module.encoder.layer.7.attention.self.value/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="680,x.93">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="465" name="__module.encoder.layer.7.attention.self/prim::ListConstruct/Concat_2" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334168" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="466" name="__module.encoder.layer.7.attention.self/aten::view/Reshape_2" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="684,x.95">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="467" name="Constant_6535407" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334200" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64" names="685">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="468" name="__module.encoder.layer.7.attention.self/aten::permute/Transpose_2" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="686">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="469" name="__module.encoder.layer.7.attention.self/aten::scaled_dot_product_attention/ScaledDotProductAttention" type="ScaledDotProductAttention" version="opset13">
|
|
<data causal="false" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="3" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>1</dim>
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="4" precision="FP32" names="687,attn_output.29">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="470" name="__module.encoder.layer.7.attention.self/aten::transpose/ScatterElementsUpdate" type="Const" version="opset1">
|
|
<data element_type="i32" shape="4" offset="139731064" size="16" />
|
|
<output>
|
|
<port id="0" precision="I32">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="471" name="__module.encoder.layer.7.attention.self/aten::transpose/Transpose" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I32">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="688,attn_output.31">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="472" name="Constant_6549627" type="Const" version="opset1">
|
|
<data element_type="i64" shape="3" offset="139731080" size="24" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>3</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="473" name="__module.encoder.layer.7.attention.self/aten::reshape/Reshape" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>3</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="690">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="474" name="self.encoder.layer.7.attention.output.dense.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="492425376" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.7.attention.output.dense.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="475" name="__module.encoder.layer.7.attention.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="476" name="Constant_6549247" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="496619680" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="477" name="__module.encoder.layer.7.attention.output.dense/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="696,input.31">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="478" name="__module.encoder.layer.7.attention.output/aten::add/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="698">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="479" name="__module.encoder.layer.7.attention.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
|
|
<data element_type="i32" shape="1" offset="127127572" size="4" />
|
|
<output>
|
|
<port id="0" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="480" name="__module.encoder.layer.7.attention.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
|
|
<data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="481" name="Constant_6549248" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="496623776" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="482" name="__module.encoder.layer.7.attention.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="483" name="Constant_6549249" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="496627872" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="484" name="__module.encoder.layer.7.attention.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="702,input_tensor.15">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="485" name="self.encoder.layer.7.intermediate.dense.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="4096, 1024" offset="496631968" size="16777216" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.7.intermediate.dense.weight">
|
|
<dim>4096</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="486" name="__module.encoder.layer.7.intermediate.dense/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>4096</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="487" name="Constant_6549250" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 4096" offset="513409184" size="16384" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="488" name="__module.encoder.layer.7.intermediate.dense/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="707">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="489" name="__module.encoder.layer.7.intermediate.intermediate_act_fn/aten::gelu/Gelu" type="Gelu" version="opset7">
|
|
<data approximation_mode="ERF" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="1" precision="FP32" names="708">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="490" name="self.encoder.layer.7.output.dense.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 4096" offset="513425568" size="16777216" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.7.output.dense.weight">
|
|
<dim>1024</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="491" name="__module.encoder.layer.7.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="492" name="Constant_6549251" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="530202784" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="493" name="__module.encoder.layer.7.output.dense/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="714,input.33">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="494" name="__module.encoder.layer.7.output/aten::add/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="716">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="495" name="__module.encoder.layer.7.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
|
|
<data element_type="i32" shape="1" offset="127127572" size="4" />
|
|
<output>
|
|
<port id="0" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="496" name="__module.encoder.layer.7.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
|
|
<data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="497" name="Constant_6549252" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="530206880" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="498" name="__module.encoder.layer.7.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="499" name="Constant_6549253" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="530210976" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="500" name="__module.encoder.layer.7.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="720,hidden_states.49">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="501" name="self.encoder.layer.8.attention.self.query.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="530215072" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.8.attention.self.query.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="502" name="__module.encoder.layer.8.attention.self.query/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="503" name="Constant_6549254" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="534409376" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="504" name="__module.encoder.layer.8.attention.self.query/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="733,x.97">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="505" name="__module.encoder.layer.8.attention.self/prim::ListConstruct/Concat" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334168" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="506" name="__module.encoder.layer.8.attention.self/aten::view/Reshape" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="737,x.99">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="507" name="Constant_6535587" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334200" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64" names="738">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="508" name="__module.encoder.layer.8.attention.self/aten::permute/Transpose" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="739">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="509" name="self.encoder.layer.8.attention.self.key.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="534413472" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.8.attention.self.key.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="510" name="__module.encoder.layer.8.attention.self.key/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="511" name="Constant_6549255" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="538607776" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="512" name="__module.encoder.layer.8.attention.self.key/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="742,x.101">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="513" name="__module.encoder.layer.8.attention.self/prim::ListConstruct/Concat_1" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334168" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="514" name="__module.encoder.layer.8.attention.self/aten::view/Reshape_1" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="746,x.103">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="515" name="Constant_6535610" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334200" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64" names="747">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="516" name="__module.encoder.layer.8.attention.self/aten::permute/Transpose_1" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="748">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="517" name="self.encoder.layer.8.attention.self.value.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="538611872" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.8.attention.self.value.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="518" name="__module.encoder.layer.8.attention.self.value/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="519" name="Constant_6549256" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="542806176" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="520" name="__module.encoder.layer.8.attention.self.value/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="751,x.105">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="521" name="__module.encoder.layer.8.attention.self/prim::ListConstruct/Concat_2" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334168" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="522" name="__module.encoder.layer.8.attention.self/aten::view/Reshape_2" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="755,x.107">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="523" name="Constant_6535633" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334200" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64" names="756">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="524" name="__module.encoder.layer.8.attention.self/aten::permute/Transpose_2" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="757">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="525" name="__module.encoder.layer.8.attention.self/aten::scaled_dot_product_attention/ScaledDotProductAttention" type="ScaledDotProductAttention" version="opset13">
|
|
<data causal="false" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="3" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>1</dim>
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="4" precision="FP32" names="758,attn_output.33">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="526" name="__module.encoder.layer.8.attention.self/aten::transpose/ScatterElementsUpdate" type="Const" version="opset1">
|
|
<data element_type="i32" shape="4" offset="139731064" size="16" />
|
|
<output>
|
|
<port id="0" precision="I32">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="527" name="__module.encoder.layer.8.attention.self/aten::transpose/Transpose" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I32">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="759,attn_output.35">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="528" name="Constant_6549628" type="Const" version="opset1">
|
|
<data element_type="i64" shape="3" offset="139731080" size="24" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>3</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="529" name="__module.encoder.layer.8.attention.self/aten::reshape/Reshape" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>3</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="761">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="530" name="self.encoder.layer.8.attention.output.dense.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="542810272" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.8.attention.output.dense.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="531" name="__module.encoder.layer.8.attention.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="532" name="Constant_6549257" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="547004576" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="533" name="__module.encoder.layer.8.attention.output.dense/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="767,input.35">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="534" name="__module.encoder.layer.8.attention.output/aten::add/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="769">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="535" name="__module.encoder.layer.8.attention.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
|
|
<data element_type="i32" shape="1" offset="127127572" size="4" />
|
|
<output>
|
|
<port id="0" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="536" name="__module.encoder.layer.8.attention.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
|
|
<data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="537" name="Constant_6549258" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="547008672" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="538" name="__module.encoder.layer.8.attention.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="539" name="Constant_6549259" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="547012768" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="540" name="__module.encoder.layer.8.attention.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="773,input_tensor.17">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="541" name="self.encoder.layer.8.intermediate.dense.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="4096, 1024" offset="547016864" size="16777216" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.8.intermediate.dense.weight">
|
|
<dim>4096</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="542" name="__module.encoder.layer.8.intermediate.dense/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>4096</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="543" name="Constant_6549260" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 4096" offset="563794080" size="16384" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="544" name="__module.encoder.layer.8.intermediate.dense/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="778">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="545" name="__module.encoder.layer.8.intermediate.intermediate_act_fn/aten::gelu/Gelu" type="Gelu" version="opset7">
|
|
<data approximation_mode="ERF" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="1" precision="FP32" names="779">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="546" name="self.encoder.layer.8.output.dense.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 4096" offset="563810464" size="16777216" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.8.output.dense.weight">
|
|
<dim>1024</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="547" name="__module.encoder.layer.8.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="548" name="Constant_6549261" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="580587680" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="549" name="__module.encoder.layer.8.output.dense/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="785,input.37">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="550" name="__module.encoder.layer.8.output/aten::add/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="787">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="551" name="__module.encoder.layer.8.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
|
|
<data element_type="i32" shape="1" offset="127127572" size="4" />
|
|
<output>
|
|
<port id="0" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="552" name="__module.encoder.layer.8.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
|
|
<data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="553" name="Constant_6549262" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="580591776" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="554" name="__module.encoder.layer.8.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="555" name="Constant_6549263" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="580595872" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="556" name="__module.encoder.layer.8.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="791,hidden_states.55">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="557" name="self.encoder.layer.9.attention.self.query.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="580599968" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.9.attention.self.query.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="558" name="__module.encoder.layer.9.attention.self.query/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="559" name="Constant_6549264" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="584794272" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="560" name="__module.encoder.layer.9.attention.self.query/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="804,x.109">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="561" name="__module.encoder.layer.9.attention.self/prim::ListConstruct/Concat" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334168" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="562" name="__module.encoder.layer.9.attention.self/aten::view/Reshape" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="808,x.111">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="563" name="Constant_6535813" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334200" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64" names="809">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="564" name="__module.encoder.layer.9.attention.self/aten::permute/Transpose" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="810">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="565" name="self.encoder.layer.9.attention.self.key.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="584798368" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.9.attention.self.key.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="566" name="__module.encoder.layer.9.attention.self.key/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="567" name="Constant_6549265" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="588992672" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="568" name="__module.encoder.layer.9.attention.self.key/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="813,x.113">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="569" name="__module.encoder.layer.9.attention.self/prim::ListConstruct/Concat_1" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334168" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="570" name="__module.encoder.layer.9.attention.self/aten::view/Reshape_1" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="817,x.115">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="571" name="Constant_6535836" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334200" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64" names="818">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="572" name="__module.encoder.layer.9.attention.self/aten::permute/Transpose_1" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="819">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="573" name="self.encoder.layer.9.attention.self.value.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="588996768" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.9.attention.self.value.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="574" name="__module.encoder.layer.9.attention.self.value/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="575" name="Constant_6549266" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="593191072" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="576" name="__module.encoder.layer.9.attention.self.value/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="822,x.117">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="577" name="__module.encoder.layer.9.attention.self/prim::ListConstruct/Concat_2" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334168" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="578" name="__module.encoder.layer.9.attention.self/aten::view/Reshape_2" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="826,x.119">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="579" name="Constant_6535859" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334200" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64" names="827">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="580" name="__module.encoder.layer.9.attention.self/aten::permute/Transpose_2" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="828">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="581" name="__module.encoder.layer.9.attention.self/aten::scaled_dot_product_attention/ScaledDotProductAttention" type="ScaledDotProductAttention" version="opset13">
|
|
<data causal="false" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="3" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>1</dim>
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="4" precision="FP32" names="829,attn_output.37">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="582" name="__module.encoder.layer.9.attention.self/aten::transpose/ScatterElementsUpdate" type="Const" version="opset1">
|
|
<data element_type="i32" shape="4" offset="139731064" size="16" />
|
|
<output>
|
|
<port id="0" precision="I32">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="583" name="__module.encoder.layer.9.attention.self/aten::transpose/Transpose" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I32">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="830,attn_output.39">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="584" name="Constant_6549629" type="Const" version="opset1">
|
|
<data element_type="i64" shape="3" offset="139731080" size="24" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>3</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="585" name="__module.encoder.layer.9.attention.self/aten::reshape/Reshape" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>3</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="832">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="586" name="self.encoder.layer.9.attention.output.dense.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="593195168" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.9.attention.output.dense.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="587" name="__module.encoder.layer.9.attention.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="588" name="Constant_6549267" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="597389472" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="589" name="__module.encoder.layer.9.attention.output.dense/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="838,input.39">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="590" name="__module.encoder.layer.9.attention.output/aten::add/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="840">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="591" name="__module.encoder.layer.9.attention.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
|
|
<data element_type="i32" shape="1" offset="127127572" size="4" />
|
|
<output>
|
|
<port id="0" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="592" name="__module.encoder.layer.9.attention.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
|
|
<data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="593" name="Constant_6549268" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="597393568" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="594" name="__module.encoder.layer.9.attention.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="595" name="Constant_6549269" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="597397664" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="596" name="__module.encoder.layer.9.attention.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="844,input_tensor.19">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="597" name="self.encoder.layer.9.intermediate.dense.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="4096, 1024" offset="597401760" size="16777216" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.9.intermediate.dense.weight">
|
|
<dim>4096</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="598" name="__module.encoder.layer.9.intermediate.dense/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>4096</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="599" name="Constant_6549270" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 4096" offset="614178976" size="16384" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="600" name="__module.encoder.layer.9.intermediate.dense/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="849">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="601" name="__module.encoder.layer.9.intermediate.intermediate_act_fn/aten::gelu/Gelu" type="Gelu" version="opset7">
|
|
<data approximation_mode="ERF" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="1" precision="FP32" names="850">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="602" name="self.encoder.layer.9.output.dense.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 4096" offset="614195360" size="16777216" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.9.output.dense.weight">
|
|
<dim>1024</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="603" name="__module.encoder.layer.9.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="604" name="Constant_6549271" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="630972576" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="605" name="__module.encoder.layer.9.output.dense/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="856,input.41">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="606" name="__module.encoder.layer.9.output/aten::add/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="858">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="607" name="__module.encoder.layer.9.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
|
|
<data element_type="i32" shape="1" offset="127127572" size="4" />
|
|
<output>
|
|
<port id="0" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="608" name="__module.encoder.layer.9.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
|
|
<data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="609" name="Constant_6549272" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="630976672" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="610" name="__module.encoder.layer.9.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="611" name="Constant_6549273" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="630980768" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="612" name="__module.encoder.layer.9.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="862,hidden_states.61">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="613" name="self.encoder.layer.10.attention.self.query.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="630984864" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.10.attention.self.query.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="614" name="__module.encoder.layer.10.attention.self.query/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="615" name="Constant_6549274" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="635179168" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="616" name="__module.encoder.layer.10.attention.self.query/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="875,x.121">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="617" name="__module.encoder.layer.10.attention.self/prim::ListConstruct/Concat" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334168" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="618" name="__module.encoder.layer.10.attention.self/aten::view/Reshape" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="879,x.123">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="619" name="Constant_6536039" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334200" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64" names="880">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="620" name="__module.encoder.layer.10.attention.self/aten::permute/Transpose" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="881">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="621" name="self.encoder.layer.10.attention.self.key.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="635183264" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.10.attention.self.key.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="622" name="__module.encoder.layer.10.attention.self.key/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="623" name="Constant_6549275" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="639377568" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="624" name="__module.encoder.layer.10.attention.self.key/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="884,x.125">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="625" name="__module.encoder.layer.10.attention.self/prim::ListConstruct/Concat_1" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334168" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="626" name="__module.encoder.layer.10.attention.self/aten::view/Reshape_1" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="888,x.127">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="627" name="Constant_6536062" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334200" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64" names="889">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="628" name="__module.encoder.layer.10.attention.self/aten::permute/Transpose_1" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="890">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="629" name="self.encoder.layer.10.attention.self.value.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="639381664" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.10.attention.self.value.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="630" name="__module.encoder.layer.10.attention.self.value/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="631" name="Constant_6549276" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="643575968" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="632" name="__module.encoder.layer.10.attention.self.value/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="893,x.129">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="633" name="__module.encoder.layer.10.attention.self/prim::ListConstruct/Concat_2" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334168" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="634" name="__module.encoder.layer.10.attention.self/aten::view/Reshape_2" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="897,x.131">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="635" name="Constant_6536085" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334200" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64" names="898">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="636" name="__module.encoder.layer.10.attention.self/aten::permute/Transpose_2" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="899">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="637" name="__module.encoder.layer.10.attention.self/aten::scaled_dot_product_attention/ScaledDotProductAttention" type="ScaledDotProductAttention" version="opset13">
|
|
<data causal="false" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="3" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>1</dim>
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="4" precision="FP32" names="900,attn_output.41">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="638" name="__module.encoder.layer.10.attention.self/aten::transpose/ScatterElementsUpdate" type="Const" version="opset1">
|
|
<data element_type="i32" shape="4" offset="139731064" size="16" />
|
|
<output>
|
|
<port id="0" precision="I32">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="639" name="__module.encoder.layer.10.attention.self/aten::transpose/Transpose" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I32">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="901,attn_output.43">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="640" name="Constant_6549630" type="Const" version="opset1">
|
|
<data element_type="i64" shape="3" offset="139731080" size="24" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>3</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="641" name="__module.encoder.layer.10.attention.self/aten::reshape/Reshape" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>3</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="903">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="642" name="self.encoder.layer.10.attention.output.dense.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="643580064" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.10.attention.output.dense.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="643" name="__module.encoder.layer.10.attention.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="644" name="Constant_6549277" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="647774368" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="645" name="__module.encoder.layer.10.attention.output.dense/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="909,input.43">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="646" name="__module.encoder.layer.10.attention.output/aten::add/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="911">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="647" name="__module.encoder.layer.10.attention.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
|
|
<data element_type="i32" shape="1" offset="127127572" size="4" />
|
|
<output>
|
|
<port id="0" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="648" name="__module.encoder.layer.10.attention.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
|
|
<data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="649" name="Constant_6549278" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="647778464" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="650" name="__module.encoder.layer.10.attention.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="651" name="Constant_6549279" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="647782560" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="652" name="__module.encoder.layer.10.attention.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="915,input_tensor.21">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="653" name="self.encoder.layer.10.intermediate.dense.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="4096, 1024" offset="647786656" size="16777216" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.10.intermediate.dense.weight">
|
|
<dim>4096</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="654" name="__module.encoder.layer.10.intermediate.dense/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>4096</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="655" name="Constant_6549280" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 4096" offset="664563872" size="16384" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="656" name="__module.encoder.layer.10.intermediate.dense/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="920">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="657" name="__module.encoder.layer.10.intermediate.intermediate_act_fn/aten::gelu/Gelu" type="Gelu" version="opset7">
|
|
<data approximation_mode="ERF" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="1" precision="FP32" names="921">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="658" name="self.encoder.layer.10.output.dense.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 4096" offset="664580256" size="16777216" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.10.output.dense.weight">
|
|
<dim>1024</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="659" name="__module.encoder.layer.10.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="660" name="Constant_6549281" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="681357472" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="661" name="__module.encoder.layer.10.output.dense/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="927,input.45">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="662" name="__module.encoder.layer.10.output/aten::add/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="929">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="663" name="__module.encoder.layer.10.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
|
|
<data element_type="i32" shape="1" offset="127127572" size="4" />
|
|
<output>
|
|
<port id="0" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="664" name="__module.encoder.layer.10.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
|
|
<data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="665" name="Constant_6549282" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="681361568" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="666" name="__module.encoder.layer.10.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="667" name="Constant_6549283" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="681365664" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="668" name="__module.encoder.layer.10.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="933,hidden_states.67">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="669" name="self.encoder.layer.11.attention.self.query.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="681369760" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.11.attention.self.query.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="670" name="__module.encoder.layer.11.attention.self.query/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="671" name="Constant_6549284" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="685564064" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="672" name="__module.encoder.layer.11.attention.self.query/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="946,x.133">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="673" name="__module.encoder.layer.11.attention.self/prim::ListConstruct/Concat" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334168" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="674" name="__module.encoder.layer.11.attention.self/aten::view/Reshape" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="950,x.135">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="675" name="Constant_6536265" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334200" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64" names="951">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="676" name="__module.encoder.layer.11.attention.self/aten::permute/Transpose" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="952">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="677" name="self.encoder.layer.11.attention.self.key.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="685568160" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.11.attention.self.key.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="678" name="__module.encoder.layer.11.attention.self.key/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="679" name="Constant_6549285" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="689762464" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="680" name="__module.encoder.layer.11.attention.self.key/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="955,x.137">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="681" name="__module.encoder.layer.11.attention.self/prim::ListConstruct/Concat_1" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334168" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="682" name="__module.encoder.layer.11.attention.self/aten::view/Reshape_1" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="959,x.139">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="683" name="Constant_6536288" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334200" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64" names="960">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="684" name="__module.encoder.layer.11.attention.self/aten::permute/Transpose_1" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="961">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="685" name="self.encoder.layer.11.attention.self.value.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="689766560" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.11.attention.self.value.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="686" name="__module.encoder.layer.11.attention.self.value/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="687" name="Constant_6549286" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="693960864" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="688" name="__module.encoder.layer.11.attention.self.value/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="964,x.141">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="689" name="__module.encoder.layer.11.attention.self/prim::ListConstruct/Concat_2" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334168" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="690" name="__module.encoder.layer.11.attention.self/aten::view/Reshape_2" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="968,x.143">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="691" name="Constant_6536311" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334200" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64" names="969">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="692" name="__module.encoder.layer.11.attention.self/aten::permute/Transpose_2" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="970">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="693" name="__module.encoder.layer.11.attention.self/aten::scaled_dot_product_attention/ScaledDotProductAttention" type="ScaledDotProductAttention" version="opset13">
|
|
<data causal="false" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="3" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>1</dim>
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="4" precision="FP32" names="971,attn_output.45">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="694" name="__module.encoder.layer.11.attention.self/aten::transpose/ScatterElementsUpdate" type="Const" version="opset1">
|
|
<data element_type="i32" shape="4" offset="139731064" size="16" />
|
|
<output>
|
|
<port id="0" precision="I32">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="695" name="__module.encoder.layer.11.attention.self/aten::transpose/Transpose" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I32">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="972,attn_output.47">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="696" name="Constant_6549631" type="Const" version="opset1">
|
|
<data element_type="i64" shape="3" offset="139731080" size="24" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>3</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="697" name="__module.encoder.layer.11.attention.self/aten::reshape/Reshape" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>3</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="974">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="698" name="self.encoder.layer.11.attention.output.dense.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="693964960" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.11.attention.output.dense.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="699" name="__module.encoder.layer.11.attention.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="700" name="Constant_6549287" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="698159264" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="701" name="__module.encoder.layer.11.attention.output.dense/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="980,input.47">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="702" name="__module.encoder.layer.11.attention.output/aten::add/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="982">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="703" name="__module.encoder.layer.11.attention.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
|
|
<data element_type="i32" shape="1" offset="127127572" size="4" />
|
|
<output>
|
|
<port id="0" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="704" name="__module.encoder.layer.11.attention.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
|
|
<data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="705" name="Constant_6549288" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="698163360" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="706" name="__module.encoder.layer.11.attention.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="707" name="Constant_6549289" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="698167456" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="708" name="__module.encoder.layer.11.attention.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="986,input_tensor.23">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="709" name="self.encoder.layer.11.intermediate.dense.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="4096, 1024" offset="698171552" size="16777216" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.11.intermediate.dense.weight">
|
|
<dim>4096</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="710" name="__module.encoder.layer.11.intermediate.dense/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>4096</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="711" name="Constant_6549290" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 4096" offset="714948768" size="16384" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="712" name="__module.encoder.layer.11.intermediate.dense/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="991">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="713" name="__module.encoder.layer.11.intermediate.intermediate_act_fn/aten::gelu/Gelu" type="Gelu" version="opset7">
|
|
<data approximation_mode="ERF" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="1" precision="FP32" names="992">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="714" name="self.encoder.layer.11.output.dense.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 4096" offset="714965152" size="16777216" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.11.output.dense.weight">
|
|
<dim>1024</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="715" name="__module.encoder.layer.11.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="716" name="Constant_6549291" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="731742368" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="717" name="__module.encoder.layer.11.output.dense/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="998,input.49">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="718" name="__module.encoder.layer.11.output/aten::add/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1000">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="719" name="__module.encoder.layer.11.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
|
|
<data element_type="i32" shape="1" offset="127127572" size="4" />
|
|
<output>
|
|
<port id="0" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="720" name="__module.encoder.layer.11.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
|
|
<data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="721" name="Constant_6549292" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="731746464" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="722" name="__module.encoder.layer.11.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="723" name="Constant_6549293" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="731750560" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="724" name="__module.encoder.layer.11.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1004,hidden_states.73">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="725" name="self.encoder.layer.12.attention.self.query.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="731754656" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.12.attention.self.query.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="726" name="__module.encoder.layer.12.attention.self.query/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="727" name="Constant_6549294" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="735948960" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="728" name="__module.encoder.layer.12.attention.self.query/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1017,x.145">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="729" name="__module.encoder.layer.12.attention.self/prim::ListConstruct/Concat" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334168" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="730" name="__module.encoder.layer.12.attention.self/aten::view/Reshape" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1021,x.147">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="731" name="Constant_6536491" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334200" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64" names="1022">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="732" name="__module.encoder.layer.12.attention.self/aten::permute/Transpose" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1023">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="733" name="self.encoder.layer.12.attention.self.key.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="735953056" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.12.attention.self.key.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="734" name="__module.encoder.layer.12.attention.self.key/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="735" name="Constant_6549295" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="740147360" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="736" name="__module.encoder.layer.12.attention.self.key/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1026,x.149">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="737" name="__module.encoder.layer.12.attention.self/prim::ListConstruct/Concat_1" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334168" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="738" name="__module.encoder.layer.12.attention.self/aten::view/Reshape_1" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1030,x.151">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="739" name="Constant_6536514" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334200" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64" names="1031">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="740" name="__module.encoder.layer.12.attention.self/aten::permute/Transpose_1" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1032">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="741" name="self.encoder.layer.12.attention.self.value.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="740151456" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.12.attention.self.value.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="742" name="__module.encoder.layer.12.attention.self.value/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="743" name="Constant_6549296" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="744345760" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="744" name="__module.encoder.layer.12.attention.self.value/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1035,x.153">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="745" name="__module.encoder.layer.12.attention.self/prim::ListConstruct/Concat_2" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334168" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="746" name="__module.encoder.layer.12.attention.self/aten::view/Reshape_2" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1039,x.155">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="747" name="Constant_6536537" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334200" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64" names="1040">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="748" name="__module.encoder.layer.12.attention.self/aten::permute/Transpose_2" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1041">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="749" name="__module.encoder.layer.12.attention.self/aten::scaled_dot_product_attention/ScaledDotProductAttention" type="ScaledDotProductAttention" version="opset13">
|
|
<data causal="false" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="3" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>1</dim>
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="4" precision="FP32" names="1042,attn_output.49">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="750" name="__module.encoder.layer.12.attention.self/aten::transpose/ScatterElementsUpdate" type="Const" version="opset1">
|
|
<data element_type="i32" shape="4" offset="139731064" size="16" />
|
|
<output>
|
|
<port id="0" precision="I32">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="751" name="__module.encoder.layer.12.attention.self/aten::transpose/Transpose" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I32">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1043,attn_output.51">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="752" name="Constant_6549632" type="Const" version="opset1">
|
|
<data element_type="i64" shape="3" offset="139731080" size="24" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>3</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="753" name="__module.encoder.layer.12.attention.self/aten::reshape/Reshape" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>3</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1045">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="754" name="self.encoder.layer.12.attention.output.dense.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="744349856" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.12.attention.output.dense.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="755" name="__module.encoder.layer.12.attention.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="756" name="Constant_6549297" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="748544160" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="757" name="__module.encoder.layer.12.attention.output.dense/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1051,input.51">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="758" name="__module.encoder.layer.12.attention.output/aten::add/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1053">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="759" name="__module.encoder.layer.12.attention.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
|
|
<data element_type="i32" shape="1" offset="127127572" size="4" />
|
|
<output>
|
|
<port id="0" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="760" name="__module.encoder.layer.12.attention.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
|
|
<data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="761" name="Constant_6549298" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="748548256" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="762" name="__module.encoder.layer.12.attention.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="763" name="Constant_6549299" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="748552352" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="764" name="__module.encoder.layer.12.attention.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1057,input_tensor.25">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="765" name="self.encoder.layer.12.intermediate.dense.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="4096, 1024" offset="748556448" size="16777216" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.12.intermediate.dense.weight">
|
|
<dim>4096</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="766" name="__module.encoder.layer.12.intermediate.dense/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>4096</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="767" name="Constant_6549300" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 4096" offset="765333664" size="16384" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="768" name="__module.encoder.layer.12.intermediate.dense/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1062">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="769" name="__module.encoder.layer.12.intermediate.intermediate_act_fn/aten::gelu/Gelu" type="Gelu" version="opset7">
|
|
<data approximation_mode="ERF" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="1" precision="FP32" names="1063">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="770" name="self.encoder.layer.12.output.dense.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 4096" offset="765350048" size="16777216" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.12.output.dense.weight">
|
|
<dim>1024</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="771" name="__module.encoder.layer.12.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="772" name="Constant_6549301" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="782127264" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="773" name="__module.encoder.layer.12.output.dense/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1069,input.53">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="774" name="__module.encoder.layer.12.output/aten::add/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1071">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="775" name="__module.encoder.layer.12.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
|
|
<data element_type="i32" shape="1" offset="127127572" size="4" />
|
|
<output>
|
|
<port id="0" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="776" name="__module.encoder.layer.12.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
|
|
<data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="777" name="Constant_6549302" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="782131360" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="778" name="__module.encoder.layer.12.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="779" name="Constant_6549303" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="782135456" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="780" name="__module.encoder.layer.12.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1075,hidden_states.79">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="781" name="self.encoder.layer.13.attention.self.query.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="782139552" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.13.attention.self.query.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="782" name="__module.encoder.layer.13.attention.self.query/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="783" name="Constant_6549304" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="786333856" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="784" name="__module.encoder.layer.13.attention.self.query/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1088,x.157">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="785" name="__module.encoder.layer.13.attention.self/prim::ListConstruct/Concat" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334168" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="786" name="__module.encoder.layer.13.attention.self/aten::view/Reshape" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1092,x.159">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="787" name="Constant_6536717" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334200" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64" names="1093">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="788" name="__module.encoder.layer.13.attention.self/aten::permute/Transpose" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1094">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="789" name="self.encoder.layer.13.attention.self.key.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="786337952" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.13.attention.self.key.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="790" name="__module.encoder.layer.13.attention.self.key/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="791" name="Constant_6549305" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="790532256" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="792" name="__module.encoder.layer.13.attention.self.key/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1097,x.161">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="793" name="__module.encoder.layer.13.attention.self/prim::ListConstruct/Concat_1" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334168" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="794" name="__module.encoder.layer.13.attention.self/aten::view/Reshape_1" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1101,x.163">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="795" name="Constant_6536740" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334200" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64" names="1102">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="796" name="__module.encoder.layer.13.attention.self/aten::permute/Transpose_1" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1103">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="797" name="self.encoder.layer.13.attention.self.value.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="790536352" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.13.attention.self.value.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="798" name="__module.encoder.layer.13.attention.self.value/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="799" name="Constant_6549306" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="794730656" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="800" name="__module.encoder.layer.13.attention.self.value/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1106,x.165">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="801" name="__module.encoder.layer.13.attention.self/prim::ListConstruct/Concat_2" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334168" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="802" name="__module.encoder.layer.13.attention.self/aten::view/Reshape_2" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1110,x.167">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="803" name="Constant_6536763" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334200" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64" names="1111">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="804" name="__module.encoder.layer.13.attention.self/aten::permute/Transpose_2" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1112">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="805" name="__module.encoder.layer.13.attention.self/aten::scaled_dot_product_attention/ScaledDotProductAttention" type="ScaledDotProductAttention" version="opset13">
|
|
<data causal="false" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="3" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>1</dim>
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="4" precision="FP32" names="1113,attn_output.53">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="806" name="__module.encoder.layer.13.attention.self/aten::transpose/ScatterElementsUpdate" type="Const" version="opset1">
|
|
<data element_type="i32" shape="4" offset="139731064" size="16" />
|
|
<output>
|
|
<port id="0" precision="I32">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="807" name="__module.encoder.layer.13.attention.self/aten::transpose/Transpose" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I32">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1114,attn_output.55">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="808" name="Constant_6549633" type="Const" version="opset1">
|
|
<data element_type="i64" shape="3" offset="139731080" size="24" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>3</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="809" name="__module.encoder.layer.13.attention.self/aten::reshape/Reshape" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>3</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1116">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="810" name="self.encoder.layer.13.attention.output.dense.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="794734752" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.13.attention.output.dense.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="811" name="__module.encoder.layer.13.attention.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="812" name="Constant_6549307" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="798929056" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="813" name="__module.encoder.layer.13.attention.output.dense/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1122,input.55">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="814" name="__module.encoder.layer.13.attention.output/aten::add/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1124">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="815" name="__module.encoder.layer.13.attention.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
|
|
<data element_type="i32" shape="1" offset="127127572" size="4" />
|
|
<output>
|
|
<port id="0" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="816" name="__module.encoder.layer.13.attention.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
|
|
<data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="817" name="Constant_6549308" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="798933152" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="818" name="__module.encoder.layer.13.attention.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="819" name="Constant_6549309" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="798937248" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="820" name="__module.encoder.layer.13.attention.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1128,input_tensor.27">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="821" name="self.encoder.layer.13.intermediate.dense.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="4096, 1024" offset="798941344" size="16777216" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.13.intermediate.dense.weight">
|
|
<dim>4096</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="822" name="__module.encoder.layer.13.intermediate.dense/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>4096</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="823" name="Constant_6549310" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 4096" offset="815718560" size="16384" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="824" name="__module.encoder.layer.13.intermediate.dense/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1133">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="825" name="__module.encoder.layer.13.intermediate.intermediate_act_fn/aten::gelu/Gelu" type="Gelu" version="opset7">
|
|
<data approximation_mode="ERF" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="1" precision="FP32" names="1134">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="826" name="self.encoder.layer.13.output.dense.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 4096" offset="815734944" size="16777216" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.13.output.dense.weight">
|
|
<dim>1024</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="827" name="__module.encoder.layer.13.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="828" name="Constant_6549311" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="832512160" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="829" name="__module.encoder.layer.13.output.dense/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1140,input.57">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="830" name="__module.encoder.layer.13.output/aten::add/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1142">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="831" name="__module.encoder.layer.13.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
|
|
<data element_type="i32" shape="1" offset="127127572" size="4" />
|
|
<output>
|
|
<port id="0" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="832" name="__module.encoder.layer.13.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
|
|
<data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="833" name="Constant_6549312" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="832516256" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="834" name="__module.encoder.layer.13.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="835" name="Constant_6549313" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="832520352" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="836" name="__module.encoder.layer.13.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1146,hidden_states.85">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="837" name="self.encoder.layer.14.attention.self.query.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="832524448" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.14.attention.self.query.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="838" name="__module.encoder.layer.14.attention.self.query/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="839" name="Constant_6549314" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="836718752" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="840" name="__module.encoder.layer.14.attention.self.query/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1159,x.169">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="841" name="__module.encoder.layer.14.attention.self/prim::ListConstruct/Concat" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334168" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="842" name="__module.encoder.layer.14.attention.self/aten::view/Reshape" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1163,x.171">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="843" name="Constant_6536943" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334200" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64" names="1164">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="844" name="__module.encoder.layer.14.attention.self/aten::permute/Transpose" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1165">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="845" name="self.encoder.layer.14.attention.self.key.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="836722848" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.14.attention.self.key.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="846" name="__module.encoder.layer.14.attention.self.key/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="847" name="Constant_6549315" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="840917152" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="848" name="__module.encoder.layer.14.attention.self.key/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1168,x.173">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="849" name="__module.encoder.layer.14.attention.self/prim::ListConstruct/Concat_1" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334168" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="850" name="__module.encoder.layer.14.attention.self/aten::view/Reshape_1" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1172,x.175">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="851" name="Constant_6536966" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334200" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64" names="1173">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="852" name="__module.encoder.layer.14.attention.self/aten::permute/Transpose_1" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1174">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="853" name="self.encoder.layer.14.attention.self.value.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="840921248" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.14.attention.self.value.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="854" name="__module.encoder.layer.14.attention.self.value/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="855" name="Constant_6549316" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="845115552" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="856" name="__module.encoder.layer.14.attention.self.value/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1177,x.177">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="857" name="__module.encoder.layer.14.attention.self/prim::ListConstruct/Concat_2" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334168" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="858" name="__module.encoder.layer.14.attention.self/aten::view/Reshape_2" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1181,x.179">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="859" name="Constant_6536989" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334200" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64" names="1182">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="860" name="__module.encoder.layer.14.attention.self/aten::permute/Transpose_2" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1183">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="861" name="__module.encoder.layer.14.attention.self/aten::scaled_dot_product_attention/ScaledDotProductAttention" type="ScaledDotProductAttention" version="opset13">
|
|
<data causal="false" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="3" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>1</dim>
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="4" precision="FP32" names="1184,attn_output.57">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="862" name="__module.encoder.layer.14.attention.self/aten::transpose/ScatterElementsUpdate" type="Const" version="opset1">
|
|
<data element_type="i32" shape="4" offset="139731064" size="16" />
|
|
<output>
|
|
<port id="0" precision="I32">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="863" name="__module.encoder.layer.14.attention.self/aten::transpose/Transpose" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I32">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1185,attn_output.59">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="864" name="Constant_6549634" type="Const" version="opset1">
|
|
<data element_type="i64" shape="3" offset="139731080" size="24" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>3</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="865" name="__module.encoder.layer.14.attention.self/aten::reshape/Reshape" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>3</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1187">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="866" name="self.encoder.layer.14.attention.output.dense.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="845119648" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.14.attention.output.dense.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="867" name="__module.encoder.layer.14.attention.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="868" name="Constant_6549317" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="849313952" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="869" name="__module.encoder.layer.14.attention.output.dense/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1193,input.59">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="870" name="__module.encoder.layer.14.attention.output/aten::add/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1195">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="871" name="__module.encoder.layer.14.attention.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
|
|
<data element_type="i32" shape="1" offset="127127572" size="4" />
|
|
<output>
|
|
<port id="0" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="872" name="__module.encoder.layer.14.attention.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
|
|
<data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="873" name="Constant_6549318" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="849318048" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="874" name="__module.encoder.layer.14.attention.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="875" name="Constant_6549319" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="849322144" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="876" name="__module.encoder.layer.14.attention.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1199,input_tensor.29">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="877" name="self.encoder.layer.14.intermediate.dense.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="4096, 1024" offset="849326240" size="16777216" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.14.intermediate.dense.weight">
|
|
<dim>4096</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="878" name="__module.encoder.layer.14.intermediate.dense/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>4096</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="879" name="Constant_6549320" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 4096" offset="866103456" size="16384" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="880" name="__module.encoder.layer.14.intermediate.dense/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1204">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="881" name="__module.encoder.layer.14.intermediate.intermediate_act_fn/aten::gelu/Gelu" type="Gelu" version="opset7">
|
|
<data approximation_mode="ERF" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="1" precision="FP32" names="1205">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="882" name="self.encoder.layer.14.output.dense.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 4096" offset="866119840" size="16777216" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.14.output.dense.weight">
|
|
<dim>1024</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="883" name="__module.encoder.layer.14.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="884" name="Constant_6549321" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="882897056" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="885" name="__module.encoder.layer.14.output.dense/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1211,input.61">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="886" name="__module.encoder.layer.14.output/aten::add/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1213">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="887" name="__module.encoder.layer.14.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
|
|
<data element_type="i32" shape="1" offset="127127572" size="4" />
|
|
<output>
|
|
<port id="0" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="888" name="__module.encoder.layer.14.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
|
|
<data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="889" name="Constant_6549322" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="882901152" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="890" name="__module.encoder.layer.14.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="891" name="Constant_6549323" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="882905248" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="892" name="__module.encoder.layer.14.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1217,hidden_states.91">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="893" name="self.encoder.layer.15.attention.self.query.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="882909344" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.15.attention.self.query.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="894" name="__module.encoder.layer.15.attention.self.query/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="895" name="Constant_6549324" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="887103648" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="896" name="__module.encoder.layer.15.attention.self.query/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1230,x.181">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="897" name="__module.encoder.layer.15.attention.self/prim::ListConstruct/Concat" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334168" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="898" name="__module.encoder.layer.15.attention.self/aten::view/Reshape" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1234,x.183">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="899" name="Constant_6537169" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334200" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64" names="1235">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="900" name="__module.encoder.layer.15.attention.self/aten::permute/Transpose" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1236">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="901" name="self.encoder.layer.15.attention.self.key.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="887107744" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.15.attention.self.key.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="902" name="__module.encoder.layer.15.attention.self.key/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="903" name="Constant_6549325" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="891302048" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="904" name="__module.encoder.layer.15.attention.self.key/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1239,x.185">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="905" name="__module.encoder.layer.15.attention.self/prim::ListConstruct/Concat_1" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334168" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="906" name="__module.encoder.layer.15.attention.self/aten::view/Reshape_1" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1243,x.187">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="907" name="Constant_6537192" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334200" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64" names="1244">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="908" name="__module.encoder.layer.15.attention.self/aten::permute/Transpose_1" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1245">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="909" name="self.encoder.layer.15.attention.self.value.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="891306144" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.15.attention.self.value.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="910" name="__module.encoder.layer.15.attention.self.value/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="911" name="Constant_6549326" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="895500448" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="912" name="__module.encoder.layer.15.attention.self.value/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1248,x.189">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="913" name="__module.encoder.layer.15.attention.self/prim::ListConstruct/Concat_2" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334168" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="914" name="__module.encoder.layer.15.attention.self/aten::view/Reshape_2" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1252,x.191">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="915" name="Constant_6537215" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334200" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64" names="1253">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="916" name="__module.encoder.layer.15.attention.self/aten::permute/Transpose_2" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1254">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="917" name="__module.encoder.layer.15.attention.self/aten::scaled_dot_product_attention/ScaledDotProductAttention" type="ScaledDotProductAttention" version="opset13">
|
|
<data causal="false" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="3" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>1</dim>
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="4" precision="FP32" names="1255,attn_output.61">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="918" name="__module.encoder.layer.15.attention.self/aten::transpose/ScatterElementsUpdate" type="Const" version="opset1">
|
|
<data element_type="i32" shape="4" offset="139731064" size="16" />
|
|
<output>
|
|
<port id="0" precision="I32">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="919" name="__module.encoder.layer.15.attention.self/aten::transpose/Transpose" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I32">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1256,attn_output.63">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="920" name="Constant_6549635" type="Const" version="opset1">
|
|
<data element_type="i64" shape="3" offset="139731080" size="24" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>3</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="921" name="__module.encoder.layer.15.attention.self/aten::reshape/Reshape" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>3</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1258">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="922" name="self.encoder.layer.15.attention.output.dense.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="895504544" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.15.attention.output.dense.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="923" name="__module.encoder.layer.15.attention.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="924" name="Constant_6549327" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="899698848" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="925" name="__module.encoder.layer.15.attention.output.dense/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1264,input.63">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="926" name="__module.encoder.layer.15.attention.output/aten::add/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1266">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="927" name="__module.encoder.layer.15.attention.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
|
|
<data element_type="i32" shape="1" offset="127127572" size="4" />
|
|
<output>
|
|
<port id="0" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="928" name="__module.encoder.layer.15.attention.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
|
|
<data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="929" name="Constant_6549328" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="899702944" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="930" name="__module.encoder.layer.15.attention.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="931" name="Constant_6549329" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="899707040" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="932" name="__module.encoder.layer.15.attention.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1270,input_tensor.31">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="933" name="self.encoder.layer.15.intermediate.dense.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="4096, 1024" offset="899711136" size="16777216" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.15.intermediate.dense.weight">
|
|
<dim>4096</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="934" name="__module.encoder.layer.15.intermediate.dense/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>4096</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="935" name="Constant_6549330" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 4096" offset="916488352" size="16384" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="936" name="__module.encoder.layer.15.intermediate.dense/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1275">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="937" name="__module.encoder.layer.15.intermediate.intermediate_act_fn/aten::gelu/Gelu" type="Gelu" version="opset7">
|
|
<data approximation_mode="ERF" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="1" precision="FP32" names="1276">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="938" name="self.encoder.layer.15.output.dense.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 4096" offset="916504736" size="16777216" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.15.output.dense.weight">
|
|
<dim>1024</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="939" name="__module.encoder.layer.15.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="940" name="Constant_6549331" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="933281952" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="941" name="__module.encoder.layer.15.output.dense/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1282,input.65">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="942" name="__module.encoder.layer.15.output/aten::add/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1284">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="943" name="__module.encoder.layer.15.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
|
|
<data element_type="i32" shape="1" offset="127127572" size="4" />
|
|
<output>
|
|
<port id="0" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="944" name="__module.encoder.layer.15.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
|
|
<data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="945" name="Constant_6549332" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="933286048" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="946" name="__module.encoder.layer.15.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="947" name="Constant_6549333" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="933290144" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="948" name="__module.encoder.layer.15.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1288,hidden_states.97">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="949" name="self.encoder.layer.16.attention.self.query.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="933294240" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.16.attention.self.query.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="950" name="__module.encoder.layer.16.attention.self.query/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="951" name="Constant_6549334" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="937488544" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="952" name="__module.encoder.layer.16.attention.self.query/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1301,x.193">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="953" name="__module.encoder.layer.16.attention.self/prim::ListConstruct/Concat" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334168" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="954" name="__module.encoder.layer.16.attention.self/aten::view/Reshape" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1305,x.195">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="955" name="Constant_6537395" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334200" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64" names="1306">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="956" name="__module.encoder.layer.16.attention.self/aten::permute/Transpose" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1307">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="957" name="self.encoder.layer.16.attention.self.key.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="937492640" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.16.attention.self.key.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="958" name="__module.encoder.layer.16.attention.self.key/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="959" name="Constant_6549335" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="941686944" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="960" name="__module.encoder.layer.16.attention.self.key/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1310,x.197">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="961" name="__module.encoder.layer.16.attention.self/prim::ListConstruct/Concat_1" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334168" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="962" name="__module.encoder.layer.16.attention.self/aten::view/Reshape_1" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1314,x.199">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="963" name="Constant_6537418" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334200" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64" names="1315">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="964" name="__module.encoder.layer.16.attention.self/aten::permute/Transpose_1" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1316">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="965" name="self.encoder.layer.16.attention.self.value.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="941691040" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.16.attention.self.value.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="966" name="__module.encoder.layer.16.attention.self.value/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="967" name="Constant_6549336" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="945885344" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="968" name="__module.encoder.layer.16.attention.self.value/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1319,x.201">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="969" name="__module.encoder.layer.16.attention.self/prim::ListConstruct/Concat_2" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334168" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="970" name="__module.encoder.layer.16.attention.self/aten::view/Reshape_2" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1323,x.203">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="971" name="Constant_6537441" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334200" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64" names="1324">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="972" name="__module.encoder.layer.16.attention.self/aten::permute/Transpose_2" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1325">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="973" name="__module.encoder.layer.16.attention.self/aten::scaled_dot_product_attention/ScaledDotProductAttention" type="ScaledDotProductAttention" version="opset13">
|
|
<data causal="false" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="3" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>1</dim>
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="4" precision="FP32" names="1326,attn_output.65">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="974" name="__module.encoder.layer.16.attention.self/aten::transpose/ScatterElementsUpdate" type="Const" version="opset1">
|
|
<data element_type="i32" shape="4" offset="139731064" size="16" />
|
|
<output>
|
|
<port id="0" precision="I32">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="975" name="__module.encoder.layer.16.attention.self/aten::transpose/Transpose" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I32">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1327,attn_output.67">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="976" name="Constant_6549636" type="Const" version="opset1">
|
|
<data element_type="i64" shape="3" offset="139731080" size="24" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>3</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="977" name="__module.encoder.layer.16.attention.self/aten::reshape/Reshape" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>3</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1329">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="978" name="self.encoder.layer.16.attention.output.dense.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="945889440" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.16.attention.output.dense.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="979" name="__module.encoder.layer.16.attention.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="980" name="Constant_6549337" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="950083744" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="981" name="__module.encoder.layer.16.attention.output.dense/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1335,input.67">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="982" name="__module.encoder.layer.16.attention.output/aten::add/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1337">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="983" name="__module.encoder.layer.16.attention.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
|
|
<data element_type="i32" shape="1" offset="127127572" size="4" />
|
|
<output>
|
|
<port id="0" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="984" name="__module.encoder.layer.16.attention.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
|
|
<data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="985" name="Constant_6549338" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="950087840" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="986" name="__module.encoder.layer.16.attention.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="987" name="Constant_6549339" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="950091936" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="988" name="__module.encoder.layer.16.attention.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1341,input_tensor.33">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="989" name="self.encoder.layer.16.intermediate.dense.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="4096, 1024" offset="950096032" size="16777216" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.16.intermediate.dense.weight">
|
|
<dim>4096</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="990" name="__module.encoder.layer.16.intermediate.dense/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>4096</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="991" name="Constant_6549340" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 4096" offset="966873248" size="16384" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="992" name="__module.encoder.layer.16.intermediate.dense/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1346">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="993" name="__module.encoder.layer.16.intermediate.intermediate_act_fn/aten::gelu/Gelu" type="Gelu" version="opset7">
|
|
<data approximation_mode="ERF" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="1" precision="FP32" names="1347">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="994" name="self.encoder.layer.16.output.dense.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 4096" offset="966889632" size="16777216" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.16.output.dense.weight">
|
|
<dim>1024</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="995" name="__module.encoder.layer.16.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="996" name="Constant_6549341" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="983666848" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="997" name="__module.encoder.layer.16.output.dense/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1353,input.69">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="998" name="__module.encoder.layer.16.output/aten::add/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1355">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="999" name="__module.encoder.layer.16.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
|
|
<data element_type="i32" shape="1" offset="127127572" size="4" />
|
|
<output>
|
|
<port id="0" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1000" name="__module.encoder.layer.16.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
|
|
<data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1001" name="Constant_6549342" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="983670944" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1002" name="__module.encoder.layer.16.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1003" name="Constant_6549343" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="983675040" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1004" name="__module.encoder.layer.16.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1359,hidden_states.103">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1005" name="self.encoder.layer.17.attention.self.query.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="983679136" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.17.attention.self.query.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1006" name="__module.encoder.layer.17.attention.self.query/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1007" name="Constant_6549344" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="987873440" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1008" name="__module.encoder.layer.17.attention.self.query/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1372,x.205">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1009" name="__module.encoder.layer.17.attention.self/prim::ListConstruct/Concat" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334168" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1010" name="__module.encoder.layer.17.attention.self/aten::view/Reshape" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1376,x.207">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1011" name="Constant_6537621" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334200" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64" names="1377">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1012" name="__module.encoder.layer.17.attention.self/aten::permute/Transpose" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1378">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1013" name="self.encoder.layer.17.attention.self.key.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="987877536" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.17.attention.self.key.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1014" name="__module.encoder.layer.17.attention.self.key/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1015" name="Constant_6549345" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="992071840" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1016" name="__module.encoder.layer.17.attention.self.key/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1381,x.209">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1017" name="__module.encoder.layer.17.attention.self/prim::ListConstruct/Concat_1" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334168" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1018" name="__module.encoder.layer.17.attention.self/aten::view/Reshape_1" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1385,x.211">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1019" name="Constant_6537644" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334200" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64" names="1386">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1020" name="__module.encoder.layer.17.attention.self/aten::permute/Transpose_1" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1387">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1021" name="self.encoder.layer.17.attention.self.value.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="992075936" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.17.attention.self.value.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1022" name="__module.encoder.layer.17.attention.self.value/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1023" name="Constant_6549346" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="996270240" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1024" name="__module.encoder.layer.17.attention.self.value/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1390,x.213">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1025" name="__module.encoder.layer.17.attention.self/prim::ListConstruct/Concat_2" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334168" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1026" name="__module.encoder.layer.17.attention.self/aten::view/Reshape_2" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1394,x.215">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1027" name="Constant_6537667" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334200" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64" names="1395">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1028" name="__module.encoder.layer.17.attention.self/aten::permute/Transpose_2" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1396">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1029" name="__module.encoder.layer.17.attention.self/aten::scaled_dot_product_attention/ScaledDotProductAttention" type="ScaledDotProductAttention" version="opset13">
|
|
<data causal="false" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="3" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>1</dim>
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="4" precision="FP32" names="1397,attn_output.69">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1030" name="__module.encoder.layer.17.attention.self/aten::transpose/ScatterElementsUpdate" type="Const" version="opset1">
|
|
<data element_type="i32" shape="4" offset="139731064" size="16" />
|
|
<output>
|
|
<port id="0" precision="I32">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1031" name="__module.encoder.layer.17.attention.self/aten::transpose/Transpose" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I32">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1398,attn_output.71">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1032" name="Constant_6549637" type="Const" version="opset1">
|
|
<data element_type="i64" shape="3" offset="139731080" size="24" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>3</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1033" name="__module.encoder.layer.17.attention.self/aten::reshape/Reshape" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>3</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1400">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1034" name="self.encoder.layer.17.attention.output.dense.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="996274336" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.17.attention.output.dense.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1035" name="__module.encoder.layer.17.attention.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1036" name="Constant_6549347" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="1000468640" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1037" name="__module.encoder.layer.17.attention.output.dense/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1406,input.71">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1038" name="__module.encoder.layer.17.attention.output/aten::add/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1408">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1039" name="__module.encoder.layer.17.attention.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
|
|
<data element_type="i32" shape="1" offset="127127572" size="4" />
|
|
<output>
|
|
<port id="0" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1040" name="__module.encoder.layer.17.attention.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
|
|
<data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1041" name="Constant_6549348" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="1000472736" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1042" name="__module.encoder.layer.17.attention.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1043" name="Constant_6549349" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="1000476832" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1044" name="__module.encoder.layer.17.attention.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1412,input_tensor.35">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1045" name="self.encoder.layer.17.intermediate.dense.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="4096, 1024" offset="1000480928" size="16777216" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.17.intermediate.dense.weight">
|
|
<dim>4096</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1046" name="__module.encoder.layer.17.intermediate.dense/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>4096</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1047" name="Constant_6549350" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 4096" offset="1017258144" size="16384" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1048" name="__module.encoder.layer.17.intermediate.dense/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1417">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1049" name="__module.encoder.layer.17.intermediate.intermediate_act_fn/aten::gelu/Gelu" type="Gelu" version="opset7">
|
|
<data approximation_mode="ERF" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="1" precision="FP32" names="1418">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1050" name="self.encoder.layer.17.output.dense.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 4096" offset="1017274528" size="16777216" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.17.output.dense.weight">
|
|
<dim>1024</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1051" name="__module.encoder.layer.17.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1052" name="Constant_6549351" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="1034051744" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1053" name="__module.encoder.layer.17.output.dense/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1424,input.73">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1054" name="__module.encoder.layer.17.output/aten::add/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1426">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1055" name="__module.encoder.layer.17.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
|
|
<data element_type="i32" shape="1" offset="127127572" size="4" />
|
|
<output>
|
|
<port id="0" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1056" name="__module.encoder.layer.17.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
|
|
<data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1057" name="Constant_6549352" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="1034055840" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1058" name="__module.encoder.layer.17.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1059" name="Constant_6549353" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="1034059936" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1060" name="__module.encoder.layer.17.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1430,hidden_states.109">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1061" name="self.encoder.layer.18.attention.self.query.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="1034064032" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.18.attention.self.query.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1062" name="__module.encoder.layer.18.attention.self.query/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1063" name="Constant_6549354" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="1038258336" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1064" name="__module.encoder.layer.18.attention.self.query/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1443,x.217">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1065" name="__module.encoder.layer.18.attention.self/prim::ListConstruct/Concat" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334168" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1066" name="__module.encoder.layer.18.attention.self/aten::view/Reshape" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1447,x.219">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1067" name="Constant_6537847" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334200" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64" names="1448">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1068" name="__module.encoder.layer.18.attention.self/aten::permute/Transpose" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1449">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1069" name="self.encoder.layer.18.attention.self.key.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="1038262432" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.18.attention.self.key.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1070" name="__module.encoder.layer.18.attention.self.key/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1071" name="Constant_6549355" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="1042456736" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1072" name="__module.encoder.layer.18.attention.self.key/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1452,x.221">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1073" name="__module.encoder.layer.18.attention.self/prim::ListConstruct/Concat_1" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334168" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1074" name="__module.encoder.layer.18.attention.self/aten::view/Reshape_1" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1456,x.223">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1075" name="Constant_6537870" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334200" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64" names="1457">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1076" name="__module.encoder.layer.18.attention.self/aten::permute/Transpose_1" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1458">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1077" name="self.encoder.layer.18.attention.self.value.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="1042460832" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.18.attention.self.value.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1078" name="__module.encoder.layer.18.attention.self.value/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1079" name="Constant_6549356" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="1046655136" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1080" name="__module.encoder.layer.18.attention.self.value/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1461,x.225">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1081" name="__module.encoder.layer.18.attention.self/prim::ListConstruct/Concat_2" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334168" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1082" name="__module.encoder.layer.18.attention.self/aten::view/Reshape_2" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1465,x.227">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1083" name="Constant_6537893" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334200" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64" names="1466">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1084" name="__module.encoder.layer.18.attention.self/aten::permute/Transpose_2" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1467">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1085" name="__module.encoder.layer.18.attention.self/aten::scaled_dot_product_attention/ScaledDotProductAttention" type="ScaledDotProductAttention" version="opset13">
|
|
<data causal="false" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="3" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>1</dim>
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="4" precision="FP32" names="1468,attn_output.73">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1086" name="__module.encoder.layer.18.attention.self/aten::transpose/ScatterElementsUpdate" type="Const" version="opset1">
|
|
<data element_type="i32" shape="4" offset="139731064" size="16" />
|
|
<output>
|
|
<port id="0" precision="I32">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1087" name="__module.encoder.layer.18.attention.self/aten::transpose/Transpose" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I32">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1469,attn_output.75">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1088" name="Constant_6549638" type="Const" version="opset1">
|
|
<data element_type="i64" shape="3" offset="139731080" size="24" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>3</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1089" name="__module.encoder.layer.18.attention.self/aten::reshape/Reshape" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>3</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1471">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1090" name="self.encoder.layer.18.attention.output.dense.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="1046659232" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.18.attention.output.dense.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1091" name="__module.encoder.layer.18.attention.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1092" name="Constant_6549357" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="1050853536" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1093" name="__module.encoder.layer.18.attention.output.dense/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1477,input.75">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1094" name="__module.encoder.layer.18.attention.output/aten::add/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1479">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1095" name="__module.encoder.layer.18.attention.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
|
|
<data element_type="i32" shape="1" offset="127127572" size="4" />
|
|
<output>
|
|
<port id="0" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1096" name="__module.encoder.layer.18.attention.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
|
|
<data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1097" name="Constant_6549358" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="1050857632" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1098" name="__module.encoder.layer.18.attention.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1099" name="Constant_6549359" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="1050861728" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1100" name="__module.encoder.layer.18.attention.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1483,input_tensor.37">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1101" name="self.encoder.layer.18.intermediate.dense.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="4096, 1024" offset="1050865824" size="16777216" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.18.intermediate.dense.weight">
|
|
<dim>4096</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1102" name="__module.encoder.layer.18.intermediate.dense/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>4096</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1103" name="Constant_6549360" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 4096" offset="1067643040" size="16384" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1104" name="__module.encoder.layer.18.intermediate.dense/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1488">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1105" name="__module.encoder.layer.18.intermediate.intermediate_act_fn/aten::gelu/Gelu" type="Gelu" version="opset7">
|
|
<data approximation_mode="ERF" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="1" precision="FP32" names="1489">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1106" name="self.encoder.layer.18.output.dense.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 4096" offset="1067659424" size="16777216" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.18.output.dense.weight">
|
|
<dim>1024</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1107" name="__module.encoder.layer.18.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1108" name="Constant_6549361" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="1084436640" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1109" name="__module.encoder.layer.18.output.dense/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1495,input.77">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1110" name="__module.encoder.layer.18.output/aten::add/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1497">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1111" name="__module.encoder.layer.18.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
|
|
<data element_type="i32" shape="1" offset="127127572" size="4" />
|
|
<output>
|
|
<port id="0" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1112" name="__module.encoder.layer.18.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
|
|
<data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1113" name="Constant_6549362" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="1084440736" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1114" name="__module.encoder.layer.18.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1115" name="Constant_6549363" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="1084444832" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1116" name="__module.encoder.layer.18.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1501,hidden_states.115">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1117" name="self.encoder.layer.19.attention.self.query.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="1084448928" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.19.attention.self.query.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1118" name="__module.encoder.layer.19.attention.self.query/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1119" name="Constant_6549364" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="1088643232" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1120" name="__module.encoder.layer.19.attention.self.query/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1514,x.229">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1121" name="__module.encoder.layer.19.attention.self/prim::ListConstruct/Concat" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334168" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1122" name="__module.encoder.layer.19.attention.self/aten::view/Reshape" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1518,x.231">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1123" name="Constant_6538073" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334200" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64" names="1519">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1124" name="__module.encoder.layer.19.attention.self/aten::permute/Transpose" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1520">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1125" name="self.encoder.layer.19.attention.self.key.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="1088647328" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.19.attention.self.key.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1126" name="__module.encoder.layer.19.attention.self.key/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1127" name="Constant_6549365" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="1092841632" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1128" name="__module.encoder.layer.19.attention.self.key/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1523,x.233">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1129" name="__module.encoder.layer.19.attention.self/prim::ListConstruct/Concat_1" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334168" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1130" name="__module.encoder.layer.19.attention.self/aten::view/Reshape_1" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1527,x.235">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1131" name="Constant_6538096" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334200" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64" names="1528">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1132" name="__module.encoder.layer.19.attention.self/aten::permute/Transpose_1" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1529">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1133" name="self.encoder.layer.19.attention.self.value.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="1092845728" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.19.attention.self.value.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1134" name="__module.encoder.layer.19.attention.self.value/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1135" name="Constant_6549366" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="1097040032" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1136" name="__module.encoder.layer.19.attention.self.value/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1532,x.237">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1137" name="__module.encoder.layer.19.attention.self/prim::ListConstruct/Concat_2" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334168" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1138" name="__module.encoder.layer.19.attention.self/aten::view/Reshape_2" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1536,x.239">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1139" name="Constant_6538119" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334200" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64" names="1537">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1140" name="__module.encoder.layer.19.attention.self/aten::permute/Transpose_2" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1538">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1141" name="__module.encoder.layer.19.attention.self/aten::scaled_dot_product_attention/ScaledDotProductAttention" type="ScaledDotProductAttention" version="opset13">
|
|
<data causal="false" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="3" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>1</dim>
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="4" precision="FP32" names="1539,attn_output.77">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1142" name="__module.encoder.layer.19.attention.self/aten::transpose/ScatterElementsUpdate" type="Const" version="opset1">
|
|
<data element_type="i32" shape="4" offset="139731064" size="16" />
|
|
<output>
|
|
<port id="0" precision="I32">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1143" name="__module.encoder.layer.19.attention.self/aten::transpose/Transpose" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I32">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1540,attn_output.79">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1144" name="Constant_6549639" type="Const" version="opset1">
|
|
<data element_type="i64" shape="3" offset="139731080" size="24" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>3</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1145" name="__module.encoder.layer.19.attention.self/aten::reshape/Reshape" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>3</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1542">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1146" name="self.encoder.layer.19.attention.output.dense.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="1097044128" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.19.attention.output.dense.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1147" name="__module.encoder.layer.19.attention.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1148" name="Constant_6549367" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="1101238432" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1149" name="__module.encoder.layer.19.attention.output.dense/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1548,input.79">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1150" name="__module.encoder.layer.19.attention.output/aten::add/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1550">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1151" name="__module.encoder.layer.19.attention.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
|
|
<data element_type="i32" shape="1" offset="127127572" size="4" />
|
|
<output>
|
|
<port id="0" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1152" name="__module.encoder.layer.19.attention.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
|
|
<data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1153" name="Constant_6549368" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="1101242528" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1154" name="__module.encoder.layer.19.attention.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1155" name="Constant_6549369" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="1101246624" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1156" name="__module.encoder.layer.19.attention.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1554,input_tensor.39">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1157" name="self.encoder.layer.19.intermediate.dense.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="4096, 1024" offset="1101250720" size="16777216" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.19.intermediate.dense.weight">
|
|
<dim>4096</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1158" name="__module.encoder.layer.19.intermediate.dense/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>4096</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1159" name="Constant_6549370" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 4096" offset="1118027936" size="16384" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1160" name="__module.encoder.layer.19.intermediate.dense/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1559">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1161" name="__module.encoder.layer.19.intermediate.intermediate_act_fn/aten::gelu/Gelu" type="Gelu" version="opset7">
|
|
<data approximation_mode="ERF" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="1" precision="FP32" names="1560">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1162" name="self.encoder.layer.19.output.dense.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 4096" offset="1118044320" size="16777216" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.19.output.dense.weight">
|
|
<dim>1024</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1163" name="__module.encoder.layer.19.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1164" name="Constant_6549371" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="1134821536" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1165" name="__module.encoder.layer.19.output.dense/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1566,input.81">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1166" name="__module.encoder.layer.19.output/aten::add/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1568">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1167" name="__module.encoder.layer.19.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
|
|
<data element_type="i32" shape="1" offset="127127572" size="4" />
|
|
<output>
|
|
<port id="0" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1168" name="__module.encoder.layer.19.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
|
|
<data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1169" name="Constant_6549372" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="1134825632" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1170" name="__module.encoder.layer.19.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1171" name="Constant_6549373" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="1134829728" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1172" name="__module.encoder.layer.19.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1572,hidden_states.121">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1173" name="self.encoder.layer.20.attention.self.query.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="1134833824" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.20.attention.self.query.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1174" name="__module.encoder.layer.20.attention.self.query/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1175" name="Constant_6549374" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="1139028128" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1176" name="__module.encoder.layer.20.attention.self.query/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1585,x.241">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1177" name="__module.encoder.layer.20.attention.self/prim::ListConstruct/Concat" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334168" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1178" name="__module.encoder.layer.20.attention.self/aten::view/Reshape" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1589,x.243">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1179" name="Constant_6538299" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334200" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64" names="1590">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1180" name="__module.encoder.layer.20.attention.self/aten::permute/Transpose" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1591">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1181" name="self.encoder.layer.20.attention.self.key.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="1139032224" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.20.attention.self.key.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1182" name="__module.encoder.layer.20.attention.self.key/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1183" name="Constant_6549375" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="1143226528" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1184" name="__module.encoder.layer.20.attention.self.key/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1594,x.245">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1185" name="__module.encoder.layer.20.attention.self/prim::ListConstruct/Concat_1" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334168" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1186" name="__module.encoder.layer.20.attention.self/aten::view/Reshape_1" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1598,x.247">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1187" name="Constant_6538322" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334200" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64" names="1599">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1188" name="__module.encoder.layer.20.attention.self/aten::permute/Transpose_1" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1600">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1189" name="self.encoder.layer.20.attention.self.value.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="1143230624" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.20.attention.self.value.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1190" name="__module.encoder.layer.20.attention.self.value/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1191" name="Constant_6549376" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="1147424928" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1192" name="__module.encoder.layer.20.attention.self.value/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1603,x.249">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1193" name="__module.encoder.layer.20.attention.self/prim::ListConstruct/Concat_2" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334168" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1194" name="__module.encoder.layer.20.attention.self/aten::view/Reshape_2" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1607,x.251">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1195" name="Constant_6538345" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334200" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64" names="1608">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1196" name="__module.encoder.layer.20.attention.self/aten::permute/Transpose_2" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1609">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1197" name="__module.encoder.layer.20.attention.self/aten::scaled_dot_product_attention/ScaledDotProductAttention" type="ScaledDotProductAttention" version="opset13">
|
|
<data causal="false" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="3" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>1</dim>
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="4" precision="FP32" names="1610,attn_output.81">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1198" name="__module.encoder.layer.20.attention.self/aten::transpose/ScatterElementsUpdate" type="Const" version="opset1">
|
|
<data element_type="i32" shape="4" offset="139731064" size="16" />
|
|
<output>
|
|
<port id="0" precision="I32">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1199" name="__module.encoder.layer.20.attention.self/aten::transpose/Transpose" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I32">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1611,attn_output.83">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1200" name="Constant_6549640" type="Const" version="opset1">
|
|
<data element_type="i64" shape="3" offset="139731080" size="24" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>3</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1201" name="__module.encoder.layer.20.attention.self/aten::reshape/Reshape" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>3</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1613">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1202" name="self.encoder.layer.20.attention.output.dense.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="1147429024" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.20.attention.output.dense.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1203" name="__module.encoder.layer.20.attention.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1204" name="Constant_6549377" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="1151623328" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1205" name="__module.encoder.layer.20.attention.output.dense/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1619,input.83">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1206" name="__module.encoder.layer.20.attention.output/aten::add/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1621">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1207" name="__module.encoder.layer.20.attention.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
|
|
<data element_type="i32" shape="1" offset="127127572" size="4" />
|
|
<output>
|
|
<port id="0" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1208" name="__module.encoder.layer.20.attention.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
|
|
<data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1209" name="Constant_6549378" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="1151627424" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1210" name="__module.encoder.layer.20.attention.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1211" name="Constant_6549379" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="1151631520" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1212" name="__module.encoder.layer.20.attention.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1625,input_tensor.41">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1213" name="self.encoder.layer.20.intermediate.dense.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="4096, 1024" offset="1151635616" size="16777216" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.20.intermediate.dense.weight">
|
|
<dim>4096</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1214" name="__module.encoder.layer.20.intermediate.dense/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>4096</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1215" name="Constant_6549380" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 4096" offset="1168412832" size="16384" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1216" name="__module.encoder.layer.20.intermediate.dense/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1630">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1217" name="__module.encoder.layer.20.intermediate.intermediate_act_fn/aten::gelu/Gelu" type="Gelu" version="opset7">
|
|
<data approximation_mode="ERF" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="1" precision="FP32" names="1631">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1218" name="self.encoder.layer.20.output.dense.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 4096" offset="1168429216" size="16777216" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.20.output.dense.weight">
|
|
<dim>1024</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1219" name="__module.encoder.layer.20.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1220" name="Constant_6549381" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="1185206432" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1221" name="__module.encoder.layer.20.output.dense/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1637,input.85">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1222" name="__module.encoder.layer.20.output/aten::add/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1639">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1223" name="__module.encoder.layer.20.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
|
|
<data element_type="i32" shape="1" offset="127127572" size="4" />
|
|
<output>
|
|
<port id="0" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1224" name="__module.encoder.layer.20.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
|
|
<data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1225" name="Constant_6549382" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="1185210528" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1226" name="__module.encoder.layer.20.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1227" name="Constant_6549383" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="1185214624" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1228" name="__module.encoder.layer.20.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1643,hidden_states.127">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1229" name="self.encoder.layer.21.attention.self.query.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="1185218720" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.21.attention.self.query.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1230" name="__module.encoder.layer.21.attention.self.query/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1231" name="Constant_6549384" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="1189413024" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1232" name="__module.encoder.layer.21.attention.self.query/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1656,x.253">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1233" name="__module.encoder.layer.21.attention.self/prim::ListConstruct/Concat" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334168" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1234" name="__module.encoder.layer.21.attention.self/aten::view/Reshape" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1660,x.255">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1235" name="Constant_6538525" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334200" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64" names="1661">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1236" name="__module.encoder.layer.21.attention.self/aten::permute/Transpose" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1662">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1237" name="self.encoder.layer.21.attention.self.key.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="1189417120" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.21.attention.self.key.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1238" name="__module.encoder.layer.21.attention.self.key/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1239" name="Constant_6549385" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="1193611424" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1240" name="__module.encoder.layer.21.attention.self.key/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1665,x.257">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1241" name="__module.encoder.layer.21.attention.self/prim::ListConstruct/Concat_1" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334168" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1242" name="__module.encoder.layer.21.attention.self/aten::view/Reshape_1" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1669,x.259">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1243" name="Constant_6538548" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334200" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64" names="1670">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1244" name="__module.encoder.layer.21.attention.self/aten::permute/Transpose_1" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1671">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1245" name="self.encoder.layer.21.attention.self.value.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="1193615520" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.21.attention.self.value.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1246" name="__module.encoder.layer.21.attention.self.value/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1247" name="Constant_6549386" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="1197809824" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1248" name="__module.encoder.layer.21.attention.self.value/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1674,x.261">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1249" name="__module.encoder.layer.21.attention.self/prim::ListConstruct/Concat_2" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334168" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1250" name="__module.encoder.layer.21.attention.self/aten::view/Reshape_2" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1678,x.263">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1251" name="Constant_6538571" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334200" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64" names="1679">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1252" name="__module.encoder.layer.21.attention.self/aten::permute/Transpose_2" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1680">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1253" name="__module.encoder.layer.21.attention.self/aten::scaled_dot_product_attention/ScaledDotProductAttention" type="ScaledDotProductAttention" version="opset13">
|
|
<data causal="false" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="3" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>1</dim>
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="4" precision="FP32" names="1681,attn_output.85">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1254" name="__module.encoder.layer.21.attention.self/aten::transpose/ScatterElementsUpdate" type="Const" version="opset1">
|
|
<data element_type="i32" shape="4" offset="139731064" size="16" />
|
|
<output>
|
|
<port id="0" precision="I32">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1255" name="__module.encoder.layer.21.attention.self/aten::transpose/Transpose" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I32">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1682,attn_output.87">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1256" name="Constant_6549641" type="Const" version="opset1">
|
|
<data element_type="i64" shape="3" offset="139731080" size="24" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>3</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1257" name="__module.encoder.layer.21.attention.self/aten::reshape/Reshape" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>3</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1684">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1258" name="self.encoder.layer.21.attention.output.dense.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="1197813920" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.21.attention.output.dense.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1259" name="__module.encoder.layer.21.attention.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1260" name="Constant_6549387" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="1202008224" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1261" name="__module.encoder.layer.21.attention.output.dense/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1690,input.87">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1262" name="__module.encoder.layer.21.attention.output/aten::add/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1692">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1263" name="__module.encoder.layer.21.attention.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
|
|
<data element_type="i32" shape="1" offset="127127572" size="4" />
|
|
<output>
|
|
<port id="0" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1264" name="__module.encoder.layer.21.attention.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
|
|
<data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1265" name="Constant_6549388" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="1202012320" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1266" name="__module.encoder.layer.21.attention.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1267" name="Constant_6549389" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="1202016416" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1268" name="__module.encoder.layer.21.attention.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1696,input_tensor.43">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1269" name="self.encoder.layer.21.intermediate.dense.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="4096, 1024" offset="1202020512" size="16777216" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.21.intermediate.dense.weight">
|
|
<dim>4096</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1270" name="__module.encoder.layer.21.intermediate.dense/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>4096</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1271" name="Constant_6549390" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 4096" offset="1218797728" size="16384" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1272" name="__module.encoder.layer.21.intermediate.dense/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1701">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1273" name="__module.encoder.layer.21.intermediate.intermediate_act_fn/aten::gelu/Gelu" type="Gelu" version="opset7">
|
|
<data approximation_mode="ERF" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="1" precision="FP32" names="1702">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1274" name="self.encoder.layer.21.output.dense.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 4096" offset="1218814112" size="16777216" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.21.output.dense.weight">
|
|
<dim>1024</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1275" name="__module.encoder.layer.21.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1276" name="Constant_6549391" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="1235591328" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1277" name="__module.encoder.layer.21.output.dense/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1708,input.89">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1278" name="__module.encoder.layer.21.output/aten::add/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1710">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1279" name="__module.encoder.layer.21.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
|
|
<data element_type="i32" shape="1" offset="127127572" size="4" />
|
|
<output>
|
|
<port id="0" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1280" name="__module.encoder.layer.21.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
|
|
<data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1281" name="Constant_6549392" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="1235595424" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1282" name="__module.encoder.layer.21.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1283" name="Constant_6549393" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="1235599520" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1284" name="__module.encoder.layer.21.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1714,hidden_states.133">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1285" name="self.encoder.layer.22.attention.self.query.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="1235603616" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.22.attention.self.query.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1286" name="__module.encoder.layer.22.attention.self.query/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1287" name="Constant_6549394" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="1239797920" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1288" name="__module.encoder.layer.22.attention.self.query/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1727,x.265">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1289" name="__module.encoder.layer.22.attention.self/prim::ListConstruct/Concat" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334168" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1290" name="__module.encoder.layer.22.attention.self/aten::view/Reshape" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1731,x.267">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1291" name="Constant_6538751" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334200" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64" names="1732">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1292" name="__module.encoder.layer.22.attention.self/aten::permute/Transpose" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1733">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1293" name="self.encoder.layer.22.attention.self.key.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="1239802016" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.22.attention.self.key.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1294" name="__module.encoder.layer.22.attention.self.key/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1295" name="Constant_6549395" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="1243996320" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1296" name="__module.encoder.layer.22.attention.self.key/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1736,x.269">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1297" name="__module.encoder.layer.22.attention.self/prim::ListConstruct/Concat_1" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334168" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1298" name="__module.encoder.layer.22.attention.self/aten::view/Reshape_1" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1740,x.271">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1299" name="Constant_6538774" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334200" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64" names="1741">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1300" name="__module.encoder.layer.22.attention.self/aten::permute/Transpose_1" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1742">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1301" name="self.encoder.layer.22.attention.self.value.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="1244000416" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.22.attention.self.value.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1302" name="__module.encoder.layer.22.attention.self.value/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1303" name="Constant_6549396" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="1248194720" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1304" name="__module.encoder.layer.22.attention.self.value/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1745,x.273">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1305" name="__module.encoder.layer.22.attention.self/prim::ListConstruct/Concat_2" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334168" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1306" name="__module.encoder.layer.22.attention.self/aten::view/Reshape_2" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1749,x.275">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1307" name="Constant_6538797" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334200" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64" names="1750">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1308" name="__module.encoder.layer.22.attention.self/aten::permute/Transpose_2" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1751">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1309" name="__module.encoder.layer.22.attention.self/aten::scaled_dot_product_attention/ScaledDotProductAttention" type="ScaledDotProductAttention" version="opset13">
|
|
<data causal="false" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="3" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>1</dim>
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="4" precision="FP32" names="1752,attn_output.89">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1310" name="__module.encoder.layer.22.attention.self/aten::transpose/ScatterElementsUpdate" type="Const" version="opset1">
|
|
<data element_type="i32" shape="4" offset="139731064" size="16" />
|
|
<output>
|
|
<port id="0" precision="I32">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1311" name="__module.encoder.layer.22.attention.self/aten::transpose/Transpose" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I32">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1753,attn_output.91">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1312" name="Constant_6549642" type="Const" version="opset1">
|
|
<data element_type="i64" shape="3" offset="139731080" size="24" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>3</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1313" name="__module.encoder.layer.22.attention.self/aten::reshape/Reshape" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>3</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1755">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1314" name="self.encoder.layer.22.attention.output.dense.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="1248198816" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.22.attention.output.dense.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1315" name="__module.encoder.layer.22.attention.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1316" name="Constant_6549397" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="1252393120" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1317" name="__module.encoder.layer.22.attention.output.dense/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1761,input.91">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1318" name="__module.encoder.layer.22.attention.output/aten::add/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1763">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1319" name="__module.encoder.layer.22.attention.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
|
|
<data element_type="i32" shape="1" offset="127127572" size="4" />
|
|
<output>
|
|
<port id="0" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1320" name="__module.encoder.layer.22.attention.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
|
|
<data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1321" name="Constant_6549398" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="1252397216" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1322" name="__module.encoder.layer.22.attention.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1323" name="Constant_6549399" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="1252401312" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1324" name="__module.encoder.layer.22.attention.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1767,input_tensor.45">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1325" name="self.encoder.layer.22.intermediate.dense.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="4096, 1024" offset="1252405408" size="16777216" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.22.intermediate.dense.weight">
|
|
<dim>4096</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1326" name="__module.encoder.layer.22.intermediate.dense/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>4096</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1327" name="Constant_6549400" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 4096" offset="1269182624" size="16384" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1328" name="__module.encoder.layer.22.intermediate.dense/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1772">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1329" name="__module.encoder.layer.22.intermediate.intermediate_act_fn/aten::gelu/Gelu" type="Gelu" version="opset7">
|
|
<data approximation_mode="ERF" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="1" precision="FP32" names="1773">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1330" name="self.encoder.layer.22.output.dense.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 4096" offset="1269199008" size="16777216" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.22.output.dense.weight">
|
|
<dim>1024</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1331" name="__module.encoder.layer.22.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1332" name="Constant_6549401" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="1285976224" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1333" name="__module.encoder.layer.22.output.dense/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1779,input.93">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1334" name="__module.encoder.layer.22.output/aten::add/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1781">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1335" name="__module.encoder.layer.22.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
|
|
<data element_type="i32" shape="1" offset="127127572" size="4" />
|
|
<output>
|
|
<port id="0" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1336" name="__module.encoder.layer.22.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
|
|
<data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1337" name="Constant_6549402" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="1285980320" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1338" name="__module.encoder.layer.22.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1339" name="Constant_6549403" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="1285984416" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1340" name="__module.encoder.layer.22.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1785,hidden_states.139">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1341" name="self.encoder.layer.23.attention.self.query.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="1285988512" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.23.attention.self.query.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1342" name="__module.encoder.layer.23.attention.self.query/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1343" name="Constant_6549404" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="1290182816" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1344" name="__module.encoder.layer.23.attention.self.query/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1798,x.277">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1345" name="__module.encoder.layer.23.attention.self/prim::ListConstruct/Concat" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334168" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1346" name="__module.encoder.layer.23.attention.self/aten::view/Reshape" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1802,x.279">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1347" name="Constant_6538977" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334200" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64" names="1803">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1348" name="__module.encoder.layer.23.attention.self/aten::permute/Transpose" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1804">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1349" name="self.encoder.layer.23.attention.self.key.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="1290186912" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.23.attention.self.key.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1350" name="__module.encoder.layer.23.attention.self.key/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1351" name="Constant_6549405" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="1294381216" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1352" name="__module.encoder.layer.23.attention.self.key/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1807,x.281">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1353" name="__module.encoder.layer.23.attention.self/prim::ListConstruct/Concat_1" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334168" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1354" name="__module.encoder.layer.23.attention.self/aten::view/Reshape_1" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1811,x.283">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1355" name="Constant_6539000" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334200" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64" names="1812">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1356" name="__module.encoder.layer.23.attention.self/aten::permute/Transpose_1" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1813">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1357" name="self.encoder.layer.23.attention.self.value.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="1294385312" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.23.attention.self.value.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1358" name="__module.encoder.layer.23.attention.self.value/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1359" name="Constant_6549406" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="1298579616" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1360" name="__module.encoder.layer.23.attention.self.value/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1816,x.285">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1361" name="__module.encoder.layer.23.attention.self/prim::ListConstruct/Concat_2" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334168" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1362" name="__module.encoder.layer.23.attention.self/aten::view/Reshape_2" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1820,x">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1363" name="Constant_6539023" type="Const" version="opset1">
|
|
<data element_type="i64" shape="4" offset="131334200" size="32" />
|
|
<output>
|
|
<port id="0" precision="I64" names="1821">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1364" name="__module.encoder.layer.23.attention.self/aten::permute/Transpose_2" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1822">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1365" name="__module.encoder.layer.23.attention.self/aten::scaled_dot_product_attention/ScaledDotProductAttention" type="ScaledDotProductAttention" version="opset13">
|
|
<data causal="false" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="3" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>1</dim>
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="4" precision="FP32" names="1823,attn_output.93">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1366" name="__module.encoder.layer.23.attention.self/aten::transpose/ScatterElementsUpdate" type="Const" version="opset1">
|
|
<data element_type="i32" shape="4" offset="139731064" size="16" />
|
|
<output>
|
|
<port id="0" precision="I32">
|
|
<dim>4</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1367" name="__module.encoder.layer.23.attention.self/aten::transpose/Transpose" type="Transpose" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>-1</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I32">
|
|
<dim>4</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1824,attn_output">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1368" name="Constant_6549643" type="Const" version="opset1">
|
|
<data element_type="i64" shape="3" offset="139731080" size="24" />
|
|
<output>
|
|
<port id="0" precision="I64">
|
|
<dim>3</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1369" name="__module.encoder.layer.23.attention.self/aten::reshape/Reshape" type="Reshape" version="opset1">
|
|
<data special_zero="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>16</dim>
|
|
<dim>64</dim>
|
|
</port>
|
|
<port id="1" precision="I64">
|
|
<dim>3</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1826">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1370" name="self.encoder.layer.23.attention.output.dense.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 1024" offset="1298583712" size="4194304" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.23.attention.output.dense.weight">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1371" name="__module.encoder.layer.23.attention.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1372" name="Constant_6549407" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="1302778016" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1373" name="__module.encoder.layer.23.attention.output.dense/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1832,input.95">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1374" name="__module.encoder.layer.23.attention.output/aten::add/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1834">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1375" name="__module.encoder.layer.23.attention.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
|
|
<data element_type="i32" shape="1" offset="127127572" size="4" />
|
|
<output>
|
|
<port id="0" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1376" name="__module.encoder.layer.23.attention.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
|
|
<data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1377" name="Constant_6549408" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="1302782112" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1378" name="__module.encoder.layer.23.attention.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1379" name="Constant_6549409" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="1302786208" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1380" name="__module.encoder.layer.23.attention.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1838,input_tensor">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1381" name="self.encoder.layer.23.intermediate.dense.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="4096, 1024" offset="1302790304" size="16777216" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.23.intermediate.dense.weight">
|
|
<dim>4096</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1382" name="__module.encoder.layer.23.intermediate.dense/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>4096</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1383" name="Constant_6549410" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 4096" offset="1319567520" size="16384" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1384" name="__module.encoder.layer.23.intermediate.dense/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1843">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1385" name="__module.encoder.layer.23.intermediate.intermediate_act_fn/aten::gelu/Gelu" type="Gelu" version="opset7">
|
|
<data approximation_mode="ERF" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="1" precision="FP32" names="1844">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1386" name="self.encoder.layer.23.output.dense.weight" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1024, 4096" offset="1319583904" size="16777216" />
|
|
<output>
|
|
<port id="0" precision="FP32" names="self.encoder.layer.23.output.dense.weight">
|
|
<dim>1024</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1387" name="__module.encoder.layer.23.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
|
|
<data transpose_a="false" transpose_b="true" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1024</dim>
|
|
<dim>4096</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1388" name="Constant_6549411" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="1336361120" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1389" name="__module.encoder.layer.23.output.dense/aten::linear/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1850,input">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1390" name="__module.encoder.layer.23.output/aten::add/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="1852">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1391" name="__module.encoder.layer.23.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
|
|
<data element_type="i32" shape="1" offset="127127572" size="4" />
|
|
<output>
|
|
<port id="0" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1392" name="__module.encoder.layer.23.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
|
|
<data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="I32">
|
|
<dim>1</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1393" name="Constant_6549412" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="1336365216" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1394" name="__module.encoder.layer.23.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1395" name="Constant_6549413" type="Const" version="opset1">
|
|
<data element_type="f32" shape="1, 1, 1024" offset="1336369312" size="4096" />
|
|
<output>
|
|
<port id="0" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1396" name="__module.encoder.layer.23.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
|
|
<data auto_broadcast="numpy" />
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
<port id="1" precision="FP32">
|
|
<dim>1</dim>
|
|
<dim>1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
<output>
|
|
<port id="2" precision="FP32" names="last_hidden_state">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</output>
|
|
</layer>
|
|
<layer id="1397" name="Result_6542175" type="Result" version="opset1">
|
|
<input>
|
|
<port id="0" precision="FP32">
|
|
<dim>-1</dim>
|
|
<dim>-1</dim>
|
|
<dim>1024</dim>
|
|
</port>
|
|
</input>
|
|
</layer>
|
|
</layers>
|
|
<edges>
|
|
<edge from-layer="0" from-port="0" to-layer="8" to-port="0" />
|
|
<edge from-layer="1" from-port="0" to-layer="58" to-port="0" />
|
|
<edge from-layer="2" from-port="0" to-layer="4" to-port="0" />
|
|
<edge from-layer="2" from-port="0" to-layer="15" to-port="0" />
|
|
<edge from-layer="3" from-port="0" to-layer="6" to-port="0" />
|
|
<edge from-layer="4" from-port="1" to-layer="6" to-port="1" />
|
|
<edge from-layer="5" from-port="0" to-layer="6" to-port="2" />
|
|
<edge from-layer="6" from-port="3" to-layer="11" to-port="0" />
|
|
<edge from-layer="7" from-port="0" to-layer="10" to-port="0" />
|
|
<edge from-layer="8" from-port="1" to-layer="10" to-port="1" />
|
|
<edge from-layer="9" from-port="0" to-layer="10" to-port="2" />
|
|
<edge from-layer="10" from-port="3" to-layer="11" to-port="1" />
|
|
<edge from-layer="11" from-port="2" to-layer="25" to-port="0" />
|
|
<edge from-layer="12" from-port="0" to-layer="24" to-port="0" />
|
|
<edge from-layer="13" from-port="0" to-layer="21" to-port="0" />
|
|
<edge from-layer="14" from-port="0" to-layer="21" to-port="1" />
|
|
<edge from-layer="15" from-port="1" to-layer="67" to-port="0" />
|
|
<edge from-layer="15" from-port="1" to-layer="63" to-port="0" />
|
|
<edge from-layer="15" from-port="1" to-layer="18" to-port="0" />
|
|
<edge from-layer="16" from-port="0" to-layer="18" to-port="1" />
|
|
<edge from-layer="17" from-port="0" to-layer="18" to-port="2" />
|
|
<edge from-layer="18" from-port="3" to-layer="21" to-port="2" />
|
|
<edge from-layer="19" from-port="0" to-layer="21" to-port="3" />
|
|
<edge from-layer="20" from-port="0" to-layer="21" to-port="4" />
|
|
<edge from-layer="21" from-port="5" to-layer="22" to-port="0" />
|
|
<edge from-layer="22" from-port="1" to-layer="24" to-port="1" />
|
|
<edge from-layer="23" from-port="0" to-layer="24" to-port="2" />
|
|
<edge from-layer="24" from-port="3" to-layer="25" to-port="1" />
|
|
<edge from-layer="25" from-port="2" to-layer="27" to-port="0" />
|
|
<edge from-layer="26" from-port="0" to-layer="27" to-port="1" />
|
|
<edge from-layer="27" from-port="2" to-layer="29" to-port="0" />
|
|
<edge from-layer="28" from-port="0" to-layer="29" to-port="1" />
|
|
<edge from-layer="29" from-port="2" to-layer="31" to-port="0" />
|
|
<edge from-layer="30" from-port="0" to-layer="31" to-port="1" />
|
|
<edge from-layer="31" from-port="2" to-layer="86" to-port="1" />
|
|
<edge from-layer="31" from-port="2" to-layer="49" to-port="0" />
|
|
<edge from-layer="31" from-port="2" to-layer="41" to-port="0" />
|
|
<edge from-layer="31" from-port="2" to-layer="33" to-port="0" />
|
|
<edge from-layer="32" from-port="0" to-layer="33" to-port="1" />
|
|
<edge from-layer="33" from-port="2" to-layer="35" to-port="0" />
|
|
<edge from-layer="34" from-port="0" to-layer="35" to-port="1" />
|
|
<edge from-layer="35" from-port="2" to-layer="37" to-port="0" />
|
|
<edge from-layer="36" from-port="0" to-layer="37" to-port="1" />
|
|
<edge from-layer="37" from-port="2" to-layer="39" to-port="0" />
|
|
<edge from-layer="38" from-port="0" to-layer="39" to-port="1" />
|
|
<edge from-layer="39" from-port="2" to-layer="77" to-port="0" />
|
|
<edge from-layer="40" from-port="0" to-layer="41" to-port="1" />
|
|
<edge from-layer="41" from-port="2" to-layer="43" to-port="0" />
|
|
<edge from-layer="42" from-port="0" to-layer="43" to-port="1" />
|
|
<edge from-layer="43" from-port="2" to-layer="45" to-port="0" />
|
|
<edge from-layer="44" from-port="0" to-layer="45" to-port="1" />
|
|
<edge from-layer="45" from-port="2" to-layer="47" to-port="0" />
|
|
<edge from-layer="46" from-port="0" to-layer="47" to-port="1" />
|
|
<edge from-layer="47" from-port="2" to-layer="77" to-port="1" />
|
|
<edge from-layer="48" from-port="0" to-layer="49" to-port="1" />
|
|
<edge from-layer="49" from-port="2" to-layer="51" to-port="0" />
|
|
<edge from-layer="50" from-port="0" to-layer="51" to-port="1" />
|
|
<edge from-layer="51" from-port="2" to-layer="53" to-port="0" />
|
|
<edge from-layer="52" from-port="0" to-layer="53" to-port="1" />
|
|
<edge from-layer="53" from-port="2" to-layer="55" to-port="0" />
|
|
<edge from-layer="54" from-port="0" to-layer="55" to-port="1" />
|
|
<edge from-layer="55" from-port="2" to-layer="77" to-port="2" />
|
|
<edge from-layer="56" from-port="0" to-layer="73" to-port="0" />
|
|
<edge from-layer="57" from-port="0" to-layer="58" to-port="1" />
|
|
<edge from-layer="58" from-port="2" to-layer="60" to-port="0" />
|
|
<edge from-layer="59" from-port="0" to-layer="60" to-port="1" />
|
|
<edge from-layer="60" from-port="2" to-layer="69" to-port="0" />
|
|
<edge from-layer="61" from-port="0" to-layer="63" to-port="1" />
|
|
<edge from-layer="62" from-port="0" to-layer="63" to-port="2" />
|
|
<edge from-layer="63" from-port="3" to-layer="68" to-port="0" />
|
|
<edge from-layer="64" from-port="0" to-layer="68" to-port="1" />
|
|
<edge from-layer="65" from-port="0" to-layer="67" to-port="1" />
|
|
<edge from-layer="66" from-port="0" to-layer="67" to-port="2" />
|
|
<edge from-layer="67" from-port="3" to-layer="68" to-port="2" />
|
|
<edge from-layer="68" from-port="3" to-layer="69" to-port="1" />
|
|
<edge from-layer="69" from-port="2" to-layer="70" to-port="0" />
|
|
<edge from-layer="70" from-port="1" to-layer="72" to-port="0" />
|
|
<edge from-layer="71" from-port="0" to-layer="72" to-port="1" />
|
|
<edge from-layer="72" from-port="2" to-layer="73" to-port="1" />
|
|
<edge from-layer="73" from-port="2" to-layer="74" to-port="0" />
|
|
<edge from-layer="73" from-port="2" to-layer="76" to-port="2" />
|
|
<edge from-layer="74" from-port="1" to-layer="76" to-port="0" />
|
|
<edge from-layer="75" from-port="0" to-layer="76" to-port="1" />
|
|
<edge from-layer="76" from-port="3" to-layer="189" to-port="3" />
|
|
<edge from-layer="76" from-port="3" to-layer="245" to-port="3" />
|
|
<edge from-layer="76" from-port="3" to-layer="301" to-port="3" />
|
|
<edge from-layer="76" from-port="3" to-layer="357" to-port="3" />
|
|
<edge from-layer="76" from-port="3" to-layer="413" to-port="3" />
|
|
<edge from-layer="76" from-port="3" to-layer="469" to-port="3" />
|
|
<edge from-layer="76" from-port="3" to-layer="525" to-port="3" />
|
|
<edge from-layer="76" from-port="3" to-layer="581" to-port="3" />
|
|
<edge from-layer="76" from-port="3" to-layer="637" to-port="3" />
|
|
<edge from-layer="76" from-port="3" to-layer="133" to-port="3" />
|
|
<edge from-layer="76" from-port="3" to-layer="693" to-port="3" />
|
|
<edge from-layer="76" from-port="3" to-layer="749" to-port="3" />
|
|
<edge from-layer="76" from-port="3" to-layer="1365" to-port="3" />
|
|
<edge from-layer="76" from-port="3" to-layer="1309" to-port="3" />
|
|
<edge from-layer="76" from-port="3" to-layer="1253" to-port="3" />
|
|
<edge from-layer="76" from-port="3" to-layer="1197" to-port="3" />
|
|
<edge from-layer="76" from-port="3" to-layer="1141" to-port="3" />
|
|
<edge from-layer="76" from-port="3" to-layer="1085" to-port="3" />
|
|
<edge from-layer="76" from-port="3" to-layer="1029" to-port="3" />
|
|
<edge from-layer="76" from-port="3" to-layer="973" to-port="3" />
|
|
<edge from-layer="76" from-port="3" to-layer="917" to-port="3" />
|
|
<edge from-layer="76" from-port="3" to-layer="861" to-port="3" />
|
|
<edge from-layer="76" from-port="3" to-layer="805" to-port="3" />
|
|
<edge from-layer="76" from-port="3" to-layer="77" to-port="3" />
|
|
<edge from-layer="77" from-port="4" to-layer="79" to-port="0" />
|
|
<edge from-layer="78" from-port="0" to-layer="79" to-port="1" />
|
|
<edge from-layer="79" from-port="2" to-layer="81" to-port="0" />
|
|
<edge from-layer="80" from-port="0" to-layer="81" to-port="1" />
|
|
<edge from-layer="81" from-port="2" to-layer="83" to-port="0" />
|
|
<edge from-layer="82" from-port="0" to-layer="83" to-port="1" />
|
|
<edge from-layer="83" from-port="2" to-layer="85" to-port="0" />
|
|
<edge from-layer="84" from-port="0" to-layer="85" to-port="1" />
|
|
<edge from-layer="85" from-port="2" to-layer="86" to-port="0" />
|
|
<edge from-layer="86" from-port="2" to-layer="88" to-port="0" />
|
|
<edge from-layer="87" from-port="0" to-layer="88" to-port="1" />
|
|
<edge from-layer="88" from-port="2" to-layer="90" to-port="0" />
|
|
<edge from-layer="89" from-port="0" to-layer="90" to-port="1" />
|
|
<edge from-layer="90" from-port="2" to-layer="92" to-port="0" />
|
|
<edge from-layer="91" from-port="0" to-layer="92" to-port="1" />
|
|
<edge from-layer="92" from-port="2" to-layer="94" to-port="0" />
|
|
<edge from-layer="92" from-port="2" to-layer="102" to-port="1" />
|
|
<edge from-layer="93" from-port="0" to-layer="94" to-port="1" />
|
|
<edge from-layer="94" from-port="2" to-layer="96" to-port="0" />
|
|
<edge from-layer="95" from-port="0" to-layer="96" to-port="1" />
|
|
<edge from-layer="96" from-port="2" to-layer="97" to-port="0" />
|
|
<edge from-layer="97" from-port="1" to-layer="99" to-port="0" />
|
|
<edge from-layer="98" from-port="0" to-layer="99" to-port="1" />
|
|
<edge from-layer="99" from-port="2" to-layer="101" to-port="0" />
|
|
<edge from-layer="100" from-port="0" to-layer="101" to-port="1" />
|
|
<edge from-layer="101" from-port="2" to-layer="102" to-port="0" />
|
|
<edge from-layer="102" from-port="2" to-layer="104" to-port="0" />
|
|
<edge from-layer="103" from-port="0" to-layer="104" to-port="1" />
|
|
<edge from-layer="104" from-port="2" to-layer="106" to-port="0" />
|
|
<edge from-layer="105" from-port="0" to-layer="106" to-port="1" />
|
|
<edge from-layer="106" from-port="2" to-layer="108" to-port="0" />
|
|
<edge from-layer="107" from-port="0" to-layer="108" to-port="1" />
|
|
<edge from-layer="108" from-port="2" to-layer="126" to-port="0" />
|
|
<edge from-layer="108" from-port="2" to-layer="110" to-port="0" />
|
|
<edge from-layer="108" from-port="2" to-layer="118" to-port="0" />
|
|
<edge from-layer="108" from-port="2" to-layer="142" to-port="1" />
|
|
<edge from-layer="109" from-port="0" to-layer="110" to-port="1" />
|
|
<edge from-layer="110" from-port="2" to-layer="112" to-port="0" />
|
|
<edge from-layer="111" from-port="0" to-layer="112" to-port="1" />
|
|
<edge from-layer="112" from-port="2" to-layer="114" to-port="0" />
|
|
<edge from-layer="113" from-port="0" to-layer="114" to-port="1" />
|
|
<edge from-layer="114" from-port="2" to-layer="116" to-port="0" />
|
|
<edge from-layer="115" from-port="0" to-layer="116" to-port="1" />
|
|
<edge from-layer="116" from-port="2" to-layer="133" to-port="0" />
|
|
<edge from-layer="117" from-port="0" to-layer="118" to-port="1" />
|
|
<edge from-layer="118" from-port="2" to-layer="120" to-port="0" />
|
|
<edge from-layer="119" from-port="0" to-layer="120" to-port="1" />
|
|
<edge from-layer="120" from-port="2" to-layer="122" to-port="0" />
|
|
<edge from-layer="121" from-port="0" to-layer="122" to-port="1" />
|
|
<edge from-layer="122" from-port="2" to-layer="124" to-port="0" />
|
|
<edge from-layer="123" from-port="0" to-layer="124" to-port="1" />
|
|
<edge from-layer="124" from-port="2" to-layer="133" to-port="1" />
|
|
<edge from-layer="125" from-port="0" to-layer="126" to-port="1" />
|
|
<edge from-layer="126" from-port="2" to-layer="128" to-port="0" />
|
|
<edge from-layer="127" from-port="0" to-layer="128" to-port="1" />
|
|
<edge from-layer="128" from-port="2" to-layer="130" to-port="0" />
|
|
<edge from-layer="129" from-port="0" to-layer="130" to-port="1" />
|
|
<edge from-layer="130" from-port="2" to-layer="132" to-port="0" />
|
|
<edge from-layer="131" from-port="0" to-layer="132" to-port="1" />
|
|
<edge from-layer="132" from-port="2" to-layer="133" to-port="2" />
|
|
<edge from-layer="133" from-port="4" to-layer="135" to-port="0" />
|
|
<edge from-layer="134" from-port="0" to-layer="135" to-port="1" />
|
|
<edge from-layer="135" from-port="2" to-layer="137" to-port="0" />
|
|
<edge from-layer="136" from-port="0" to-layer="137" to-port="1" />
|
|
<edge from-layer="137" from-port="2" to-layer="139" to-port="0" />
|
|
<edge from-layer="138" from-port="0" to-layer="139" to-port="1" />
|
|
<edge from-layer="139" from-port="2" to-layer="141" to-port="0" />
|
|
<edge from-layer="140" from-port="0" to-layer="141" to-port="1" />
|
|
<edge from-layer="141" from-port="2" to-layer="142" to-port="0" />
|
|
<edge from-layer="142" from-port="2" to-layer="144" to-port="0" />
|
|
<edge from-layer="143" from-port="0" to-layer="144" to-port="1" />
|
|
<edge from-layer="144" from-port="2" to-layer="146" to-port="0" />
|
|
<edge from-layer="145" from-port="0" to-layer="146" to-port="1" />
|
|
<edge from-layer="146" from-port="2" to-layer="148" to-port="0" />
|
|
<edge from-layer="147" from-port="0" to-layer="148" to-port="1" />
|
|
<edge from-layer="148" from-port="2" to-layer="158" to-port="1" />
|
|
<edge from-layer="148" from-port="2" to-layer="150" to-port="0" />
|
|
<edge from-layer="149" from-port="0" to-layer="150" to-port="1" />
|
|
<edge from-layer="150" from-port="2" to-layer="152" to-port="0" />
|
|
<edge from-layer="151" from-port="0" to-layer="152" to-port="1" />
|
|
<edge from-layer="152" from-port="2" to-layer="153" to-port="0" />
|
|
<edge from-layer="153" from-port="1" to-layer="155" to-port="0" />
|
|
<edge from-layer="154" from-port="0" to-layer="155" to-port="1" />
|
|
<edge from-layer="155" from-port="2" to-layer="157" to-port="0" />
|
|
<edge from-layer="156" from-port="0" to-layer="157" to-port="1" />
|
|
<edge from-layer="157" from-port="2" to-layer="158" to-port="0" />
|
|
<edge from-layer="158" from-port="2" to-layer="160" to-port="0" />
|
|
<edge from-layer="159" from-port="0" to-layer="160" to-port="1" />
|
|
<edge from-layer="160" from-port="2" to-layer="162" to-port="0" />
|
|
<edge from-layer="161" from-port="0" to-layer="162" to-port="1" />
|
|
<edge from-layer="162" from-port="2" to-layer="164" to-port="0" />
|
|
<edge from-layer="163" from-port="0" to-layer="164" to-port="1" />
|
|
<edge from-layer="164" from-port="2" to-layer="182" to-port="0" />
|
|
<edge from-layer="164" from-port="2" to-layer="166" to-port="0" />
|
|
<edge from-layer="164" from-port="2" to-layer="174" to-port="0" />
|
|
<edge from-layer="164" from-port="2" to-layer="198" to-port="1" />
|
|
<edge from-layer="165" from-port="0" to-layer="166" to-port="1" />
|
|
<edge from-layer="166" from-port="2" to-layer="168" to-port="0" />
|
|
<edge from-layer="167" from-port="0" to-layer="168" to-port="1" />
|
|
<edge from-layer="168" from-port="2" to-layer="170" to-port="0" />
|
|
<edge from-layer="169" from-port="0" to-layer="170" to-port="1" />
|
|
<edge from-layer="170" from-port="2" to-layer="172" to-port="0" />
|
|
<edge from-layer="171" from-port="0" to-layer="172" to-port="1" />
|
|
<edge from-layer="172" from-port="2" to-layer="189" to-port="0" />
|
|
<edge from-layer="173" from-port="0" to-layer="174" to-port="1" />
|
|
<edge from-layer="174" from-port="2" to-layer="176" to-port="0" />
|
|
<edge from-layer="175" from-port="0" to-layer="176" to-port="1" />
|
|
<edge from-layer="176" from-port="2" to-layer="178" to-port="0" />
|
|
<edge from-layer="177" from-port="0" to-layer="178" to-port="1" />
|
|
<edge from-layer="178" from-port="2" to-layer="180" to-port="0" />
|
|
<edge from-layer="179" from-port="0" to-layer="180" to-port="1" />
|
|
<edge from-layer="180" from-port="2" to-layer="189" to-port="1" />
|
|
<edge from-layer="181" from-port="0" to-layer="182" to-port="1" />
|
|
<edge from-layer="182" from-port="2" to-layer="184" to-port="0" />
|
|
<edge from-layer="183" from-port="0" to-layer="184" to-port="1" />
|
|
<edge from-layer="184" from-port="2" to-layer="186" to-port="0" />
|
|
<edge from-layer="185" from-port="0" to-layer="186" to-port="1" />
|
|
<edge from-layer="186" from-port="2" to-layer="188" to-port="0" />
|
|
<edge from-layer="187" from-port="0" to-layer="188" to-port="1" />
|
|
<edge from-layer="188" from-port="2" to-layer="189" to-port="2" />
|
|
<edge from-layer="189" from-port="4" to-layer="191" to-port="0" />
|
|
<edge from-layer="190" from-port="0" to-layer="191" to-port="1" />
|
|
<edge from-layer="191" from-port="2" to-layer="193" to-port="0" />
|
|
<edge from-layer="192" from-port="0" to-layer="193" to-port="1" />
|
|
<edge from-layer="193" from-port="2" to-layer="195" to-port="0" />
|
|
<edge from-layer="194" from-port="0" to-layer="195" to-port="1" />
|
|
<edge from-layer="195" from-port="2" to-layer="197" to-port="0" />
|
|
<edge from-layer="196" from-port="0" to-layer="197" to-port="1" />
|
|
<edge from-layer="197" from-port="2" to-layer="198" to-port="0" />
|
|
<edge from-layer="198" from-port="2" to-layer="200" to-port="0" />
|
|
<edge from-layer="199" from-port="0" to-layer="200" to-port="1" />
|
|
<edge from-layer="200" from-port="2" to-layer="202" to-port="0" />
|
|
<edge from-layer="201" from-port="0" to-layer="202" to-port="1" />
|
|
<edge from-layer="202" from-port="2" to-layer="204" to-port="0" />
|
|
<edge from-layer="203" from-port="0" to-layer="204" to-port="1" />
|
|
<edge from-layer="204" from-port="2" to-layer="214" to-port="1" />
|
|
<edge from-layer="204" from-port="2" to-layer="206" to-port="0" />
|
|
<edge from-layer="205" from-port="0" to-layer="206" to-port="1" />
|
|
<edge from-layer="206" from-port="2" to-layer="208" to-port="0" />
|
|
<edge from-layer="207" from-port="0" to-layer="208" to-port="1" />
|
|
<edge from-layer="208" from-port="2" to-layer="209" to-port="0" />
|
|
<edge from-layer="209" from-port="1" to-layer="211" to-port="0" />
|
|
<edge from-layer="210" from-port="0" to-layer="211" to-port="1" />
|
|
<edge from-layer="211" from-port="2" to-layer="213" to-port="0" />
|
|
<edge from-layer="212" from-port="0" to-layer="213" to-port="1" />
|
|
<edge from-layer="213" from-port="2" to-layer="214" to-port="0" />
|
|
<edge from-layer="214" from-port="2" to-layer="216" to-port="0" />
|
|
<edge from-layer="215" from-port="0" to-layer="216" to-port="1" />
|
|
<edge from-layer="216" from-port="2" to-layer="218" to-port="0" />
|
|
<edge from-layer="217" from-port="0" to-layer="218" to-port="1" />
|
|
<edge from-layer="218" from-port="2" to-layer="220" to-port="0" />
|
|
<edge from-layer="219" from-port="0" to-layer="220" to-port="1" />
|
|
<edge from-layer="220" from-port="2" to-layer="222" to-port="0" />
|
|
<edge from-layer="220" from-port="2" to-layer="254" to-port="1" />
|
|
<edge from-layer="220" from-port="2" to-layer="238" to-port="0" />
|
|
<edge from-layer="220" from-port="2" to-layer="230" to-port="0" />
|
|
<edge from-layer="221" from-port="0" to-layer="222" to-port="1" />
|
|
<edge from-layer="222" from-port="2" to-layer="224" to-port="0" />
|
|
<edge from-layer="223" from-port="0" to-layer="224" to-port="1" />
|
|
<edge from-layer="224" from-port="2" to-layer="226" to-port="0" />
|
|
<edge from-layer="225" from-port="0" to-layer="226" to-port="1" />
|
|
<edge from-layer="226" from-port="2" to-layer="228" to-port="0" />
|
|
<edge from-layer="227" from-port="0" to-layer="228" to-port="1" />
|
|
<edge from-layer="228" from-port="2" to-layer="245" to-port="0" />
|
|
<edge from-layer="229" from-port="0" to-layer="230" to-port="1" />
|
|
<edge from-layer="230" from-port="2" to-layer="232" to-port="0" />
|
|
<edge from-layer="231" from-port="0" to-layer="232" to-port="1" />
|
|
<edge from-layer="232" from-port="2" to-layer="234" to-port="0" />
|
|
<edge from-layer="233" from-port="0" to-layer="234" to-port="1" />
|
|
<edge from-layer="234" from-port="2" to-layer="236" to-port="0" />
|
|
<edge from-layer="235" from-port="0" to-layer="236" to-port="1" />
|
|
<edge from-layer="236" from-port="2" to-layer="245" to-port="1" />
|
|
<edge from-layer="237" from-port="0" to-layer="238" to-port="1" />
|
|
<edge from-layer="238" from-port="2" to-layer="240" to-port="0" />
|
|
<edge from-layer="239" from-port="0" to-layer="240" to-port="1" />
|
|
<edge from-layer="240" from-port="2" to-layer="242" to-port="0" />
|
|
<edge from-layer="241" from-port="0" to-layer="242" to-port="1" />
|
|
<edge from-layer="242" from-port="2" to-layer="244" to-port="0" />
|
|
<edge from-layer="243" from-port="0" to-layer="244" to-port="1" />
|
|
<edge from-layer="244" from-port="2" to-layer="245" to-port="2" />
|
|
<edge from-layer="245" from-port="4" to-layer="247" to-port="0" />
|
|
<edge from-layer="246" from-port="0" to-layer="247" to-port="1" />
|
|
<edge from-layer="247" from-port="2" to-layer="249" to-port="0" />
|
|
<edge from-layer="248" from-port="0" to-layer="249" to-port="1" />
|
|
<edge from-layer="249" from-port="2" to-layer="251" to-port="0" />
|
|
<edge from-layer="250" from-port="0" to-layer="251" to-port="1" />
|
|
<edge from-layer="251" from-port="2" to-layer="253" to-port="0" />
|
|
<edge from-layer="252" from-port="0" to-layer="253" to-port="1" />
|
|
<edge from-layer="253" from-port="2" to-layer="254" to-port="0" />
|
|
<edge from-layer="254" from-port="2" to-layer="256" to-port="0" />
|
|
<edge from-layer="255" from-port="0" to-layer="256" to-port="1" />
|
|
<edge from-layer="256" from-port="2" to-layer="258" to-port="0" />
|
|
<edge from-layer="257" from-port="0" to-layer="258" to-port="1" />
|
|
<edge from-layer="258" from-port="2" to-layer="260" to-port="0" />
|
|
<edge from-layer="259" from-port="0" to-layer="260" to-port="1" />
|
|
<edge from-layer="260" from-port="2" to-layer="270" to-port="1" />
|
|
<edge from-layer="260" from-port="2" to-layer="262" to-port="0" />
|
|
<edge from-layer="261" from-port="0" to-layer="262" to-port="1" />
|
|
<edge from-layer="262" from-port="2" to-layer="264" to-port="0" />
|
|
<edge from-layer="263" from-port="0" to-layer="264" to-port="1" />
|
|
<edge from-layer="264" from-port="2" to-layer="265" to-port="0" />
|
|
<edge from-layer="265" from-port="1" to-layer="267" to-port="0" />
|
|
<edge from-layer="266" from-port="0" to-layer="267" to-port="1" />
|
|
<edge from-layer="267" from-port="2" to-layer="269" to-port="0" />
|
|
<edge from-layer="268" from-port="0" to-layer="269" to-port="1" />
|
|
<edge from-layer="269" from-port="2" to-layer="270" to-port="0" />
|
|
<edge from-layer="270" from-port="2" to-layer="272" to-port="0" />
|
|
<edge from-layer="271" from-port="0" to-layer="272" to-port="1" />
|
|
<edge from-layer="272" from-port="2" to-layer="274" to-port="0" />
|
|
<edge from-layer="273" from-port="0" to-layer="274" to-port="1" />
|
|
<edge from-layer="274" from-port="2" to-layer="276" to-port="0" />
|
|
<edge from-layer="275" from-port="0" to-layer="276" to-port="1" />
|
|
<edge from-layer="276" from-port="2" to-layer="294" to-port="0" />
|
|
<edge from-layer="276" from-port="2" to-layer="278" to-port="0" />
|
|
<edge from-layer="276" from-port="2" to-layer="310" to-port="1" />
|
|
<edge from-layer="276" from-port="2" to-layer="286" to-port="0" />
|
|
<edge from-layer="277" from-port="0" to-layer="278" to-port="1" />
|
|
<edge from-layer="278" from-port="2" to-layer="280" to-port="0" />
|
|
<edge from-layer="279" from-port="0" to-layer="280" to-port="1" />
|
|
<edge from-layer="280" from-port="2" to-layer="282" to-port="0" />
|
|
<edge from-layer="281" from-port="0" to-layer="282" to-port="1" />
|
|
<edge from-layer="282" from-port="2" to-layer="284" to-port="0" />
|
|
<edge from-layer="283" from-port="0" to-layer="284" to-port="1" />
|
|
<edge from-layer="284" from-port="2" to-layer="301" to-port="0" />
|
|
<edge from-layer="285" from-port="0" to-layer="286" to-port="1" />
|
|
<edge from-layer="286" from-port="2" to-layer="288" to-port="0" />
|
|
<edge from-layer="287" from-port="0" to-layer="288" to-port="1" />
|
|
<edge from-layer="288" from-port="2" to-layer="290" to-port="0" />
|
|
<edge from-layer="289" from-port="0" to-layer="290" to-port="1" />
|
|
<edge from-layer="290" from-port="2" to-layer="292" to-port="0" />
|
|
<edge from-layer="291" from-port="0" to-layer="292" to-port="1" />
|
|
<edge from-layer="292" from-port="2" to-layer="301" to-port="1" />
|
|
<edge from-layer="293" from-port="0" to-layer="294" to-port="1" />
|
|
<edge from-layer="294" from-port="2" to-layer="296" to-port="0" />
|
|
<edge from-layer="295" from-port="0" to-layer="296" to-port="1" />
|
|
<edge from-layer="296" from-port="2" to-layer="298" to-port="0" />
|
|
<edge from-layer="297" from-port="0" to-layer="298" to-port="1" />
|
|
<edge from-layer="298" from-port="2" to-layer="300" to-port="0" />
|
|
<edge from-layer="299" from-port="0" to-layer="300" to-port="1" />
|
|
<edge from-layer="300" from-port="2" to-layer="301" to-port="2" />
|
|
<edge from-layer="301" from-port="4" to-layer="303" to-port="0" />
|
|
<edge from-layer="302" from-port="0" to-layer="303" to-port="1" />
|
|
<edge from-layer="303" from-port="2" to-layer="305" to-port="0" />
|
|
<edge from-layer="304" from-port="0" to-layer="305" to-port="1" />
|
|
<edge from-layer="305" from-port="2" to-layer="307" to-port="0" />
|
|
<edge from-layer="306" from-port="0" to-layer="307" to-port="1" />
|
|
<edge from-layer="307" from-port="2" to-layer="309" to-port="0" />
|
|
<edge from-layer="308" from-port="0" to-layer="309" to-port="1" />
|
|
<edge from-layer="309" from-port="2" to-layer="310" to-port="0" />
|
|
<edge from-layer="310" from-port="2" to-layer="312" to-port="0" />
|
|
<edge from-layer="311" from-port="0" to-layer="312" to-port="1" />
|
|
<edge from-layer="312" from-port="2" to-layer="314" to-port="0" />
|
|
<edge from-layer="313" from-port="0" to-layer="314" to-port="1" />
|
|
<edge from-layer="314" from-port="2" to-layer="316" to-port="0" />
|
|
<edge from-layer="315" from-port="0" to-layer="316" to-port="1" />
|
|
<edge from-layer="316" from-port="2" to-layer="326" to-port="1" />
|
|
<edge from-layer="316" from-port="2" to-layer="318" to-port="0" />
|
|
<edge from-layer="317" from-port="0" to-layer="318" to-port="1" />
|
|
<edge from-layer="318" from-port="2" to-layer="320" to-port="0" />
|
|
<edge from-layer="319" from-port="0" to-layer="320" to-port="1" />
|
|
<edge from-layer="320" from-port="2" to-layer="321" to-port="0" />
|
|
<edge from-layer="321" from-port="1" to-layer="323" to-port="0" />
|
|
<edge from-layer="322" from-port="0" to-layer="323" to-port="1" />
|
|
<edge from-layer="323" from-port="2" to-layer="325" to-port="0" />
|
|
<edge from-layer="324" from-port="0" to-layer="325" to-port="1" />
|
|
<edge from-layer="325" from-port="2" to-layer="326" to-port="0" />
|
|
<edge from-layer="326" from-port="2" to-layer="328" to-port="0" />
|
|
<edge from-layer="327" from-port="0" to-layer="328" to-port="1" />
|
|
<edge from-layer="328" from-port="2" to-layer="330" to-port="0" />
|
|
<edge from-layer="329" from-port="0" to-layer="330" to-port="1" />
|
|
<edge from-layer="330" from-port="2" to-layer="332" to-port="0" />
|
|
<edge from-layer="331" from-port="0" to-layer="332" to-port="1" />
|
|
<edge from-layer="332" from-port="2" to-layer="350" to-port="0" />
|
|
<edge from-layer="332" from-port="2" to-layer="366" to-port="1" />
|
|
<edge from-layer="332" from-port="2" to-layer="342" to-port="0" />
|
|
<edge from-layer="332" from-port="2" to-layer="334" to-port="0" />
|
|
<edge from-layer="333" from-port="0" to-layer="334" to-port="1" />
|
|
<edge from-layer="334" from-port="2" to-layer="336" to-port="0" />
|
|
<edge from-layer="335" from-port="0" to-layer="336" to-port="1" />
|
|
<edge from-layer="336" from-port="2" to-layer="338" to-port="0" />
|
|
<edge from-layer="337" from-port="0" to-layer="338" to-port="1" />
|
|
<edge from-layer="338" from-port="2" to-layer="340" to-port="0" />
|
|
<edge from-layer="339" from-port="0" to-layer="340" to-port="1" />
|
|
<edge from-layer="340" from-port="2" to-layer="357" to-port="0" />
|
|
<edge from-layer="341" from-port="0" to-layer="342" to-port="1" />
|
|
<edge from-layer="342" from-port="2" to-layer="344" to-port="0" />
|
|
<edge from-layer="343" from-port="0" to-layer="344" to-port="1" />
|
|
<edge from-layer="344" from-port="2" to-layer="346" to-port="0" />
|
|
<edge from-layer="345" from-port="0" to-layer="346" to-port="1" />
|
|
<edge from-layer="346" from-port="2" to-layer="348" to-port="0" />
|
|
<edge from-layer="347" from-port="0" to-layer="348" to-port="1" />
|
|
<edge from-layer="348" from-port="2" to-layer="357" to-port="1" />
|
|
<edge from-layer="349" from-port="0" to-layer="350" to-port="1" />
|
|
<edge from-layer="350" from-port="2" to-layer="352" to-port="0" />
|
|
<edge from-layer="351" from-port="0" to-layer="352" to-port="1" />
|
|
<edge from-layer="352" from-port="2" to-layer="354" to-port="0" />
|
|
<edge from-layer="353" from-port="0" to-layer="354" to-port="1" />
|
|
<edge from-layer="354" from-port="2" to-layer="356" to-port="0" />
|
|
<edge from-layer="355" from-port="0" to-layer="356" to-port="1" />
|
|
<edge from-layer="356" from-port="2" to-layer="357" to-port="2" />
|
|
<edge from-layer="357" from-port="4" to-layer="359" to-port="0" />
|
|
<edge from-layer="358" from-port="0" to-layer="359" to-port="1" />
|
|
<edge from-layer="359" from-port="2" to-layer="361" to-port="0" />
|
|
<edge from-layer="360" from-port="0" to-layer="361" to-port="1" />
|
|
<edge from-layer="361" from-port="2" to-layer="363" to-port="0" />
|
|
<edge from-layer="362" from-port="0" to-layer="363" to-port="1" />
|
|
<edge from-layer="363" from-port="2" to-layer="365" to-port="0" />
|
|
<edge from-layer="364" from-port="0" to-layer="365" to-port="1" />
|
|
<edge from-layer="365" from-port="2" to-layer="366" to-port="0" />
|
|
<edge from-layer="366" from-port="2" to-layer="368" to-port="0" />
|
|
<edge from-layer="367" from-port="0" to-layer="368" to-port="1" />
|
|
<edge from-layer="368" from-port="2" to-layer="370" to-port="0" />
|
|
<edge from-layer="369" from-port="0" to-layer="370" to-port="1" />
|
|
<edge from-layer="370" from-port="2" to-layer="372" to-port="0" />
|
|
<edge from-layer="371" from-port="0" to-layer="372" to-port="1" />
|
|
<edge from-layer="372" from-port="2" to-layer="374" to-port="0" />
|
|
<edge from-layer="372" from-port="2" to-layer="382" to-port="1" />
|
|
<edge from-layer="373" from-port="0" to-layer="374" to-port="1" />
|
|
<edge from-layer="374" from-port="2" to-layer="376" to-port="0" />
|
|
<edge from-layer="375" from-port="0" to-layer="376" to-port="1" />
|
|
<edge from-layer="376" from-port="2" to-layer="377" to-port="0" />
|
|
<edge from-layer="377" from-port="1" to-layer="379" to-port="0" />
|
|
<edge from-layer="378" from-port="0" to-layer="379" to-port="1" />
|
|
<edge from-layer="379" from-port="2" to-layer="381" to-port="0" />
|
|
<edge from-layer="380" from-port="0" to-layer="381" to-port="1" />
|
|
<edge from-layer="381" from-port="2" to-layer="382" to-port="0" />
|
|
<edge from-layer="382" from-port="2" to-layer="384" to-port="0" />
|
|
<edge from-layer="383" from-port="0" to-layer="384" to-port="1" />
|
|
<edge from-layer="384" from-port="2" to-layer="386" to-port="0" />
|
|
<edge from-layer="385" from-port="0" to-layer="386" to-port="1" />
|
|
<edge from-layer="386" from-port="2" to-layer="388" to-port="0" />
|
|
<edge from-layer="387" from-port="0" to-layer="388" to-port="1" />
|
|
<edge from-layer="388" from-port="2" to-layer="406" to-port="0" />
|
|
<edge from-layer="388" from-port="2" to-layer="398" to-port="0" />
|
|
<edge from-layer="388" from-port="2" to-layer="390" to-port="0" />
|
|
<edge from-layer="388" from-port="2" to-layer="422" to-port="1" />
|
|
<edge from-layer="389" from-port="0" to-layer="390" to-port="1" />
|
|
<edge from-layer="390" from-port="2" to-layer="392" to-port="0" />
|
|
<edge from-layer="391" from-port="0" to-layer="392" to-port="1" />
|
|
<edge from-layer="392" from-port="2" to-layer="394" to-port="0" />
|
|
<edge from-layer="393" from-port="0" to-layer="394" to-port="1" />
|
|
<edge from-layer="394" from-port="2" to-layer="396" to-port="0" />
|
|
<edge from-layer="395" from-port="0" to-layer="396" to-port="1" />
|
|
<edge from-layer="396" from-port="2" to-layer="413" to-port="0" />
|
|
<edge from-layer="397" from-port="0" to-layer="398" to-port="1" />
|
|
<edge from-layer="398" from-port="2" to-layer="400" to-port="0" />
|
|
<edge from-layer="399" from-port="0" to-layer="400" to-port="1" />
|
|
<edge from-layer="400" from-port="2" to-layer="402" to-port="0" />
|
|
<edge from-layer="401" from-port="0" to-layer="402" to-port="1" />
|
|
<edge from-layer="402" from-port="2" to-layer="404" to-port="0" />
|
|
<edge from-layer="403" from-port="0" to-layer="404" to-port="1" />
|
|
<edge from-layer="404" from-port="2" to-layer="413" to-port="1" />
|
|
<edge from-layer="405" from-port="0" to-layer="406" to-port="1" />
|
|
<edge from-layer="406" from-port="2" to-layer="408" to-port="0" />
|
|
<edge from-layer="407" from-port="0" to-layer="408" to-port="1" />
|
|
<edge from-layer="408" from-port="2" to-layer="410" to-port="0" />
|
|
<edge from-layer="409" from-port="0" to-layer="410" to-port="1" />
|
|
<edge from-layer="410" from-port="2" to-layer="412" to-port="0" />
|
|
<edge from-layer="411" from-port="0" to-layer="412" to-port="1" />
|
|
<edge from-layer="412" from-port="2" to-layer="413" to-port="2" />
|
|
<edge from-layer="413" from-port="4" to-layer="415" to-port="0" />
|
|
<edge from-layer="414" from-port="0" to-layer="415" to-port="1" />
|
|
<edge from-layer="415" from-port="2" to-layer="417" to-port="0" />
|
|
<edge from-layer="416" from-port="0" to-layer="417" to-port="1" />
|
|
<edge from-layer="417" from-port="2" to-layer="419" to-port="0" />
|
|
<edge from-layer="418" from-port="0" to-layer="419" to-port="1" />
|
|
<edge from-layer="419" from-port="2" to-layer="421" to-port="0" />
|
|
<edge from-layer="420" from-port="0" to-layer="421" to-port="1" />
|
|
<edge from-layer="421" from-port="2" to-layer="422" to-port="0" />
|
|
<edge from-layer="422" from-port="2" to-layer="424" to-port="0" />
|
|
<edge from-layer="423" from-port="0" to-layer="424" to-port="1" />
|
|
<edge from-layer="424" from-port="2" to-layer="426" to-port="0" />
|
|
<edge from-layer="425" from-port="0" to-layer="426" to-port="1" />
|
|
<edge from-layer="426" from-port="2" to-layer="428" to-port="0" />
|
|
<edge from-layer="427" from-port="0" to-layer="428" to-port="1" />
|
|
<edge from-layer="428" from-port="2" to-layer="438" to-port="1" />
|
|
<edge from-layer="428" from-port="2" to-layer="430" to-port="0" />
|
|
<edge from-layer="429" from-port="0" to-layer="430" to-port="1" />
|
|
<edge from-layer="430" from-port="2" to-layer="432" to-port="0" />
|
|
<edge from-layer="431" from-port="0" to-layer="432" to-port="1" />
|
|
<edge from-layer="432" from-port="2" to-layer="433" to-port="0" />
|
|
<edge from-layer="433" from-port="1" to-layer="435" to-port="0" />
|
|
<edge from-layer="434" from-port="0" to-layer="435" to-port="1" />
|
|
<edge from-layer="435" from-port="2" to-layer="437" to-port="0" />
|
|
<edge from-layer="436" from-port="0" to-layer="437" to-port="1" />
|
|
<edge from-layer="437" from-port="2" to-layer="438" to-port="0" />
|
|
<edge from-layer="438" from-port="2" to-layer="440" to-port="0" />
|
|
<edge from-layer="439" from-port="0" to-layer="440" to-port="1" />
|
|
<edge from-layer="440" from-port="2" to-layer="442" to-port="0" />
|
|
<edge from-layer="441" from-port="0" to-layer="442" to-port="1" />
|
|
<edge from-layer="442" from-port="2" to-layer="444" to-port="0" />
|
|
<edge from-layer="443" from-port="0" to-layer="444" to-port="1" />
|
|
<edge from-layer="444" from-port="2" to-layer="446" to-port="0" />
|
|
<edge from-layer="444" from-port="2" to-layer="478" to-port="1" />
|
|
<edge from-layer="444" from-port="2" to-layer="462" to-port="0" />
|
|
<edge from-layer="444" from-port="2" to-layer="454" to-port="0" />
|
|
<edge from-layer="445" from-port="0" to-layer="446" to-port="1" />
|
|
<edge from-layer="446" from-port="2" to-layer="448" to-port="0" />
|
|
<edge from-layer="447" from-port="0" to-layer="448" to-port="1" />
|
|
<edge from-layer="448" from-port="2" to-layer="450" to-port="0" />
|
|
<edge from-layer="449" from-port="0" to-layer="450" to-port="1" />
|
|
<edge from-layer="450" from-port="2" to-layer="452" to-port="0" />
|
|
<edge from-layer="451" from-port="0" to-layer="452" to-port="1" />
|
|
<edge from-layer="452" from-port="2" to-layer="469" to-port="0" />
|
|
<edge from-layer="453" from-port="0" to-layer="454" to-port="1" />
|
|
<edge from-layer="454" from-port="2" to-layer="456" to-port="0" />
|
|
<edge from-layer="455" from-port="0" to-layer="456" to-port="1" />
|
|
<edge from-layer="456" from-port="2" to-layer="458" to-port="0" />
|
|
<edge from-layer="457" from-port="0" to-layer="458" to-port="1" />
|
|
<edge from-layer="458" from-port="2" to-layer="460" to-port="0" />
|
|
<edge from-layer="459" from-port="0" to-layer="460" to-port="1" />
|
|
<edge from-layer="460" from-port="2" to-layer="469" to-port="1" />
|
|
<edge from-layer="461" from-port="0" to-layer="462" to-port="1" />
|
|
<edge from-layer="462" from-port="2" to-layer="464" to-port="0" />
|
|
<edge from-layer="463" from-port="0" to-layer="464" to-port="1" />
|
|
<edge from-layer="464" from-port="2" to-layer="466" to-port="0" />
|
|
<edge from-layer="465" from-port="0" to-layer="466" to-port="1" />
|
|
<edge from-layer="466" from-port="2" to-layer="468" to-port="0" />
|
|
<edge from-layer="467" from-port="0" to-layer="468" to-port="1" />
|
|
<edge from-layer="468" from-port="2" to-layer="469" to-port="2" />
|
|
<edge from-layer="469" from-port="4" to-layer="471" to-port="0" />
|
|
<edge from-layer="470" from-port="0" to-layer="471" to-port="1" />
|
|
<edge from-layer="471" from-port="2" to-layer="473" to-port="0" />
|
|
<edge from-layer="472" from-port="0" to-layer="473" to-port="1" />
|
|
<edge from-layer="473" from-port="2" to-layer="475" to-port="0" />
|
|
<edge from-layer="474" from-port="0" to-layer="475" to-port="1" />
|
|
<edge from-layer="475" from-port="2" to-layer="477" to-port="0" />
|
|
<edge from-layer="476" from-port="0" to-layer="477" to-port="1" />
|
|
<edge from-layer="477" from-port="2" to-layer="478" to-port="0" />
|
|
<edge from-layer="478" from-port="2" to-layer="480" to-port="0" />
|
|
<edge from-layer="479" from-port="0" to-layer="480" to-port="1" />
|
|
<edge from-layer="480" from-port="2" to-layer="482" to-port="0" />
|
|
<edge from-layer="481" from-port="0" to-layer="482" to-port="1" />
|
|
<edge from-layer="482" from-port="2" to-layer="484" to-port="0" />
|
|
<edge from-layer="483" from-port="0" to-layer="484" to-port="1" />
|
|
<edge from-layer="484" from-port="2" to-layer="494" to-port="1" />
|
|
<edge from-layer="484" from-port="2" to-layer="486" to-port="0" />
|
|
<edge from-layer="485" from-port="0" to-layer="486" to-port="1" />
|
|
<edge from-layer="486" from-port="2" to-layer="488" to-port="0" />
|
|
<edge from-layer="487" from-port="0" to-layer="488" to-port="1" />
|
|
<edge from-layer="488" from-port="2" to-layer="489" to-port="0" />
|
|
<edge from-layer="489" from-port="1" to-layer="491" to-port="0" />
|
|
<edge from-layer="490" from-port="0" to-layer="491" to-port="1" />
|
|
<edge from-layer="491" from-port="2" to-layer="493" to-port="0" />
|
|
<edge from-layer="492" from-port="0" to-layer="493" to-port="1" />
|
|
<edge from-layer="493" from-port="2" to-layer="494" to-port="0" />
|
|
<edge from-layer="494" from-port="2" to-layer="496" to-port="0" />
|
|
<edge from-layer="495" from-port="0" to-layer="496" to-port="1" />
|
|
<edge from-layer="496" from-port="2" to-layer="498" to-port="0" />
|
|
<edge from-layer="497" from-port="0" to-layer="498" to-port="1" />
|
|
<edge from-layer="498" from-port="2" to-layer="500" to-port="0" />
|
|
<edge from-layer="499" from-port="0" to-layer="500" to-port="1" />
|
|
<edge from-layer="500" from-port="2" to-layer="534" to-port="1" />
|
|
<edge from-layer="500" from-port="2" to-layer="502" to-port="0" />
|
|
<edge from-layer="500" from-port="2" to-layer="518" to-port="0" />
|
|
<edge from-layer="500" from-port="2" to-layer="510" to-port="0" />
|
|
<edge from-layer="501" from-port="0" to-layer="502" to-port="1" />
|
|
<edge from-layer="502" from-port="2" to-layer="504" to-port="0" />
|
|
<edge from-layer="503" from-port="0" to-layer="504" to-port="1" />
|
|
<edge from-layer="504" from-port="2" to-layer="506" to-port="0" />
|
|
<edge from-layer="505" from-port="0" to-layer="506" to-port="1" />
|
|
<edge from-layer="506" from-port="2" to-layer="508" to-port="0" />
|
|
<edge from-layer="507" from-port="0" to-layer="508" to-port="1" />
|
|
<edge from-layer="508" from-port="2" to-layer="525" to-port="0" />
|
|
<edge from-layer="509" from-port="0" to-layer="510" to-port="1" />
|
|
<edge from-layer="510" from-port="2" to-layer="512" to-port="0" />
|
|
<edge from-layer="511" from-port="0" to-layer="512" to-port="1" />
|
|
<edge from-layer="512" from-port="2" to-layer="514" to-port="0" />
|
|
<edge from-layer="513" from-port="0" to-layer="514" to-port="1" />
|
|
<edge from-layer="514" from-port="2" to-layer="516" to-port="0" />
|
|
<edge from-layer="515" from-port="0" to-layer="516" to-port="1" />
|
|
<edge from-layer="516" from-port="2" to-layer="525" to-port="1" />
|
|
<edge from-layer="517" from-port="0" to-layer="518" to-port="1" />
|
|
<edge from-layer="518" from-port="2" to-layer="520" to-port="0" />
|
|
<edge from-layer="519" from-port="0" to-layer="520" to-port="1" />
|
|
<edge from-layer="520" from-port="2" to-layer="522" to-port="0" />
|
|
<edge from-layer="521" from-port="0" to-layer="522" to-port="1" />
|
|
<edge from-layer="522" from-port="2" to-layer="524" to-port="0" />
|
|
<edge from-layer="523" from-port="0" to-layer="524" to-port="1" />
|
|
<edge from-layer="524" from-port="2" to-layer="525" to-port="2" />
|
|
<edge from-layer="525" from-port="4" to-layer="527" to-port="0" />
|
|
<edge from-layer="526" from-port="0" to-layer="527" to-port="1" />
|
|
<edge from-layer="527" from-port="2" to-layer="529" to-port="0" />
|
|
<edge from-layer="528" from-port="0" to-layer="529" to-port="1" />
|
|
<edge from-layer="529" from-port="2" to-layer="531" to-port="0" />
|
|
<edge from-layer="530" from-port="0" to-layer="531" to-port="1" />
|
|
<edge from-layer="531" from-port="2" to-layer="533" to-port="0" />
|
|
<edge from-layer="532" from-port="0" to-layer="533" to-port="1" />
|
|
<edge from-layer="533" from-port="2" to-layer="534" to-port="0" />
|
|
<edge from-layer="534" from-port="2" to-layer="536" to-port="0" />
|
|
<edge from-layer="535" from-port="0" to-layer="536" to-port="1" />
|
|
<edge from-layer="536" from-port="2" to-layer="538" to-port="0" />
|
|
<edge from-layer="537" from-port="0" to-layer="538" to-port="1" />
|
|
<edge from-layer="538" from-port="2" to-layer="540" to-port="0" />
|
|
<edge from-layer="539" from-port="0" to-layer="540" to-port="1" />
|
|
<edge from-layer="540" from-port="2" to-layer="550" to-port="1" />
|
|
<edge from-layer="540" from-port="2" to-layer="542" to-port="0" />
|
|
<edge from-layer="541" from-port="0" to-layer="542" to-port="1" />
|
|
<edge from-layer="542" from-port="2" to-layer="544" to-port="0" />
|
|
<edge from-layer="543" from-port="0" to-layer="544" to-port="1" />
|
|
<edge from-layer="544" from-port="2" to-layer="545" to-port="0" />
|
|
<edge from-layer="545" from-port="1" to-layer="547" to-port="0" />
|
|
<edge from-layer="546" from-port="0" to-layer="547" to-port="1" />
|
|
<edge from-layer="547" from-port="2" to-layer="549" to-port="0" />
|
|
<edge from-layer="548" from-port="0" to-layer="549" to-port="1" />
|
|
<edge from-layer="549" from-port="2" to-layer="550" to-port="0" />
|
|
<edge from-layer="550" from-port="2" to-layer="552" to-port="0" />
|
|
<edge from-layer="551" from-port="0" to-layer="552" to-port="1" />
|
|
<edge from-layer="552" from-port="2" to-layer="554" to-port="0" />
|
|
<edge from-layer="553" from-port="0" to-layer="554" to-port="1" />
|
|
<edge from-layer="554" from-port="2" to-layer="556" to-port="0" />
|
|
<edge from-layer="555" from-port="0" to-layer="556" to-port="1" />
|
|
<edge from-layer="556" from-port="2" to-layer="574" to-port="0" />
|
|
<edge from-layer="556" from-port="2" to-layer="590" to-port="1" />
|
|
<edge from-layer="556" from-port="2" to-layer="558" to-port="0" />
|
|
<edge from-layer="556" from-port="2" to-layer="566" to-port="0" />
|
|
<edge from-layer="557" from-port="0" to-layer="558" to-port="1" />
|
|
<edge from-layer="558" from-port="2" to-layer="560" to-port="0" />
|
|
<edge from-layer="559" from-port="0" to-layer="560" to-port="1" />
|
|
<edge from-layer="560" from-port="2" to-layer="562" to-port="0" />
|
|
<edge from-layer="561" from-port="0" to-layer="562" to-port="1" />
|
|
<edge from-layer="562" from-port="2" to-layer="564" to-port="0" />
|
|
<edge from-layer="563" from-port="0" to-layer="564" to-port="1" />
|
|
<edge from-layer="564" from-port="2" to-layer="581" to-port="0" />
|
|
<edge from-layer="565" from-port="0" to-layer="566" to-port="1" />
|
|
<edge from-layer="566" from-port="2" to-layer="568" to-port="0" />
|
|
<edge from-layer="567" from-port="0" to-layer="568" to-port="1" />
|
|
<edge from-layer="568" from-port="2" to-layer="570" to-port="0" />
|
|
<edge from-layer="569" from-port="0" to-layer="570" to-port="1" />
|
|
<edge from-layer="570" from-port="2" to-layer="572" to-port="0" />
|
|
<edge from-layer="571" from-port="0" to-layer="572" to-port="1" />
|
|
<edge from-layer="572" from-port="2" to-layer="581" to-port="1" />
|
|
<edge from-layer="573" from-port="0" to-layer="574" to-port="1" />
|
|
<edge from-layer="574" from-port="2" to-layer="576" to-port="0" />
|
|
<edge from-layer="575" from-port="0" to-layer="576" to-port="1" />
|
|
<edge from-layer="576" from-port="2" to-layer="578" to-port="0" />
|
|
<edge from-layer="577" from-port="0" to-layer="578" to-port="1" />
|
|
<edge from-layer="578" from-port="2" to-layer="580" to-port="0" />
|
|
<edge from-layer="579" from-port="0" to-layer="580" to-port="1" />
|
|
<edge from-layer="580" from-port="2" to-layer="581" to-port="2" />
|
|
<edge from-layer="581" from-port="4" to-layer="583" to-port="0" />
|
|
<edge from-layer="582" from-port="0" to-layer="583" to-port="1" />
|
|
<edge from-layer="583" from-port="2" to-layer="585" to-port="0" />
|
|
<edge from-layer="584" from-port="0" to-layer="585" to-port="1" />
|
|
<edge from-layer="585" from-port="2" to-layer="587" to-port="0" />
|
|
<edge from-layer="586" from-port="0" to-layer="587" to-port="1" />
|
|
<edge from-layer="587" from-port="2" to-layer="589" to-port="0" />
|
|
<edge from-layer="588" from-port="0" to-layer="589" to-port="1" />
|
|
<edge from-layer="589" from-port="2" to-layer="590" to-port="0" />
|
|
<edge from-layer="590" from-port="2" to-layer="592" to-port="0" />
|
|
<edge from-layer="591" from-port="0" to-layer="592" to-port="1" />
|
|
<edge from-layer="592" from-port="2" to-layer="594" to-port="0" />
|
|
<edge from-layer="593" from-port="0" to-layer="594" to-port="1" />
|
|
<edge from-layer="594" from-port="2" to-layer="596" to-port="0" />
|
|
<edge from-layer="595" from-port="0" to-layer="596" to-port="1" />
|
|
<edge from-layer="596" from-port="2" to-layer="606" to-port="1" />
|
|
<edge from-layer="596" from-port="2" to-layer="598" to-port="0" />
|
|
<edge from-layer="597" from-port="0" to-layer="598" to-port="1" />
|
|
<edge from-layer="598" from-port="2" to-layer="600" to-port="0" />
|
|
<edge from-layer="599" from-port="0" to-layer="600" to-port="1" />
|
|
<edge from-layer="600" from-port="2" to-layer="601" to-port="0" />
|
|
<edge from-layer="601" from-port="1" to-layer="603" to-port="0" />
|
|
<edge from-layer="602" from-port="0" to-layer="603" to-port="1" />
|
|
<edge from-layer="603" from-port="2" to-layer="605" to-port="0" />
|
|
<edge from-layer="604" from-port="0" to-layer="605" to-port="1" />
|
|
<edge from-layer="605" from-port="2" to-layer="606" to-port="0" />
|
|
<edge from-layer="606" from-port="2" to-layer="608" to-port="0" />
|
|
<edge from-layer="607" from-port="0" to-layer="608" to-port="1" />
|
|
<edge from-layer="608" from-port="2" to-layer="610" to-port="0" />
|
|
<edge from-layer="609" from-port="0" to-layer="610" to-port="1" />
|
|
<edge from-layer="610" from-port="2" to-layer="612" to-port="0" />
|
|
<edge from-layer="611" from-port="0" to-layer="612" to-port="1" />
|
|
<edge from-layer="612" from-port="2" to-layer="622" to-port="0" />
|
|
<edge from-layer="612" from-port="2" to-layer="646" to-port="1" />
|
|
<edge from-layer="612" from-port="2" to-layer="630" to-port="0" />
|
|
<edge from-layer="612" from-port="2" to-layer="614" to-port="0" />
|
|
<edge from-layer="613" from-port="0" to-layer="614" to-port="1" />
|
|
<edge from-layer="614" from-port="2" to-layer="616" to-port="0" />
|
|
<edge from-layer="615" from-port="0" to-layer="616" to-port="1" />
|
|
<edge from-layer="616" from-port="2" to-layer="618" to-port="0" />
|
|
<edge from-layer="617" from-port="0" to-layer="618" to-port="1" />
|
|
<edge from-layer="618" from-port="2" to-layer="620" to-port="0" />
|
|
<edge from-layer="619" from-port="0" to-layer="620" to-port="1" />
|
|
<edge from-layer="620" from-port="2" to-layer="637" to-port="0" />
|
|
<edge from-layer="621" from-port="0" to-layer="622" to-port="1" />
|
|
<edge from-layer="622" from-port="2" to-layer="624" to-port="0" />
|
|
<edge from-layer="623" from-port="0" to-layer="624" to-port="1" />
|
|
<edge from-layer="624" from-port="2" to-layer="626" to-port="0" />
|
|
<edge from-layer="625" from-port="0" to-layer="626" to-port="1" />
|
|
<edge from-layer="626" from-port="2" to-layer="628" to-port="0" />
|
|
<edge from-layer="627" from-port="0" to-layer="628" to-port="1" />
|
|
<edge from-layer="628" from-port="2" to-layer="637" to-port="1" />
|
|
<edge from-layer="629" from-port="0" to-layer="630" to-port="1" />
|
|
<edge from-layer="630" from-port="2" to-layer="632" to-port="0" />
|
|
<edge from-layer="631" from-port="0" to-layer="632" to-port="1" />
|
|
<edge from-layer="632" from-port="2" to-layer="634" to-port="0" />
|
|
<edge from-layer="633" from-port="0" to-layer="634" to-port="1" />
|
|
<edge from-layer="634" from-port="2" to-layer="636" to-port="0" />
|
|
<edge from-layer="635" from-port="0" to-layer="636" to-port="1" />
|
|
<edge from-layer="636" from-port="2" to-layer="637" to-port="2" />
|
|
<edge from-layer="637" from-port="4" to-layer="639" to-port="0" />
|
|
<edge from-layer="638" from-port="0" to-layer="639" to-port="1" />
|
|
<edge from-layer="639" from-port="2" to-layer="641" to-port="0" />
|
|
<edge from-layer="640" from-port="0" to-layer="641" to-port="1" />
|
|
<edge from-layer="641" from-port="2" to-layer="643" to-port="0" />
|
|
<edge from-layer="642" from-port="0" to-layer="643" to-port="1" />
|
|
<edge from-layer="643" from-port="2" to-layer="645" to-port="0" />
|
|
<edge from-layer="644" from-port="0" to-layer="645" to-port="1" />
|
|
<edge from-layer="645" from-port="2" to-layer="646" to-port="0" />
|
|
<edge from-layer="646" from-port="2" to-layer="648" to-port="0" />
|
|
<edge from-layer="647" from-port="0" to-layer="648" to-port="1" />
|
|
<edge from-layer="648" from-port="2" to-layer="650" to-port="0" />
|
|
<edge from-layer="649" from-port="0" to-layer="650" to-port="1" />
|
|
<edge from-layer="650" from-port="2" to-layer="652" to-port="0" />
|
|
<edge from-layer="651" from-port="0" to-layer="652" to-port="1" />
|
|
<edge from-layer="652" from-port="2" to-layer="654" to-port="0" />
|
|
<edge from-layer="652" from-port="2" to-layer="662" to-port="1" />
|
|
<edge from-layer="653" from-port="0" to-layer="654" to-port="1" />
|
|
<edge from-layer="654" from-port="2" to-layer="656" to-port="0" />
|
|
<edge from-layer="655" from-port="0" to-layer="656" to-port="1" />
|
|
<edge from-layer="656" from-port="2" to-layer="657" to-port="0" />
|
|
<edge from-layer="657" from-port="1" to-layer="659" to-port="0" />
|
|
<edge from-layer="658" from-port="0" to-layer="659" to-port="1" />
|
|
<edge from-layer="659" from-port="2" to-layer="661" to-port="0" />
|
|
<edge from-layer="660" from-port="0" to-layer="661" to-port="1" />
|
|
<edge from-layer="661" from-port="2" to-layer="662" to-port="0" />
|
|
<edge from-layer="662" from-port="2" to-layer="664" to-port="0" />
|
|
<edge from-layer="663" from-port="0" to-layer="664" to-port="1" />
|
|
<edge from-layer="664" from-port="2" to-layer="666" to-port="0" />
|
|
<edge from-layer="665" from-port="0" to-layer="666" to-port="1" />
|
|
<edge from-layer="666" from-port="2" to-layer="668" to-port="0" />
|
|
<edge from-layer="667" from-port="0" to-layer="668" to-port="1" />
|
|
<edge from-layer="668" from-port="2" to-layer="678" to-port="0" />
|
|
<edge from-layer="668" from-port="2" to-layer="670" to-port="0" />
|
|
<edge from-layer="668" from-port="2" to-layer="686" to-port="0" />
|
|
<edge from-layer="668" from-port="2" to-layer="702" to-port="1" />
|
|
<edge from-layer="669" from-port="0" to-layer="670" to-port="1" />
|
|
<edge from-layer="670" from-port="2" to-layer="672" to-port="0" />
|
|
<edge from-layer="671" from-port="0" to-layer="672" to-port="1" />
|
|
<edge from-layer="672" from-port="2" to-layer="674" to-port="0" />
|
|
<edge from-layer="673" from-port="0" to-layer="674" to-port="1" />
|
|
<edge from-layer="674" from-port="2" to-layer="676" to-port="0" />
|
|
<edge from-layer="675" from-port="0" to-layer="676" to-port="1" />
|
|
<edge from-layer="676" from-port="2" to-layer="693" to-port="0" />
|
|
<edge from-layer="677" from-port="0" to-layer="678" to-port="1" />
|
|
<edge from-layer="678" from-port="2" to-layer="680" to-port="0" />
|
|
<edge from-layer="679" from-port="0" to-layer="680" to-port="1" />
|
|
<edge from-layer="680" from-port="2" to-layer="682" to-port="0" />
|
|
<edge from-layer="681" from-port="0" to-layer="682" to-port="1" />
|
|
<edge from-layer="682" from-port="2" to-layer="684" to-port="0" />
|
|
<edge from-layer="683" from-port="0" to-layer="684" to-port="1" />
|
|
<edge from-layer="684" from-port="2" to-layer="693" to-port="1" />
|
|
<edge from-layer="685" from-port="0" to-layer="686" to-port="1" />
|
|
<edge from-layer="686" from-port="2" to-layer="688" to-port="0" />
|
|
<edge from-layer="687" from-port="0" to-layer="688" to-port="1" />
|
|
<edge from-layer="688" from-port="2" to-layer="690" to-port="0" />
|
|
<edge from-layer="689" from-port="0" to-layer="690" to-port="1" />
|
|
<edge from-layer="690" from-port="2" to-layer="692" to-port="0" />
|
|
<edge from-layer="691" from-port="0" to-layer="692" to-port="1" />
|
|
<edge from-layer="692" from-port="2" to-layer="693" to-port="2" />
|
|
<edge from-layer="693" from-port="4" to-layer="695" to-port="0" />
|
|
<edge from-layer="694" from-port="0" to-layer="695" to-port="1" />
|
|
<edge from-layer="695" from-port="2" to-layer="697" to-port="0" />
|
|
<edge from-layer="696" from-port="0" to-layer="697" to-port="1" />
|
|
<edge from-layer="697" from-port="2" to-layer="699" to-port="0" />
|
|
<edge from-layer="698" from-port="0" to-layer="699" to-port="1" />
|
|
<edge from-layer="699" from-port="2" to-layer="701" to-port="0" />
|
|
<edge from-layer="700" from-port="0" to-layer="701" to-port="1" />
|
|
<edge from-layer="701" from-port="2" to-layer="702" to-port="0" />
|
|
<edge from-layer="702" from-port="2" to-layer="704" to-port="0" />
|
|
<edge from-layer="703" from-port="0" to-layer="704" to-port="1" />
|
|
<edge from-layer="704" from-port="2" to-layer="706" to-port="0" />
|
|
<edge from-layer="705" from-port="0" to-layer="706" to-port="1" />
|
|
<edge from-layer="706" from-port="2" to-layer="708" to-port="0" />
|
|
<edge from-layer="707" from-port="0" to-layer="708" to-port="1" />
|
|
<edge from-layer="708" from-port="2" to-layer="718" to-port="1" />
|
|
<edge from-layer="708" from-port="2" to-layer="710" to-port="0" />
|
|
<edge from-layer="709" from-port="0" to-layer="710" to-port="1" />
|
|
<edge from-layer="710" from-port="2" to-layer="712" to-port="0" />
|
|
<edge from-layer="711" from-port="0" to-layer="712" to-port="1" />
|
|
<edge from-layer="712" from-port="2" to-layer="713" to-port="0" />
|
|
<edge from-layer="713" from-port="1" to-layer="715" to-port="0" />
|
|
<edge from-layer="714" from-port="0" to-layer="715" to-port="1" />
|
|
<edge from-layer="715" from-port="2" to-layer="717" to-port="0" />
|
|
<edge from-layer="716" from-port="0" to-layer="717" to-port="1" />
|
|
<edge from-layer="717" from-port="2" to-layer="718" to-port="0" />
|
|
<edge from-layer="718" from-port="2" to-layer="720" to-port="0" />
|
|
<edge from-layer="719" from-port="0" to-layer="720" to-port="1" />
|
|
<edge from-layer="720" from-port="2" to-layer="722" to-port="0" />
|
|
<edge from-layer="721" from-port="0" to-layer="722" to-port="1" />
|
|
<edge from-layer="722" from-port="2" to-layer="724" to-port="0" />
|
|
<edge from-layer="723" from-port="0" to-layer="724" to-port="1" />
|
|
<edge from-layer="724" from-port="2" to-layer="734" to-port="0" />
|
|
<edge from-layer="724" from-port="2" to-layer="726" to-port="0" />
|
|
<edge from-layer="724" from-port="2" to-layer="742" to-port="0" />
|
|
<edge from-layer="724" from-port="2" to-layer="758" to-port="1" />
|
|
<edge from-layer="725" from-port="0" to-layer="726" to-port="1" />
|
|
<edge from-layer="726" from-port="2" to-layer="728" to-port="0" />
|
|
<edge from-layer="727" from-port="0" to-layer="728" to-port="1" />
|
|
<edge from-layer="728" from-port="2" to-layer="730" to-port="0" />
|
|
<edge from-layer="729" from-port="0" to-layer="730" to-port="1" />
|
|
<edge from-layer="730" from-port="2" to-layer="732" to-port="0" />
|
|
<edge from-layer="731" from-port="0" to-layer="732" to-port="1" />
|
|
<edge from-layer="732" from-port="2" to-layer="749" to-port="0" />
|
|
<edge from-layer="733" from-port="0" to-layer="734" to-port="1" />
|
|
<edge from-layer="734" from-port="2" to-layer="736" to-port="0" />
|
|
<edge from-layer="735" from-port="0" to-layer="736" to-port="1" />
|
|
<edge from-layer="736" from-port="2" to-layer="738" to-port="0" />
|
|
<edge from-layer="737" from-port="0" to-layer="738" to-port="1" />
|
|
<edge from-layer="738" from-port="2" to-layer="740" to-port="0" />
|
|
<edge from-layer="739" from-port="0" to-layer="740" to-port="1" />
|
|
<edge from-layer="740" from-port="2" to-layer="749" to-port="1" />
|
|
<edge from-layer="741" from-port="0" to-layer="742" to-port="1" />
|
|
<edge from-layer="742" from-port="2" to-layer="744" to-port="0" />
|
|
<edge from-layer="743" from-port="0" to-layer="744" to-port="1" />
|
|
<edge from-layer="744" from-port="2" to-layer="746" to-port="0" />
|
|
<edge from-layer="745" from-port="0" to-layer="746" to-port="1" />
|
|
<edge from-layer="746" from-port="2" to-layer="748" to-port="0" />
|
|
<edge from-layer="747" from-port="0" to-layer="748" to-port="1" />
|
|
<edge from-layer="748" from-port="2" to-layer="749" to-port="2" />
|
|
<edge from-layer="749" from-port="4" to-layer="751" to-port="0" />
|
|
<edge from-layer="750" from-port="0" to-layer="751" to-port="1" />
|
|
<edge from-layer="751" from-port="2" to-layer="753" to-port="0" />
|
|
<edge from-layer="752" from-port="0" to-layer="753" to-port="1" />
|
|
<edge from-layer="753" from-port="2" to-layer="755" to-port="0" />
|
|
<edge from-layer="754" from-port="0" to-layer="755" to-port="1" />
|
|
<edge from-layer="755" from-port="2" to-layer="757" to-port="0" />
|
|
<edge from-layer="756" from-port="0" to-layer="757" to-port="1" />
|
|
<edge from-layer="757" from-port="2" to-layer="758" to-port="0" />
|
|
<edge from-layer="758" from-port="2" to-layer="760" to-port="0" />
|
|
<edge from-layer="759" from-port="0" to-layer="760" to-port="1" />
|
|
<edge from-layer="760" from-port="2" to-layer="762" to-port="0" />
|
|
<edge from-layer="761" from-port="0" to-layer="762" to-port="1" />
|
|
<edge from-layer="762" from-port="2" to-layer="764" to-port="0" />
|
|
<edge from-layer="763" from-port="0" to-layer="764" to-port="1" />
|
|
<edge from-layer="764" from-port="2" to-layer="774" to-port="1" />
|
|
<edge from-layer="764" from-port="2" to-layer="766" to-port="0" />
|
|
<edge from-layer="765" from-port="0" to-layer="766" to-port="1" />
|
|
<edge from-layer="766" from-port="2" to-layer="768" to-port="0" />
|
|
<edge from-layer="767" from-port="0" to-layer="768" to-port="1" />
|
|
<edge from-layer="768" from-port="2" to-layer="769" to-port="0" />
|
|
<edge from-layer="769" from-port="1" to-layer="771" to-port="0" />
|
|
<edge from-layer="770" from-port="0" to-layer="771" to-port="1" />
|
|
<edge from-layer="771" from-port="2" to-layer="773" to-port="0" />
|
|
<edge from-layer="772" from-port="0" to-layer="773" to-port="1" />
|
|
<edge from-layer="773" from-port="2" to-layer="774" to-port="0" />
|
|
<edge from-layer="774" from-port="2" to-layer="776" to-port="0" />
|
|
<edge from-layer="775" from-port="0" to-layer="776" to-port="1" />
|
|
<edge from-layer="776" from-port="2" to-layer="778" to-port="0" />
|
|
<edge from-layer="777" from-port="0" to-layer="778" to-port="1" />
|
|
<edge from-layer="778" from-port="2" to-layer="780" to-port="0" />
|
|
<edge from-layer="779" from-port="0" to-layer="780" to-port="1" />
|
|
<edge from-layer="780" from-port="2" to-layer="790" to-port="0" />
|
|
<edge from-layer="780" from-port="2" to-layer="782" to-port="0" />
|
|
<edge from-layer="780" from-port="2" to-layer="814" to-port="1" />
|
|
<edge from-layer="780" from-port="2" to-layer="798" to-port="0" />
|
|
<edge from-layer="781" from-port="0" to-layer="782" to-port="1" />
|
|
<edge from-layer="782" from-port="2" to-layer="784" to-port="0" />
|
|
<edge from-layer="783" from-port="0" to-layer="784" to-port="1" />
|
|
<edge from-layer="784" from-port="2" to-layer="786" to-port="0" />
|
|
<edge from-layer="785" from-port="0" to-layer="786" to-port="1" />
|
|
<edge from-layer="786" from-port="2" to-layer="788" to-port="0" />
|
|
<edge from-layer="787" from-port="0" to-layer="788" to-port="1" />
|
|
<edge from-layer="788" from-port="2" to-layer="805" to-port="0" />
|
|
<edge from-layer="789" from-port="0" to-layer="790" to-port="1" />
|
|
<edge from-layer="790" from-port="2" to-layer="792" to-port="0" />
|
|
<edge from-layer="791" from-port="0" to-layer="792" to-port="1" />
|
|
<edge from-layer="792" from-port="2" to-layer="794" to-port="0" />
|
|
<edge from-layer="793" from-port="0" to-layer="794" to-port="1" />
|
|
<edge from-layer="794" from-port="2" to-layer="796" to-port="0" />
|
|
<edge from-layer="795" from-port="0" to-layer="796" to-port="1" />
|
|
<edge from-layer="796" from-port="2" to-layer="805" to-port="1" />
|
|
<edge from-layer="797" from-port="0" to-layer="798" to-port="1" />
|
|
<edge from-layer="798" from-port="2" to-layer="800" to-port="0" />
|
|
<edge from-layer="799" from-port="0" to-layer="800" to-port="1" />
|
|
<edge from-layer="800" from-port="2" to-layer="802" to-port="0" />
|
|
<edge from-layer="801" from-port="0" to-layer="802" to-port="1" />
|
|
<edge from-layer="802" from-port="2" to-layer="804" to-port="0" />
|
|
<edge from-layer="803" from-port="0" to-layer="804" to-port="1" />
|
|
<edge from-layer="804" from-port="2" to-layer="805" to-port="2" />
|
|
<edge from-layer="805" from-port="4" to-layer="807" to-port="0" />
|
|
<edge from-layer="806" from-port="0" to-layer="807" to-port="1" />
|
|
<edge from-layer="807" from-port="2" to-layer="809" to-port="0" />
|
|
<edge from-layer="808" from-port="0" to-layer="809" to-port="1" />
|
|
<edge from-layer="809" from-port="2" to-layer="811" to-port="0" />
|
|
<edge from-layer="810" from-port="0" to-layer="811" to-port="1" />
|
|
<edge from-layer="811" from-port="2" to-layer="813" to-port="0" />
|
|
<edge from-layer="812" from-port="0" to-layer="813" to-port="1" />
|
|
<edge from-layer="813" from-port="2" to-layer="814" to-port="0" />
|
|
<edge from-layer="814" from-port="2" to-layer="816" to-port="0" />
|
|
<edge from-layer="815" from-port="0" to-layer="816" to-port="1" />
|
|
<edge from-layer="816" from-port="2" to-layer="818" to-port="0" />
|
|
<edge from-layer="817" from-port="0" to-layer="818" to-port="1" />
|
|
<edge from-layer="818" from-port="2" to-layer="820" to-port="0" />
|
|
<edge from-layer="819" from-port="0" to-layer="820" to-port="1" />
|
|
<edge from-layer="820" from-port="2" to-layer="822" to-port="0" />
|
|
<edge from-layer="820" from-port="2" to-layer="830" to-port="1" />
|
|
<edge from-layer="821" from-port="0" to-layer="822" to-port="1" />
|
|
<edge from-layer="822" from-port="2" to-layer="824" to-port="0" />
|
|
<edge from-layer="823" from-port="0" to-layer="824" to-port="1" />
|
|
<edge from-layer="824" from-port="2" to-layer="825" to-port="0" />
|
|
<edge from-layer="825" from-port="1" to-layer="827" to-port="0" />
|
|
<edge from-layer="826" from-port="0" to-layer="827" to-port="1" />
|
|
<edge from-layer="827" from-port="2" to-layer="829" to-port="0" />
|
|
<edge from-layer="828" from-port="0" to-layer="829" to-port="1" />
|
|
<edge from-layer="829" from-port="2" to-layer="830" to-port="0" />
|
|
<edge from-layer="830" from-port="2" to-layer="832" to-port="0" />
|
|
<edge from-layer="831" from-port="0" to-layer="832" to-port="1" />
|
|
<edge from-layer="832" from-port="2" to-layer="834" to-port="0" />
|
|
<edge from-layer="833" from-port="0" to-layer="834" to-port="1" />
|
|
<edge from-layer="834" from-port="2" to-layer="836" to-port="0" />
|
|
<edge from-layer="835" from-port="0" to-layer="836" to-port="1" />
|
|
<edge from-layer="836" from-port="2" to-layer="846" to-port="0" />
|
|
<edge from-layer="836" from-port="2" to-layer="870" to-port="1" />
|
|
<edge from-layer="836" from-port="2" to-layer="838" to-port="0" />
|
|
<edge from-layer="836" from-port="2" to-layer="854" to-port="0" />
|
|
<edge from-layer="837" from-port="0" to-layer="838" to-port="1" />
|
|
<edge from-layer="838" from-port="2" to-layer="840" to-port="0" />
|
|
<edge from-layer="839" from-port="0" to-layer="840" to-port="1" />
|
|
<edge from-layer="840" from-port="2" to-layer="842" to-port="0" />
|
|
<edge from-layer="841" from-port="0" to-layer="842" to-port="1" />
|
|
<edge from-layer="842" from-port="2" to-layer="844" to-port="0" />
|
|
<edge from-layer="843" from-port="0" to-layer="844" to-port="1" />
|
|
<edge from-layer="844" from-port="2" to-layer="861" to-port="0" />
|
|
<edge from-layer="845" from-port="0" to-layer="846" to-port="1" />
|
|
<edge from-layer="846" from-port="2" to-layer="848" to-port="0" />
|
|
<edge from-layer="847" from-port="0" to-layer="848" to-port="1" />
|
|
<edge from-layer="848" from-port="2" to-layer="850" to-port="0" />
|
|
<edge from-layer="849" from-port="0" to-layer="850" to-port="1" />
|
|
<edge from-layer="850" from-port="2" to-layer="852" to-port="0" />
|
|
<edge from-layer="851" from-port="0" to-layer="852" to-port="1" />
|
|
<edge from-layer="852" from-port="2" to-layer="861" to-port="1" />
|
|
<edge from-layer="853" from-port="0" to-layer="854" to-port="1" />
|
|
<edge from-layer="854" from-port="2" to-layer="856" to-port="0" />
|
|
<edge from-layer="855" from-port="0" to-layer="856" to-port="1" />
|
|
<edge from-layer="856" from-port="2" to-layer="858" to-port="0" />
|
|
<edge from-layer="857" from-port="0" to-layer="858" to-port="1" />
|
|
<edge from-layer="858" from-port="2" to-layer="860" to-port="0" />
|
|
<edge from-layer="859" from-port="0" to-layer="860" to-port="1" />
|
|
<edge from-layer="860" from-port="2" to-layer="861" to-port="2" />
|
|
<edge from-layer="861" from-port="4" to-layer="863" to-port="0" />
|
|
<edge from-layer="862" from-port="0" to-layer="863" to-port="1" />
|
|
<edge from-layer="863" from-port="2" to-layer="865" to-port="0" />
|
|
<edge from-layer="864" from-port="0" to-layer="865" to-port="1" />
|
|
<edge from-layer="865" from-port="2" to-layer="867" to-port="0" />
|
|
<edge from-layer="866" from-port="0" to-layer="867" to-port="1" />
|
|
<edge from-layer="867" from-port="2" to-layer="869" to-port="0" />
|
|
<edge from-layer="868" from-port="0" to-layer="869" to-port="1" />
|
|
<edge from-layer="869" from-port="2" to-layer="870" to-port="0" />
|
|
<edge from-layer="870" from-port="2" to-layer="872" to-port="0" />
|
|
<edge from-layer="871" from-port="0" to-layer="872" to-port="1" />
|
|
<edge from-layer="872" from-port="2" to-layer="874" to-port="0" />
|
|
<edge from-layer="873" from-port="0" to-layer="874" to-port="1" />
|
|
<edge from-layer="874" from-port="2" to-layer="876" to-port="0" />
|
|
<edge from-layer="875" from-port="0" to-layer="876" to-port="1" />
|
|
<edge from-layer="876" from-port="2" to-layer="886" to-port="1" />
|
|
<edge from-layer="876" from-port="2" to-layer="878" to-port="0" />
|
|
<edge from-layer="877" from-port="0" to-layer="878" to-port="1" />
|
|
<edge from-layer="878" from-port="2" to-layer="880" to-port="0" />
|
|
<edge from-layer="879" from-port="0" to-layer="880" to-port="1" />
|
|
<edge from-layer="880" from-port="2" to-layer="881" to-port="0" />
|
|
<edge from-layer="881" from-port="1" to-layer="883" to-port="0" />
|
|
<edge from-layer="882" from-port="0" to-layer="883" to-port="1" />
|
|
<edge from-layer="883" from-port="2" to-layer="885" to-port="0" />
|
|
<edge from-layer="884" from-port="0" to-layer="885" to-port="1" />
|
|
<edge from-layer="885" from-port="2" to-layer="886" to-port="0" />
|
|
<edge from-layer="886" from-port="2" to-layer="888" to-port="0" />
|
|
<edge from-layer="887" from-port="0" to-layer="888" to-port="1" />
|
|
<edge from-layer="888" from-port="2" to-layer="890" to-port="0" />
|
|
<edge from-layer="889" from-port="0" to-layer="890" to-port="1" />
|
|
<edge from-layer="890" from-port="2" to-layer="892" to-port="0" />
|
|
<edge from-layer="891" from-port="0" to-layer="892" to-port="1" />
|
|
<edge from-layer="892" from-port="2" to-layer="894" to-port="0" />
|
|
<edge from-layer="892" from-port="2" to-layer="902" to-port="0" />
|
|
<edge from-layer="892" from-port="2" to-layer="926" to-port="1" />
|
|
<edge from-layer="892" from-port="2" to-layer="910" to-port="0" />
|
|
<edge from-layer="893" from-port="0" to-layer="894" to-port="1" />
|
|
<edge from-layer="894" from-port="2" to-layer="896" to-port="0" />
|
|
<edge from-layer="895" from-port="0" to-layer="896" to-port="1" />
|
|
<edge from-layer="896" from-port="2" to-layer="898" to-port="0" />
|
|
<edge from-layer="897" from-port="0" to-layer="898" to-port="1" />
|
|
<edge from-layer="898" from-port="2" to-layer="900" to-port="0" />
|
|
<edge from-layer="899" from-port="0" to-layer="900" to-port="1" />
|
|
<edge from-layer="900" from-port="2" to-layer="917" to-port="0" />
|
|
<edge from-layer="901" from-port="0" to-layer="902" to-port="1" />
|
|
<edge from-layer="902" from-port="2" to-layer="904" to-port="0" />
|
|
<edge from-layer="903" from-port="0" to-layer="904" to-port="1" />
|
|
<edge from-layer="904" from-port="2" to-layer="906" to-port="0" />
|
|
<edge from-layer="905" from-port="0" to-layer="906" to-port="1" />
|
|
<edge from-layer="906" from-port="2" to-layer="908" to-port="0" />
|
|
<edge from-layer="907" from-port="0" to-layer="908" to-port="1" />
|
|
<edge from-layer="908" from-port="2" to-layer="917" to-port="1" />
|
|
<edge from-layer="909" from-port="0" to-layer="910" to-port="1" />
|
|
<edge from-layer="910" from-port="2" to-layer="912" to-port="0" />
|
|
<edge from-layer="911" from-port="0" to-layer="912" to-port="1" />
|
|
<edge from-layer="912" from-port="2" to-layer="914" to-port="0" />
|
|
<edge from-layer="913" from-port="0" to-layer="914" to-port="1" />
|
|
<edge from-layer="914" from-port="2" to-layer="916" to-port="0" />
|
|
<edge from-layer="915" from-port="0" to-layer="916" to-port="1" />
|
|
<edge from-layer="916" from-port="2" to-layer="917" to-port="2" />
|
|
<edge from-layer="917" from-port="4" to-layer="919" to-port="0" />
|
|
<edge from-layer="918" from-port="0" to-layer="919" to-port="1" />
|
|
<edge from-layer="919" from-port="2" to-layer="921" to-port="0" />
|
|
<edge from-layer="920" from-port="0" to-layer="921" to-port="1" />
|
|
<edge from-layer="921" from-port="2" to-layer="923" to-port="0" />
|
|
<edge from-layer="922" from-port="0" to-layer="923" to-port="1" />
|
|
<edge from-layer="923" from-port="2" to-layer="925" to-port="0" />
|
|
<edge from-layer="924" from-port="0" to-layer="925" to-port="1" />
|
|
<edge from-layer="925" from-port="2" to-layer="926" to-port="0" />
|
|
<edge from-layer="926" from-port="2" to-layer="928" to-port="0" />
|
|
<edge from-layer="927" from-port="0" to-layer="928" to-port="1" />
|
|
<edge from-layer="928" from-port="2" to-layer="930" to-port="0" />
|
|
<edge from-layer="929" from-port="0" to-layer="930" to-port="1" />
|
|
<edge from-layer="930" from-port="2" to-layer="932" to-port="0" />
|
|
<edge from-layer="931" from-port="0" to-layer="932" to-port="1" />
|
|
<edge from-layer="932" from-port="2" to-layer="934" to-port="0" />
|
|
<edge from-layer="932" from-port="2" to-layer="942" to-port="1" />
|
|
<edge from-layer="933" from-port="0" to-layer="934" to-port="1" />
|
|
<edge from-layer="934" from-port="2" to-layer="936" to-port="0" />
|
|
<edge from-layer="935" from-port="0" to-layer="936" to-port="1" />
|
|
<edge from-layer="936" from-port="2" to-layer="937" to-port="0" />
|
|
<edge from-layer="937" from-port="1" to-layer="939" to-port="0" />
|
|
<edge from-layer="938" from-port="0" to-layer="939" to-port="1" />
|
|
<edge from-layer="939" from-port="2" to-layer="941" to-port="0" />
|
|
<edge from-layer="940" from-port="0" to-layer="941" to-port="1" />
|
|
<edge from-layer="941" from-port="2" to-layer="942" to-port="0" />
|
|
<edge from-layer="942" from-port="2" to-layer="944" to-port="0" />
|
|
<edge from-layer="943" from-port="0" to-layer="944" to-port="1" />
|
|
<edge from-layer="944" from-port="2" to-layer="946" to-port="0" />
|
|
<edge from-layer="945" from-port="0" to-layer="946" to-port="1" />
|
|
<edge from-layer="946" from-port="2" to-layer="948" to-port="0" />
|
|
<edge from-layer="947" from-port="0" to-layer="948" to-port="1" />
|
|
<edge from-layer="948" from-port="2" to-layer="958" to-port="0" />
|
|
<edge from-layer="948" from-port="2" to-layer="950" to-port="0" />
|
|
<edge from-layer="948" from-port="2" to-layer="982" to-port="1" />
|
|
<edge from-layer="948" from-port="2" to-layer="966" to-port="0" />
|
|
<edge from-layer="949" from-port="0" to-layer="950" to-port="1" />
|
|
<edge from-layer="950" from-port="2" to-layer="952" to-port="0" />
|
|
<edge from-layer="951" from-port="0" to-layer="952" to-port="1" />
|
|
<edge from-layer="952" from-port="2" to-layer="954" to-port="0" />
|
|
<edge from-layer="953" from-port="0" to-layer="954" to-port="1" />
|
|
<edge from-layer="954" from-port="2" to-layer="956" to-port="0" />
|
|
<edge from-layer="955" from-port="0" to-layer="956" to-port="1" />
|
|
<edge from-layer="956" from-port="2" to-layer="973" to-port="0" />
|
|
<edge from-layer="957" from-port="0" to-layer="958" to-port="1" />
|
|
<edge from-layer="958" from-port="2" to-layer="960" to-port="0" />
|
|
<edge from-layer="959" from-port="0" to-layer="960" to-port="1" />
|
|
<edge from-layer="960" from-port="2" to-layer="962" to-port="0" />
|
|
<edge from-layer="961" from-port="0" to-layer="962" to-port="1" />
|
|
<edge from-layer="962" from-port="2" to-layer="964" to-port="0" />
|
|
<edge from-layer="963" from-port="0" to-layer="964" to-port="1" />
|
|
<edge from-layer="964" from-port="2" to-layer="973" to-port="1" />
|
|
<edge from-layer="965" from-port="0" to-layer="966" to-port="1" />
|
|
<edge from-layer="966" from-port="2" to-layer="968" to-port="0" />
|
|
<edge from-layer="967" from-port="0" to-layer="968" to-port="1" />
|
|
<edge from-layer="968" from-port="2" to-layer="970" to-port="0" />
|
|
<edge from-layer="969" from-port="0" to-layer="970" to-port="1" />
|
|
<edge from-layer="970" from-port="2" to-layer="972" to-port="0" />
|
|
<edge from-layer="971" from-port="0" to-layer="972" to-port="1" />
|
|
<edge from-layer="972" from-port="2" to-layer="973" to-port="2" />
|
|
<edge from-layer="973" from-port="4" to-layer="975" to-port="0" />
|
|
<edge from-layer="974" from-port="0" to-layer="975" to-port="1" />
|
|
<edge from-layer="975" from-port="2" to-layer="977" to-port="0" />
|
|
<edge from-layer="976" from-port="0" to-layer="977" to-port="1" />
|
|
<edge from-layer="977" from-port="2" to-layer="979" to-port="0" />
|
|
<edge from-layer="978" from-port="0" to-layer="979" to-port="1" />
|
|
<edge from-layer="979" from-port="2" to-layer="981" to-port="0" />
|
|
<edge from-layer="980" from-port="0" to-layer="981" to-port="1" />
|
|
<edge from-layer="981" from-port="2" to-layer="982" to-port="0" />
|
|
<edge from-layer="982" from-port="2" to-layer="984" to-port="0" />
|
|
<edge from-layer="983" from-port="0" to-layer="984" to-port="1" />
|
|
<edge from-layer="984" from-port="2" to-layer="986" to-port="0" />
|
|
<edge from-layer="985" from-port="0" to-layer="986" to-port="1" />
|
|
<edge from-layer="986" from-port="2" to-layer="988" to-port="0" />
|
|
<edge from-layer="987" from-port="0" to-layer="988" to-port="1" />
|
|
<edge from-layer="988" from-port="2" to-layer="990" to-port="0" />
|
|
<edge from-layer="988" from-port="2" to-layer="998" to-port="1" />
|
|
<edge from-layer="989" from-port="0" to-layer="990" to-port="1" />
|
|
<edge from-layer="990" from-port="2" to-layer="992" to-port="0" />
|
|
<edge from-layer="991" from-port="0" to-layer="992" to-port="1" />
|
|
<edge from-layer="992" from-port="2" to-layer="993" to-port="0" />
|
|
<edge from-layer="993" from-port="1" to-layer="995" to-port="0" />
|
|
<edge from-layer="994" from-port="0" to-layer="995" to-port="1" />
|
|
<edge from-layer="995" from-port="2" to-layer="997" to-port="0" />
|
|
<edge from-layer="996" from-port="0" to-layer="997" to-port="1" />
|
|
<edge from-layer="997" from-port="2" to-layer="998" to-port="0" />
|
|
<edge from-layer="998" from-port="2" to-layer="1000" to-port="0" />
|
|
<edge from-layer="999" from-port="0" to-layer="1000" to-port="1" />
|
|
<edge from-layer="1000" from-port="2" to-layer="1002" to-port="0" />
|
|
<edge from-layer="1001" from-port="0" to-layer="1002" to-port="1" />
|
|
<edge from-layer="1002" from-port="2" to-layer="1004" to-port="0" />
|
|
<edge from-layer="1003" from-port="0" to-layer="1004" to-port="1" />
|
|
<edge from-layer="1004" from-port="2" to-layer="1038" to-port="1" />
|
|
<edge from-layer="1004" from-port="2" to-layer="1022" to-port="0" />
|
|
<edge from-layer="1004" from-port="2" to-layer="1014" to-port="0" />
|
|
<edge from-layer="1004" from-port="2" to-layer="1006" to-port="0" />
|
|
<edge from-layer="1005" from-port="0" to-layer="1006" to-port="1" />
|
|
<edge from-layer="1006" from-port="2" to-layer="1008" to-port="0" />
|
|
<edge from-layer="1007" from-port="0" to-layer="1008" to-port="1" />
|
|
<edge from-layer="1008" from-port="2" to-layer="1010" to-port="0" />
|
|
<edge from-layer="1009" from-port="0" to-layer="1010" to-port="1" />
|
|
<edge from-layer="1010" from-port="2" to-layer="1012" to-port="0" />
|
|
<edge from-layer="1011" from-port="0" to-layer="1012" to-port="1" />
|
|
<edge from-layer="1012" from-port="2" to-layer="1029" to-port="0" />
|
|
<edge from-layer="1013" from-port="0" to-layer="1014" to-port="1" />
|
|
<edge from-layer="1014" from-port="2" to-layer="1016" to-port="0" />
|
|
<edge from-layer="1015" from-port="0" to-layer="1016" to-port="1" />
|
|
<edge from-layer="1016" from-port="2" to-layer="1018" to-port="0" />
|
|
<edge from-layer="1017" from-port="0" to-layer="1018" to-port="1" />
|
|
<edge from-layer="1018" from-port="2" to-layer="1020" to-port="0" />
|
|
<edge from-layer="1019" from-port="0" to-layer="1020" to-port="1" />
|
|
<edge from-layer="1020" from-port="2" to-layer="1029" to-port="1" />
|
|
<edge from-layer="1021" from-port="0" to-layer="1022" to-port="1" />
|
|
<edge from-layer="1022" from-port="2" to-layer="1024" to-port="0" />
|
|
<edge from-layer="1023" from-port="0" to-layer="1024" to-port="1" />
|
|
<edge from-layer="1024" from-port="2" to-layer="1026" to-port="0" />
|
|
<edge from-layer="1025" from-port="0" to-layer="1026" to-port="1" />
|
|
<edge from-layer="1026" from-port="2" to-layer="1028" to-port="0" />
|
|
<edge from-layer="1027" from-port="0" to-layer="1028" to-port="1" />
|
|
<edge from-layer="1028" from-port="2" to-layer="1029" to-port="2" />
|
|
<edge from-layer="1029" from-port="4" to-layer="1031" to-port="0" />
|
|
<edge from-layer="1030" from-port="0" to-layer="1031" to-port="1" />
|
|
<edge from-layer="1031" from-port="2" to-layer="1033" to-port="0" />
|
|
<edge from-layer="1032" from-port="0" to-layer="1033" to-port="1" />
|
|
<edge from-layer="1033" from-port="2" to-layer="1035" to-port="0" />
|
|
<edge from-layer="1034" from-port="0" to-layer="1035" to-port="1" />
|
|
<edge from-layer="1035" from-port="2" to-layer="1037" to-port="0" />
|
|
<edge from-layer="1036" from-port="0" to-layer="1037" to-port="1" />
|
|
<edge from-layer="1037" from-port="2" to-layer="1038" to-port="0" />
|
|
<edge from-layer="1038" from-port="2" to-layer="1040" to-port="0" />
|
|
<edge from-layer="1039" from-port="0" to-layer="1040" to-port="1" />
|
|
<edge from-layer="1040" from-port="2" to-layer="1042" to-port="0" />
|
|
<edge from-layer="1041" from-port="0" to-layer="1042" to-port="1" />
|
|
<edge from-layer="1042" from-port="2" to-layer="1044" to-port="0" />
|
|
<edge from-layer="1043" from-port="0" to-layer="1044" to-port="1" />
|
|
<edge from-layer="1044" from-port="2" to-layer="1046" to-port="0" />
|
|
<edge from-layer="1044" from-port="2" to-layer="1054" to-port="1" />
|
|
<edge from-layer="1045" from-port="0" to-layer="1046" to-port="1" />
|
|
<edge from-layer="1046" from-port="2" to-layer="1048" to-port="0" />
|
|
<edge from-layer="1047" from-port="0" to-layer="1048" to-port="1" />
|
|
<edge from-layer="1048" from-port="2" to-layer="1049" to-port="0" />
|
|
<edge from-layer="1049" from-port="1" to-layer="1051" to-port="0" />
|
|
<edge from-layer="1050" from-port="0" to-layer="1051" to-port="1" />
|
|
<edge from-layer="1051" from-port="2" to-layer="1053" to-port="0" />
|
|
<edge from-layer="1052" from-port="0" to-layer="1053" to-port="1" />
|
|
<edge from-layer="1053" from-port="2" to-layer="1054" to-port="0" />
|
|
<edge from-layer="1054" from-port="2" to-layer="1056" to-port="0" />
|
|
<edge from-layer="1055" from-port="0" to-layer="1056" to-port="1" />
|
|
<edge from-layer="1056" from-port="2" to-layer="1058" to-port="0" />
|
|
<edge from-layer="1057" from-port="0" to-layer="1058" to-port="1" />
|
|
<edge from-layer="1058" from-port="2" to-layer="1060" to-port="0" />
|
|
<edge from-layer="1059" from-port="0" to-layer="1060" to-port="1" />
|
|
<edge from-layer="1060" from-port="2" to-layer="1070" to-port="0" />
|
|
<edge from-layer="1060" from-port="2" to-layer="1094" to-port="1" />
|
|
<edge from-layer="1060" from-port="2" to-layer="1062" to-port="0" />
|
|
<edge from-layer="1060" from-port="2" to-layer="1078" to-port="0" />
|
|
<edge from-layer="1061" from-port="0" to-layer="1062" to-port="1" />
|
|
<edge from-layer="1062" from-port="2" to-layer="1064" to-port="0" />
|
|
<edge from-layer="1063" from-port="0" to-layer="1064" to-port="1" />
|
|
<edge from-layer="1064" from-port="2" to-layer="1066" to-port="0" />
|
|
<edge from-layer="1065" from-port="0" to-layer="1066" to-port="1" />
|
|
<edge from-layer="1066" from-port="2" to-layer="1068" to-port="0" />
|
|
<edge from-layer="1067" from-port="0" to-layer="1068" to-port="1" />
|
|
<edge from-layer="1068" from-port="2" to-layer="1085" to-port="0" />
|
|
<edge from-layer="1069" from-port="0" to-layer="1070" to-port="1" />
|
|
<edge from-layer="1070" from-port="2" to-layer="1072" to-port="0" />
|
|
<edge from-layer="1071" from-port="0" to-layer="1072" to-port="1" />
|
|
<edge from-layer="1072" from-port="2" to-layer="1074" to-port="0" />
|
|
<edge from-layer="1073" from-port="0" to-layer="1074" to-port="1" />
|
|
<edge from-layer="1074" from-port="2" to-layer="1076" to-port="0" />
|
|
<edge from-layer="1075" from-port="0" to-layer="1076" to-port="1" />
|
|
<edge from-layer="1076" from-port="2" to-layer="1085" to-port="1" />
|
|
<edge from-layer="1077" from-port="0" to-layer="1078" to-port="1" />
|
|
<edge from-layer="1078" from-port="2" to-layer="1080" to-port="0" />
|
|
<edge from-layer="1079" from-port="0" to-layer="1080" to-port="1" />
|
|
<edge from-layer="1080" from-port="2" to-layer="1082" to-port="0" />
|
|
<edge from-layer="1081" from-port="0" to-layer="1082" to-port="1" />
|
|
<edge from-layer="1082" from-port="2" to-layer="1084" to-port="0" />
|
|
<edge from-layer="1083" from-port="0" to-layer="1084" to-port="1" />
|
|
<edge from-layer="1084" from-port="2" to-layer="1085" to-port="2" />
|
|
<edge from-layer="1085" from-port="4" to-layer="1087" to-port="0" />
|
|
<edge from-layer="1086" from-port="0" to-layer="1087" to-port="1" />
|
|
<edge from-layer="1087" from-port="2" to-layer="1089" to-port="0" />
|
|
<edge from-layer="1088" from-port="0" to-layer="1089" to-port="1" />
|
|
<edge from-layer="1089" from-port="2" to-layer="1091" to-port="0" />
|
|
<edge from-layer="1090" from-port="0" to-layer="1091" to-port="1" />
|
|
<edge from-layer="1091" from-port="2" to-layer="1093" to-port="0" />
|
|
<edge from-layer="1092" from-port="0" to-layer="1093" to-port="1" />
|
|
<edge from-layer="1093" from-port="2" to-layer="1094" to-port="0" />
|
|
<edge from-layer="1094" from-port="2" to-layer="1096" to-port="0" />
|
|
<edge from-layer="1095" from-port="0" to-layer="1096" to-port="1" />
|
|
<edge from-layer="1096" from-port="2" to-layer="1098" to-port="0" />
|
|
<edge from-layer="1097" from-port="0" to-layer="1098" to-port="1" />
|
|
<edge from-layer="1098" from-port="2" to-layer="1100" to-port="0" />
|
|
<edge from-layer="1099" from-port="0" to-layer="1100" to-port="1" />
|
|
<edge from-layer="1100" from-port="2" to-layer="1110" to-port="1" />
|
|
<edge from-layer="1100" from-port="2" to-layer="1102" to-port="0" />
|
|
<edge from-layer="1101" from-port="0" to-layer="1102" to-port="1" />
|
|
<edge from-layer="1102" from-port="2" to-layer="1104" to-port="0" />
|
|
<edge from-layer="1103" from-port="0" to-layer="1104" to-port="1" />
|
|
<edge from-layer="1104" from-port="2" to-layer="1105" to-port="0" />
|
|
<edge from-layer="1105" from-port="1" to-layer="1107" to-port="0" />
|
|
<edge from-layer="1106" from-port="0" to-layer="1107" to-port="1" />
|
|
<edge from-layer="1107" from-port="2" to-layer="1109" to-port="0" />
|
|
<edge from-layer="1108" from-port="0" to-layer="1109" to-port="1" />
|
|
<edge from-layer="1109" from-port="2" to-layer="1110" to-port="0" />
|
|
<edge from-layer="1110" from-port="2" to-layer="1112" to-port="0" />
|
|
<edge from-layer="1111" from-port="0" to-layer="1112" to-port="1" />
|
|
<edge from-layer="1112" from-port="2" to-layer="1114" to-port="0" />
|
|
<edge from-layer="1113" from-port="0" to-layer="1114" to-port="1" />
|
|
<edge from-layer="1114" from-port="2" to-layer="1116" to-port="0" />
|
|
<edge from-layer="1115" from-port="0" to-layer="1116" to-port="1" />
|
|
<edge from-layer="1116" from-port="2" to-layer="1134" to-port="0" />
|
|
<edge from-layer="1116" from-port="2" to-layer="1118" to-port="0" />
|
|
<edge from-layer="1116" from-port="2" to-layer="1150" to-port="1" />
|
|
<edge from-layer="1116" from-port="2" to-layer="1126" to-port="0" />
|
|
<edge from-layer="1117" from-port="0" to-layer="1118" to-port="1" />
|
|
<edge from-layer="1118" from-port="2" to-layer="1120" to-port="0" />
|
|
<edge from-layer="1119" from-port="0" to-layer="1120" to-port="1" />
|
|
<edge from-layer="1120" from-port="2" to-layer="1122" to-port="0" />
|
|
<edge from-layer="1121" from-port="0" to-layer="1122" to-port="1" />
|
|
<edge from-layer="1122" from-port="2" to-layer="1124" to-port="0" />
|
|
<edge from-layer="1123" from-port="0" to-layer="1124" to-port="1" />
|
|
<edge from-layer="1124" from-port="2" to-layer="1141" to-port="0" />
|
|
<edge from-layer="1125" from-port="0" to-layer="1126" to-port="1" />
|
|
<edge from-layer="1126" from-port="2" to-layer="1128" to-port="0" />
|
|
<edge from-layer="1127" from-port="0" to-layer="1128" to-port="1" />
|
|
<edge from-layer="1128" from-port="2" to-layer="1130" to-port="0" />
|
|
<edge from-layer="1129" from-port="0" to-layer="1130" to-port="1" />
|
|
<edge from-layer="1130" from-port="2" to-layer="1132" to-port="0" />
|
|
<edge from-layer="1131" from-port="0" to-layer="1132" to-port="1" />
|
|
<edge from-layer="1132" from-port="2" to-layer="1141" to-port="1" />
|
|
<edge from-layer="1133" from-port="0" to-layer="1134" to-port="1" />
|
|
<edge from-layer="1134" from-port="2" to-layer="1136" to-port="0" />
|
|
<edge from-layer="1135" from-port="0" to-layer="1136" to-port="1" />
|
|
<edge from-layer="1136" from-port="2" to-layer="1138" to-port="0" />
|
|
<edge from-layer="1137" from-port="0" to-layer="1138" to-port="1" />
|
|
<edge from-layer="1138" from-port="2" to-layer="1140" to-port="0" />
|
|
<edge from-layer="1139" from-port="0" to-layer="1140" to-port="1" />
|
|
<edge from-layer="1140" from-port="2" to-layer="1141" to-port="2" />
|
|
<edge from-layer="1141" from-port="4" to-layer="1143" to-port="0" />
|
|
<edge from-layer="1142" from-port="0" to-layer="1143" to-port="1" />
|
|
<edge from-layer="1143" from-port="2" to-layer="1145" to-port="0" />
|
|
<edge from-layer="1144" from-port="0" to-layer="1145" to-port="1" />
|
|
<edge from-layer="1145" from-port="2" to-layer="1147" to-port="0" />
|
|
<edge from-layer="1146" from-port="0" to-layer="1147" to-port="1" />
|
|
<edge from-layer="1147" from-port="2" to-layer="1149" to-port="0" />
|
|
<edge from-layer="1148" from-port="0" to-layer="1149" to-port="1" />
|
|
<edge from-layer="1149" from-port="2" to-layer="1150" to-port="0" />
|
|
<edge from-layer="1150" from-port="2" to-layer="1152" to-port="0" />
|
|
<edge from-layer="1151" from-port="0" to-layer="1152" to-port="1" />
|
|
<edge from-layer="1152" from-port="2" to-layer="1154" to-port="0" />
|
|
<edge from-layer="1153" from-port="0" to-layer="1154" to-port="1" />
|
|
<edge from-layer="1154" from-port="2" to-layer="1156" to-port="0" />
|
|
<edge from-layer="1155" from-port="0" to-layer="1156" to-port="1" />
|
|
<edge from-layer="1156" from-port="2" to-layer="1166" to-port="1" />
|
|
<edge from-layer="1156" from-port="2" to-layer="1158" to-port="0" />
|
|
<edge from-layer="1157" from-port="0" to-layer="1158" to-port="1" />
|
|
<edge from-layer="1158" from-port="2" to-layer="1160" to-port="0" />
|
|
<edge from-layer="1159" from-port="0" to-layer="1160" to-port="1" />
|
|
<edge from-layer="1160" from-port="2" to-layer="1161" to-port="0" />
|
|
<edge from-layer="1161" from-port="1" to-layer="1163" to-port="0" />
|
|
<edge from-layer="1162" from-port="0" to-layer="1163" to-port="1" />
|
|
<edge from-layer="1163" from-port="2" to-layer="1165" to-port="0" />
|
|
<edge from-layer="1164" from-port="0" to-layer="1165" to-port="1" />
|
|
<edge from-layer="1165" from-port="2" to-layer="1166" to-port="0" />
|
|
<edge from-layer="1166" from-port="2" to-layer="1168" to-port="0" />
|
|
<edge from-layer="1167" from-port="0" to-layer="1168" to-port="1" />
|
|
<edge from-layer="1168" from-port="2" to-layer="1170" to-port="0" />
|
|
<edge from-layer="1169" from-port="0" to-layer="1170" to-port="1" />
|
|
<edge from-layer="1170" from-port="2" to-layer="1172" to-port="0" />
|
|
<edge from-layer="1171" from-port="0" to-layer="1172" to-port="1" />
|
|
<edge from-layer="1172" from-port="2" to-layer="1182" to-port="0" />
|
|
<edge from-layer="1172" from-port="2" to-layer="1174" to-port="0" />
|
|
<edge from-layer="1172" from-port="2" to-layer="1206" to-port="1" />
|
|
<edge from-layer="1172" from-port="2" to-layer="1190" to-port="0" />
|
|
<edge from-layer="1173" from-port="0" to-layer="1174" to-port="1" />
|
|
<edge from-layer="1174" from-port="2" to-layer="1176" to-port="0" />
|
|
<edge from-layer="1175" from-port="0" to-layer="1176" to-port="1" />
|
|
<edge from-layer="1176" from-port="2" to-layer="1178" to-port="0" />
|
|
<edge from-layer="1177" from-port="0" to-layer="1178" to-port="1" />
|
|
<edge from-layer="1178" from-port="2" to-layer="1180" to-port="0" />
|
|
<edge from-layer="1179" from-port="0" to-layer="1180" to-port="1" />
|
|
<edge from-layer="1180" from-port="2" to-layer="1197" to-port="0" />
|
|
<edge from-layer="1181" from-port="0" to-layer="1182" to-port="1" />
|
|
<edge from-layer="1182" from-port="2" to-layer="1184" to-port="0" />
|
|
<edge from-layer="1183" from-port="0" to-layer="1184" to-port="1" />
|
|
<edge from-layer="1184" from-port="2" to-layer="1186" to-port="0" />
|
|
<edge from-layer="1185" from-port="0" to-layer="1186" to-port="1" />
|
|
<edge from-layer="1186" from-port="2" to-layer="1188" to-port="0" />
|
|
<edge from-layer="1187" from-port="0" to-layer="1188" to-port="1" />
|
|
<edge from-layer="1188" from-port="2" to-layer="1197" to-port="1" />
|
|
<edge from-layer="1189" from-port="0" to-layer="1190" to-port="1" />
|
|
<edge from-layer="1190" from-port="2" to-layer="1192" to-port="0" />
|
|
<edge from-layer="1191" from-port="0" to-layer="1192" to-port="1" />
|
|
<edge from-layer="1192" from-port="2" to-layer="1194" to-port="0" />
|
|
<edge from-layer="1193" from-port="0" to-layer="1194" to-port="1" />
|
|
<edge from-layer="1194" from-port="2" to-layer="1196" to-port="0" />
|
|
<edge from-layer="1195" from-port="0" to-layer="1196" to-port="1" />
|
|
<edge from-layer="1196" from-port="2" to-layer="1197" to-port="2" />
|
|
<edge from-layer="1197" from-port="4" to-layer="1199" to-port="0" />
|
|
<edge from-layer="1198" from-port="0" to-layer="1199" to-port="1" />
|
|
<edge from-layer="1199" from-port="2" to-layer="1201" to-port="0" />
|
|
<edge from-layer="1200" from-port="0" to-layer="1201" to-port="1" />
|
|
<edge from-layer="1201" from-port="2" to-layer="1203" to-port="0" />
|
|
<edge from-layer="1202" from-port="0" to-layer="1203" to-port="1" />
|
|
<edge from-layer="1203" from-port="2" to-layer="1205" to-port="0" />
|
|
<edge from-layer="1204" from-port="0" to-layer="1205" to-port="1" />
|
|
<edge from-layer="1205" from-port="2" to-layer="1206" to-port="0" />
|
|
<edge from-layer="1206" from-port="2" to-layer="1208" to-port="0" />
|
|
<edge from-layer="1207" from-port="0" to-layer="1208" to-port="1" />
|
|
<edge from-layer="1208" from-port="2" to-layer="1210" to-port="0" />
|
|
<edge from-layer="1209" from-port="0" to-layer="1210" to-port="1" />
|
|
<edge from-layer="1210" from-port="2" to-layer="1212" to-port="0" />
|
|
<edge from-layer="1211" from-port="0" to-layer="1212" to-port="1" />
|
|
<edge from-layer="1212" from-port="2" to-layer="1222" to-port="1" />
|
|
<edge from-layer="1212" from-port="2" to-layer="1214" to-port="0" />
|
|
<edge from-layer="1213" from-port="0" to-layer="1214" to-port="1" />
|
|
<edge from-layer="1214" from-port="2" to-layer="1216" to-port="0" />
|
|
<edge from-layer="1215" from-port="0" to-layer="1216" to-port="1" />
|
|
<edge from-layer="1216" from-port="2" to-layer="1217" to-port="0" />
|
|
<edge from-layer="1217" from-port="1" to-layer="1219" to-port="0" />
|
|
<edge from-layer="1218" from-port="0" to-layer="1219" to-port="1" />
|
|
<edge from-layer="1219" from-port="2" to-layer="1221" to-port="0" />
|
|
<edge from-layer="1220" from-port="0" to-layer="1221" to-port="1" />
|
|
<edge from-layer="1221" from-port="2" to-layer="1222" to-port="0" />
|
|
<edge from-layer="1222" from-port="2" to-layer="1224" to-port="0" />
|
|
<edge from-layer="1223" from-port="0" to-layer="1224" to-port="1" />
|
|
<edge from-layer="1224" from-port="2" to-layer="1226" to-port="0" />
|
|
<edge from-layer="1225" from-port="0" to-layer="1226" to-port="1" />
|
|
<edge from-layer="1226" from-port="2" to-layer="1228" to-port="0" />
|
|
<edge from-layer="1227" from-port="0" to-layer="1228" to-port="1" />
|
|
<edge from-layer="1228" from-port="2" to-layer="1230" to-port="0" />
|
|
<edge from-layer="1228" from-port="2" to-layer="1262" to-port="1" />
|
|
<edge from-layer="1228" from-port="2" to-layer="1246" to-port="0" />
|
|
<edge from-layer="1228" from-port="2" to-layer="1238" to-port="0" />
|
|
<edge from-layer="1229" from-port="0" to-layer="1230" to-port="1" />
|
|
<edge from-layer="1230" from-port="2" to-layer="1232" to-port="0" />
|
|
<edge from-layer="1231" from-port="0" to-layer="1232" to-port="1" />
|
|
<edge from-layer="1232" from-port="2" to-layer="1234" to-port="0" />
|
|
<edge from-layer="1233" from-port="0" to-layer="1234" to-port="1" />
|
|
<edge from-layer="1234" from-port="2" to-layer="1236" to-port="0" />
|
|
<edge from-layer="1235" from-port="0" to-layer="1236" to-port="1" />
|
|
<edge from-layer="1236" from-port="2" to-layer="1253" to-port="0" />
|
|
<edge from-layer="1237" from-port="0" to-layer="1238" to-port="1" />
|
|
<edge from-layer="1238" from-port="2" to-layer="1240" to-port="0" />
|
|
<edge from-layer="1239" from-port="0" to-layer="1240" to-port="1" />
|
|
<edge from-layer="1240" from-port="2" to-layer="1242" to-port="0" />
|
|
<edge from-layer="1241" from-port="0" to-layer="1242" to-port="1" />
|
|
<edge from-layer="1242" from-port="2" to-layer="1244" to-port="0" />
|
|
<edge from-layer="1243" from-port="0" to-layer="1244" to-port="1" />
|
|
<edge from-layer="1244" from-port="2" to-layer="1253" to-port="1" />
|
|
<edge from-layer="1245" from-port="0" to-layer="1246" to-port="1" />
|
|
<edge from-layer="1246" from-port="2" to-layer="1248" to-port="0" />
|
|
<edge from-layer="1247" from-port="0" to-layer="1248" to-port="1" />
|
|
<edge from-layer="1248" from-port="2" to-layer="1250" to-port="0" />
|
|
<edge from-layer="1249" from-port="0" to-layer="1250" to-port="1" />
|
|
<edge from-layer="1250" from-port="2" to-layer="1252" to-port="0" />
|
|
<edge from-layer="1251" from-port="0" to-layer="1252" to-port="1" />
|
|
<edge from-layer="1252" from-port="2" to-layer="1253" to-port="2" />
|
|
<edge from-layer="1253" from-port="4" to-layer="1255" to-port="0" />
|
|
<edge from-layer="1254" from-port="0" to-layer="1255" to-port="1" />
|
|
<edge from-layer="1255" from-port="2" to-layer="1257" to-port="0" />
|
|
<edge from-layer="1256" from-port="0" to-layer="1257" to-port="1" />
|
|
<edge from-layer="1257" from-port="2" to-layer="1259" to-port="0" />
|
|
<edge from-layer="1258" from-port="0" to-layer="1259" to-port="1" />
|
|
<edge from-layer="1259" from-port="2" to-layer="1261" to-port="0" />
|
|
<edge from-layer="1260" from-port="0" to-layer="1261" to-port="1" />
|
|
<edge from-layer="1261" from-port="2" to-layer="1262" to-port="0" />
|
|
<edge from-layer="1262" from-port="2" to-layer="1264" to-port="0" />
|
|
<edge from-layer="1263" from-port="0" to-layer="1264" to-port="1" />
|
|
<edge from-layer="1264" from-port="2" to-layer="1266" to-port="0" />
|
|
<edge from-layer="1265" from-port="0" to-layer="1266" to-port="1" />
|
|
<edge from-layer="1266" from-port="2" to-layer="1268" to-port="0" />
|
|
<edge from-layer="1267" from-port="0" to-layer="1268" to-port="1" />
|
|
<edge from-layer="1268" from-port="2" to-layer="1270" to-port="0" />
|
|
<edge from-layer="1268" from-port="2" to-layer="1278" to-port="1" />
|
|
<edge from-layer="1269" from-port="0" to-layer="1270" to-port="1" />
|
|
<edge from-layer="1270" from-port="2" to-layer="1272" to-port="0" />
|
|
<edge from-layer="1271" from-port="0" to-layer="1272" to-port="1" />
|
|
<edge from-layer="1272" from-port="2" to-layer="1273" to-port="0" />
|
|
<edge from-layer="1273" from-port="1" to-layer="1275" to-port="0" />
|
|
<edge from-layer="1274" from-port="0" to-layer="1275" to-port="1" />
|
|
<edge from-layer="1275" from-port="2" to-layer="1277" to-port="0" />
|
|
<edge from-layer="1276" from-port="0" to-layer="1277" to-port="1" />
|
|
<edge from-layer="1277" from-port="2" to-layer="1278" to-port="0" />
|
|
<edge from-layer="1278" from-port="2" to-layer="1280" to-port="0" />
|
|
<edge from-layer="1279" from-port="0" to-layer="1280" to-port="1" />
|
|
<edge from-layer="1280" from-port="2" to-layer="1282" to-port="0" />
|
|
<edge from-layer="1281" from-port="0" to-layer="1282" to-port="1" />
|
|
<edge from-layer="1282" from-port="2" to-layer="1284" to-port="0" />
|
|
<edge from-layer="1283" from-port="0" to-layer="1284" to-port="1" />
|
|
<edge from-layer="1284" from-port="2" to-layer="1318" to-port="1" />
|
|
<edge from-layer="1284" from-port="2" to-layer="1302" to-port="0" />
|
|
<edge from-layer="1284" from-port="2" to-layer="1294" to-port="0" />
|
|
<edge from-layer="1284" from-port="2" to-layer="1286" to-port="0" />
|
|
<edge from-layer="1285" from-port="0" to-layer="1286" to-port="1" />
|
|
<edge from-layer="1286" from-port="2" to-layer="1288" to-port="0" />
|
|
<edge from-layer="1287" from-port="0" to-layer="1288" to-port="1" />
|
|
<edge from-layer="1288" from-port="2" to-layer="1290" to-port="0" />
|
|
<edge from-layer="1289" from-port="0" to-layer="1290" to-port="1" />
|
|
<edge from-layer="1290" from-port="2" to-layer="1292" to-port="0" />
|
|
<edge from-layer="1291" from-port="0" to-layer="1292" to-port="1" />
|
|
<edge from-layer="1292" from-port="2" to-layer="1309" to-port="0" />
|
|
<edge from-layer="1293" from-port="0" to-layer="1294" to-port="1" />
|
|
<edge from-layer="1294" from-port="2" to-layer="1296" to-port="0" />
|
|
<edge from-layer="1295" from-port="0" to-layer="1296" to-port="1" />
|
|
<edge from-layer="1296" from-port="2" to-layer="1298" to-port="0" />
|
|
<edge from-layer="1297" from-port="0" to-layer="1298" to-port="1" />
|
|
<edge from-layer="1298" from-port="2" to-layer="1300" to-port="0" />
|
|
<edge from-layer="1299" from-port="0" to-layer="1300" to-port="1" />
|
|
<edge from-layer="1300" from-port="2" to-layer="1309" to-port="1" />
|
|
<edge from-layer="1301" from-port="0" to-layer="1302" to-port="1" />
|
|
<edge from-layer="1302" from-port="2" to-layer="1304" to-port="0" />
|
|
<edge from-layer="1303" from-port="0" to-layer="1304" to-port="1" />
|
|
<edge from-layer="1304" from-port="2" to-layer="1306" to-port="0" />
|
|
<edge from-layer="1305" from-port="0" to-layer="1306" to-port="1" />
|
|
<edge from-layer="1306" from-port="2" to-layer="1308" to-port="0" />
|
|
<edge from-layer="1307" from-port="0" to-layer="1308" to-port="1" />
|
|
<edge from-layer="1308" from-port="2" to-layer="1309" to-port="2" />
|
|
<edge from-layer="1309" from-port="4" to-layer="1311" to-port="0" />
|
|
<edge from-layer="1310" from-port="0" to-layer="1311" to-port="1" />
|
|
<edge from-layer="1311" from-port="2" to-layer="1313" to-port="0" />
|
|
<edge from-layer="1312" from-port="0" to-layer="1313" to-port="1" />
|
|
<edge from-layer="1313" from-port="2" to-layer="1315" to-port="0" />
|
|
<edge from-layer="1314" from-port="0" to-layer="1315" to-port="1" />
|
|
<edge from-layer="1315" from-port="2" to-layer="1317" to-port="0" />
|
|
<edge from-layer="1316" from-port="0" to-layer="1317" to-port="1" />
|
|
<edge from-layer="1317" from-port="2" to-layer="1318" to-port="0" />
|
|
<edge from-layer="1318" from-port="2" to-layer="1320" to-port="0" />
|
|
<edge from-layer="1319" from-port="0" to-layer="1320" to-port="1" />
|
|
<edge from-layer="1320" from-port="2" to-layer="1322" to-port="0" />
|
|
<edge from-layer="1321" from-port="0" to-layer="1322" to-port="1" />
|
|
<edge from-layer="1322" from-port="2" to-layer="1324" to-port="0" />
|
|
<edge from-layer="1323" from-port="0" to-layer="1324" to-port="1" />
|
|
<edge from-layer="1324" from-port="2" to-layer="1334" to-port="1" />
|
|
<edge from-layer="1324" from-port="2" to-layer="1326" to-port="0" />
|
|
<edge from-layer="1325" from-port="0" to-layer="1326" to-port="1" />
|
|
<edge from-layer="1326" from-port="2" to-layer="1328" to-port="0" />
|
|
<edge from-layer="1327" from-port="0" to-layer="1328" to-port="1" />
|
|
<edge from-layer="1328" from-port="2" to-layer="1329" to-port="0" />
|
|
<edge from-layer="1329" from-port="1" to-layer="1331" to-port="0" />
|
|
<edge from-layer="1330" from-port="0" to-layer="1331" to-port="1" />
|
|
<edge from-layer="1331" from-port="2" to-layer="1333" to-port="0" />
|
|
<edge from-layer="1332" from-port="0" to-layer="1333" to-port="1" />
|
|
<edge from-layer="1333" from-port="2" to-layer="1334" to-port="0" />
|
|
<edge from-layer="1334" from-port="2" to-layer="1336" to-port="0" />
|
|
<edge from-layer="1335" from-port="0" to-layer="1336" to-port="1" />
|
|
<edge from-layer="1336" from-port="2" to-layer="1338" to-port="0" />
|
|
<edge from-layer="1337" from-port="0" to-layer="1338" to-port="1" />
|
|
<edge from-layer="1338" from-port="2" to-layer="1340" to-port="0" />
|
|
<edge from-layer="1339" from-port="0" to-layer="1340" to-port="1" />
|
|
<edge from-layer="1340" from-port="2" to-layer="1374" to-port="1" />
|
|
<edge from-layer="1340" from-port="2" to-layer="1342" to-port="0" />
|
|
<edge from-layer="1340" from-port="2" to-layer="1358" to-port="0" />
|
|
<edge from-layer="1340" from-port="2" to-layer="1350" to-port="0" />
|
|
<edge from-layer="1341" from-port="0" to-layer="1342" to-port="1" />
|
|
<edge from-layer="1342" from-port="2" to-layer="1344" to-port="0" />
|
|
<edge from-layer="1343" from-port="0" to-layer="1344" to-port="1" />
|
|
<edge from-layer="1344" from-port="2" to-layer="1346" to-port="0" />
|
|
<edge from-layer="1345" from-port="0" to-layer="1346" to-port="1" />
|
|
<edge from-layer="1346" from-port="2" to-layer="1348" to-port="0" />
|
|
<edge from-layer="1347" from-port="0" to-layer="1348" to-port="1" />
|
|
<edge from-layer="1348" from-port="2" to-layer="1365" to-port="0" />
|
|
<edge from-layer="1349" from-port="0" to-layer="1350" to-port="1" />
|
|
<edge from-layer="1350" from-port="2" to-layer="1352" to-port="0" />
|
|
<edge from-layer="1351" from-port="0" to-layer="1352" to-port="1" />
|
|
<edge from-layer="1352" from-port="2" to-layer="1354" to-port="0" />
|
|
<edge from-layer="1353" from-port="0" to-layer="1354" to-port="1" />
|
|
<edge from-layer="1354" from-port="2" to-layer="1356" to-port="0" />
|
|
<edge from-layer="1355" from-port="0" to-layer="1356" to-port="1" />
|
|
<edge from-layer="1356" from-port="2" to-layer="1365" to-port="1" />
|
|
<edge from-layer="1357" from-port="0" to-layer="1358" to-port="1" />
|
|
<edge from-layer="1358" from-port="2" to-layer="1360" to-port="0" />
|
|
<edge from-layer="1359" from-port="0" to-layer="1360" to-port="1" />
|
|
<edge from-layer="1360" from-port="2" to-layer="1362" to-port="0" />
|
|
<edge from-layer="1361" from-port="0" to-layer="1362" to-port="1" />
|
|
<edge from-layer="1362" from-port="2" to-layer="1364" to-port="0" />
|
|
<edge from-layer="1363" from-port="0" to-layer="1364" to-port="1" />
|
|
<edge from-layer="1364" from-port="2" to-layer="1365" to-port="2" />
|
|
<edge from-layer="1365" from-port="4" to-layer="1367" to-port="0" />
|
|
<edge from-layer="1366" from-port="0" to-layer="1367" to-port="1" />
|
|
<edge from-layer="1367" from-port="2" to-layer="1369" to-port="0" />
|
|
<edge from-layer="1368" from-port="0" to-layer="1369" to-port="1" />
|
|
<edge from-layer="1369" from-port="2" to-layer="1371" to-port="0" />
|
|
<edge from-layer="1370" from-port="0" to-layer="1371" to-port="1" />
|
|
<edge from-layer="1371" from-port="2" to-layer="1373" to-port="0" />
|
|
<edge from-layer="1372" from-port="0" to-layer="1373" to-port="1" />
|
|
<edge from-layer="1373" from-port="2" to-layer="1374" to-port="0" />
|
|
<edge from-layer="1374" from-port="2" to-layer="1376" to-port="0" />
|
|
<edge from-layer="1375" from-port="0" to-layer="1376" to-port="1" />
|
|
<edge from-layer="1376" from-port="2" to-layer="1378" to-port="0" />
|
|
<edge from-layer="1377" from-port="0" to-layer="1378" to-port="1" />
|
|
<edge from-layer="1378" from-port="2" to-layer="1380" to-port="0" />
|
|
<edge from-layer="1379" from-port="0" to-layer="1380" to-port="1" />
|
|
<edge from-layer="1380" from-port="2" to-layer="1382" to-port="0" />
|
|
<edge from-layer="1380" from-port="2" to-layer="1390" to-port="1" />
|
|
<edge from-layer="1381" from-port="0" to-layer="1382" to-port="1" />
|
|
<edge from-layer="1382" from-port="2" to-layer="1384" to-port="0" />
|
|
<edge from-layer="1383" from-port="0" to-layer="1384" to-port="1" />
|
|
<edge from-layer="1384" from-port="2" to-layer="1385" to-port="0" />
|
|
<edge from-layer="1385" from-port="1" to-layer="1387" to-port="0" />
|
|
<edge from-layer="1386" from-port="0" to-layer="1387" to-port="1" />
|
|
<edge from-layer="1387" from-port="2" to-layer="1389" to-port="0" />
|
|
<edge from-layer="1388" from-port="0" to-layer="1389" to-port="1" />
|
|
<edge from-layer="1389" from-port="2" to-layer="1390" to-port="0" />
|
|
<edge from-layer="1390" from-port="2" to-layer="1392" to-port="0" />
|
|
<edge from-layer="1391" from-port="0" to-layer="1392" to-port="1" />
|
|
<edge from-layer="1392" from-port="2" to-layer="1394" to-port="0" />
|
|
<edge from-layer="1393" from-port="0" to-layer="1394" to-port="1" />
|
|
<edge from-layer="1394" from-port="2" to-layer="1396" to-port="0" />
|
|
<edge from-layer="1395" from-port="0" to-layer="1396" to-port="1" />
|
|
<edge from-layer="1396" from-port="2" to-layer="1397" to-port="0" />
|
|
</edges>
|
|
<rt_info>
|
|
<Runtime_version value="2024.4.1-16618-643f23d1318-releases/2024/4" />
|
|
<conversion_parameters>
|
|
<framework value="pytorch" />
|
|
<is_python_object value="True" />
|
|
</conversion_parameters>
|
|
<optimum>
|
|
<optimum_intel_version value="1.20.1" />
|
|
<optimum_version value="1.23.3" />
|
|
<pytorch_version value="2.5.1" />
|
|
<transformers_version value="4.46.2" />
|
|
</optimum>
|
|
</rt_info>
|
|
</net>
|