Phi-4-Multimodal-OV-INT4-CPU / openvino_audio_forward_embeddings_model.xml
lokinfey's picture
Upload folder using huggingface_hub
9735b74 verified
<?xml version="1.0"?>
<net name="Model16686" version="11">
<layers>
<layer id="0" name="input_tensor" type="Parameter" version="opset1">
<data shape="?,?,?" element_type="f32" />
<output>
<port id="0" precision="FP32" names="input_tensor">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1" name="10" type="Const" version="opset1">
<data element_type="i64" shape="" offset="0" size="8" />
<output>
<port id="0" precision="I64" names="10" />
</output>
</layer>
<layer id="2" name="__module.embed/aten::unsqueeze/Unsqueeze" type="Unsqueeze" version="opset1">
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64" />
</input>
<output>
<port id="2" precision="FP32" names="13">
<dim>-1</dim>
<dim>1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="3" name="self.embed.conv.0.weight_compressed" type="Const" version="opset1">
<data element_type="f16" shape="1024, 1, 3, 3" offset="8" size="18432" />
<output>
<port id="0" precision="FP16" names="self.embed.conv.0.weight">
<dim>1024</dim>
<dim>1</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="4" name="self.embed.conv.0.weight" type="Convert" version="opset1">
<data destination_type="f32" />
<rt_info>
<attribute name="decompression" version="0" />
</rt_info>
<input>
<port id="0" precision="FP16">
<dim>1024</dim>
<dim>1</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</input>
<output>
<port id="1" precision="FP32">
<dim>1024</dim>
<dim>1</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="5" name="__module.embed.conv.0/aten::_convolution/Convolution" type="Convolution" version="opset1">
<data strides="2, 2" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1024</dim>
<dim>1</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>1024</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="6" name="__module.embed.conv.0/aten::_convolution/Reshape_compressed" type="Const" version="opset1">
<data element_type="f16" shape="1, 1024, 1, 1" offset="18440" size="2048" />
<output>
<port id="0" precision="FP16">
<dim>1</dim>
<dim>1024</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="7" name="__module.embed.conv.0/aten::_convolution/Reshape" type="Convert" version="opset1">
<data destination_type="f32" />
<rt_info>
<attribute name="decompression" version="0" />
</rt_info>
<input>
<port id="0" precision="FP16">
<dim>1</dim>
<dim>1024</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1024</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="8" name="__module.embed.conv.0/aten::_convolution/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>1024</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1024</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="25">
<dim>-1</dim>
<dim>1024</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="9" name="__module.embed.conv.1/aten::relu/Relu" type="ReLU" version="opset1">
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>1024</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="FP32" names="26">
<dim>-1</dim>
<dim>1024</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="10" name="__module.embed.conv.2/aten::_convolution/Reshape_compressed" type="Const" version="opset1">
<data element_type="f16" shape="1024, 1, 1, 3, 3" offset="20488" size="18432" />
<output>
<port id="0" precision="FP16">
<dim>1024</dim>
<dim>1</dim>
<dim>1</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="11" name="__module.embed.conv.2/aten::_convolution/Reshape" type="Convert" version="opset1">
<data destination_type="f32" />
<rt_info>
<attribute name="decompression" version="0" />
</rt_info>
<input>
<port id="0" precision="FP16">
<dim>1024</dim>
<dim>1</dim>
<dim>1</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</input>
<output>
<port id="1" precision="FP32">
<dim>1024</dim>
<dim>1</dim>
<dim>1</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="12" name="__module.embed.conv.2/aten::_convolution/GroupConvolution" type="GroupConvolution" version="opset1">
<data strides="2, 2" pads_begin="1, 1" pads_end="1, 1" dilations="1, 1" auto_pad="explicit" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>1024</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1024</dim>
<dim>1</dim>
<dim>1</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>1024</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="13" name="__module.embed.conv.2/aten::_convolution/Reshape_1_compressed" type="Const" version="opset1">
<data element_type="f16" shape="1, 1024, 1, 1" offset="38920" size="2048" />
<output>
<port id="0" precision="FP16">
<dim>1</dim>
<dim>1024</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="14" name="__module.embed.conv.2/aten::_convolution/Reshape_1" type="Convert" version="opset1">
<data destination_type="f32" />
<rt_info>
<attribute name="decompression" version="0" />
</rt_info>
<input>
<port id="0" precision="FP16">
<dim>1</dim>
<dim>1024</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1024</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="15" name="__module.embed.conv.2/aten::_convolution/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>1024</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1024</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="33">
<dim>-1</dim>
<dim>1024</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="16" name="self.embed.conv.3.weight_compressed" type="Const" version="opset1">
<data element_type="f16" shape="1024, 1024, 1, 1" offset="40968" size="2097152" />
<output>
<port id="0" precision="FP16" names="self.embed.conv.3.weight">
<dim>1024</dim>
<dim>1024</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="17" name="self.embed.conv.3.weight" type="Convert" version="opset1">
<data destination_type="f32" />
<rt_info>
<attribute name="decompression" version="0" />
</rt_info>
<input>
<port id="0" precision="FP16">
<dim>1024</dim>
<dim>1024</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="1" precision="FP32">
<dim>1024</dim>
<dim>1024</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="18" name="__module.embed.conv.3/aten::_convolution/Convolution" type="Convolution" version="opset1">
<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>1024</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1024</dim>
<dim>1024</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>1024</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="19" name="__module.embed.conv.3/aten::_convolution/Reshape_compressed" type="Const" version="opset1">
<data element_type="f16" shape="1, 1024, 1, 1" offset="2138120" size="2048" />
<output>
<port id="0" precision="FP16">
<dim>1</dim>
<dim>1024</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="20" name="__module.embed.conv.3/aten::_convolution/Reshape" type="Convert" version="opset1">
<data destination_type="f32" />
<rt_info>
<attribute name="decompression" version="0" />
</rt_info>
<input>
<port id="0" precision="FP16">
<dim>1</dim>
<dim>1024</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1024</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="21" name="__module.embed.conv.3/aten::_convolution/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>1024</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1024</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="40">
<dim>-1</dim>
<dim>1024</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="22" name="__module.embed.conv.1/aten::relu/Relu_1" type="ReLU" version="opset1">
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>1024</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="FP32" names="41">
<dim>-1</dim>
<dim>1024</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="23" name="__module.embed.conv.5/aten::_convolution/Reshape_compressed" type="Const" version="opset1">
<data element_type="f16" shape="1024, 1, 1, 3, 3" offset="2140168" size="18432" />
<output>
<port id="0" precision="FP16">
<dim>1024</dim>
<dim>1</dim>
<dim>1</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="24" name="__module.embed.conv.5/aten::_convolution/Reshape" type="Convert" version="opset1">
<data destination_type="f32" />
<rt_info>
<attribute name="decompression" version="0" />
</rt_info>
<input>
<port id="0" precision="FP16">
<dim>1024</dim>
<dim>1</dim>
<dim>1</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</input>
<output>
<port id="1" precision="FP32">
<dim>1024</dim>
<dim>1</dim>
<dim>1</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="25" name="__module.embed.conv.5/aten::_convolution/GroupConvolution" type="GroupConvolution" version="opset1">
<data strides="2, 2" pads_begin="1, 1" pads_end="1, 1" dilations="1, 1" auto_pad="explicit" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>1024</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1024</dim>
<dim>1</dim>
<dim>1</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>1024</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="26" name="__module.embed.conv.5/aten::_convolution/Reshape_1_compressed" type="Const" version="opset1">
<data element_type="f16" shape="1, 1024, 1, 1" offset="2158600" size="2048" />
<output>
<port id="0" precision="FP16">
<dim>1</dim>
<dim>1024</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="27" name="__module.embed.conv.5/aten::_convolution/Reshape_1" type="Convert" version="opset1">
<data destination_type="f32" />
<rt_info>
<attribute name="decompression" version="0" />
</rt_info>
<input>
<port id="0" precision="FP16">
<dim>1</dim>
<dim>1024</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1024</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="28" name="__module.embed.conv.5/aten::_convolution/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>1024</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1024</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="48">
<dim>-1</dim>
<dim>1024</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="29" name="self.embed.conv.6.weight_compressed" type="Const" version="opset1">
<data element_type="f16" shape="1024, 1024, 1, 1" offset="2160648" size="2097152" />
<output>
<port id="0" precision="FP16" names="self.embed.conv.6.weight">
<dim>1024</dim>
<dim>1024</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="30" name="self.embed.conv.6.weight" type="Convert" version="opset1">
<data destination_type="f32" />
<rt_info>
<attribute name="decompression" version="0" />
</rt_info>
<input>
<port id="0" precision="FP16">
<dim>1024</dim>
<dim>1024</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="1" precision="FP32">
<dim>1024</dim>
<dim>1024</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="31" name="__module.embed.conv.6/aten::_convolution/Convolution" type="Convolution" version="opset1">
<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>1024</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1024</dim>
<dim>1024</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>1024</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="32" name="__module.embed.conv.6/aten::_convolution/Reshape_compressed" type="Const" version="opset1">
<data element_type="f16" shape="1, 1024, 1, 1" offset="4257800" size="2048" />
<output>
<port id="0" precision="FP16">
<dim>1</dim>
<dim>1024</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="33" name="__module.embed.conv.6/aten::_convolution/Reshape" type="Convert" version="opset1">
<data destination_type="f32" />
<rt_info>
<attribute name="decompression" version="0" />
</rt_info>
<input>
<port id="0" precision="FP16">
<dim>1</dim>
<dim>1024</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1024</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="34" name="__module.embed.conv.6/aten::_convolution/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>1024</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1024</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="55">
<dim>-1</dim>
<dim>1024</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="35" name="__module.embed.conv.1/aten::relu/Relu_2" type="ReLU" version="opset1">
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>1024</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="FP32" names="56,x">
<dim>-1</dim>
<dim>1024</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="36" name="__module.embed/aten::transpose/Constant" type="Const" version="opset1">
<data element_type="i32" shape="4" offset="4259848" size="16" />
<output>
<port id="0" precision="I32">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="37" name="__module.embed/aten::transpose/Transpose" type="Transpose" version="opset1">
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>1024</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I32">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="59">
<dim>-1</dim>
<dim>-1</dim>
<dim>1024</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="38" name="Constant_3243459" type="Const" version="opset1">
<data element_type="i64" shape="3" offset="4259864" size="24" />
<rt_info>
<attribute name="precise" version="0" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="39" name="__module.embed/aten::reshape/Reshape" type="Reshape" version="opset1">
<data special_zero="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>1024</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="61">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="40" name="self.embed.out.weight_compressed" type="Const" version="opset1">
<data element_type="f16" shape="1024, 10240" offset="4259888" size="20971520" />
<output>
<port id="0" precision="FP16" names="self.embed.out.weight">
<dim>1024</dim>
<dim>10240</dim>
</port>
</output>
</layer>
<layer id="41" name="self.embed.out.weight" type="Convert" version="opset1">
<data destination_type="f32" />
<rt_info>
<attribute name="decompression" version="0" />
</rt_info>
<input>
<port id="0" precision="FP16">
<dim>1024</dim>
<dim>10240</dim>
</port>
</input>
<output>
<port id="1" precision="FP32">
<dim>1024</dim>
<dim>10240</dim>
</port>
</output>
</layer>
<layer id="42" name="__module.embed.out/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1024</dim>
<dim>10240</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>1024</dim>
</port>
</output>
</layer>
<layer id="43" name="Constant_3243449_compressed" type="Const" version="opset1">
<data element_type="f16" shape="1, 1, 1024" offset="25231408" size="2048" />
<output>
<port id="0" precision="FP16">
<dim>1</dim>
<dim>1</dim>
<dim>1024</dim>
</port>
</output>
</layer>
<layer id="44" name="Constant_3243449" type="Convert" version="opset1">
<data destination_type="f32" />
<rt_info>
<attribute name="decompression" version="0" />
</rt_info>
<input>
<port id="0" precision="FP16">
<dim>1</dim>
<dim>1</dim>
<dim>1024</dim>
</port>
</input>
<output>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>1024</dim>
</port>
</output>
</layer>
<layer id="45" name="__module.embed.out/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>1024</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>1024</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>1024</dim>
</port>
</output>
</layer>
<layer id="46" name="Result_3241509" type="Result" version="opset1">
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>1024</dim>
</port>
</input>
</layer>
</layers>
<edges>
<edge from-layer="0" from-port="0" to-layer="2" to-port="0" />
<edge from-layer="1" from-port="0" to-layer="2" to-port="1" />
<edge from-layer="2" from-port="2" to-layer="5" to-port="0" />
<edge from-layer="3" from-port="0" to-layer="4" to-port="0" />
<edge from-layer="4" from-port="1" to-layer="5" to-port="1" />
<edge from-layer="5" from-port="2" to-layer="8" to-port="0" />
<edge from-layer="6" from-port="0" to-layer="7" to-port="0" />
<edge from-layer="7" from-port="1" to-layer="8" to-port="1" />
<edge from-layer="8" from-port="2" to-layer="9" to-port="0" />
<edge from-layer="9" from-port="1" to-layer="12" to-port="0" />
<edge from-layer="10" from-port="0" to-layer="11" to-port="0" />
<edge from-layer="11" from-port="1" to-layer="12" to-port="1" />
<edge from-layer="12" from-port="2" to-layer="15" to-port="0" />
<edge from-layer="13" from-port="0" to-layer="14" to-port="0" />
<edge from-layer="14" from-port="1" to-layer="15" to-port="1" />
<edge from-layer="15" from-port="2" to-layer="18" to-port="0" />
<edge from-layer="16" from-port="0" to-layer="17" to-port="0" />
<edge from-layer="17" from-port="1" to-layer="18" to-port="1" />
<edge from-layer="18" from-port="2" to-layer="21" to-port="0" />
<edge from-layer="19" from-port="0" to-layer="20" to-port="0" />
<edge from-layer="20" from-port="1" to-layer="21" to-port="1" />
<edge from-layer="21" from-port="2" to-layer="22" to-port="0" />
<edge from-layer="22" from-port="1" to-layer="25" to-port="0" />
<edge from-layer="23" from-port="0" to-layer="24" to-port="0" />
<edge from-layer="24" from-port="1" to-layer="25" to-port="1" />
<edge from-layer="25" from-port="2" to-layer="28" to-port="0" />
<edge from-layer="26" from-port="0" to-layer="27" to-port="0" />
<edge from-layer="27" from-port="1" to-layer="28" to-port="1" />
<edge from-layer="28" from-port="2" to-layer="31" to-port="0" />
<edge from-layer="29" from-port="0" to-layer="30" to-port="0" />
<edge from-layer="30" from-port="1" to-layer="31" to-port="1" />
<edge from-layer="31" from-port="2" to-layer="34" to-port="0" />
<edge from-layer="32" from-port="0" to-layer="33" to-port="0" />
<edge from-layer="33" from-port="1" to-layer="34" to-port="1" />
<edge from-layer="34" from-port="2" to-layer="35" to-port="0" />
<edge from-layer="35" from-port="1" to-layer="37" to-port="0" />
<edge from-layer="36" from-port="0" to-layer="37" to-port="1" />
<edge from-layer="37" from-port="2" to-layer="39" to-port="0" />
<edge from-layer="38" from-port="0" to-layer="39" to-port="1" />
<edge from-layer="39" from-port="2" to-layer="42" to-port="0" />
<edge from-layer="40" from-port="0" to-layer="41" to-port="0" />
<edge from-layer="41" from-port="1" to-layer="42" to-port="1" />
<edge from-layer="42" from-port="2" to-layer="45" to-port="0" />
<edge from-layer="43" from-port="0" to-layer="44" to-port="0" />
<edge from-layer="44" from-port="1" to-layer="45" to-port="1" />
<edge from-layer="45" from-port="2" to-layer="46" to-port="0" />
</edges>
<rt_info>
<Runtime_version value="2025.1.0-18311-da00e90afb7" />
<conversion_parameters>
<framework value="pytorch" />
<is_python_object value="True" />
</conversion_parameters>
</rt_info>
</net>