# IR entry: @72_1_test_predict_backend_lite_lenet_LeNet5_construct_307
# Total subgraphs: 1

# Attrs:
has_shard: 0
flash_sp_send_recv_has_attached: 1
has_attached: 1
jit_level: O0
check_set_strategy_valid_once_only: 1
FLASH_SP_RUN_ONCE_ONLY: 1
pynative_run_in_graph: 0
FIAS_SP_RUN_ONCE_ONLY: 1
less_bn: 0
auto_parallel_finish_pre_action: 1

# Total params: 9
# Params:
%para1_x: <Tensor[Float32], (1, 1, 32, 32)> : [1, 1, 32, 32]
%para2_conv1.weight: <Ref[Tensor[Float32]], (6, 1, 5, 5), ref_key=conv1.weight>  :  has_default
%para3_conv2.weight: <Ref[Tensor[Float32]], (16, 6, 5, 5), ref_key=conv2.weight>  :  has_default
%para4_fc1.weight: <Ref[Tensor[Float32]], (120, 400), ref_key=fc1.weight>  :  has_default
%para5_fc1.bias: <Ref[Tensor[Float32]], (120), ref_key=fc1.bias>  :  has_default
%para6_fc2.weight: <Ref[Tensor[Float32]], (84, 120), ref_key=fc2.weight>  :  has_default
%para7_fc2.bias: <Ref[Tensor[Float32]], (84), ref_key=fc2.bias>  :  has_default
%para8_fc3.weight: <Ref[Tensor[Float32]], (10, 84), ref_key=fc3.weight>  :  has_default
%para9_fc3.bias: <Ref[Tensor[Float32]], (10), ref_key=fc3.bias>  :  has_default

Node counting information:
Total number of nodes: 52
Total number of cnodes: 27

subgraph attr:
has_shard: 0
flash_sp_send_recv_has_attached: 1
has_attached: 1
jit_level: O0
check_set_strategy_valid_once_only: 1
FLASH_SP_RUN_ONCE_ONLY: 1
pynative_run_in_graph: 0
FIAS_SP_RUN_ONCE_ONLY: 1
less_bn: 0
auto_parallel_finish_pre_action: 1
subgraph instance: 72_1_test_predict_backend_lite_lenet_LeNet5_construct_307 : 0x8261a60
# In file /home/jenkins/mindspore/mindspore/lite/test/st/python/import_ms_and_mslite/test_predict_backend_lite_lenet.py:55~68, 4~16/    def construct(self, x):/
subgraph @72_1_test_predict_backend_lite_lenet_LeNet5_construct_307() {
  %0(ValueNode_291) = Load(%para2_conv1.weight, UMonad[U]) cnode_attrs: {checkpoint: Bool(1), is_dynamic_len: Bool(0)}
      : (<Ref[Tensor[Float32]], (6, 1, 5, 5), ref_key=conv1.weight>, <UMonad, NoShape>) -> (<Tensor[Float32], (6, 1, 5, 5)>)
      # Fullname with scope: (Default/Load-op0)
  %1(output) = Conv2D(%para1_x, %0) {instance name: conv2d} primitive_attrs: {kernel_size: (5, 5), mode: I64(1), out_channel: I64(6), input_names: [x, w], pad: (0, 0, 0, 0), pad_mode: I64(2), format: "NCHW", pad_list: (0, 0, 0, 0), groups: I64(1), stride: (1, 1, 1, 1), group: I64(1), dilation: (1, 1, 1, 1), output_names: [output]} cnode_attrs: {checkpoint: Bool(1), is_dynamic_len: Bool(0)}
      : (<Tensor[Float32], (1, 1, 32, 32)>, <Tensor[Float32], (6, 1, 5, 5)>) -> (<Tensor[Float32], (1, 6, 28, 28)>)
      # Fullname with scope: (Default/conv1-Conv2d/Conv2D-op0)
      # In file /home/jenkins/mindspore/mindspore/lite/test/st/python/import_ms_and_mslite/test_predict_backend_lite_lenet.py:55~68, 4~16/    def construct(self, x):/
      # In file /home/jenkins/mindspore/mindspore/lite/test/st/python/import_ms_and_mslite/test_predict_backend_lite_lenet.py:56, 12~25/        x = self.conv1(x)/
      # In file /home/jenkins/mindspore/mindspore/lite/test/st/python/import_ms_and_mslite/test_predict_backend_lite_lenet.py:56, 12~22/        x = self.conv1(x)/<~~This line of code can be shared by multiple nodes, and may be duplicated./
      # In file /home/jenkins/.local/lib/python3.9/site-packages/mindspore/nn/layer/conv.py:365~369, 4~21/    def construct(self, x):/
      # In file /home/jenkins/.local/lib/python3.9/site-packages/mindspore/nn/layer/conv.py:366, 17~44/        output = self.conv2d(x, self.weight)/
  %2(CNode_292) = PrimFunc_ReLU(%1) {instance name: relu} cnode_attrs: {checkpoint: Bool(1), is_dynamic_len: Bool(0)}
      : (<Tensor[Float32], (1, 6, 28, 28)>) -> (<Tensor[Float32], (1, 6, 28, 28)>)
      # Fullname with scope: (Default/relu-ReLU/ReLU-op0)
      # In file /home/jenkins/mindspore/mindspore/lite/test/st/python/import_ms_and_mslite/test_predict_backend_lite_lenet.py:55~68, 4~16/    def construct(self, x):/
      # In file /home/jenkins/mindspore/mindspore/lite/test/st/python/import_ms_and_mslite/test_predict_backend_lite_lenet.py:57, 12~24/        x = self.relu(x)/
      # In file /home/jenkins/.local/lib/python3.9/site-packages/mindspore/nn/layer/activation.py:515~516, 4~31/    def construct(self, input):/
      # In file /home/jenkins/.local/lib/python3.9/site-packages/mindspore/nn/layer/activation.py:516, 15~31/        return self.relu(input)/
  %3(out) = MaxPool(%2) {instance name: max_pool} primitive_attrs: {pad_mode: I64(2), output_names: [output], kernel_size: (1, 1, 2, 2), format: "NCHW", strides: (1, 1, 2, 2), input_names: [x]} cnode_attrs: {checkpoint: Bool(1), is_dynamic_len: Bool(0)}
      : (<Tensor[Float32], (1, 6, 28, 28)>) -> (<Tensor[Float32], (1, 6, 14, 14)>)
      # Fullname with scope: (Default/max_pool2d-MaxPool2d/MaxPool-op0)
      # In file /home/jenkins/mindspore/mindspore/lite/test/st/python/import_ms_and_mslite/test_predict_backend_lite_lenet.py:55~68, 4~16/    def construct(self, x):/
      # In file /home/jenkins/mindspore/mindspore/lite/test/st/python/import_ms_and_mslite/test_predict_backend_lite_lenet.py:58, 12~30/        x = self.max_pool2d(x)/
      # In file /home/jenkins/.local/lib/python3.9/site-packages/mindspore/nn/layer/pooling.py:583~604, 4~18/    def construct(self, x):/
      # In file /home/jenkins/.local/lib/python3.9/site-packages/mindspore/nn/layer/pooling.py:585~587, 8~31/        if x.ndim == 3:/
      # In file /home/jenkins/.local/lib/python3.9/site-packages/mindspore/nn/layer/pooling.py:588~596, 8~34/        if self.use_pad:/
      # In file /home/jenkins/.local/lib/python3.9/site-packages/mindspore/nn/layer/pooling.py:596, 18~34/            out = self.max_pool(x)/
  %4(ValueNode_293) = Load(%para3_conv2.weight, UMonad[U]) cnode_attrs: {checkpoint: Bool(1), is_dynamic_len: Bool(0)}
      : (<Ref[Tensor[Float32]], (16, 6, 5, 5), ref_key=conv2.weight>, <UMonad, NoShape>) -> (<Tensor[Float32], (16, 6, 5, 5)>)
      # Fullname with scope: (Default/Load-op1)
  %5(output) = Conv2D(%3, %4) {instance name: conv2d} primitive_attrs: {kernel_size: (5, 5), mode: I64(1), out_channel: I64(16), input_names: [x, w], pad: (0, 0, 0, 0), pad_mode: I64(2), format: "NCHW", pad_list: (0, 0, 0, 0), groups: I64(1), stride: (1, 1, 1, 1), group: I64(1), dilation: (1, 1, 1, 1), output_names: [output]} cnode_attrs: {checkpoint: Bool(1), is_dynamic_len: Bool(0)}
      : (<Tensor[Float32], (1, 6, 14, 14)>, <Tensor[Float32], (16, 6, 5, 5)>) -> (<Tensor[Float32], (1, 16, 10, 10)>)
      # Fullname with scope: (Default/conv2-Conv2d/Conv2D-op0)
      # In file /home/jenkins/mindspore/mindspore/lite/test/st/python/import_ms_and_mslite/test_predict_backend_lite_lenet.py:55~68, 4~16/    def construct(self, x):/
      # In file /home/jenkins/mindspore/mindspore/lite/test/st/python/import_ms_and_mslite/test_predict_backend_lite_lenet.py:59, 12~25/        x = self.conv2(x)/
      # In file /home/jenkins/mindspore/mindspore/lite/test/st/python/import_ms_and_mslite/test_predict_backend_lite_lenet.py:59, 12~22/        x = self.conv2(x)/<~~This line of code can be shared by multiple nodes, and may be duplicated./
      # In file /home/jenkins/.local/lib/python3.9/site-packages/mindspore/nn/layer/conv.py:365~369, 4~21/    def construct(self, x):/
      # In file /home/jenkins/.local/lib/python3.9/site-packages/mindspore/nn/layer/conv.py:366, 17~44/        output = self.conv2d(x, self.weight)/
  %6(CNode_294) = PrimFunc_ReLU(%5) {instance name: relu} cnode_attrs: {checkpoint: Bool(1), is_dynamic_len: Bool(0)}
      : (<Tensor[Float32], (1, 16, 10, 10)>) -> (<Tensor[Float32], (1, 16, 10, 10)>)
      # Fullname with scope: (Default/relu-ReLU/ReLU-op1)
      # In file /home/jenkins/mindspore/mindspore/lite/test/st/python/import_ms_and_mslite/test_predict_backend_lite_lenet.py:55~68, 4~16/    def construct(self, x):/
      # In file /home/jenkins/mindspore/mindspore/lite/test/st/python/import_ms_and_mslite/test_predict_backend_lite_lenet.py:60, 12~24/        x = self.relu(x)/
      # In file /home/jenkins/.local/lib/python3.9/site-packages/mindspore/nn/layer/activation.py:515~516, 4~31/    def construct(self, input):/
      # In file /home/jenkins/.local/lib/python3.9/site-packages/mindspore/nn/layer/activation.py:516, 15~31/        return self.relu(input)/
  %7(out) = MaxPool(%6) {instance name: max_pool} primitive_attrs: {pad_mode: I64(2), output_names: [output], kernel_size: (1, 1, 2, 2), format: "NCHW", strides: (1, 1, 2, 2), input_names: [x]} cnode_attrs: {checkpoint: Bool(1), is_dynamic_len: Bool(0)}
      : (<Tensor[Float32], (1, 16, 10, 10)>) -> (<Tensor[Float32], (1, 16, 5, 5)>)
      # Fullname with scope: (Default/max_pool2d-MaxPool2d/MaxPool-op1)
      # In file /home/jenkins/mindspore/mindspore/lite/test/st/python/import_ms_and_mslite/test_predict_backend_lite_lenet.py:55~68, 4~16/    def construct(self, x):/
      # In file /home/jenkins/mindspore/mindspore/lite/test/st/python/import_ms_and_mslite/test_predict_backend_lite_lenet.py:61, 12~30/        x = self.max_pool2d(x)/
      # In file /home/jenkins/.local/lib/python3.9/site-packages/mindspore/nn/layer/pooling.py:583~604, 4~18/    def construct(self, x):/
      # In file /home/jenkins/.local/lib/python3.9/site-packages/mindspore/nn/layer/pooling.py:585~587, 8~31/        if x.ndim == 3:/
      # In file /home/jenkins/.local/lib/python3.9/site-packages/mindspore/nn/layer/pooling.py:588~596, 8~34/        if self.use_pad:/
      # In file /home/jenkins/.local/lib/python3.9/site-packages/mindspore/nn/layer/pooling.py:596, 18~34/            out = self.max_pool(x)/
  %8(CNode_295) = PrimFunc_Flatten(%7) cnode_attrs: {checkpoint: Bool(1), is_dynamic_len: Bool(0)}
      : (<Tensor[Float32], (1, 16, 5, 5)>) -> (<Tensor[Float32], (1, 400)>)
      # Fullname with scope: (Default/flatten-Flatten/Flatten-op0)
      # In file /home/jenkins/mindspore/mindspore/lite/test/st/python/import_ms_and_mslite/test_predict_backend_lite_lenet.py:55~68, 4~16/    def construct(self, x):/
      # In file /home/jenkins/mindspore/mindspore/lite/test/st/python/import_ms_and_mslite/test_predict_backend_lite_lenet.py:64, 12~24/        x = self.flatten(x)/
      # In file /home/jenkins/.local/lib/python3.9/site-packages/mindspore/nn/layer/basic.py:579~584, 4~75/    def construct(self, x):/
      # In file /home/jenkins/.local/lib/python3.9/site-packages/mindspore/nn/layer/basic.py:584, 15~24/        return F.flatten(x, start_dim=self.start_dim, end_dim=self.end_dim)/
      # In file /home/jenkins/.local/lib/python3.9/site-packages/mindspore/ops/function/array_func.py:1834~1929/def flatten(input, order='C', *, start_dim=1, end_dim=-1):/
      # In file /home/jenkins/.local/lib/python3.9/site-packages/mindspore/ops/function/array_func.py:1908~1909, 8~41/        if x_rank in (0, 1):/
      # In file /home/jenkins/.local/lib/python3.9/site-packages/mindspore/ops/function/array_func.py:1910, 15~30/        return flatten_(input)/<~~This line of code can be shared by multiple nodes, and may be duplicated./
  %9(ValueNode_296) = Load(%para4_fc1.weight, UMonad[U]) cnode_attrs: {checkpoint: Bool(1), is_dynamic_len: Bool(0)}
      : (<Ref[Tensor[Float32]], (120, 400), ref_key=fc1.weight>, <UMonad, NoShape>) -> (<Tensor[Float32], (120, 400)>)
      # Fullname with scope: (Default/Load-op2)
  %10(x) = PrimFunc_MatMul(%8, %9, Bool(0), Bool(1)) {instance name: matmul} cnode_attrs: {checkpoint: Bool(1), is_dynamic_len: Bool(0)}
      : (<Tensor[Float32], (1, 400)>, <Tensor[Float32], (120, 400)>, <Bool, NoShape>, <Bool, NoShape>) -> (<Tensor[Float32], (1, 120)>)
      # Fullname with scope: (Default/fc1-Dense/MatMul-op0)
      # In file /home/jenkins/mindspore/mindspore/lite/test/st/python/import_ms_and_mslite/test_predict_backend_lite_lenet.py:55~68, 4~16/    def construct(self, x):/
      # In file /home/jenkins/mindspore/mindspore/lite/test/st/python/import_ms_and_mslite/test_predict_backend_lite_lenet.py:65, 22~33/        x = self.relu(self.fc1(x))/
      # In file /home/jenkins/mindspore/mindspore/lite/test/st/python/import_ms_and_mslite/test_predict_backend_lite_lenet.py:65, 22~30/        x = self.relu(self.fc1(x))/<~~This line of code can be shared by multiple nodes, and may be duplicated./
      # In file /home/jenkins/.local/lib/python3.9/site-packages/mindspore/nn/layer/basic.py:744~756, 4~16/    def construct(self, x):/
      # In file /home/jenkins/.local/lib/python3.9/site-packages/mindspore/nn/layer/basic.py:748, 12~39/        x = self.matmul(x, self.weight)/
  %11(ValueNode_297) = Load(%para5_fc1.bias, UMonad[U]) cnode_attrs: {checkpoint: Bool(1), is_dynamic_len: Bool(0)}
      : (<Ref[Tensor[Float32]], (120), ref_key=fc1.bias>, <UMonad, NoShape>) -> (<Tensor[Float32], (120)>)
      # Fullname with scope: (Default/fc1-Dense/Load-op0)
  %12(x) = PrimFunc_BiasAdd(%10, %11, I64(0)) {instance name: bias_add} cnode_attrs: {checkpoint: Bool(1), is_dynamic_len: Bool(0)}
      : (<Tensor[Float32], (1, 120)>, <Tensor[Float32], (120)>, <Int64, NoShape>) -> (<Tensor[Float32], (1, 120)>)
      # Fullname with scope: (Default/fc1-Dense/BiasAdd-op0)
      # In file /home/jenkins/mindspore/mindspore/lite/test/st/python/import_ms_and_mslite/test_predict_backend_lite_lenet.py:55~68, 4~16/    def construct(self, x):/
      # In file /home/jenkins/mindspore/mindspore/lite/test/st/python/import_ms_and_mslite/test_predict_backend_lite_lenet.py:65, 22~33/        x = self.relu(self.fc1(x))/
      # In file /home/jenkins/mindspore/mindspore/lite/test/st/python/import_ms_and_mslite/test_predict_backend_lite_lenet.py:65, 22~30/        x = self.relu(self.fc1(x))/<~~This line of code can be shared by multiple nodes, and may be duplicated./
      # In file /home/jenkins/.local/lib/python3.9/site-packages/mindspore/nn/layer/basic.py:744~756, 4~16/    def construct(self, x):/
      # In file /home/jenkins/.local/lib/python3.9/site-packages/mindspore/nn/layer/basic.py:749~750, 8~43/        if self.has_bias:/
      # In file /home/jenkins/.local/lib/python3.9/site-packages/mindspore/nn/layer/basic.py:750, 16~43/            x = self.bias_add(x, self.bias)/<~~This line of code can be shared by multiple nodes, and may be duplicated./
  %13(CNode_298) = PrimFunc_ReLU(%12) {instance name: relu} cnode_attrs: {checkpoint: Bool(1), is_dynamic_len: Bool(0)}
      : (<Tensor[Float32], (1, 120)>) -> (<Tensor[Float32], (1, 120)>)
      # Fullname with scope: (Default/relu-ReLU/ReLU-op2)
      # In file /home/jenkins/mindspore/mindspore/lite/test/st/python/import_ms_and_mslite/test_predict_backend_lite_lenet.py:55~68, 4~16/    def construct(self, x):/
      # In file /home/jenkins/mindspore/mindspore/lite/test/st/python/import_ms_and_mslite/test_predict_backend_lite_lenet.py:65, 12~34/        x = self.relu(self.fc1(x))/
      # In file /home/jenkins/.local/lib/python3.9/site-packages/mindspore/nn/layer/activation.py:515~516, 4~31/    def construct(self, input):/
      # In file /home/jenkins/.local/lib/python3.9/site-packages/mindspore/nn/layer/activation.py:516, 15~31/        return self.relu(input)/
  %14(ValueNode_299) = Load(%para6_fc2.weight, UMonad[U]) cnode_attrs: {checkpoint: Bool(1), is_dynamic_len: Bool(0)}
      : (<Ref[Tensor[Float32]], (84, 120), ref_key=fc2.weight>, <UMonad, NoShape>) -> (<Tensor[Float32], (84, 120)>)
      # Fullname with scope: (Default/Load-op3)
  %15(x) = PrimFunc_MatMul(%13, %14, Bool(0), Bool(1)) {instance name: matmul} cnode_attrs: {checkpoint: Bool(1), is_dynamic_len: Bool(0)}
      : (<Tensor[Float32], (1, 120)>, <Tensor[Float32], (84, 120)>, <Bool, NoShape>, <Bool, NoShape>) -> (<Tensor[Float32], (1, 84)>)
      # Fullname with scope: (Default/fc2-Dense/MatMul-op0)
      # In file /home/jenkins/mindspore/mindspore/lite/test/st/python/import_ms_and_mslite/test_predict_backend_lite_lenet.py:55~68, 4~16/    def construct(self, x):/
      # In file /home/jenkins/mindspore/mindspore/lite/test/st/python/import_ms_and_mslite/test_predict_backend_lite_lenet.py:66, 22~33/        x = self.relu(self.fc2(x))/
      # In file /home/jenkins/mindspore/mindspore/lite/test/st/python/import_ms_and_mslite/test_predict_backend_lite_lenet.py:66, 22~30/        x = self.relu(self.fc2(x))/<~~This line of code can be shared by multiple nodes, and may be duplicated./
      # In file /home/jenkins/.local/lib/python3.9/site-packages/mindspore/nn/layer/basic.py:744~756, 4~16/    def construct(self, x):/
      # In file /home/jenkins/.local/lib/python3.9/site-packages/mindspore/nn/layer/basic.py:748, 12~39/        x = self.matmul(x, self.weight)/
  %16(ValueNode_300) = Load(%para7_fc2.bias, UMonad[U]) cnode_attrs: {checkpoint: Bool(1), is_dynamic_len: Bool(0)}
      : (<Ref[Tensor[Float32]], (84), ref_key=fc2.bias>, <UMonad, NoShape>) -> (<Tensor[Float32], (84)>)
      # Fullname with scope: (Default/fc2-Dense/Load-op0)
  %17(x) = PrimFunc_BiasAdd(%15, %16, I64(0)) {instance name: bias_add} cnode_attrs: {checkpoint: Bool(1), is_dynamic_len: Bool(0)}
      : (<Tensor[Float32], (1, 84)>, <Tensor[Float32], (84)>, <Int64, NoShape>) -> (<Tensor[Float32], (1, 84)>)
      # Fullname with scope: (Default/fc2-Dense/BiasAdd-op0)
      # In file /home/jenkins/mindspore/mindspore/lite/test/st/python/import_ms_and_mslite/test_predict_backend_lite_lenet.py:55~68, 4~16/    def construct(self, x):/
      # In file /home/jenkins/mindspore/mindspore/lite/test/st/python/import_ms_and_mslite/test_predict_backend_lite_lenet.py:66, 22~33/        x = self.relu(self.fc2(x))/
      # In file /home/jenkins/mindspore/mindspore/lite/test/st/python/import_ms_and_mslite/test_predict_backend_lite_lenet.py:66, 22~30/        x = self.relu(self.fc2(x))/<~~This line of code can be shared by multiple nodes, and may be duplicated./
      # In file /home/jenkins/.local/lib/python3.9/site-packages/mindspore/nn/layer/basic.py:744~756, 4~16/    def construct(self, x):/
      # In file /home/jenkins/.local/lib/python3.9/site-packages/mindspore/nn/layer/basic.py:749~750, 8~43/        if self.has_bias:/
      # In file /home/jenkins/.local/lib/python3.9/site-packages/mindspore/nn/layer/basic.py:750, 16~43/            x = self.bias_add(x, self.bias)/<~~This line of code can be shared by multiple nodes, and may be duplicated./
  %18(CNode_301) = PrimFunc_ReLU(%17) {instance name: relu} cnode_attrs: {checkpoint: Bool(1), is_dynamic_len: Bool(0)}
      : (<Tensor[Float32], (1, 84)>) -> (<Tensor[Float32], (1, 84)>)
      # Fullname with scope: (Default/relu-ReLU/ReLU-op3)
      # In file /home/jenkins/mindspore/mindspore/lite/test/st/python/import_ms_and_mslite/test_predict_backend_lite_lenet.py:55~68, 4~16/    def construct(self, x):/
      # In file /home/jenkins/mindspore/mindspore/lite/test/st/python/import_ms_and_mslite/test_predict_backend_lite_lenet.py:66, 12~34/        x = self.relu(self.fc2(x))/
      # In file /home/jenkins/.local/lib/python3.9/site-packages/mindspore/nn/layer/activation.py:515~516, 4~31/    def construct(self, input):/
      # In file /home/jenkins/.local/lib/python3.9/site-packages/mindspore/nn/layer/activation.py:516, 15~31/        return self.relu(input)/
  %19(ValueNode_302$x) = Load(%para8_fc3.weight, UMonad[U]) cnode_attrs: {checkpoint: Bool(1), is_dynamic_len: Bool(0)}
      : (<Ref[Tensor[Float32]], (10, 84), ref_key=fc3.weight>, <UMonad, NoShape>) -> (<Tensor[Float32], (10, 84)>)
      # Fullname with scope: (Default/Load-op4)
  %20(x) = PrimFunc_MatMul(%18, %19, Bool(0), Bool(1)) {instance name: matmul} cnode_attrs: {checkpoint: Bool(1), is_dynamic_len: Bool(0)}
      : (<Tensor[Float32], (1, 84)>, <Tensor[Float32], (10, 84)>, <Bool, NoShape>, <Bool, NoShape>) -> (<Tensor[Float32], (1, 10)>)
      # Fullname with scope: (Default/fc3-Dense/MatMul-op0)
      # In file /home/jenkins/mindspore/mindspore/lite/test/st/python/import_ms_and_mslite/test_predict_backend_lite_lenet.py:55~68, 4~16/    def construct(self, x):/
      # In file /home/jenkins/mindspore/mindspore/lite/test/st/python/import_ms_and_mslite/test_predict_backend_lite_lenet.py:67, 12~23/        x = self.fc3(x)/
      # In file /home/jenkins/mindspore/mindspore/lite/test/st/python/import_ms_and_mslite/test_predict_backend_lite_lenet.py:67, 12~20/        x = self.fc3(x)/<~~This line of code can be shared by multiple nodes, and may be duplicated./
      # In file /home/jenkins/.local/lib/python3.9/site-packages/mindspore/nn/layer/basic.py:744~756, 4~16/    def construct(self, x):/
      # In file /home/jenkins/.local/lib/python3.9/site-packages/mindspore/nn/layer/basic.py:748, 12~39/        x = self.matmul(x, self.weight)/
  %21(ValueNode_303$x) = Load(%para9_fc3.bias, UMonad[U]) cnode_attrs: {checkpoint: Bool(1), is_dynamic_len: Bool(0)}
      : (<Ref[Tensor[Float32]], (10), ref_key=fc3.bias>, <UMonad, NoShape>) -> (<Tensor[Float32], (10)>)
      # Fullname with scope: (Default/fc3-Dense/Load-op0)
  %22(x) = PrimFunc_BiasAdd(%20, %21, I64(0)) {instance name: bias_add} cnode_attrs: {checkpoint: Bool(1), is_dynamic_len: Bool(0)}
      : (<Tensor[Float32], (1, 10)>, <Tensor[Float32], (10)>, <Int64, NoShape>) -> (<Tensor[Float32], (1, 10)>)
      # Fullname with scope: (Default/fc3-Dense/BiasAdd-op0)
      # In file /home/jenkins/mindspore/mindspore/lite/test/st/python/import_ms_and_mslite/test_predict_backend_lite_lenet.py:55~68, 4~16/    def construct(self, x):/
      # In file /home/jenkins/mindspore/mindspore/lite/test/st/python/import_ms_and_mslite/test_predict_backend_lite_lenet.py:67, 12~23/        x = self.fc3(x)/
      # In file /home/jenkins/mindspore/mindspore/lite/test/st/python/import_ms_and_mslite/test_predict_backend_lite_lenet.py:67, 12~20/        x = self.fc3(x)/<~~This line of code can be shared by multiple nodes, and may be duplicated./
      # In file /home/jenkins/.local/lib/python3.9/site-packages/mindspore/nn/layer/basic.py:744~756, 4~16/    def construct(self, x):/
      # In file /home/jenkins/.local/lib/python3.9/site-packages/mindspore/nn/layer/basic.py:749~750, 8~43/        if self.has_bias:/
      # In file /home/jenkins/.local/lib/python3.9/site-packages/mindspore/nn/layer/basic.py:750, 16~43/            x = self.bias_add(x, self.bias)/<~~This line of code can be shared by multiple nodes, and may be duplicated./
  %23(ValueNode_304) = MakeTuple(%19, %14, %9, %0, %4, %11, %16, %21) cnode_attrs: {checkpoint: Bool(1), is_dynamic_len: Bool(0)}
      : (<Tensor[Float32], (10, 84)>, <Tensor[Float32], (84, 120)>, <Tensor[Float32], (120, 400)>, <Tensor[Float32], (6, 1, 5, 5)>, <Tensor[Float32], (16, 6, 5, 5)>, <Tensor[Float32], (120)>, <Tensor[Float32], (84)>, <Tensor[Float32], (10)>) -> (<Tuple[Tensor[Float32]*8], TupleShape((10, 84), (84, 120), (120, 400), (6, 1, 5, 5), (16, 6, 5, 5), (120), (84), (10)), elements_use_flags={[const vector]{1, 1, 1, 1, 1, 1, 1, 1}}>)
      # Fullname with scope: (Default/fc3-Dense/MakeTuple-op0)
  %24(ValueNode_305) = UpdateState(UMonad[U], %23) cnode_attrs: {checkpoint: Bool(1), is_dynamic_len: Bool(0)}
      : (<UMonad, NoShape>, <Tuple[Tensor[Float32]*8], TupleShape((10, 84), (84, 120), (120, 400), (6, 1, 5, 5), (16, 6, 5, 5), (120), (84), (10)), elements_use_flags={[const vector]{1, 1, 1, 1, 1, 1, 1, 1}}>) -> (<UMonad, NoShape>)
      # Fullname with scope: (Default/fc3-Dense/UpdateState-op0)
  %25(CNode_306) = Depend(%22, %24) primitive_attrs: {side_effect_propagate: I64(1)} cnode_attrs: {checkpoint: Bool(1), is_dynamic_len: Bool(0)}
      : (<Tensor[Float32], (1, 10)>, <UMonad, NoShape>) -> (<Tensor[Float32], (1, 10)>)
      # Fullname with scope: (Default/Depend-op0)
      # In file /home/jenkins/mindspore/mindspore/lite/test/st/python/import_ms_and_mslite/test_predict_backend_lite_lenet.py:55~68, 4~16/    def construct(self, x):/
      # In file /home/jenkins/mindspore/mindspore/lite/test/st/python/import_ms_and_mslite/test_predict_backend_lite_lenet.py:62~63, 8~20/        if not self.include_top:/
  Return(%25) cnode_attrs: {checkpoint: Bool(1), is_dynamic_len: Bool(0)}
      : (<Tensor[Float32], (1, 10)>)
      # Fullname with scope: (Default/Return-op0)
      # In file /home/jenkins/mindspore/mindspore/lite/test/st/python/import_ms_and_mslite/test_predict_backend_lite_lenet.py:55~68, 4~16/    def construct(self, x):/
      # In file /home/jenkins/mindspore/mindspore/lite/test/st/python/import_ms_and_mslite/test_predict_backend_lite_lenet.py:62~63, 8~20/        if not self.include_top:/
}