Skip to content

Commit

Permalink
[PaddleV3] 添加 pytorchaten::list 的算子映射并修复相关模型 (#1075)
Browse files Browse the repository at this point in the history
* [Add] pytorch aten::list and fix models

* [Add] prim_list_ as a python function

* [Update] black.list
  • Loading branch information
megemini authored Oct 31, 2024
1 parent 22a2743 commit c20a276
Show file tree
Hide file tree
Showing 7 changed files with 50 additions and 26 deletions.
1 change: 0 additions & 1 deletion test_benchmark/PyTorch/DeepLabv3_ResNet50/deploy_infer.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@

import numpy as np
import paddle
import paddle.fluid as fluid
from paddle.inference import Config
from paddle.inference import create_predictor

Expand Down
15 changes: 4 additions & 11 deletions test_benchmark/PyTorch/DeepLabv3_ResNet50/pd_infer.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
from __future__ import print_function
import paddle.fluid as fluid
import paddle
import sys
import os
Expand All @@ -22,11 +21,8 @@ def rel_err(x, y):
# trace
paddle.enable_static()
exe = paddle.static.Executor(paddle.CPUPlace())
[prog, inputs, outputs] = fluid.io.load_inference_model(
dirname="pd_model_trace/inference_model/",
executor=exe,
model_filename="model.pdmodel",
params_filename="model.pdiparams")
[prog, inputs, outputs] = paddle.static.load_inference_model(
path_prefix="pd_model_trace/inference_model/model", executor=exe)
result = exe.run(prog, feed={inputs[0]: input_data}, fetch_list=outputs)
df0 = pytorch_output["aux"] - result[0]
df1 = pytorch_output["out"] - result[1]
Expand All @@ -40,11 +36,8 @@ def rel_err(x, y):
# script
paddle.enable_static()
exe = paddle.static.Executor(paddle.CPUPlace())
[prog, inputs, outputs] = fluid.io.load_inference_model(
dirname="pd_model_script/inference_model/",
executor=exe,
model_filename="model.pdmodel",
params_filename="model.pdiparams")
[prog, inputs, outputs] = paddle.static.load_inference_model(
path_prefix="pd_model_script/inference_model/model", executor=exe)
result = exe.run(prog, feed={inputs[0]: input_data}, fetch_list=outputs)
df0 = pytorch_output["aux"] - result[0]
df1 = pytorch_output["out"] - result[1]
Expand Down
1 change: 0 additions & 1 deletion test_benchmark/PyTorch/FCN_ResNet50/deploy_infer.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@

import numpy as np
import paddle
import paddle.fluid as fluid
from paddle.inference import Config
from paddle.inference import create_predictor

Expand Down
15 changes: 4 additions & 11 deletions test_benchmark/PyTorch/FCN_ResNet50/pd_infer.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
from __future__ import print_function
import paddle.fluid as fluid
import paddle
import sys
import os
Expand All @@ -22,11 +21,8 @@ def rel_err(x, y):
# trace
paddle.enable_static()
exe = paddle.static.Executor(paddle.CPUPlace())
[prog, inputs, outputs] = fluid.io.load_inference_model(
dirname="pd_model_trace/inference_model/",
executor=exe,
model_filename="model.pdmodel",
params_filename="model.pdiparams")
[prog, inputs, outputs] = paddle.static.load_inference_model(
path_prefix="pd_model_trace/inference_model/model", executor=exe)

result = exe.run(prog, feed={inputs[0]: input_data}, fetch_list=outputs)
df0 = pytorch_output["aux"] - result[0]
Expand All @@ -41,11 +37,8 @@ def rel_err(x, y):
# script
paddle.enable_static()
exe = paddle.static.Executor(paddle.CPUPlace())
[prog, inputs, outputs] = fluid.io.load_inference_model(
dirname="pd_model_script/inference_model/",
executor=exe,
model_filename="model.pdmodel",
params_filename="model.pdiparams")
[prog, inputs, outputs] = paddle.static.load_inference_model(
path_prefix="pd_model_script/inference_model/model", executor=exe)

result = exe.run(prog, feed={inputs[0]: input_data}, fetch_list=outputs)
df0 = pytorch_output["aux"] - result[0]
Expand Down
2 changes: 0 additions & 2 deletions test_benchmark/PyTorch/black.list
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,8 @@ BertForMaskedLM_dccuchile
BertModel_SpanBert
CamembertForQuestionAnswering
DPRContextEncoder
DeepLabv3_ResNet50
EasyOCR_detector
EasyOCR_recognizer
FCN_ResNet50
GRU
MiniFasNet
MockingBird
Expand Down
30 changes: 30 additions & 0 deletions x2paddle/op_mapper/pytorch2paddle/aten.py
Original file line number Diff line number Diff line change
Expand Up @@ -6711,3 +6711,33 @@ def aten_pad(mapper, graph, node):
**layer_attrs)
current_inputs = list(layer_inputs.values())
return current_inputs, current_outputs


def aten_list(mapper, graph, node):
""" python 的 `list` 转换,如 `list((1,2,3))`
TorchScript示例:
%1926 : int[] = aten::list(%1925)
参数含义:
%1926 (list): 输出,转换为list。
%1925 (-): 可以转为list的输入
"""
scope_name = mapper.normalize_scope_name(node)
output_name = mapper._get_outputs_name(node)[0]
layer_outputs = [output_name]
layer_inputs = {}
layer_attrs = {}
inputs_name, inputs_node = mapper._get_inputs_name(node)
# 获取当前节点输出的list
current_outputs = [output_name]

# process input
layer_inputs["x"] = inputs_name[0]

# 获取当前节点输入的list
current_inputs = list(layer_inputs.values())

graph.add_layer("prim.list_",
inputs=layer_inputs,
outputs=layer_outputs,
scope_name=scope_name)
return current_inputs, current_outputs
12 changes: 12 additions & 0 deletions x2paddle/op_mapper/pytorch2paddle/prim2code.py
Original file line number Diff line number Diff line change
Expand Up @@ -530,6 +530,18 @@ def prim_list(layer,
forward_func.extend(gen_codes([line], indent=indent))


def prim_list_(layer,
indent=1,
init_func=[],
forward_func=[],
layer_id=None,
different_attrs=None):
""" just a python function: `list(x)`
"""
line = "{} = list({})".format(layer.outputs[0], layer.inputs["x"])
forward_func.extend(gen_codes([line], indent=indent))


def prim_list_unpack(layer,
indent=1,
init_func=[],
Expand Down

0 comments on commit c20a276

Please sign in to comment.