Skip to content

Commit

Permalink
bump PyTorch version to 1.11 (apache#10794)
Browse files Browse the repository at this point in the history
* bump PyTorch version to 1.11

* disable some caffe2 ci

* Fix sub conversion in PyTorch frontend

* use fuse_modules_qat if available, fallback to fuse_modules for older PyTorch

* Re-Run CI
  • Loading branch information
t-vi authored and pfk-beta committed Apr 11, 2022
1 parent 42d6be6 commit 96387b9
Show file tree
Hide file tree
Showing 6 changed files with 17 additions and 156 deletions.
3 changes: 0 additions & 3 deletions docker/Dockerfile.ci_gpu
Original file line number Diff line number Diff line change
Expand Up @@ -77,9 +77,6 @@ RUN bash /install/ubuntu_install_onnx.sh
COPY install/ubuntu_install_tflite.sh /install/ubuntu_install_tflite.sh
RUN bash /install/ubuntu_install_tflite.sh

COPY install/ubuntu_install_caffe2.sh /install/ubuntu_install_caffe2.sh
RUN bash /install/ubuntu_install_caffe2.sh

COPY install/ubuntu_install_dgl.sh /install/ubuntu_install_dgl.sh
RUN bash /install/ubuntu_install_dgl.sh

Expand Down
4 changes: 2 additions & 2 deletions docker/install/ubuntu_install_onnx.sh
Original file line number Diff line number Diff line change
Expand Up @@ -36,5 +36,5 @@ pip3 install \
pip3 install future

pip3 install \
torch==1.10.1 \
torchvision==0.11.2
torch==1.11.0 \
torchvision==0.12.0
145 changes: 0 additions & 145 deletions gallery/how_to/compile_models/from_caffe2.py

This file was deleted.

15 changes: 13 additions & 2 deletions python/tvm/relay/frontend/pytorch.py
Original file line number Diff line number Diff line change
Expand Up @@ -2039,6 +2039,14 @@ def stack(self, inputs, input_types):
assert isinstance(ty, tvm.ir.TypeCall) and ty.func == list_ty, msg
return self.tensor_array_stack(inputs, input_types)

def sub(self, inputs, input_types):
if len(inputs) == 3:
data0, data1, alpha = self.pytorch_promote_types(inputs, input_types)
return get_relay_op("subtract")(data0, alpha * data1)
else:
data0, data1= self.pytorch_promote_types(inputs, input_types)
return get_relay_op("subtract")(data0, data1)

def rsub(self, inputs, input_types):
data0, data1, alpha = self.pytorch_promote_types(inputs, input_types)

Expand Down Expand Up @@ -2859,7 +2867,10 @@ def all_any_common(self, op, inputs, input_types):
inp = inputs[0]
return op(inp, axis=dim, keepdims=keepdim)

def searchsorted_common(self, sorted_sequence, values, out_int32, right):
def searchsorted_common(
self, sorted_sequence, values, out_int32, right, side=None, out=None, sorter=None
):
assert side is None and out is None and sorter is None, "unsupported parameters"
dtype = "int32" if out_int32 else "int64"
values_shape = _infer_shape(values)

Expand Down Expand Up @@ -2959,7 +2970,7 @@ def create_convert_map(self):
"aten::pixel_shuffle": self.pixel_shuffle,
"aten::device": self.none,
"prim::device": self.none,
"aten::sub": self.make_elemwise("subtract"),
"aten::sub": self.sub,
"aten::max": self.max,
"aten::min": self.min,
"aten::mul": self.make_elemwise("multiply"),
Expand Down
3 changes: 2 additions & 1 deletion tests/python/frontend/pytorch/qnn_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -743,9 +743,10 @@ def __init__(self, inputsize=(128, 128)):
self.backbone = Backbone()

def fuse_model(self):
fuse_modules_qat = getattr(torch.ao.quantization, "fuse_modules_qat", fuse_modules)
for idx, m in enumerate(self.modules()):
if type(m) == ConvBnRelu:
torch.quantization.fuse_modules(m, ["conv", "bn", "relu"], inplace=True)
fuse_modules_qat(m, ["conv", "bn", "relu"], inplace=True)

def forward(self, input):
input = self.quant(input)
Expand Down
3 changes: 0 additions & 3 deletions tests/scripts/task_python_frontend.sh
Original file line number Diff line number Diff line change
Expand Up @@ -56,9 +56,6 @@ function shard2 {
i=$((i+1))
done

echo "Running relay caffe2 frontend test..."
run_pytest cython python-frontend-caffe2 tests/python/frontend/caffe2

echo "Running relay DarkNet frontend test..."
run_pytest cython python-frontend-darknet tests/python/frontend/darknet

Expand Down

0 comments on commit 96387b9

Please sign in to comment.