Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[UTILS] Move target to tvm; rename convolution as conv2d #492

Merged
merged 4 commits into from
Sep 26, 2017
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions python/tvm/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
from . import module
from . import node
from . import ir_builder
from . import target

from . import ndarray as nd
from .ndarray import context, cpu, gpu, opencl, cl, metal, mtl, vpi, rocm
Expand Down
2 changes: 1 addition & 1 deletion topi/python/topi/target.py → python/tvm/target.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
"""Target management API of topi"""
"""Target management API of tvm"""

from __future__ import absolute_import

Expand Down
1 change: 0 additions & 1 deletion topi/python/topi/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,5 @@
from . import nn
from . import cuda
from . import rasp
from . import target
from . import testing
from . import util
2 changes: 1 addition & 1 deletion topi/python/topi/nn/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
from __future__ import absolute_import as _abs

from .batch_norm import *
from .convolution import *
from .conv2d import *
from .depthwise_convolution import *
from .elemwise import *
from .dilate import *
Expand Down
Original file line number Diff line number Diff line change
@@ -1,14 +1,14 @@
# pylint: disable=invalid-name, unused-variable, too-many-locals
"""Convolution operators"""
"""Conv2D operators"""
from __future__ import absolute_import as _abs
from collections import namedtuple
import tvm
from tvm import target as _target
from .pad import pad
from .util import get_pad_tuple
from ..util import simplify
from .. import target as _target

# workload description of convolution
# workload description of conv2d
Workload = namedtuple('Workload',
['height', 'width', 'in_filter', 'out_filter',
'hkernel', 'wkernel', 'hpad', 'wpad', 'hstride', 'wstride'])
Expand Down Expand Up @@ -43,8 +43,8 @@
# platform specific declaration
_CONV_DECLARATION = {}

def convolution(data, kernel, stride, padding, layout='NCHW'):
"""Convolution operator.
def conv2d(data, kernel, stride, padding, layout='NCHW'):
"""Conv2D operator.

Parameters
----------
Expand Down Expand Up @@ -75,9 +75,9 @@ def convolution(data, kernel, stride, padding, layout='NCHW'):

# default declaration
if layout == 'NCHW':
conv2d_nchw(data, kernel, stride, padding)
return conv2d_nchw(data, kernel, stride, padding)
elif layout == 'HWCN':
conv2d_hwcn(data, kernel, stride, padding)
return conv2d_hwcn(data, kernel, stride, padding)
else:
raise ValueError("not support this layout {} yet".format(layout))

Expand Down
2 changes: 1 addition & 1 deletion topi/python/topi/rasp/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,4 @@
"""Raspberry pi specific declaration and schedules."""
from __future__ import absolute_import as _abs

from .convolution import *
from .conv2d import *
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
# pylint: disable=invalid-name,unused-variable,invalid-name
"""Convolution schedule on raspberry pi"""
"""Conv2D schedule on raspberry pi"""
from __future__ import absolute_import as _abs
import tvm
from .. import target as _target
from tvm import target as _target
from .. import tag
from ..nn.convolution import SpatialPack, Im2ColPack
from ..nn.convolution import _CONV_DECLARATION, _CONV_SCHEDULE
from ..nn.convolution import _WORKLOADS, _SCH_TO_DECL_FUNC
from ..nn.convolution import _get_workload, _get_schedule
from ..nn.conv2d import SpatialPack, Im2ColPack
from ..nn.conv2d import _CONV_DECLARATION, _CONV_SCHEDULE
from ..nn.conv2d import _WORKLOADS, _SCH_TO_DECL_FUNC
from ..nn.conv2d import _get_workload, _get_schedule
from ..nn.util import infer_pad, infer_stride

_SCHEDULES = [
Expand Down Expand Up @@ -264,7 +264,7 @@ def _schedule_im2col_conv2d(s, data, data_pad, data_col, data_vec,

return s

def schedule_convolution(outs):
def schedule_conv2d(outs):
"""Create schedule for tensors"""
s = tvm.create_schedule([x.op for x in outs])

Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
"""Example code to do convolution."""
"""Example code to do conv2d."""
import os
import numpy as np
import tvm
Expand All @@ -7,20 +7,20 @@
from topi.util import get_const_tuple


def verify_convolution(batch, in_size, in_channel, num_filter, kernel, stride, padding):
def verify_conv2d(batch, in_size, in_channel, num_filter, kernel, stride, padding):
in_height = in_width = in_size

with topi.target.rasp():
with tvm.target.rasp():
A = tvm.placeholder((batch, in_channel, in_height, in_width), name='A')
W = tvm.placeholder((num_filter, in_channel, kernel, kernel), name='W')
B = topi.nn.convolution(A, W, stride, padding)
B = topi.nn.conv2d(A, W, stride, padding)

s = topi.rasp.schedule_convolution([B])
s = topi.rasp.schedule_conv2d([B])
a_shape = get_const_tuple(A.shape)
w_shape = get_const_tuple(W.shape)
dtype = A.dtype

@memoize("topi.tests.test_topi_convolution.verify_convolution")
@memoize("topi.tests.test_topi_conv2d.verify_conv2d")
def get_ref_data():
a_np = np.random.uniform(size=a_shape).astype(dtype)
w_np = np.random.uniform(size=w_shape).astype(dtype)
Expand All @@ -37,8 +37,8 @@ def get_ref_data():
func(a, w, b)
np.testing.assert_allclose(b.asnumpy(), b_np, rtol=1e-5)

def test_convolution():
verify_convolution(1, 56, 64, 64, 3, 1, 1)
def test_conv2d():
verify_conv2d(1, 56, 64, 64, 3, 1, 1)

if __name__ == "__main__":
test_convolution()
test_conv2d()