Skip to content

Commit

Permalink
[LINT][PY] Fixes for pylint==2.4.4
Browse files Browse the repository at this point in the history
  • Loading branch information
tqchen committed Feb 9, 2020
1 parent b46c254 commit 7506fd6
Show file tree
Hide file tree
Showing 96 changed files with 238 additions and 278 deletions.
5 changes: 1 addition & 4 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -94,10 +94,7 @@ javadoc:

# Cython build
cython:
cd python; python setup.py build_ext --inplace

cython2:
cd python; python2 setup.py build_ext --inplace
cd python; python3 setup.py build_ext --inplace

cython3:
cd python; python3 setup.py build_ext --inplace
Expand Down
16 changes: 8 additions & 8 deletions python/tvm/_ffi/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
# specific language governing permissions and limitations
# under the License.
# coding: utf-8
# pylint: disable=invalid-name
# pylint: disable=invalid-name, import-outside-toplevel
"""Base library for TVM FFI."""
import sys
import os
Expand Down Expand Up @@ -204,14 +204,14 @@ def _find_error_type(line):
if _valid_error_name(err_name):
return err_name
return None
else:
end_pos = line.find(":")
if end_pos == -1:
return None
err_name = line[:end_pos]
if _valid_error_name(err_name):
return err_name

end_pos = line.find(":")
if end_pos == -1:
return None
err_name = line[:end_pos]
if _valid_error_name(err_name):
return err_name
return None


def c2pyerror(err_msg):
Expand Down
1 change: 1 addition & 0 deletions python/tvm/autotvm/database.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,6 +104,7 @@ class RedisDatabase(Database):
MAGIC_SPLIT = "$"

def __init__(self, db_index=REDIS_PROD):
# pylint: disable=import-outside-toplevel
import redis

if db_index == RedisDatabase.REDIS_TEST:
Expand Down
3 changes: 2 additions & 1 deletion python/tvm/autotvm/feature.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=invalid-name
# pylint: disable=invalid-name,
"""Extract feature of iter vars
There are two types of feature
Expand Down Expand Up @@ -148,6 +148,7 @@ def get_flatten_name(fea):
}

if isinstance(fea, str):
# pylint: disable=import-outside-toplevel
from .record import decode
# flatten line to feature
line = fea
Expand Down
1 change: 0 additions & 1 deletion python/tvm/autotvm/graph_tuner/base_graph_tuner.py
Original file line number Diff line number Diff line change
Expand Up @@ -539,4 +539,3 @@ def write_opt_sch2record_file(self, record_file="graph_opt_schedule.log"):
@abstractmethod
def run(self, **kwargs):
"""Run graph tuning."""
pass
1 change: 1 addition & 0 deletions python/tvm/autotvm/graph_tuner/utils/traverse_graph.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,7 @@ def expr2graph(expr, target_ops, node_dict, node_list):
% op_name)
topi_funcs += OP2COMPUTE[op_name]
env.reset(topi_funcs)
# pylint: disable=not-context-manager
with env:
_expr2graph_impl(expr, target_ops, node_dict, node_list)
task_pos = 0
Expand Down
1 change: 1 addition & 0 deletions python/tvm/autotvm/measure/measure.py
Original file line number Diff line number Diff line change
Expand Up @@ -208,6 +208,7 @@ def measure_option(builder, runner):
Using `min_repeat_ms` can dynamically adjusts `number`, so it is recommended.
The typical value for NVIDIA GPU is 150 ms.
"""
# pylint: disable=import-outside-toplevel
from .measure_methods import LocalBuilder, LocalRunner

if isinstance(builder, str):
Expand Down
6 changes: 4 additions & 2 deletions python/tvm/autotvm/measure/measure_methods.py
Original file line number Diff line number Diff line change
Expand Up @@ -324,11 +324,11 @@ def __init__(self,
self.server = None

def set_task(self, task):
self.task = task

# pylint: disable=import-outside-toplevel
from ...rpc.tracker import Tracker
from ...rpc.server import Server

self.task = task
tracker = Tracker('0.0.0.0', port=9000, port_end=10000, silent=True)
device_key = '$local$device$%d' % tracker.port
server = Server('0.0.0.0', port=9000, port_end=10000,
Expand Down Expand Up @@ -362,6 +362,7 @@ def _build_func_common(measure_input, check_gpu=None, cuda_arch=None, build_opti
# if target is vta, we need to use vta build
if hasattr(measure_input.target, 'device_name') and \
measure_input.target.device_name == 'vta':
# pylint: disable=import-outside-toplevel
import vta
func = vta.build(s, args, target_host=task.target_host)
else:
Expand Down Expand Up @@ -460,6 +461,7 @@ def run_through_rpc(measure_input, build_result,
# Program the FPGA every single time when targeting VTA
if hasattr(measure_input.target, 'device_name') and \
measure_input.target.device_name == 'vta':
# pylint: disable=import-outside-toplevel
from vta import program_fpga, reconfig_runtime
program_fpga(remote, None)
reconfig_runtime(remote)
Expand Down
2 changes: 2 additions & 0 deletions python/tvm/autotvm/task/dispatcher.py
Original file line number Diff line number Diff line change
Expand Up @@ -282,6 +282,7 @@ def load(self, records):
Each row of this file is an encoded record pair.
Otherwise, it is an iterator.
"""
# pylint: disable=import-outside-toplevel
from pathlib import Path
from ..record import load_from_file

Expand Down Expand Up @@ -454,6 +455,7 @@ def __init__(self, records):
Each row of this file is an encoded record pair.
Otherwise, it is an iterator.
"""
# pylint: disable=import-outside-toplevel
from ..record import load_from_file

super(ApplyGraphBest, self).__init__()
Expand Down
5 changes: 3 additions & 2 deletions python/tvm/autotvm/task/relay_integration.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=unused-variable,invalid-name
# pylint: disable=unused-variable,invalid-name, not-context-manager
"""
Decorator and utilities for the integration with TOPI and Relay
99.9% copy-paste of implementation by @MerryMercy
Expand All @@ -37,7 +37,7 @@ def _lower(mod,
params):
""" Helper to lower VTA properly.
"""

# pylint: disable=import-outside-toplevel
from tvm import relay
from tvm.relay.backend import graph_runtime_codegen

Expand Down Expand Up @@ -114,6 +114,7 @@ def extract_from_multiple_program(mods, params, ops, target, target_host=None,
task: Array of autotvm.task.Task
collected tasks
"""
# pylint: disable=import-outside-toplevel
import tvm.relay.op
from tvm import relay
import topi
Expand Down
2 changes: 2 additions & 0 deletions python/tvm/autotvm/task/topi_integration.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,7 @@ class TaskExtractEnv:
registered = None

def __init__(self, allow_duplicate=False):
# pylint: disable=import-outside-toplevel
import topi

# topi compute -> autotvm task name
Expand Down Expand Up @@ -168,6 +169,7 @@ def __exit__(self, exc_type, exc_val, exc_tb):

def _register_topi_task(self):
"""register tuning wrapper for topi function"""
# pylint: disable=import-outside-toplevel
import topi

# Avoid double registration for certain targets
Expand Down
1 change: 1 addition & 0 deletions python/tvm/autotvm/tophub.py
Original file line number Diff line number Diff line change
Expand Up @@ -147,6 +147,7 @@ def check_backend(tophub_location, backend):
if os.path.isfile(os.path.join(AUTOTVM_TOPHUB_ROOT_PATH, package_name)):
return True

# pylint: disable=import-outside-toplevel
if sys.version_info >= (3,):
import urllib.request as urllib2
else:
Expand Down
1 change: 1 addition & 0 deletions python/tvm/autotvm/tuner/callback.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,7 @@ def _callback(_, inputs, results):
for inp, result in zip(inputs, results):
file_out.write(record.encode(inp, result, protocol) + "\n")

# pylint: disable=import-outside-toplevel
from pathlib import Path
if isinstance(file_out, Path):
file_out = str(file_out)
Expand Down
2 changes: 1 addition & 1 deletion python/tvm/autotvm/tuner/sa_model_optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=consider-using-enumerate, invalid-name
# pylint: disable=consider-using-enumerate, invalid-name, invalid-sequence-index
"""
Cost model optimizer based on simulated annealing
"""
Expand Down
1 change: 1 addition & 0 deletions python/tvm/autotvm/tuner/xgboost_cost_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -420,6 +420,7 @@ def _extract_curve_feature_log(arg):
def custom_callback(stopping_rounds, metric, fevals, evals=(), log_file=None,
maximize=False, verbose_eval=True):
"""callback function for xgboost to support multiple custom evaluation functions"""
# pylint: disable=import-outside-toplevel
from xgboost.core import EarlyStopException
from xgboost.callback import _fmt_metric
from xgboost.training import aggcv
Expand Down
2 changes: 1 addition & 1 deletion python/tvm/build_module.py
Original file line number Diff line number Diff line change
Expand Up @@ -467,7 +467,7 @@ def _build_for_device(flist, target, target_host):
func = ir_pass.InferFragment(func)
warp_size = target.thread_warp_size
func = ir_pass.LowerThreadAllreduce(func, warp_size)
fsplits = [s for s in ir_pass.SplitHostDevice(func)]
fsplits = list(ir_pass.SplitHostDevice(func))
fhost.append(fsplits[0])
for x in fsplits[1:]:
fdevice.append(x)
Expand Down
3 changes: 1 addition & 2 deletions python/tvm/contrib/cc.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,8 +76,7 @@ def get_target_triple():
msg += py_str(out)
return None
return py_str(out)
else:
return None
return None

return get_target_triple

Expand Down
1 change: 1 addition & 0 deletions python/tvm/contrib/dlpack.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,7 @@ def to_pytorch_func(tvm_func):
wrapped_func: Function
Wrapped tvm function that operates on PyTorch tensors
"""
# pylint: disable=import-outside-toplevel
import torch
import torch.utils.dlpack
return convert_func(tvm_func, torch.Tensor, torch.utils.dlpack.to_dlpack)
14 changes: 4 additions & 10 deletions python/tvm/contrib/download.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,6 @@
# specific language governing permissions and limitations
# under the License.
"""Helper utility for downloading"""
from __future__ import print_function
from __future__ import absolute_import as _abs

import os
import sys
import time
Expand Down Expand Up @@ -48,10 +45,8 @@ def download(url, path, overwrite=False, size_compare=False, verbose=1, retries=
retries: int, optional
Number of time to retry download, default at 3.
"""
if sys.version_info >= (3,):
import urllib.request as urllib2
else:
import urllib2
# pylint: disable=import-outside-toplevel
import urllib.request as urllib2

if os.path.isfile(path) and not overwrite:
if size_compare:
Expand Down Expand Up @@ -114,9 +109,8 @@ def _download_progress(count, block_size, total_size):
if os.path.exists(tempfile):
os.remove(tempfile)
raise err
else:
print("download failed due to {}, retrying, {} attempt{} left"
.format(repr(err), retries, 's' if retries > 1 else ''))
print("download failed due to {}, retrying, {} attempt{} left"
.format(repr(err), retries, 's' if retries > 1 else ''))


if "TEST_DATA_ROOT_PATH" in os.environ:
Expand Down
2 changes: 1 addition & 1 deletion python/tvm/contrib/mxnet.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ def to_mxnet_func(func, const_loc=None):
Run asynchrously in MXNet's async engine.
"""
# only import mxnet when wrap get called.
# pylint: disable=import-self
# pylint: disable=import-self, import-outside-toplevel
import mxnet
if isinstance(func, Module):
func = func.entry_func
Expand Down
33 changes: 0 additions & 33 deletions python/tvm/contrib/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@
# specific language governing permissions and limitations
# under the License.
"""Common system utilities"""
from __future__ import absolute_import as _abs
import os
import tempfile
import shutil
Expand Down Expand Up @@ -167,35 +166,3 @@ def which(exec_name):
if os.path.isfile(full_path) and os.access(full_path, os.X_OK):
return full_path
return None

def get_lower_ir(s):
"""Get lower ir code of a schedule.
This is useful for debug, since you don't have to find all inputs/outputs
for a schedule in a fused subgraph.
Parameters
----------
s: Schedule
Returns
-------
ir: str
The lower ir
"""
from .. import tensor
from ..build_module import lower

outputs = s.outputs

inputs = []
def find_all(op):
if isinstance(op, tensor.PlaceholderOp):
inputs.append(op.output(0))
else:
for x in op.input_tensors:
find_all(x.op)

for out in outputs:
find_all(out)

return lower(s, inputs, simple_mode=True)
3 changes: 2 additions & 1 deletion python/tvm/hybrid/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,8 @@ def script(pyfunc):
hybrid_func : function
A decorated hybrid script function.
"""
def wrapped_func(func, *args, **kwargs): #pylint: disable=missing-docstring
# pylint: disable=import-outside-toplevel, missing-docstring
def wrapped_func(func, *args, **kwargs):
from .util import _is_tvm_arg_types
if _is_tvm_arg_types(args):
src = _pruned_source(func)
Expand Down
1 change: 1 addition & 0 deletions python/tvm/hybrid/calls.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,7 @@ def bind(func_id, args):


def _math_intrin(func_id, args):
# pylint: disable=import-outside-toplevel
from .. import intrin
return getattr(intrin, func_id)(*args)

Expand Down
2 changes: 1 addition & 1 deletion python/tvm/hybrid/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -198,7 +198,7 @@ def wrap_up_realize(self, node, body):
ty, entry = self.symbols[key] #pylint: disable=invalid-name
if ty in [Symbol.Input, Symbol.OutputBuffer]:
continue
elif 'Buffer' in ty.name:
if 'Buffer' in ty.name:
_buf = entry
_scope = 'global' if ty is Symbol.BufferVar else ty.name[:-6].lower()
to_pop.append(key)
Expand Down
1 change: 1 addition & 0 deletions python/tvm/hybrid/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,7 @@ def _pruned_source(func):

def replace_io(body, rmap):
"""Replacing tensors usage according to the dict given"""
# pylint: disable=import-outside-toplevel
from .. import ir_pass

def replace(op):
Expand Down
6 changes: 3 additions & 3 deletions python/tvm/relay/_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ def __str__(self):

class OpWrapper:
"""Overload the __call__ for op."""
pass


class ExprOp(OpWrapper):
"""Call an expr. The default, but does not handle attrs well."""
Expand Down Expand Up @@ -273,7 +273,7 @@ def _check_existing_typ_expr(self, name, new_expr):
def _type_expr_name(self, e):
if isinstance(e, adt.Constructor):
return "`{0}` ADT constructor".format(e.belong_to.name_hint)
elif isinstance(e, ty.GlobalTypeVar):
if isinstance(e, ty.GlobalTypeVar):
if e.kind == ty.Kind.AdtHandle:
return "ADT definition"
return "function definition"
Expand Down Expand Up @@ -623,7 +623,7 @@ def visitCallWithAttr(self, ctx: RelayParser.CallWithAttrContext):
def call(self, func, args, attrs, type_args):
if isinstance(func, OpWrapper):
return func(args, attrs, type_args)
elif isinstance(func, adt.Constructor):
if isinstance(func, adt.Constructor):
return func(*args)
return expr.Call(func, args, attrs, type_args)

Expand Down
2 changes: 1 addition & 1 deletion python/tvm/relay/analysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -384,7 +384,7 @@ def detect_feature(a, b=None):
"""
if isinstance(a, Module):
a, b = b, a
return set([Feature(int(x)) for x in _analysis.detect_feature(a, b)])
return {[Feature(int(x)) for x in _analysis.detect_feature(a, b)]}


def structural_hash(value):
Expand Down
Loading

0 comments on commit 7506fd6

Please sign in to comment.