Skip to content

Commit

Permalink
Revert commits
Browse files Browse the repository at this point in the history
  • Loading branch information
jcf94 committed Jun 22, 2020
1 parent 53bd591 commit 227d8f2
Show file tree
Hide file tree
Showing 51 changed files with 53 additions and 8,734 deletions.
1 change: 0 additions & 1 deletion docs/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -198,7 +198,6 @@
'../tutorials/language',
'../tutorials/optimize',
'../tutorials/autotvm',
'../tutorials/ansor',
'../tutorials/dev',
'../tutorials/topi',
'../tutorials/deployment',
Expand Down
13 changes: 0 additions & 13 deletions include/tvm/relay/attrs/transform.h
Original file line number Diff line number Diff line change
Expand Up @@ -296,19 +296,6 @@ struct LayoutTransformAttrs : public tvm::AttrsNode<LayoutTransformAttrs> {
}
};

/*! \brief Attributes for KernelLayoutTransform operator */
struct KernelLayoutTransformAttrs : public tvm::AttrsNode<KernelLayoutTransformAttrs> {
std::string src_layout;
std::string dst_layout;

TVM_DECLARE_ATTRS(KernelLayoutTransformAttrs, "relay.attrs.KernelLayoutTransformAttrs") {
TVM_ATTR_FIELD(src_layout)
.describe("The source layout of the tensor. (e.g. 1N32C112H112W)");
TVM_ATTR_FIELD(dst_layout)
.describe("The destination layout of the tensor. (e.g. 1N2C112H112W16c)");
}
};

/*! \brief Attributes for ShapeOf operator */
struct ShapeOfAttrs : public tvm::AttrsNode<ShapeOfAttrs> {
DataType dtype;
Expand Down
14 changes: 0 additions & 14 deletions include/tvm/relay/transform.h
Original file line number Diff line number Diff line change
Expand Up @@ -277,20 +277,6 @@ TVM_DLL Pass CanonicalizeOps();
*/
TVM_DLL Pass AlterOpLayout();

/*!
* \brief Alternate the layouts of kernels.
*
* \return The pass.
*/
TVM_DLL Pass KernelLayoutTransform();

/*!
* \brief The reverse of FuseOps.
*
* \return The pass.
*/
TVM_DLL Pass DeFuseOps();

/*!
* \brief Given a dest layout, this pass transforms the expr such that most of the ops input data
* layout is changed to the dest layout. In ideal situation, there are only 2 layout transforms, one
Expand Down
14 changes: 1 addition & 13 deletions python/tvm/ansor/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,26 +21,14 @@
from . import measure
from . import serialization
from . import loop_state
from . import auto_schedule
from . import utils
from . import feature
from . import workload_registry
from . import task_scheduler

# Shortcut
from .compute_dag import ComputeDAG, LayoutRewriteLevel
from .auto_schedule import SearchTask, SketchSearchPolicy, TuneOption, HardwareParams, \
PreloadMeasuredStates, PreloadCustomSketchRule, auto_schedule
from .auto_schedule import SearchTask, HardwareParams
from .measure import MeasureInput, LocalBuilder, LocalRunner, RPCRunner, LocalRPCMeasureContext
from .cost_model import RandomModel
from .cost_model.xgb_model import XGBModel
from .serialization import LogToFile, LogReader, best_measure_pair_in_file, \
load_from_file, write_measure_records_to_file
from .workload_registry import register_workload_func, \
workload_key_to_dag, make_workload_key_func
from .task_scheduler import TaskScheduler, SimpleTaskScheduler
from .dispatcher import DispatchContext, ApplyConfig, ApplyHistoryBest as apply_history_best, \
FallbackContext
from .relay_integration import extract_from_program, extract_from_multiple_program, \
finish_layout_rewrite, prepare_layout_rewrite, auto_schedule_topi
from .env import GLOBAL_SCOPE
204 changes: 0 additions & 204 deletions python/tvm/ansor/auto_schedule.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@
import tvm._ffi
from tvm.runtime import Object
from .measure import LocalBuilder, LocalRunner
from .cost_model import RandomModel, XGBModel
from . import _ffi_api


Expand Down Expand Up @@ -64,206 +63,3 @@ def __init__(self, dag, workload_key, target, target_host=None,
self.__init_handle_by_constructor__(_ffi_api.SearchTask, dag,
workload_key, target, target_host,
hardware_params)


@tvm._ffi.register_object("ansor.SearchPolicy")
class SearchPolicy(Object):
""" The base class for search policy """
def continue_search(self, task, num_measure, verbose, measurer):
return _ffi_api.SearchPolicyContinueSearchOneRound(self, task,
num_measure, verbose, measurer)

def set_task(self, task):
_ffi_api.SearchPolicySetTask(self, task)

def set_verbose(self, verbose):
_ffi_api.SearchPolicySetVerbose(self, verbose)

def run_callbacks(self, callbacks):
_ffi_api.SearchPolicyRunCallbacks(self, callbacks)


@tvm._ffi.register_object("ansor.SketchSearchPolicy")
class SketchSearchPolicy(SearchPolicy):
""" The search policy that searches in a hierarchical search space defined by sketches.
The policy randomly samples programs from the space defined by sketches
and use evolutionary search to fine-tune them.
Parameters
----------
program_cost_model: CostModel
Cost model for programs
params: int
Parameters of the search policy. See `src/ansor/search_policy/sketch_search_policy.h`
to find the definitions. See code below to find the default values
seed: int
Random seed
"""
def __init__(self,
program_cost_model,
params=None,
seed=None):
# set default parameters
default_params = {
"eps_greedy": 0.05,

'evolutionary_search_population': 2048,
'evolutionary_search_num_iters': 15,
"evolutionary_search_mutation_prob": 0.85,
"evolutionary_search_use_measured_ratio": 0.2,

'cpu_multi_level_tiling_structure': 'SSRSRS',
'gpu_multi_level_tiling_structure': 'SSSRRSRS',

'disable_change_compute_location': 0,
}

if params is None:
params = default_params
else:
for key, value in default_params.items():
if key not in params:
params[key] = value

self.__init_handle_by_constructor__(
_ffi_api.SketchSearchPolicy, program_cost_model, params,
seed or random.randint(1, 1 << 30))


@tvm._ffi.register_object("ansor.SearchCallback")
class SearchCallback(Object):
"""Callback function before or after search process"""
pass


@tvm._ffi.register_object("ansor.PreloadMeasuredStates")
class PreloadMeasuredStates(SearchCallback):
""" A SearchCallback to load measured states from the log file for a search policy.
This can resume the state of the search policy.
Parameters
----------
filename: str
"""
def __init__(self, filename: str):
self.__init_handle_by_constructor__(
_ffi_api.PreloadMeasuredStates, filename)


@tvm._ffi.register_object("ansor.PreloadCustomSketchRule")
class PreloadCustomSketchRule(SearchCallback):
"""
A SearchCallback for SketchSearchPolicy that allowing users to add
custom sketch rule.
Notes
-----
This is an advanced feature. Make sure you're clear how it
works and this should only be used in SketchSearchPolicy.
Parameters
----------
meet_condition_func: Function
A function with `(policy, state, stage_id) -> int`
apply_func: Function
A function with `(policy, state, stage_id) -> [[State, int], ...]`
"""
def __init__(self, meet_condition_func, apply_func):
self.__init_handle_by_constructor__(
_ffi_api.PreloadCustomSketchRule, meet_condition_func, apply_func)


@tvm._ffi.register_object("ansor.TuneOption")
class TuneOption(Object):
""" The options for tuning
Parameters
----------
n_trials: int
Number of total measurement trials
early_stopping: int
Stops early the tuning if no improvement after n measurements
num_measure_per_iter: int
The number of programs to be measured at each iteration
verbose: int
Verbosity level. 0 means silent.
builder: Builder
Builder which builds the program
runner: Runner
Runner which runs the program and measure time costs
measure_callbacks: List[MeasureCallback]
Callback functions called after each measure
Candidates:
- ansor.LogToFile
pre_search_callbacks: List[SearchCallback]
Callback functions called before the search process
Candidates:
- ansor.PreloadMeasuredStates
- ansor.PreloadCustomSketchRule
"""
def __init__(self, n_trials=0, early_stopping=-1, num_measure_per_iter=64,
verbose=1, builder='local', runner='local', measure_callbacks=None,
pre_search_callbacks=None):
if isinstance(builder, str):
if builder == 'local':
builder = LocalBuilder()
else:
raise ValueError("Invalid builder: " + builder)

if isinstance(runner, str):
if runner == 'local':
runner = LocalRunner()
else:
raise ValueError("Invalid builder: " + runner)

if measure_callbacks is None:
measure_callbacks = []

if pre_search_callbacks is None:
pre_search_callbacks = []

self.__init_handle_by_constructor__(
_ffi_api.TuneOption, n_trials, early_stopping, num_measure_per_iter,
verbose, builder, runner, measure_callbacks, pre_search_callbacks)


def auto_schedule(workload, target=None,
target_host=None, search_policy='default',
hardware_params=None, tune_option=None):
""" Do auto scheduling for a computation declaration.
The workload parameter can be a `string` as workload_key, or directly
passing a `SearchTask` as input.
Parameters
----------
workload : Union[SearchTask, str]
target : Target
target_host : Target = None
search_policy : Union[SearchPolicy, str]
hardware_params : HardwareParams
tune_option : TuneOption
Returns
-------
sch : tvm.Schedule
tensors : List[Tensor]
"""
if isinstance(search_policy, str):
if search_policy == 'default':
search_policy = SketchSearchPolicy(RandomModel())
else:
raise ValueError("Invalid search policy: " + search_policy)

if tune_option is None:
tune_option = TuneOption(n_trials=0)

if isinstance(workload, str):
sch, tensors = _ffi_api.AutoScheduleByWorkloadKey(
workload, target, target_host, search_policy, hardware_params, tune_option)
return sch, tensors
elif isinstance(workload, SearchTask):
sch, tensors = _ffi_api.AutoScheduleBySearchTask(workload, search_policy, tune_option)
return sch, tensors
else:
raise ValueError("Invalid workload: " + workload + ". Expect a string or SearchTask")
21 changes: 0 additions & 21 deletions python/tvm/ansor/cost_model/__init__.py

This file was deleted.

78 changes: 0 additions & 78 deletions python/tvm/ansor/cost_model/cost_model.py

This file was deleted.

Loading

0 comments on commit 227d8f2

Please sign in to comment.