Skip to content

Commit

Permalink
add tutorial
Browse files Browse the repository at this point in the history
  • Loading branch information
merrymercy committed Sep 16, 2020
1 parent d9eb9e7 commit 48aa0e9
Show file tree
Hide file tree
Showing 7 changed files with 114 additions and 43 deletions.
35 changes: 35 additions & 0 deletions docs/api/python/auto_scheduler.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
.. Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
.. http://www.apache.org/licenses/LICENSE-2.0
.. Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
tvm.auto_scheduler
------------------
.. automodule:: tvm.auto_scheduler

tvm.auto_scheduler.auto_schedule
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
.. automodule:: tvm.auto_scheduler.auto_schedule

.. autoclass:: tvm.auto_scheduler.auto_schedule.SearchTask

.. autoclass:: tvm.auto_scheduler.auto_schedule.TuningOptions

.. autofunction:: tvm.auto_scheduler.auto_schedule.create_task

.. autofunction:: tvm.auto_scheduler.auto_schedule.auto_schedule



2 changes: 1 addition & 1 deletion docs/api/python/autotvm.rst
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
tvm.autotvm
-----------
.. automodule:: tvm.autotvm
.. automodule:: tvm.autotvm.apply_history_best
.. autofunction:: tvm.autotvm.apply_history_best

tvm.autotvm.measure
~~~~~~~~~~~~~~~~~~~
Expand Down
1 change: 1 addition & 0 deletions docs/api/python/index.rst
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@ Python API
relay/dataflow_pattern
relay/testing
autotvm
auto_scheduler
rpc
micro
contrib
Expand Down
19 changes: 12 additions & 7 deletions python/tvm/auto_scheduler/auto_schedule.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,10 +31,10 @@
import tvm._ffi
from tvm.runtime import Object
from .measure import LocalBuilder, LocalRunner
from .workload_registry import make_workload_key, workload_key_to_tensors
from .workload_registry import make_workload_key
from .compute_dag import ComputeDAG
from .search_policy import EmptyPolicy
from .utils import get_func_name
from .cost_model import XGBModel
from .search_policy import SketchPolicy
from . import _ffi_api


Expand Down Expand Up @@ -158,6 +158,7 @@ def __init__(
measure_callbacks,
)


def create_task(func, args, target, target_host=None, hardware_params=None):
"""Create a search task
Expand All @@ -183,16 +184,16 @@ def create_task(func, args, target, target_host=None, hardware_params=None):
dag = ComputeDAG(workload_key)
return SearchTask(dag, workload_key, target, target_host, hardware_params)


def auto_schedule(task, search_policy=None, tuning_options=TuningOptions()):
"""Do auto scheduling for a computation declaration.
"""Run auto scheduling search for a task
Parameters
----------
task : SearchTask
The SearchTask for the computation declaration.
search_policy : Optional[SearchPolicy]
The search policy to be used for schedule search. Use EmptyPolicy as default, which always
returns an empty schedule.
The search policy to be used for schedule search.
tuning_options : Optional[TuningOptions]
Tuning and measurement options.
Expand All @@ -205,5 +206,9 @@ def auto_schedule(task, search_policy=None, tuning_options=TuningOptions()):
"Invalid task: " + task + " . `auto_scheduler.auto_schedule` expects a SearchTask."
)

sch, tensors = _ffi_api.AutoSchedule(search_policy or EmptyPolicy(task), tuning_options)
if search_policy is None:
cost_model = XGBModel()
search_policy = SketchPolicy(task, cost_model)

sch, tensors = _ffi_api.AutoSchedule(search_policy, tuning_options)
return sch, tensors
2 changes: 1 addition & 1 deletion tutorials/auto_scheduler/README.txt
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
AutoScheduler : Template-free Auto Scheduling
-----------
---------------------------------------------
96 changes: 63 additions & 33 deletions tutorials/auto_scheduler/tune_matmul_x86.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,13 +20,12 @@
**Author**: `Lianmin Zheng <https://github.com/merrymercy>`_, \
`Chengfan Jia <https://github.com/jcf94/>`_
This is a tutorial on how to use the auto-scheduler in TVM.
Different from the exiting autotvm which relies on manual templates to
define the search space, the auto-scheduler does not require any templates.
The user only needs to write the computation declaration,
the auto-scheduler then automatically generate a large
search space and begins the search (or auto-tuning).
Different from the exiting :ref:`autotvm <tutorials-autotvm-sec>` which relies on
manual templates to define the search space, the auto-scheduler does not require any templates.
The auto-scheduler is template-free, so users only need to write the computation declaration without
any schedule commands or templates.
The auto-scheduler can automatically generate a large
search space and find a good schedule in the space.
We use matrix multiplication as an example in this tutorial.
"""
Expand All @@ -36,62 +35,72 @@
from tvm import te, testing, auto_scheduler

######################################################################
# Define the computation
# ^^^^^^^^^^^^^^^^^^^^^^
# To begin with, we define the computation of a matmul with bias add.
# The function should return the list of input/output tensors.
# From these tensors, the auto-scheduler can get the whole computational graph.


@auto_scheduler.register_workload
def matmul_add(N, L, M, dtype):
A = te.placeholder((N, L), name='A', dtype=dtype)
B = te.placeholder((L, M), name='B', dtype=dtype)
C = te.placeholder((N, M), name='C', dtype=dtype)
A = te.placeholder((N, L), name="A", dtype=dtype)
B = te.placeholder((L, M), name="B", dtype=dtype)
C = te.placeholder((N, M), name="C", dtype=dtype)

k = te.reduce_axis((0, L), name='k')
matmul = te.compute((N, M), lambda i, j: te.sum(A[i, k] * B[k, j], axis=k),
name='matmul')
D = te.compute((N, M), lambda i, j: matmul[i, j] + C[i, j], name='D')
k = te.reduce_axis((0, L), name="k")
matmul = te.compute((N, M), lambda i, j: te.sum(A[i, k] * B[k, j], axis=k), name="matmul")
out = te.compute((N, M), lambda i, j: matmul[i, j] + C[i, j], name="D")

return [A, B, C, out]

return [A, B, C, D]

######################################################################
# We then create the a search task with N=L=M=128 and dtype='float32'
# Create the search task
# ^^^^^^^^^^^^^^^^^^^^^^
# We then create the a search task with N=L=M=128 and dtype="float32"

target = tvm.target.Target("llvm")
task = auto_scheduler.create_task(matmul_add, (128, 128, 128, 'float32'), target)
task = auto_scheduler.create_task(matmul_add, (128, 128, 128, "float32"), target)

# inspect the computational graph
print(task.compute_dag)

######################################################################
# Next, we set parameters for the auto-scheduler.
# `num_measure_trials` is the number of measurement trials we can use during the search.
# We only make 10 trials in this tutorial for fast demonstration. In practice, 1000 is a good value for
# the search to converge. You can do more trials according to your time budget.
# In addition, we use `RecordToFile` to log measurement records into a file `test.json`.
# The measurement records can be used to query the history best, resume the search,
# or train the cost model later.

tune_option = auto_scheduler.TuningOptions(num_measure_trials=2,
measure_callbacks=[auto_scheduler.RecordToFile('test.json')])
#
# * `num_measure_trials` is the number of measurement trials we can use during the search.
# We only make 10 trials in this tutorial for a fast demonstration. In practice, 1000 is a
# good value for the search to converge. You can do more trials according to your time budget.
# * In addition, we use `RecordToFile` to dump measurement records into a file `matmul.json`.
# The measurement records can be used to query the history best, resume the search,
# or do more analysis later.
# * see :any:`auto_schedule.TuningOptions`: for more parameters

tune_option = auto_scheduler.TuningOptions(
num_measure_trials=10, measure_callbacks=[auto_scheduler.RecordToFile("matmul.json")]
)

######################################################################
# Run the search
# ^^^^^^^^^^^^^^
# Now we get all inputs ready. Pretty simple, isn't it?
# We can kick off the search and let the auto-scheduler do its magic.
# After some measurement trials, it will return the best schedule it founds.

sch, args = auto_scheduler.auto_schedule(task,
tuning_options=tune_option)
sch, args = auto_scheduler.auto_schedule(task, tuning_options=tune_option)

######################################################################
# We can lower schedule to see the IR after auto-scheduling.
# We can also build the binary function as usual.

print(tvm.lower(sch, args, simple_mode=True))
func = tvm.build(sch, args)

######################################################################
# Finally, let use do a correctness check
# Check correctness
# ^^^^^^^^^^^^^^^^^
# We build the binary and check its correctness

# check correctness
func = tvm.build(sch, args)
a_np = np.random.uniform(size=(128, 128)).astype(np.float32)
b_np = np.random.uniform(size=(128, 128)).astype(np.float32)
c_np = np.random.uniform(size=(128, 128)).astype(np.float32)
Expand All @@ -100,4 +109,25 @@ def matmul_add(N, L, M, dtype):
d_tvm = tvm.nd.empty(d_np.shape)
func(tvm.nd.array(a_np), tvm.nd.array(b_np), tvm.nd.array(c_np), d_tvm)

tvm.testing.assert_allclose(d_np, d_tvm.asnumpy(), rtol=1e-2)
tvm.testing.assert_allclose(d_np, d_tvm.asnumpy(), rtol=1e-3)

######################################################################
# Using the record file
# ^^^^^^^^^^^^^^^^^^^^^
# During the search, all measuremnt records is dumpped into the record
# file "matmul.json". The measurement records can be used to resume the
# search, re-apply search results and other analysis.
#
# Here we show an example where we load the best schedule from a file,
# print the equivalent python schedule API, and build the binary again.

inp, res = auto_scheduler.load_best("matmul.json", task.workload_key)

# Print equivalent python schedule API. This can be used for debugging and
# learning the behavior of auto-scheduler.
print(task.compute_dag.print_python_code_from_state(inp.state))

# Rebuild the binary. This shows how you can apply the best schedule from a
# log file without reruning the search again.
sch, args = task.compute_dag.apply_steps_from_state(inp.state)
func = tvm.build(sch, args)
2 changes: 1 addition & 1 deletion tutorials/autotvm/README.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
.. _tutorials-autotvm-sec:

AutoTVM : Template-based Auto Tuning
-----------
------------------------------------

0 comments on commit 48aa0e9

Please sign in to comment.