Skip to content

Commit

Permalink
[TVMC][TRANSFORMS] ToMixedPrecision transform support with custom opt…
Browse files Browse the repository at this point in the history
…ions enabled

Adds new command line options
 --mixed-precision
 --mixed-precision-ops
 --mixed-precision-input
 --mixed-precision-output

and --desired-layout-ops

This PR also enhances the python interface by replacing alter_layout to transform_args.
transform_args is a dict with all tranform related options including existing desired_layout or alter_layout option.
  • Loading branch information
srkreddy1238 committed Feb 23, 2023
1 parent e7ad4bc commit 4b62c12
Show file tree
Hide file tree
Showing 5 changed files with 258 additions and 58 deletions.
47 changes: 18 additions & 29 deletions python/tvm/driver/tvmc/autotuner.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@
from .model import TVMCModel
from .target import target_from_cli, generate_target_args, reconstruct_target_args
from .shape_parser import parse_shape_string
from .transform import convert_graph_layout
from .transform import generate_transform_args, parse_graph_transform_args, apply_graph_transforms


# pylint: disable=invalid-name
Expand Down Expand Up @@ -127,12 +127,7 @@ def add_tune_parser(subparsers, _, json_params):
metavar="PATH",
help="path to an auto-tuning log file by AutoTVM.",
)
parser.add_argument(
"--desired-layout",
choices=["NCHW", "NHWC"],
default=None,
help="change the data layout of the whole graph",
)
generate_transform_args(parser)
parser.add_argument(
"--enable-autoscheduler",
help="enable tuning the graph through the AutoScheduler tuner",
Expand Down Expand Up @@ -269,6 +264,8 @@ def drive_tune(args):
rpc_hostname = None
rpc_port = None

transform_args = parse_graph_transform_args(args)

tune_model(
tvmc_model,
args.target,
Expand All @@ -283,7 +280,7 @@ def drive_tune(args):
tuner=args.tuner,
min_repeat_ms=args.min_repeat_ms,
early_stopping=args.early_stopping,
desired_layout=args.desired_layout,
transform_args=transform_args,
timeout=args.timeout,
repeat=args.repeat,
number=args.number,
Expand All @@ -309,7 +306,7 @@ def tune_model(
tuner: str = "xgb",
min_repeat_ms: Optional[int] = None,
early_stopping: Optional[int] = None,
desired_layout: Optional[str] = None,
transform_args: Optional[Dict[str, Any]] = None,
timeout: int = 10,
repeat: int = 1,
number: int = 10,
Expand Down Expand Up @@ -354,10 +351,8 @@ def tune_model(
Minimum time to run each trial. Defaults to 0 on x86 and 1000 on other targets.
early_stopping : int, optional
When specified, stop tuning after this number of trials if results aren't improving.
desired_layout : str, optional
Can be one of "NCHW" or "NHWC". When specified, compatible operations in the graph
will have their layout set to this format. Tasks will then be tuned using this
specified layout.
transform_args: dict, optional
Graph transformation arguments that are applied to the relay module.
timeout : int, optional,
If a kernel trial lasts longer than this duration in seconds, it will be
considered a failure.
Expand Down Expand Up @@ -453,7 +448,7 @@ def tune_model(
mod=mod,
params=params,
target=target,
alter_layout=desired_layout,
transform_args=transform_args,
hardware_params=hardware_params,
include_simple_tasks=include_simple_tasks,
)
Expand All @@ -475,7 +470,7 @@ def tune_model(
mod=mod,
params=params,
target=target,
alter_layout=desired_layout,
transform_args=transform_args,
)

# In autotvm, trials is specified per task. We can convert the per-model input
Expand Down Expand Up @@ -504,7 +499,7 @@ def autotvm_get_tuning_tasks(
params: Dict[str, tvm.nd.NDArray],
target: str,
target_host: Optional[str] = None,
alter_layout: Optional[str] = None,
transform_args: Optional[Dict[str, Any]] = None,
):
"""Get the autotvm tuning tasks for a given relay module.
Expand All @@ -518,10 +513,8 @@ def autotvm_get_tuning_tasks(
The compilation target.
target_host : str, optional
The compilation target for the host.
alter_layout : str, optional
The layout to convert the graph to. Note, the convert layout
pass doesn't currently guarantee the whole of the graph will
be converted to the chosen layout.
transform_args: dict, optional
Graph transformation arguments that are applied to the relay module.
Returns
-------
Expand All @@ -530,8 +523,7 @@ def autotvm_get_tuning_tasks(
"""
target, target_host = Target.canon_target_and_host(target, target_host)

if alter_layout:
mod = convert_graph_layout(mod, alter_layout)
mod = apply_graph_transforms(mod, transform_args)

tasks = autotvm.task.extract_from_program(
mod["main"],
Expand All @@ -547,7 +539,7 @@ def autoscheduler_get_tuning_tasks(
params: Dict[str, tvm.nd.NDArray],
target: str,
target_host: Optional[str] = None,
alter_layout: Optional[str] = None,
transform_args: Optional[Dict[str, Any]] = None,
hardware_params: Optional[HardwareParams] = None,
include_simple_tasks: bool = False,
):
Expand All @@ -563,10 +555,8 @@ def autoscheduler_get_tuning_tasks(
The compilation target.
target_host : str, optional
The compilation target for the host.
alter_layout : str, optional
The layout to convert the graph to. Note, the convert layout
pass doesn't currently guarantee the whole of the graph will
be converted to the chosen layout.
transform_args: dict, optional
Graph transformation arguments that are applied to the relay module.
hardware_params : Optional[HardwareParams]
Hardware parameters used for the search tasks
Expand All @@ -579,8 +569,7 @@ def autoscheduler_get_tuning_tasks(
"""
target, target_host = Target.canon_target_and_host(target, target_host)

if alter_layout:
mod = convert_graph_layout(mod, alter_layout)
mod = apply_graph_transforms(mod, transform_args)

# Extract the tasks
tasks, task_weights = auto_scheduler.extract_tasks(
Expand Down
23 changes: 8 additions & 15 deletions python/tvm/driver/tvmc/compiler.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@
from .target import target_from_cli, generate_target_args, reconstruct_target_args
from .pass_config import parse_configs
from .pass_list import parse_pass_list_str
from .transform import convert_graph_layout
from .transform import generate_transform_args, parse_graph_transform_args, apply_graph_transforms
from .shape_parser import parse_shape_string
from .workspace_pools import generate_workspace_pools_args, workspace_pools_recombobulate

Expand All @@ -61,12 +61,7 @@ def add_compile_parser(subparsers, _, json_params):
default="",
help="the cross compiler options to generate target libraries, e.g. '-mfpu=neon-vfpv4'.",
)
parser.add_argument(
"--desired-layout",
choices=["NCHW", "NHWC"],
default=None,
help="change the data layout of the whole graph.",
)
generate_transform_args(parser)
parser.add_argument(
"--dump-code",
metavar="FORMAT",
Expand Down Expand Up @@ -177,6 +172,7 @@ def drive_compile(args):

additional_targets = reconstruct_target_args(args)
workspace_pools_target, extra_targets = target_from_cli(args.target, additional_targets)
transform_args = parse_graph_transform_args(args)

compile_model(
tvmc_model,
Expand All @@ -191,7 +187,7 @@ def drive_compile(args):
output_format=args.output_format,
dump_code=dump_code,
target_host=None,
desired_layout=args.desired_layout,
transform_args=transform_args,
disabled_pass=args.disabled_pass,
pass_context_configs=args.pass_config,
mod_name=args.module_name,
Expand All @@ -217,7 +213,7 @@ def compile_model(
output_format: str = "so",
dump_code: Optional[List[str]] = None,
target_host: Optional[str] = None,
desired_layout: Optional[str] = None,
transform_args: Optional[Dict[str, Any]] = None,
disabled_pass: Optional[str] = None,
pass_context_configs: Optional[List[str]] = None,
additional_target_options: Optional[Dict[str, Dict[str, Any]]] = None,
Expand Down Expand Up @@ -260,10 +256,8 @@ def compile_model(
target_host : str, optional
The target of the host machine if host-side code
needs to be generated.
desired_layout: str, optional
The layout to convert the graph to. Note, the convert layout
pass doesn't currently guarantee the whole of the graph will
be converted to the chosen layout.
transform_args: dict, optional
Graph transformation arguments that are applied to the relay module.
disabled_pass: str, optional
Comma-separated list of passes which needs to be disabled
during compilation
Expand Down Expand Up @@ -310,8 +304,7 @@ def compile_model(
disabled_pass=disabled_pass,
instruments=instruments,
):
if desired_layout:
mod = convert_graph_layout(mod, desired_layout)
mod = apply_graph_transforms(mod, transform_args)

for partition_function, opts in zip(partition_functions, partition_opts):
mod = partition_function(mod, params, mod_name=mod_name, **opts)
Expand Down
Loading

0 comments on commit 4b62c12

Please sign in to comment.