Skip to content

Commit

Permalink
[AutoTVM] Minor bug fixes in AutoTVM for QNN graphs.
Browse files Browse the repository at this point in the history
  • Loading branch information
anijain2305 committed Feb 1, 2020
1 parent 6798ba8 commit 5e0c672
Show file tree
Hide file tree
Showing 4 changed files with 11 additions and 7 deletions.
7 changes: 4 additions & 3 deletions python/tvm/autotvm/graph_tuner/utils/traverse_graph.py
Original file line number Diff line number Diff line change
Expand Up @@ -126,10 +126,10 @@ def _traverse_expr(node):
for i, input_idx in enumerate(node_entry["inputs"]):
input_node_entry = node_list[input_idx[0]]
input_type = input_node_entry["types"][input_idx[1]]
if not isinstance(input_node_entry["node"], (Var, Call)):
if not isinstance(input_node_entry["node"], (Var, Constant, Call)):
raise RuntimeError("Graph tuner can only tune target "
"operators with input node of type "
"relay.expr.Var or relay.expr.Call. Now "
"relay.expr.Var/Constant/Call. Now "
"find a target op %s with input type %s"
% (op_name, str(type(input_node_entry["node"]))))
free_var = relay.Var("var_%d" % i, input_type)
Expand Down Expand Up @@ -167,7 +167,8 @@ def _traverse_expr(node):
else:
node_entry["inputs"].append([in_node_idx, 0, 0])
elif isinstance(node, Constant):
pass
node_entry["name"] = "Constant_" + str(node_index)
node_entry["types"] = [node.checked_type]
elif isinstance(node, relay.op.op.Op):
return
else:
Expand Down
2 changes: 1 addition & 1 deletion python/tvm/autotvm/graph_tuner/utils/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ def is_boundary_node(node_entry, input_names):
# Operators dependent on original layouts.
_LAYOUT_FIXED_OP = ["batch_flatten", "transpose", "reshape",
"multibox_prior", "multibox_transform_loc", "where",
"non_max_suppression", "strided_slice"]
"non_max_suppression"]

out = node_entry["op"] in _LAYOUT_FIXED_OP or \
("name" in node_entry and node_entry["name"] in input_names)
Expand Down
7 changes: 5 additions & 2 deletions python/tvm/autotvm/task/relay_integration.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,8 @@ def _lower(mod,
grc.codegen(mod["main"])
# default case
compiler = relay.vm.VMCompiler()
compiler.set_params(params)
if params:
compiler.set_params(params)
compiler.lower(mod, target=target)


Expand Down Expand Up @@ -123,7 +124,9 @@ def extract_from_multiple_program(mods, params, ops, target, target_host=None,
# relay op -> topi compute
OP2TOPI = {
tvm.relay.op.nn.conv2d: [topi.nn.conv2d, topi.nn.depthwise_conv2d_nchw,
topi.nn.group_conv2d_nchw, topi.nn.conv2d_NCHWc],
topi.nn.group_conv2d_nchw,
topi.nn.conv2d_NCHWc,
topi.nn.conv2d_NCHWc_int8],
tvm.relay.op.nn.conv2d_transpose: [topi.nn.conv2d_transpose_nchw],
tvm.relay.op.nn.dense: [topi.nn.dense],
tvm.relay.op.nn.batch_matmul: [topi.nn.batch_matmul],
Expand Down
2 changes: 1 addition & 1 deletion python/tvm/relay/backend/vm.py
Original file line number Diff line number Diff line change
Expand Up @@ -422,7 +422,7 @@ def set_params(self, params):
inputs = {}
for name, param in params.items():
if isinstance(param, np.ndarray):
param = _nd.array(param)
param = tvm.nd.array(param)
inputs[name] = _expr.const(param)
self._set_params_func(inputs)

Expand Down

0 comments on commit 5e0c672

Please sign in to comment.