Skip to content

Commit

Permalink
[BYOC][FIX] Infer types in MergeComposite (#5766)
Browse files Browse the repository at this point in the history
If InferType isn't run between partitioning passes,
function calls are inserted which don't have a type.
This can result in failures for patterns which want
to check types.

This works around it simply by running InferType after
every partitioning.

Change-Id: Ie0887f0564a41eb0913bfe42a362e8effe9681b9
  • Loading branch information
mbaret authored Jun 12, 2020
1 parent f672639 commit 04496d3
Show file tree
Hide file tree
Showing 2 changed files with 59 additions and 18 deletions.
13 changes: 10 additions & 3 deletions src/relay/transforms/merge_composite.cc
Original file line number Diff line number Diff line change
Expand Up @@ -36,17 +36,24 @@ namespace tvm {
namespace relay {
namespace merge_composite {

Function InferType(const Function& expr) {
auto mod = IRModule::FromExpr(expr);
mod = transform::InferType()(mod);
return Downcast<Function>(mod->Lookup("main"));
}

Expr MergeComposite(const Function& func, const Array<runtime::String>& pattern_names,
const Array<DFPattern>& patterns, const std::vector<PackedFunc>& checks) {
CHECK_EQ(pattern_names.size(), patterns.size());
Expr merged_expr = func->body;
Function merged_func = func;
// merge the patterns one-by-one in order
for (size_t i = 0; i < patterns.size(); i++) {
Map<String, ObjectRef> attrs;
attrs.Set("Composite", pattern_names[i]);
merged_expr = PartitionPattern(patterns[i], merged_expr, attrs, checks[i]);
merged_func = Downcast<Function>(PartitionPattern(patterns[i], merged_func, attrs, checks[i]));
merged_func = InferType(merged_func);
}
return Function(func->params, merged_expr, func->ret_type, func->type_params, func->attrs);
return std::move(merged_func);
}

} // namespace merge_composite
Expand Down
64 changes: 49 additions & 15 deletions tests/python/relay/test_pass_merge_composite.py
Original file line number Diff line number Diff line change
Expand Up @@ -916,31 +916,63 @@ def before():
x = relay.var('x', shape=(1, 10, 10, 10))
w = relay.var('w', shape=(10, 10, 3, 3))
b = relay.var('b', shape=(8,))
conv = relay.nn.conv2d(x,
add = relay.op.add(x, x)
relu = relay.nn.relu(add)
conv = relay.nn.conv2d(relu,
w,
kernel_size=(3, 3),
kernel_layout="OIHW",
data_layout="NHWC")
bias = relay.nn.bias_add(conv, b)
relu = relay.nn.relu(bias)
return relay.Function([x, w, b], relu)
relu2 = relay.nn.relu(bias)
return run_opt_pass(relay.Function([x, w, b], relu2), relay.transform.InferType())

def expected():
x = relay.var('x')
w = relay.var('w')
b = relay.var('b')
conv = relay.nn.conv2d(x, w, kernel_size=(3, 3), kernel_layout="OIHW", data_layout="NHWC")
def expected_false():
x = relay.var('x', shape=(1, 10, 10, 10))
w = relay.var('w', shape=(10, 10, 3, 3))
b = relay.var('b', shape=(8, ))

x0 = relay.var('x')
y0 = relay.var('y')

add = relay.op.add(y0, y0)
relu = relay.nn.relu(add)
func = relay.Function([x0, y0], relu)
func = func.with_attr("PartitionedFromPattern", "add_nn.relu_")
func = func.with_attr("Composite", "add_relu")
call = relay.Call(func, [x, x])

conv = relay.nn.conv2d(call, w, kernel_size=(3, 3), kernel_layout="OIHW", data_layout="NHWC")
bias = relay.nn.bias_add(conv, b)
relu = relay.nn.relu(bias)
func = relay.Function([x, w, b], relu)
func = func.with_attr("Composite", "conv_bias_relu")
func = func.with_attr("PartitionedFromPattern", "nn.conv2d_nn.bias_add_nn.relu_")
relu2 = relay.nn.relu(bias)
return relay.Function([x, w, b], relu2)

def expected_true():
x = relay.var('x', shape=(1, 10, 10, 10))
w = relay.var('w', shape=(10, 10, 3, 3))
b = relay.var('b', shape=(8, ))
return relay.Function([x, w, b], func(x, w, b))

x0 = relay.var('x')
y0 = relay.var('y')

add = relay.op.add(y0, y0)
relu = relay.nn.relu(add)
func = relay.Function([x0, y0], relu)
func = func.with_attr("PartitionedFromPattern", "add_nn.relu_")
func = func.with_attr("Composite", "add_relu")
call = relay.Call(func, [x, x])

x2 = relay.var('x')
w1 = relay.var('w')
b1 = relay.var('b')
conv = relay.nn.conv2d(x2, w1, kernel_size=(3, 3), kernel_layout="OIHW", data_layout="NHWC")
bias = relay.nn.bias_add(conv, b1)
relu2 = relay.nn.relu(bias)
func = relay.Function([x2, w1, b1], relu2)
func = func.with_attr("Composite", "conv_bias_relu")
func = func.with_attr("PartitionedFromPattern", "nn.conv2d_nn.bias_add_nn.relu_")
call = relay.Call(func, [call, w, b])
return relay.Function([x, w, b], call)

def _check_type_true(extract):
conv = extract.args[0].args[0]
Expand All @@ -953,14 +985,16 @@ def _check_type_false(extract):
return bool(typ.shape[0] != 1)

pattern_table_false = [
("add_relu", make_add_relu_pattern()),
("conv_bias_relu", make_conv_bias_relu_pattern(), _check_type_false)
]
check_result(pattern_table_false, before(), before())
check_result(pattern_table_false, before(), expected_false())

pattern_table_true = [
("add_relu", make_add_relu_pattern()),
("conv_bias_relu", make_conv_bias_relu_pattern(), _check_type_true)
]
check_result(pattern_table_true, before(), expected())
check_result(pattern_table_true, before(), expected_true())


if __name__ == "__main__":
Expand Down

0 comments on commit 04496d3

Please sign in to comment.