Skip to content

Commit

Permalink
[Relay][AlterOp] Improving support for broadcast layout alteration.
Browse files Browse the repository at this point in the history
  • Loading branch information
anijain2305 committed Oct 1, 2019
1 parent 0cd8047 commit 6185291
Show file tree
Hide file tree
Showing 7 changed files with 92 additions and 33 deletions.
14 changes: 14 additions & 0 deletions include/tvm/data_layout.h
Original file line number Diff line number Diff line change
Expand Up @@ -210,6 +210,20 @@ class Layout : public NodeRef {
return ct;
}

/*! \return Concatenation of all primal axes */
inline std::string get_primal_axes() const {
std::string primal_axis = "";
if (!defined()) {
return primal_axis;
}
for (auto x : operator->()->axes) {
if (LayoutAxis::Get(x).IsPrimal()) {
primal_axis += LayoutAxis::Get(x).name();
}
}
return primal_axis;
}

/*!
* \brief return the index of the input axis.
* If it is not found in the layout or the layout is undefined,
Expand Down
1 change: 1 addition & 0 deletions src/relay/op/tensor/transform.cc
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@
#include "../op_common.h"
#include "../../../arithmetic/compute_expr.h"
#include "../../pass/alter_op_layout.h"
#include "../../pass/pattern_util.h"
#include "transform.h"

namespace tvm {
Expand Down
47 changes: 35 additions & 12 deletions src/relay/pass/alter_op_layout.cc
Original file line number Diff line number Diff line change
Expand Up @@ -38,26 +38,49 @@
#include <unordered_map>

#include "alter_op_layout.h"
#include "pattern_util.h"

namespace tvm {
namespace relay {

namespace alter_op_layout {

// Make a transform CallNode
/* Performs 2 operations
* 1) If src_layout ndim is smaller then dst_layout, expand_dim is inserted to match the dim size.
* For example, src_layout = C, dst_layout = NCHW16c. The src is expanded to NHWC.
* 2) Call layout transform with new src layout.
*/
Expr TransformLayout(Expr raw, Layout src_layout, Layout dst_layout) {
if (src_layout.Equals(dst_layout)) { return raw; }
CHECK(src_layout.defined() && dst_layout.defined())
<< "Cannot insert layout transform because there are undefined layouts";
CHECK(BijectiveLayoutNode::make(src_layout, dst_layout).defined())
<< "Cannot insert layout transform because there are inconvertible layouts: "
<< src_layout << " v.s. " << dst_layout;
static auto &transform_op = Op::Get("layout_transform");
NodePtr<LayoutTransformAttrs> attrs = make_node<LayoutTransformAttrs>();
attrs->src_layout = src_layout.name();
attrs->dst_layout = dst_layout.name();
Call transform = CallNode::make(transform_op, {raw}, Attrs{attrs});
return std::move(transform);
if (src_layout.Equals(dst_layout)) {
return raw;
}

// 1) Check if the shape lengths are different. If yes, expand dims.
Expr input_expr = raw;
Layout new_src_layout = src_layout;
if (src_layout.ndim_primal() < dst_layout.ndim_primal()) {
int num_new_axis = dst_layout.ndim_primal() - src_layout.ndim_primal();
std::string src_primal_layout = src_layout.get_primal_axes();
std::string dst_primal_layout = dst_layout.get_primal_axes();
std::string new_src_layout_str = "";
for (auto s : dst_primal_layout) {
if (src_primal_layout.find(s) == std::string::npos) {
new_src_layout_str += s;
}
}
new_src_layout_str += src_primal_layout;
new_src_layout = Layout(new_src_layout_str);
input_expr = MakeExpandDims(input_expr, 0, num_new_axis);
}

// 2) Insert layout transform on the transformed src.
CHECK(new_src_layout.defined() && dst_layout.defined())
<< "Cannot insert layout transform because there are undefined layouts";
CHECK(BijectiveLayoutNode::make(new_src_layout, dst_layout).defined())
<< "Cannot insert layout transform because there are inconvertible layouts: "
<< new_src_layout << " v.s. " << dst_layout;
return MakeLayoutTransform(input_expr, new_src_layout.name(), dst_layout.name());
}

// Memorize layout transform so we can reuse internal transformed nodes
Expand Down
21 changes: 3 additions & 18 deletions src/relay/pass/alter_op_layout.h
Original file line number Diff line number Diff line change
Expand Up @@ -111,27 +111,12 @@ inline Array<Array<Layout> > BinaryBroadcastLayout(const Attrs& attrs,
int scalar = layouts[0].ndim() == 0 ? 0 : 1;
return Array<Array<Layout> >{layouts, {layouts[1-scalar]}};
} else {
// try to broadcast the tensors to the larger dimension
// Set the layout of the larger dimension. If one dimension size is lower, we call expand dims
// while transforming layout.
int large_idx = layouts[0].ndim_primal() >= layouts[1].ndim_primal() ? 0 : 1;
int small_idx = 1 - large_idx;
Layout ret = layouts[large_idx];

// extract common part
size_t i = layouts[large_idx].ndim();
for (; i != 0; --i) {
const auto& axis = layouts[large_idx][i-1];
if (!layouts[small_idx].Contains(axis.ToPrimal())) {
break;
}
}

Layout common_part = layouts[large_idx].SubLayout(i, layouts[large_idx].ndim() - i);
if (!BijectiveLayoutNode::make(layouts[small_idx], common_part).defined()) {
// not convertible
return Array<Array<Layout> > {{Layout::Undef()}, {Layout::Undef()}};
}

layouts.Set(small_idx, common_part);
layouts.Set(small_idx, ret);
return Array<Array<Layout> > {layouts, {ret}};
}
}
Expand Down
2 changes: 2 additions & 0 deletions src/relay/pass/pattern_util.h
Original file line number Diff line number Diff line change
Expand Up @@ -505,6 +505,8 @@ Expr MakeSqueeze(Expr data, Array<Integer> axis);

Expr MakeExpandDims(Expr data, int axis, int num_newaxis);

Expr MakeLayoutTransform(Expr data, std::string src_layout, std::string dst_layout);

Expr StopFusion(Expr data);

Expr CastHint(Expr data, DataType dtype);
Expand Down
31 changes: 31 additions & 0 deletions tests/python/relay/test_op_qnn_conv2d.py
Original file line number Diff line number Diff line change
Expand Up @@ -608,6 +608,36 @@ def tflite_anistropic_strides():
golden_output = np.array((124, -92, 164, -132)).reshape(1, 1, 2, 2)
np.testing.assert_equal(qnn_output, golden_output)

def broadcast_layout_test():
# Test broadcast support for NHWC layout.
data_shape = (1, 229, 229, 3) # NHWC
data_dtype = 'uint8'
kernel_shape = (7, 7, 3, 64) # HWIO
kernel_dtype = 'int8'
_, qnn_func = get_funcs(data_shape=data_shape,
data_dtype=data_dtype,
kernel_shape=kernel_shape,
kernel_dtype=kernel_dtype,
input_zero_point=8,
kernel_zero_point=3,
kernel_size=(7, 7),
padding=(1, 1),
strides=(1, 1),
dilation=(1, 1),
data_layout="NHWC",
kernel_layout="HWIO",
out_dtype="int32")
func = qnn_func['main'].body
bias = relay.var("bias", shape=(64,), dtype="int32")

# Check broadcast support on both lhs and rhs
func = relay.add(func, bias)
func = relay.add(bias, func)
func = relay.Function(relay.analysis.free_vars(func), func)
mod = relay.Module.from_expr(func)
with relay.build_config(opt_level=3):
graph, lib, params = relay.build(mod, "llvm -mcpu=skylake-avx512")

if __name__ == "__main__":
no_zero_point_test()
input_zero_point_test()
Expand All @@ -621,3 +651,4 @@ def tflite_anistropic_strides():
tflite_large_irregular_test()
tflite_output_multiplier_greater_than_one()
tflite_anistropic_strides()
broadcast_layout_test()
9 changes: 6 additions & 3 deletions tests/python/relay/test_pass_alter_op_layout.py
Original file line number Diff line number Diff line change
Expand Up @@ -134,7 +134,8 @@ def expected():
kernel_layout="OIHW16i",
data_layout="NCHW16c")
b = relay.expand_dims(bias, axis=1, num_newaxis=2)
b = relay.layout_transform(b, "CHW", "CHW16c")
b = relay.expand_dims(b, axis=0, num_newaxis=1)
b = relay.layout_transform(b, "NCHW", "NCHW16c")
y = relay.add(y, b)

y = relay.nn.relu(y)
Expand Down Expand Up @@ -304,8 +305,10 @@ def expected():
weight = relay.var("weight")
x = relay.layout_transform(x, "NCHW", "NCHW16c")
bias = relay.expand_dims(bias, 1, 2)
bias = relay.layout_transform(bias, "CHW", "CHW16c")
scale = relay.layout_transform(scale, "CHW", "CHW16c")
bias = relay.expand_dims(bias, 0, 1)
bias = relay.layout_transform(bias, "NCHW", "NCHW16c")
scale = relay.expand_dims(scale, 0, 1)
scale = relay.layout_transform(scale, "NCHW", "NCHW16c")
y = relay.nn.conv2d(x, weight, channels=64, kernel_size=(3, 3), padding=(1, 1),
data_layout="NCHW16c")
y = relay.add(y, bias) # test broadcasting to lhs
Expand Down

0 comments on commit 6185291

Please sign in to comment.