Skip to content

Commit

Permalink
[Relay][QNN] QNNtoRelay & QNNLegalize Pass utility using Relay Legali…
Browse files Browse the repository at this point in the history
…ze API. (#3838)
  • Loading branch information
anijain2305 authored and zhiics committed Aug 30, 2019
1 parent a5def36 commit 671421a
Show file tree
Hide file tree
Showing 16 changed files with 365 additions and 55 deletions.
7 changes: 6 additions & 1 deletion include/tvm/relay/transform.h
Original file line number Diff line number Diff line change
Expand Up @@ -522,10 +522,15 @@ TVM_DLL Pass AlterOpLayout();

/*!
* \brief Legalizes an expr with another expression.
* \param legalize_map_attr_name The Op's attr name which corresponds to the legalize rule function.
* One can collect and isolate similar type of legalize transformations using this param. For
* example, transformations that only apply to Dialects can be isolated into a FTVMDialectLegalize
* string. This pass calls only those transformations that have been registered using the supplied
* legalize_map_attr_name.
*
* \return The pass.
*/
TVM_DLL Pass Legalize();
TVM_DLL Pass Legalize(const std::string& legalize_map_attr_name = "FTVMLegalize");

/*!
* \brief Canonicalize cast expressions to make operator fusion more efficient.
Expand Down
1 change: 1 addition & 0 deletions python/tvm/relay/qnn/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,3 +18,4 @@
"""QNN dialect operators and IR passes."""
from __future__ import absolute_import as _abs
from . import op
from . import transform
3 changes: 2 additions & 1 deletion python/tvm/relay/qnn/op/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
# specific language governing permissions and limitations
# under the License.
# pylint: disable=wildcard-import
"""Neural network related operators."""
"""QNN dialect related operators."""
from __future__ import absolute_import as _abs
from .qnn import *
from .op import register_qnn_legalize
35 changes: 35 additions & 0 deletions python/tvm/relay/qnn/op/op.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#pylint: disable=unused-argument
"""The register functions for the QNN dialect."""
from tvm.relay.op.op import register as register

def register_qnn_legalize(op_name, legal_op=None, level=10):
"""Register legal transformation function for a QNN op
Parameters
----------
op_name : str
The name of the operator
legal_op: function (attrs: Attrs, inputs: List[Expr]) -> new_expr: Expr
The function for transforming an expr to another expr.
level : int
The priority level
"""
return register(op_name, "FTVMQnnLegalize", legal_op, level)
115 changes: 115 additions & 0 deletions python/tvm/relay/qnn/transform.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,115 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=invalid-name,arguments-differ,no-else-return,unused-argument,missing-docstring
"""
QNN pass transformation infrastructure.
"""
from tvm import relay

def CanonicalizeOps():
"""Converts/Lowers an expression containing QNN ops to an expression containing only core
(non-Dialect) Relay ops. Each QNN op is lowered to a sequence of exisiting Relay ops. This is a
target-independent pass. One can register the lowering/transformation function for this op using
FTVMQnnCanonicalize attr_name for FTVMLegalize op attribute. An example of this transformation
is below
Examples
________
.. code-block:: python
# Original expression
qnn_expr = relay.qnn.op.requantize(y,
input_scale=1,
input_zero_point=0,
output_scale=1,
output_zero_point=0,
out_dtype='int8')
# We want to utilize all the existing Relay infrastucture. So, instead of supporting this
# QNN requantize op, we convert it into a sequence of existing Relay operators.
mod = relay.Module.from_expr(qnn_expr)
mod = relay.qnn.transform.CanonicalizeOps()(mod)
relay_expr = mod['main']
print(relay_expr)
def @main(%quantized_data: Tensor[(200), int32]) -> Tensor[(200), int8] {
%0 = cast(%quantized_data, dtype="int64") /* ty=Tensor[(200), int64] */;
%1 = multiply(%0, 2 /* ty=int64 */) /* ty=Tensor[(200), int64] */;
%2 = multiply(%1, 1073741824 /* ty=int64 */) /* ty=Tensor[(200), int64] */;
%3 = add(%2, 1073741824 /* ty=int64 */) /* ty=Tensor[(200), int64] */;
%4 = right_shift(%3, 31 /* ty=int64 */) /* ty=Tensor[(200), int64] */;
%5 = add(0 /* ty=int64 */, %4) /* ty=Tensor[(200), int64] */;
%6 = clip(%5, a_min=-128f, a_max=127f) /* ty=Tensor[(200), int64] */;
cast(%6, dtype="int8") /* ty=Tensor[(200), int8] */
}
Returns
-------
ret : tvm.relay.Pass
The registered pass that canonicalizes QNN ops to Relay ops.
"""

return relay.transform.Legalize("FTVMQnnCanonicalize")


def Legalize():
"""Legalizes QNN ops. As opposed to Relay Legalize, this one legalizes only QNN ops. One can
register a transformation/legalization function for an op by using the FTVMQnnLegalize attr_name
for FTVMLegalize op attribute. The isolation of QNN and Relay Legalize gives us separation of
concerns, leading to a better software practice. The legalization can be configured to happen
per target. An example of this type of legalization is shown below.
Examples
________
Suppose the original graph is as follows
data(u8) weight(u8)
| |
| |
qnn.conv2d (int32)
|
|
nn.relu (int32)
Now, we know that Intel Cascade Lake has VNNI instructions to speedup convolution. However, it
only works on u8 x i8 inputs. So, here, we can use QNN Legalize to transform the above graph as
follows
data(u8) weight(u8)
| |
| |
| requantize(i8)
| |
| |
qnn.conv2d (int32)
|
|
nn.relu (int32)
In this legalization, since we have isolated legalization for QNN ops, it will only trigger the
transformation for qnn.conv2d (and not nn.relu). This pass can be followed by CanonicalizeOps to
further lower the qnn.requantize and qnn.conv2d into an expr containing only Relay ops.
Returns
-------
ret : tvm.relay.Pass
The registered pass that legalizes QNN ops.
"""

return relay.transform.Legalize("FTVMQnnLegalize")
9 changes: 7 additions & 2 deletions python/tvm/relay/transform.py
Original file line number Diff line number Diff line change
Expand Up @@ -414,19 +414,24 @@ def AlterOpLayout():
return _transform.AlterOpLayout()


def Legalize():
def Legalize(legalize_map_attr_name="FTVMLegalize"):
"""Legalizes an expression with another expression.
This pass can be used to replace an expr with another expr for target
dependent optimizations. For example, one expr, though semnatically
equivalent to the other, can have better performance on a target. This pass
can be used to legalize the expr in a target-dependent manner.
Parameters
----------
legalize_map_attr_name : str
The Op's attr name which corresponds to the legalize rule function.
Returns
-------
ret : tvm.relay.Pass
The registered pass that rewrites an expr.
"""
return _transform.Legalize()
return _transform.Legalize(legalize_map_attr_name)


def RewriteAnnotatedOps(fallback_device):
Expand Down
77 changes: 47 additions & 30 deletions src/relay/pass/legalize.cc
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
*/

#include <tvm/operation.h>
#include <tvm/relay/expr_functor.h>
#include <tvm/relay/op_attr_types.h>
#include <tvm/relay/transform.h>

Expand All @@ -35,48 +36,64 @@ namespace legalize {

// Call registered FTVMLegalize of an op
// Returns the legalized expression
Expr Legalizer(const Call& ref_call, const Array<Expr>& new_args, const NodeRef& ctx) {
static auto fop_legalize = Op::GetAttr<FTVMLegalize>("FTVMLegalize");
Op op = Downcast<Op>(ref_call->op);

Expr new_e;
bool modified = false;
if (fop_legalize.count(op)) {
// Collect input and output dtypes to pass on to Legalize API.
tvm::Array<tvm::relay::Type> types;
for (auto& expr : ref_call->args) {
types.push_back(expr->checked_type());
class Legalizer : public ExprMutator {
public:
explicit Legalizer(const std::string& legalize_map_attr_name)
: legalize_map_attr_name_{legalize_map_attr_name} {}

Expr VisitExpr_(const CallNode* call_node) {
// Get the new_call node without any changes to current call node.
Expr new_e = ExprMutator::VisitExpr_(call_node);
Call new_call = Downcast<Call>(new_e);

// Collect the registered legalize function.
auto fop_legalize = Op::GetAttr<FTVMLegalize>(legalize_map_attr_name_);
Op op = Downcast<Op>(call_node->op);

if (fop_legalize.count(op)) {
// Collect the new_args.
tvm::Array<Expr> call_args = new_call->args;

// Collect input and output dtypes to pass on to Legalize API.
tvm::Array<tvm::relay::Type> types;
for (auto arg : call_node->args) {
types.push_back(arg->checked_type());
}
types.push_back(call_node->checked_type());

// Transform the op by calling the registered legalize function.
Expr legalized_value = fop_legalize[op](call_node->attrs, call_args, types);

// Reassign new_e if the transformation succeeded.
if (legalized_value.defined()) {
// Check that the returned Expr from legalize is CallNode.
const CallNode* legalized_call_node = legalized_value.as<CallNode>();
CHECK(legalized_call_node)
<< "Can only replace the original operator with another call node";

new_e = legalized_value;
}
}
types.push_back(ref_call->checked_type());

// Transform the op by calling the registered legalize function.
Expr legalized_value = fop_legalize[op](ref_call->attrs, new_args, types);

// Check if the transformation succeeded. If not, revert back to the original ref_call->op.
if (legalized_value.defined()) {
new_e = legalized_value;
modified = true;
}
}
if (!modified) {
new_e = CallNode::make(ref_call->op, new_args, ref_call->attrs);
return new_e;
}

const CallNode* new_call = new_e.as<CallNode>();
CHECK(new_call) << "Can only replace the original operator with another call node";
return GetRef<Call>(new_call);
}
private:
std::string legalize_map_attr_name_;
};

Expr Legalize(const Expr& expr) { return ForwardRewrite(expr, Legalizer, nullptr); }
Expr Legalize(const Expr& expr, const std::string& legalize_map_attr_name) {
return Legalizer(legalize_map_attr_name).Mutate(expr);
}

} // namespace legalize

namespace transform {

Pass Legalize() {
Pass Legalize(const std::string& legalize_map_attr_name) {
runtime::TypedPackedFunc<Function(Function, Module, PassContext)> pass_func =
[=](Function f, Module m, PassContext pc) {
return Downcast<Function>(relay::legalize::Legalize(f));
return Downcast<Function>(relay::legalize::Legalize(f, legalize_map_attr_name));
};
return CreateFunctionPass(pass_func, 3, "Legalize", {ir::StringImm::make("InferType")});
}
Expand Down
8 changes: 4 additions & 4 deletions src/relay/qnn/op/dequantize.cc
Original file line number Diff line number Diff line change
Expand Up @@ -72,9 +72,9 @@ Expr DequantizeLower(const Expr& input_tensor,
return scaled_output;
}

Expr DequantizeLegalize(const Attrs& attrs,
const Array<Expr>& new_args,
const Array<tvm::relay::Type>& types) {
Expr DequantizeQnnCanonicalize(const Attrs& attrs,
const Array<Expr>& new_args,
const Array<tvm::relay::Type>& types) {
CHECK_EQ(new_args.size(), 1);
auto& data = new_args[0];
const auto* dequantize_attrs = attrs.as<DequantizeAttrs>();
Expand All @@ -93,7 +93,7 @@ The input is always quantized (int8, uint8) and will be converted to float32 giv
.add_argument("data", "Tensor", "The tensor to dequantize.")
.set_support_level(11)
.add_type_rel("Dequantize", DequantizeRel)
.set_attr<FTVMLegalize>("FTVMLegalize", DequantizeLegalize);
.set_attr<FTVMLegalize>("FTVMQnnCanonicalize", DequantizeQnnCanonicalize);

TVM_REGISTER_API("relay.qnn.op._make.dequantize")
.set_body_typed(MakeDequantize);
Expand Down
8 changes: 4 additions & 4 deletions src/relay/qnn/op/quantize.cc
Original file line number Diff line number Diff line change
Expand Up @@ -83,9 +83,9 @@ Expr QuantizeLower(const Expr& input_tensor,
return clamp_out_dtype;
}

Expr QuantizeLegalize(const Attrs& attrs,
const Array<Expr>& new_args,
const Array<tvm::relay::Type>& types) {
Expr QuantizeQnnCanonicalize(const Attrs& attrs,
const Array<Expr>& new_args,
const Array<tvm::relay::Type>& types) {
CHECK_EQ(new_args.size(), 1);
auto& data = new_args[0];
const auto* quantize_attrs = attrs.as<QuantizeAttrs>();
Expand All @@ -111,7 +111,7 @@ scale and zero point.
.add_argument("data", "Tensor", "The tensor to quantize.")
.set_support_level(11)
.add_type_rel("Quantize", QuantizeRel)
.set_attr<FTVMLegalize>("FTVMLegalize", QuantizeLegalize);
.set_attr<FTVMLegalize>("FTVMQnnCanonicalize", QuantizeQnnCanonicalize);

TVM_REGISTER_API("relay.qnn.op._make.quantize")
.set_body_typed(MakeQuantize);
Expand Down
6 changes: 3 additions & 3 deletions src/relay/qnn/op/requantize.cc
Original file line number Diff line number Diff line change
Expand Up @@ -192,8 +192,8 @@ Expr RequantizeLower(const Expr& input_tensor, const RequantizeAttrs* param,
*
* Q_output = zp_output + (scale_input)/(scale_ouptut) * (Q_input - zp_input)
*/
Expr RequantizeLegalize(const Attrs& attrs, const Array<Expr>& new_args,
const Array<tvm::relay::Type>& types) {
Expr RequantizeQnnCanonicalize(const Attrs& attrs, const Array<Expr>& new_args,
const Array<tvm::relay::Type>& types) {
CHECK_EQ(new_args.size(), 1);
auto& quantized_data = new_args[0];
const auto* param = attrs.as<RequantizeAttrs>();
Expand Down Expand Up @@ -276,7 +276,7 @@ Q_output = zp_output + (scale_input)/(scale_output) * (Q_input - zp_input)
.add_argument("data", "Tensor", "The quantized input tensor.")
.set_support_level(11)
.add_type_rel("Requantize", RequantizeRel)
.set_attr<FTVMLegalize>("FTVMLegalize", RequantizeLegalize);
.set_attr<FTVMLegalize>("FTVMQnnCanonicalize", RequantizeQnnCanonicalize);

TVM_REGISTER_API("relay.qnn.op._make.requantize")
.set_body_typed(MakeRequantize);
Expand Down
Loading

0 comments on commit 671421a

Please sign in to comment.