From 68d8173cd2b38dc4250589e884868b5357984de4 Mon Sep 17 00:00:00 2001 From: MORITA Kazutaka Date: Sun, 24 May 2020 03:50:58 +0900 Subject: [PATCH 1/2] [CODEGEN][CONTRIB] Various update for CoreML codegen --- apps/ios_rpc/tvmrpc/TVMRuntime.mm | 1 + python/tvm/contrib/target/coreml.py | 42 +++++-- src/runtime/contrib/coreml/coreml_runtime.mm | 12 +- tests/python/contrib/test_coreml_codegen.py | 115 ++++++++++++++++--- 4 files changed, 144 insertions(+), 26 deletions(-) diff --git a/apps/ios_rpc/tvmrpc/TVMRuntime.mm b/apps/ios_rpc/tvmrpc/TVMRuntime.mm index d9b834ba13a6..8d7fdb2861db 100644 --- a/apps/ios_rpc/tvmrpc/TVMRuntime.mm +++ b/apps/ios_rpc/tvmrpc/TVMRuntime.mm @@ -35,6 +35,7 @@ #include "../../../src/runtime/thread_pool.cc" #include "../../../src/runtime/threading_backend.cc" #include "../../../src/runtime/workspace_pool.cc" +#include "../../../src/runtime/metadata_module.cc" // RPC server #include "../../../src/runtime/rpc/rpc_channel.cc" diff --git a/python/tvm/contrib/target/coreml.py b/python/tvm/contrib/target/coreml.py index e74457ee5378..e789a03db300 100644 --- a/python/tvm/contrib/target/coreml.py +++ b/python/tvm/contrib/target/coreml.py @@ -56,6 +56,27 @@ def _convert_batch_flatten(builder, name, inputs, outputs, args, attrs): output_name=outputs[0] ) +def _convert_expand_dims(builder, name, inputs, outputs, args, attrs): + if attrs.axis >= 0: + axes = list(range(attrs.axis, attrs.axis+attrs.num_newaxis)) + else: + axes = list(range(attrs.axis-attrs.num_newaxis+1, attrs.axis+1)) + + builder.add_expand_dims( + name=name, + input_name=inputs[0], + output_name=outputs[0], + axes=axes + ) + +def _convert_relu(builder, name, inputs, outputs, args, attrs): + builder.add_activation( + name=name, + non_linearity='RELU', + input_name=inputs[0], + output_name=outputs[0] + ) + def _convert_softmax(builder, name, inputs, outputs, args, attrs): builder.add_softmax_nd( name=name, @@ -111,6 +132,8 @@ def _convert_global_avg_pool2d(builder, name, inputs, outputs, args, attrs): 'add' : _convert_add, 'multiply' : _convert_multiply, 'clip' : _convert_clip, + 'expand_dims' : _convert_expand_dims, + 'nn.relu' : _convert_relu, 'nn.batch_flatten' : _convert_batch_flatten, 'nn.softmax' : _convert_softmax, 'nn.conv2d' : _convert_conv2d, @@ -207,20 +230,19 @@ def compile(self, out_dir): @tvm._ffi.register_func("relay.ext.coremlcompiler") -def coreml_compiler(ref): +def coreml_compiler(func): """ Create a CoreML runtime from a Relay module. """ + assert isinstance(func, tvm.relay.function.Function) model_dir = os.getcwd() - if isinstance(ref, tvm.ir.module.IRModule): - for var, func in ref.functions.items(): - name = var.name_hint - builder = CodegenCoreML(name, func) - builder.visit(func.body) - mlmodelc_path = "{}/{}.mlmodelc".format(model_dir, name) - if os.path.exists(mlmodelc_path): - shutil.rmtree(mlmodelc_path) - builder.compile(model_dir) + name = str(func.attrs.global_symbol) + builder = CodegenCoreML(name, func) + builder.visit(func.body) + mlmodelc_path = "{}/{}.mlmodelc".format(model_dir, name) + if os.path.exists(mlmodelc_path): + shutil.rmtree(mlmodelc_path) + builder.compile(model_dir) ctx = tvm.cpu(0) return coreml_runtime.create(model_dir, ctx).module diff --git a/src/runtime/contrib/coreml/coreml_runtime.mm b/src/runtime/contrib/coreml/coreml_runtime.mm index e6d22517d20f..2d98489b73f6 100644 --- a/src/runtime/contrib/coreml/coreml_runtime.mm +++ b/src/runtime/contrib/coreml/coreml_runtime.mm @@ -49,6 +49,9 @@ } else if (dtype == DataType::Float(32)) { dataType = MLMultiArrayDataTypeFloat32; size *= sizeof(float); + } else if (dtype == DataType::Int(32)) { + dataType = MLMultiArrayDataTypeInt32; + size *= sizeof(int); } else { LOG(FATAL) << "unsupported data type " << dtype; return; @@ -88,6 +91,9 @@ } else if (data_desc.dataType == MLMultiArrayDataTypeFloat32) { dtype = DataType::Float(32); size *= sizeof(float); + } else if (data_desc.dataType == MLMultiArrayDataTypeInt32) { + dtype = DataType::Int(32); + size *= sizeof(int); } else { LOG(FATAL) << "unexpected data type " << data_desc.dataType; } @@ -135,7 +141,7 @@ PackedFunc CoreMLRuntime::GetFunction(const std::string& name, const ObjectPtr& sptr_to_self) { // Return member functions during query. - if (name == "invoke") { + if (name == "invoke" || name == "run") { return PackedFunc( [sptr_to_self, this](TVMArgs args, TVMRetValue* rv) { GetModel("main").Invoke(); }); } else if (name == "set_input") { @@ -151,7 +157,7 @@ return PackedFunc([sptr_to_self, this](TVMArgs args, TVMRetValue* rv) { *rv = GetModel("main").GetNumOutputs(); }); - } else { + } else if (model_map_.count(name) != 0) { // Return the packedfunc which executes the subgraph. return PackedFunc([sptr_to_self, name, this](TVMArgs args, TVMRetValue* rv) { CoreMLModel& model = GetModel(name); @@ -188,6 +194,8 @@ } *rv = out; }); + } else { + return PackedFunc(); } } diff --git a/tests/python/contrib/test_coreml_codegen.py b/tests/python/contrib/test_coreml_codegen.py index be47b3e4fc2b..300239c118b3 100644 --- a/tests/python/contrib/test_coreml_codegen.py +++ b/tests/python/contrib/test_coreml_codegen.py @@ -95,19 +95,6 @@ def test_annotate(): assert tvm.ir.structural_equal(mod, expected, map_free_vars=True) -@mock.patch('tvm.contrib.coreml_runtime.create') -@mock.patch('tvm.contrib.xcode.compile_coreml') -def test_construct_model(m1, m2): - mod = _create_graph_annotated() - - fcompile = tvm._ffi.get_global_func("relay.ext.coremlcompiler") - - for var, func in mod.functions.items(): - if func.attrs and 'Compiler' in func.attrs and \ - func.attrs['Compiler'] == 'coremlcompiler': - fcompile(tvm.IRModule.from_expr(func.body)) - - @pytest.mark.skipif(not _has_xcode(), reason="Xcode is not available") def test_compile_and_run(): ctx=tvm.cpu() @@ -133,7 +120,107 @@ def test_compile_and_run(): tvm.testing.assert_allclose(out.asnumpy(), expected, rtol=tol, atol=tol) +@mock.patch('tvm.contrib.coreml_runtime.create') +@mock.patch('tvm.contrib.xcode.compile_coreml') +def _construct_model(func, m1, m2): + mod = tvm.IRModule() + mod["main"] = func + mod = transform.AnnotateTarget("coremlcompiler")(mod) + mod = transform.PartitionGraph()(mod) + + fcompile = tvm._ffi.get_global_func("relay.ext.coremlcompiler") + + for var, func in mod.functions.items(): + if func.attrs and 'Compiler' in func.attrs and \ + func.attrs['Compiler'] == 'coremlcompiler': + fcompile(func) + + +def test_add(): + shape = (10, 10) + x = relay.var('x', shape=shape) + y = x + x + func = relay.Function([x], y) + _construct_model(func) + + +def test_multiply(): + shape = (10, 10) + x = relay.var('x', shape=shape) + y = x * x + func = relay.Function([x], y) + _construct_model(func) + + +def test_clip(): + shape = (10, 10) + x = relay.var('x', shape=shape) + y = relay.clip(x, a_min=0.0, a_max=1.0) + func = relay.Function([x], y) + _construct_model(func) + + +def test_batch_flatten(): + shape = (10, 10, 10) + x = relay.var('x', shape=shape) + y = relay.nn.batch_flatten(x) + func = relay.Function([x], y) + _construct_model(func) + + +def test_expand_dims(): + shape = (10, 10) + x = relay.var('x', shape=shape) + y = relay.expand_dims(x, axis=0) + func = relay.Function([x], y) + _construct_model(func) + + y = relay.expand_dims(x, axis=-1) + func = relay.Function([x], y) + _construct_model(func) + + +def test_relu(): + shape = (10, 10) + x = relay.var('x', shape=shape) + y = relay.nn.relu(x) + func = relay.Function([x], y) + _construct_model(func) + + +def test_softmax(): + shape = (10, 10) + x = relay.var('x', shape=shape) + y = relay.nn.softmax(x, axis=1) + func = relay.Function([x], y) + _construct_model(func) + + +def test_conv2d(): + x = relay.var('x', shape=(1,3,224,224)) + w = relay.const(np.zeros((16,3,3,3), dtype='float32')) + y = relay.nn.conv2d(x, w, strides=[2, 2], padding=[1, 1, 1, 1], kernel_size=[3, 3]) + func = relay.Function([x], y) + _construct_model(func) + + +def test_global_avg_pool2d(): + shape = (10, 10, 10, 10) + x = relay.var('x', shape=shape) + y = relay.nn.global_avg_pool2d(x) + func = relay.Function([x], y) + _construct_model(func) + + if __name__ == "__main__": test_annotate() - test_construct_model() test_compile_and_run() + test_add() + test_multiply() + test_clip() + test_expand_dims() + test_relu() + test_batch_flatten() + test_softmax() + test_conv2d() + test_global_avg_pool2d() From 8e4e922552c0ff0212945568f41ba912081cf847 Mon Sep 17 00:00:00 2001 From: MORITA Kazutaka Date: Fri, 26 Jun 2020 20:48:01 +0900 Subject: [PATCH 2/2] fix lint error --- apps/ios_rpc/tvmrpc/TVMRuntime.mm | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/ios_rpc/tvmrpc/TVMRuntime.mm b/apps/ios_rpc/tvmrpc/TVMRuntime.mm index 8d7fdb2861db..9e2899bf6e5e 100644 --- a/apps/ios_rpc/tvmrpc/TVMRuntime.mm +++ b/apps/ios_rpc/tvmrpc/TVMRuntime.mm @@ -27,6 +27,7 @@ #include "../../../src/runtime/dso_library.cc" #include "../../../src/runtime/file_util.cc" #include "../../../src/runtime/library_module.cc" +#include "../../../src/runtime/metadata_module.cc" #include "../../../src/runtime/module.cc" #include "../../../src/runtime/ndarray.cc" #include "../../../src/runtime/object.cc" @@ -35,7 +36,6 @@ #include "../../../src/runtime/thread_pool.cc" #include "../../../src/runtime/threading_backend.cc" #include "../../../src/runtime/workspace_pool.cc" -#include "../../../src/runtime/metadata_module.cc" // RPC server #include "../../../src/runtime/rpc/rpc_channel.cc"