forked from apache/tvm
-
Notifications
You must be signed in to change notification settings - Fork 30
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Fix merge Fix merge and clean up logs Add BiasAdd, Concat, padding ceil mode, and clean up code Fix formatting and remove unused headers uncomment models Fix bug with variable input, clean up Don't split batch norm Move TRT execution to TrtExecutor Clean up Clean up Add paritioning Implement graph_runtime execution for Relay/TRT Fix bug in extern op Fix compilation Add EnableTrt pass to perform same modification as previous wholegraphannotator Renable NNVM TRT Remove SimplifyBatchnorm, add rules for converting ops Fix format, remove unused tests Enable multiple outputs Fix multiple outputs Fix activation lookup Fix no newline at eof Add license header. Add consistency test to models Add method to check TRT used. Improve comments Fix lint Add util to check TRT version Add if guards around TRT5.1 APIs Add env var for workspace size, fix logger fix build Add TRT versioning to EnableTrt pass Fix build error in DLR Fix compile for DLR Update dmlc-core, fix copyright header, undo change to includes Remove unused headers Fix IsTrtCompatible visitor and move op list to constructor Add dropout to compatible ops for CheckTrtCompatible only. Add not compatible test Add squeeze, transpose, reshape, pad, and reduce ops. Add transpose on weights workaround Fix formatting. Add unit tests Support transpose on weights for conv2d and dense. Support asymmetric padding. Temp fix for 1D inputs. Add units tests for all ops. Support StridedSlice, AdaptivePooling approximation, Pytorch addmm fixer pass Support (2,3,0,1) tranpose on weights Allow stride to be incomplete. Support ConstantNode -> kWeight Fix pass serialized graph by value in runtime. Allow inclusive count for strided pool Comments, disable failign test Fix CI lint Removed unused variables from TrtBuilder. Add more comments Fix build for TRT4 Add GetTrtVersion(), Move convert map to function, remove uneeded include, make batch_size_, logger_ TrtBuilder members, check output existence Use shared_ptr for converters. Add check for num outputs and inputs Support image.resize Make GetOpConverters return a shared_ptr Clarify count inclusive padding weirdness Use external codegen/runtime Move to src/runtime/contrib/tensorrt. Add Save and Load methods for tensorrt module. Rename some classes Require format to be tensorrt so that loader knows how to load FoldConstants Destroy engine and context after use. Store TRT weights from op converters. Formatting Always apply ConvertLayout to NCHW Clean up Add ASF header Change ObjectRef -> NodeRef Fix lint Fix pylint Fix bug with scalar weights Making TRT cmake more informative Make tensorrt tests dependent on whether trt codegen is enabled Add serialization test.
- Loading branch information
Trevor Morris
committed
Jan 10, 2020
1 parent
303a471
commit aa98061
Showing
16 changed files
with
3,129 additions
and
3 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,77 @@ | ||
/* | ||
* Licensed to the Apache Software Foundation (ASF) under one | ||
* or more contributor license agreements. See the NOTICE file | ||
* distributed with this work for additional information | ||
* regarding copyright ownership. The ASF licenses this file | ||
* to you under the Apache License, Version 2.0 (the | ||
* "License"); you may not use this file except in compliance | ||
* with the License. You may obtain a copy of the License at | ||
* | ||
* http://www.apache.org/licenses/LICENSE-2.0 | ||
* | ||
* Unless required by applicable law or agreed to in writing, | ||
* software distributed under the License is distributed on an | ||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY | ||
* KIND, either express or implied. See the License for the | ||
* specific language governing permissions and limitations | ||
* under the License. | ||
*/ | ||
|
||
/*! | ||
* \file src/relay/backend/contrib/dnnl/codegen.cc | ||
* \brief Implementation of DNNL codegen APIs. | ||
*/ | ||
|
||
#include <tvm/node/serialization.h> | ||
#include <tvm/relay/attrs/nn.h> | ||
#include <tvm/relay/expr_functor.h> | ||
#include <tvm/relay/transform.h> | ||
#include <tvm/relay/type.h> | ||
#include <tvm/runtime/module.h> | ||
#include <tvm/runtime/registry.h> | ||
|
||
#include <fstream> | ||
#include <sstream> | ||
|
||
#include "../../../../runtime/contrib/tensorrt/tensorrt_module.h" | ||
#include "../codegen_c/codegen_c.h" | ||
|
||
namespace tvm { | ||
namespace relay { | ||
namespace contrib { | ||
|
||
class TrtModuleCodegen : public CSourceModuleCodegenBase { | ||
public: | ||
runtime::Module CreateCSourceModule(const NodeRef& ref) override { | ||
std::string serialized_subgraph; | ||
if (ref->IsInstance<FunctionNode>()) { | ||
serialized_subgraph = SaveJSON(Downcast<Function>(ref)->body); | ||
} else if (ref->IsInstance<relay::ModuleNode>()) { | ||
relay::Module mod = Downcast<relay::Module>(ref); | ||
// TODO(trevmorr): support multiple functions. It is currently not | ||
// possible for there to be more than one TRT func, so not a problem yet. | ||
for (const auto& it : mod->functions) { | ||
serialized_subgraph = SaveJSON(Downcast<Function>(it.second)->body); | ||
} | ||
} else { | ||
LOG(FATAL) << "The input ref is expected to be a Relay function or module" | ||
<< "\n"; | ||
} | ||
return runtime::TensorRTModuleCreate(serialized_subgraph); | ||
} | ||
}; | ||
|
||
/*! | ||
* \brief The external compiler/codegen tool. It takes a Relay expression/module | ||
* and compiles it into a runtime module. | ||
*/ | ||
runtime::Module TrtCompiler(const NodeRef& ref) { | ||
TrtModuleCodegen tensorrt; | ||
return tensorrt.CreateCSourceModule(ref); | ||
} | ||
|
||
TVM_REGISTER_API("relay.ext.tensorrt").set_body_typed(TrtCompiler); | ||
|
||
} // namespace contrib | ||
} // namespace relay | ||
} // namespace tvm |
Oops, something went wrong.