Skip to content

Commit

Permalink
Eliminate dropout by custom onnx optimizer
Browse files Browse the repository at this point in the history
  • Loading branch information
daquexian committed Nov 10, 2019
1 parent c766981 commit 7460af8
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 6 deletions.
2 changes: 1 addition & 1 deletion third_party/onnx
Submodule onnx updated 833 files
6 changes: 1 addition & 5 deletions tools/onnx2bnn/OnnxConverter.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -193,6 +193,7 @@ std::vector<std::string> OnnxConverter::Convert(
// for details.
vector<string> optimizers{
"eliminate_nop_pad", "extract_constant_to_initializer",
"dabnn_eliminate_dropout",
"dabnn_convert_gemm_with_reshape_or_flatten_to_conv_and_reshape",
"dabnn_bconv_strict"};
if (level == Level::kModerate || level == Level::kAggressive) {
Expand Down Expand Up @@ -510,11 +511,6 @@ std::vector<std::string> OnnxConverter::Convert(
0, 0, 0, 0, 0, 0, param);
layers_.push_back(layer);
VLOG(5) << "Converting Concat completed";
} else if (op == "Dropout") {
VLOG(5) << "Start converting Dropout";
// Dropout does nothing, so the output is the same as the input
name_map_[node.output(0)] = m(node.input(0));
VLOG(5) << "Converting Dropout completed";
} else if (op == "Reshape") {
VLOG(5) << "Start converting Reshape";
has_reshape = true;
Expand Down

0 comments on commit 7460af8

Please sign in to comment.