Skip to content

Commit

Permalink
Add quick benchmark
Browse files Browse the repository at this point in the history
  • Loading branch information
daquexian committed Oct 21, 2019
1 parent ddd7280 commit fbbe650
Show file tree
Hide file tree
Showing 5 changed files with 145 additions and 8 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -13,3 +13,4 @@ build_*/
tools/onnx2bnn/python/dist/
tools/onnx2bnn/python/onnx2bnn.egg-info/
.clangd/
.build*/
5 changes: 5 additions & 0 deletions benchmark/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -4,3 +4,8 @@ add_executable(dabnn_benchmark benchmark.cpp)
target_link_libraries(dabnn_benchmark
dabnn
benchmark_main)

add_executable(benchmark_single_model benchmark_single_model.cpp)
target_link_libraries(benchmark_single_model
dabnn
benchmark_main)
25 changes: 25 additions & 0 deletions benchmark/benchmark_single_model.cpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
#include <string>

#include <benchmark/benchmark.h>

#include <dabnn/net.h>

static void BM_single_model(benchmark::State &state, std::string model_path) {
float input[999999];

auto net = bnn::Net::create();
net->optimize = true;
net->run_fconv = true;
net->strict = true;
net->read(model_path);

for (auto _ : state) {
net->run(input);
}
}

int main(int argc, char **argv) {
benchmark::RegisterBenchmark("single_model", BM_single_model, argv[1]);
benchmark::Initialize(&argc, argv);
benchmark::RunSpecifiedBenchmarks();
}
82 changes: 74 additions & 8 deletions tools/onnx2bnn/onnx2bnn.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -19,33 +19,47 @@ void usage(const std::string &filename) {
std::cout << "Usage:" << std::endl;
std::cout << " " << filename
<< " onnx_model output_filename [ --strict | --moderate | "
"--aggressive ] [--binary-list] [--verbose]"
"--aggressive ] [--binary-convs list] [--binary-convs-file "
"filename] [--exclude-first-last] [--verbose]"
<< std::endl;
std::cout << std::endl;
std::cout << "Options:" << std::endl;
std::cout
<< " --aggressive The default optimization level. In this level, "
<< " --aggressive The default optimization level. In this "
"level, "
"onnx2bnn will mark all convolutions with binary (+1/-1) weights as "
"binary convolutions. It is for the existing BNN models, which may "
"not use the correct padding value. Note: The output of the "
"generated dabnn model is different from that of the ONNX model "
"since the padding value is 0 instead of -1."
<< std::endl;
std::cout << " --moderate This level is for our \"standard\" "
std::cout << " --moderate This level is for our \"standard\" "
"implementation -- A Conv operator with binary weight and "
"following a -1 Pad operator."
<< std::endl;
std::cout
<< " --strict In this level, onnx2bnn only recognizes the "
<< " --strict In this level, onnx2bnn only recognizes "
"the "
"following natural and correct \"pattern\" of binary convolutions: "
"A Conv operator, whose input is got from a Sign op and a Pad op "
"(the order doesn't matter), and weight is got from a Sign op."
<< std::endl;
std::cout
<< " --binary-list A text file containing the **output "
<< " --binary-convs-file A text file containing the **output "
"names** of some convolutions, which will be treated as binary "
"convlutions unconditionally. It is mainly for benchmark purpose."
<< std::endl;
std::cout
<< " --binary-convs A ','-sperated list (for example, "
"\"4,5,10\") containing the **output "
"names** of some convolutions, which will be treated as binary "
"convlutions unconditionally. It is mainly for benchmark purpose."
<< std::endl;
std::cout
<< " --exclude-first-last Set all convolutions except the first and "
"last convolution as binary convoslutions regardless of what they "
"actually are. It is mainly for benchmark purpose."
<< std::endl;
std::cout << std::endl;
std::cout << "Example:" << std::endl;
std::cout << " " << filename
Expand All @@ -58,9 +72,22 @@ void usage(const std::string &filename) {
<< std::endl;
}

vector<string> split(string s, string delimiter) {
vector<string> parts;
size_t pos = 0;
std::string token;
while ((pos = s.find(delimiter)) != std::string::npos) {
token = s.substr(0, pos);
std::cout << token << std::endl;
s.erase(0, pos + delimiter.length());
}
parts.push_back(s);
return parts;
}

int main(int argc, char **argv) {
argh::parser cmdl;
cmdl.add_param("--binary-list");
cmdl.add_params({"--binary-convs", "--binary-convs-file"});
cmdl.parse(argc, argv);
google::InitGoogleLogging(cmdl[0].c_str());
FLAGS_alsologtostderr = true;
Expand All @@ -70,12 +97,28 @@ int main(int argc, char **argv) {
}
for (const auto flag : cmdl.flags()) {
if (flag != "strict" && flag != "moderate" && flag != "aggressive" &&
flag != "verbose") {
flag != "verbose" && flag != "exclude-first-last") {
std::cout << "Invalid flag: " << flag << std::endl;
usage(cmdl[0]);
return -2;
}
}
int manual_binary_list = 0;
if (cmdl["binary-convs"]) {
manual_binary_list++;
}
if (cmdl["binary-convs-file"]) {
manual_binary_list++;
}
if (cmdl["exclude-first-last"]) {
manual_binary_list++;
}
if (manual_binary_list > 1) {
std::cerr << "--binary--convs, --binary-convs-list and "
"--exclude-first-last are mutually exclusive"
<< std::endl;
return -2;
}

bnn::OnnxConverter::Level opt_level =
bnn::OnnxConverter::Level::kAggressive;
Expand All @@ -91,8 +134,8 @@ int main(int argc, char **argv) {
FLAGS_v = 5;
}

const auto binary_list_filepath = cmdl("binary-list").str();
vector<string> expected_binary_conv_outputs;
const auto binary_list_filepath = cmdl("binary-convs-file").str();
if (!binary_list_filepath.empty()) {
std::ifstream ifs(binary_list_filepath);
if (ifs.is_open()) {
Expand All @@ -102,13 +145,36 @@ int main(int argc, char **argv) {
}
}
}
const auto binary_convs_str = cmdl("binary-convs").str();
if (!binary_convs_str.empty()) {
expected_binary_conv_outputs = split(binary_convs_str, ",");
}
bool exclude_first_last = false;
if (cmdl["exclude-first-last"]) {
exclude_first_last = true;
}

ONNX_NAMESPACE::ModelProto model_proto;
{
std::ifstream ifs(cmdl[1], std::ios::in | std::ios::binary);
model_proto.ParseFromIstream(&ifs);
ifs.close();
}
if (exclude_first_last) {
vector<ONNX_NAMESPACE::NodeProto> binary_node_candidates;
for (const auto &node : model_proto.graph().node()) {
if (node.op_type() == "Conv" || node.op_type() == "Gemm") {
binary_node_candidates.push_back(node);
}
}
for (size_t i = 0; i < binary_node_candidates.size(); i++) {
if (i == 0 || i == binary_node_candidates.size() - 1) {
continue;
}
expected_binary_conv_outputs.push_back(
binary_node_candidates[i].output(0));
}
}

bnn::OnnxConverter converter;
const auto binary_conv_outputs = converter.Convert(
Expand Down
40 changes: 40 additions & 0 deletions tools/quick_benchmark.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
#!/usr/bin/env python3

import argparse
import inspect
import subprocess
import os
from pathlib import Path

parser = argparse.ArgumentParser()
parser.add_argument('--onnx', type=str, required=True)
args, others = parser.parse_known_args()

filename = inspect.getframeinfo(inspect.currentframe()).filename
base_dir = Path(filename).resolve().parent.parent
android_ndk = Path(os.getenv('ANDROID_NDK', default='')).resolve()
onnx_model = Path(args.onnx).resolve()
dabnn_build_dir = base_dir/'.build_dabnn_release'
onnx2bnn_build_dir = base_dir/'.build_onnx2bnn_release'
temp_dab_model = onnx2bnn_build_dir/'dabnn_quick_benchmark.dab'
quick_benchmark_bin = dabnn_build_dir/'benchmark'/'benchmark_single_model'
os.makedirs(dabnn_build_dir, exist_ok=True)
os.makedirs(onnx2bnn_build_dir, exist_ok=True)
print("Build dabnn..")
subprocess.check_call('cmake -DCMAKE_TOOLCHAIN_FILE={}/build/cmake/android.toolchain.cmake -DANDROID_ABI=arm64-v8a ..'.format(android_ndk/'build'/'cmake'/'android.toolchain.cmake'), cwd=dabnn_build_dir, shell=True, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)

subprocess.check_call('cmake --build .', cwd=dabnn_build_dir, shell=True, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)

print("Build onnx2bnn..")
subprocess.check_call('cmake ..', cwd=onnx2bnn_build_dir, shell=True, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)

subprocess.check_call('cmake --build .', cwd=onnx2bnn_build_dir, shell=True, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)

print("Generating daBNN model..")
subprocess.check_call('./tools/onnx2bnn/onnx2bnn {} {} {}'.format(' '.join(others), onnx_model, temp_dab_model), cwd=onnx2bnn_build_dir, shell=True)
print("Pushing daBNN model..")
subprocess.check_call('adb push {} /data/local/tmp/'.format(temp_dab_model), cwd=onnx2bnn_build_dir, shell=True, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
subprocess.check_call('adb push {} /data/local/tmp/'.format(quick_benchmark_bin), cwd=dabnn_build_dir, shell=True, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
print("Benchmarking..")
subprocess.check_call('adb shell /data/local/tmp/{} /data/local/tmp/{}'.format(quick_benchmark_bin.name, temp_dab_model.name), shell=True, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
subprocess.check_call('adb shell rm /data/local/tmp/{} /data/local/tmp/{}'.format(quick_benchmark_bin.name, temp_dab_model.name), shell=True, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)

0 comments on commit fbbe650

Please sign in to comment.