From 6b7ca608cd76699e515d7d973d01d81495c5a533 Mon Sep 17 00:00:00 2001 From: Glenn Jocher Date: Tue, 4 Jan 2022 18:23:53 -0800 Subject: [PATCH] Edge TPU compiler comment --- export.py | 4 ++-- models/common.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/export.py b/export.py index 3447fc6ed1ab..17b994df82a4 100644 --- a/export.py +++ b/export.py @@ -17,7 +17,7 @@ TensorFlow.js | `tfjs` | yolov5s_web_model/ Usage: - $ python path/to/export.py --weights yolov5s.pt --include torchscript onnx coreml openvino saved_model tflite tfjs + $ python path/to/export.py --weights yolov5s.pt --include torchscript onnx openvino engine coreml tflite ... Inference: $ python path/to/detect.py --weights yolov5s.pt # PyTorch @@ -308,7 +308,7 @@ def export_tflite(keras_model, im, file, int8, data, ncalib, prefix=colorstr('Te def export_edgetpu(keras_model, im, file, prefix=colorstr('Edge TPU:')): # YOLOv5 Edge TPU export https://coral.ai/docs/edgetpu/models-intro/ try: - cmd = 'edgetpu_compiler --version' + cmd = 'edgetpu_compiler --version' # install https://coral.ai/docs/edgetpu/compiler/ out = subprocess.run(cmd, shell=True, capture_output=True, check=True) ver = out.stdout.decode().split()[-1] LOGGER.info(f'\n{prefix} starting export with Edge TPU compiler {ver}...') diff --git a/models/common.py b/models/common.py index d8d5423a16e0..b055cb68a439 100644 --- a/models/common.py +++ b/models/common.py @@ -376,8 +376,8 @@ def wrap_frozen_graph(gd, inputs, outputs): elif tflite: # https://www.tensorflow.org/lite/guide/python#install_tensorflow_lite_for_python if 'edgetpu' in w.lower(): # Edge TPU LOGGER.info(f'Loading {w} for TensorFlow Lite Edge TPU inference...') - import tflite_runtime.interpreter as tfli - delegate = {'Linux': 'libedgetpu.so.1', # install https://coral.ai/software/#edgetpu-runtime + import tflite_runtime.interpreter as tfli # install https://coral.ai/software/#edgetpu-runtime + delegate = {'Linux': 'libedgetpu.so.1', 'Darwin': 'libedgetpu.1.dylib', 'Windows': 'edgetpu.dll'}[platform.system()] interpreter = tfli.Interpreter(model_path=w, experimental_delegates=[tfli.load_delegate(delegate)])