@@ -140,7 +140,13 @@ def export_onnx(model, im, file, opset, train, dynamic, simplify, prefix=colorst | |||
# Checks | |||
model_onnx = onnx.load(f) # load onnx model | |||
onnx.checker.check_model(model_onnx) # check onnx model | |||
# LOGGER.info(onnx.helper.printable_graph(model_onnx.graph)) # print | |||
# Metadata | |||
d = {'stride': int(max(model.stride)), 'names': model.names} | |||
for k, v in d.items(): | |||
meta = model_onnx.metadata_props.add() | |||
meta.key, meta.value = k, str(v) | |||
onnx.save(model_onnx, f) | |||
# Simplify | |||
if simplify: |
@@ -328,6 +328,9 @@ class DetectMultiBackend(nn.Module): | |||
import onnxruntime | |||
providers = ['CUDAExecutionProvider', 'CPUExecutionProvider'] if cuda else ['CPUExecutionProvider'] | |||
session = onnxruntime.InferenceSession(w, providers=providers) | |||
meta = session.get_modelmeta().custom_metadata_map # metadata | |||
if 'stride' in meta: | |||
stride, names = int(meta['stride']), eval(meta['names']) | |||
elif xml: # OpenVINO | |||
LOGGER.info(f'Loading {w} for OpenVINO inference...') | |||
check_requirements(('openvino-dev',)) # requires openvino-dev: https://pypi.org/project/openvino-dev/ |