|
|
|
|
|
|
|
|
import onnxruntime |
|
|
import onnxruntime |
|
|
providers = ['CUDAExecutionProvider', 'CPUExecutionProvider'] if cuda else ['CPUExecutionProvider'] |
|
|
providers = ['CUDAExecutionProvider', 'CPUExecutionProvider'] if cuda else ['CPUExecutionProvider'] |
|
|
session = onnxruntime.InferenceSession(w, providers=providers) |
|
|
session = onnxruntime.InferenceSession(w, providers=providers) |
|
|
|
|
|
meta = session.get_modelmeta().custom_metadata_map # metadata |
|
|
|
|
|
if 'stride' in meta: |
|
|
|
|
|
stride, names = int(meta['stride']), eval(meta['names']) |
|
|
elif xml: # OpenVINO |
|
|
elif xml: # OpenVINO |
|
|
LOGGER.info(f'Loading {w} for OpenVINO inference...') |
|
|
LOGGER.info(f'Loading {w} for OpenVINO inference...') |
|
|
check_requirements(('openvino-dev',)) # requires openvino-dev: https://pypi.org/project/openvino-dev/ |
|
|
check_requirements(('openvino-dev',)) # requires openvino-dev: https://pypi.org/project/openvino-dev/ |