Make `select_device()` robust to `batch_size=-1` (#5940)

* Find out a bug. When set batch_size = -1 to use the autobatch.

reproduce:

* Fix type conflict

Co-authored-by: Glenn Jocher <glenn.jocher@ultralytics.com>
This commit is contained in:
Felix You 2021-12-10 22:27:20 +08:00 committed by GitHub
parent 4fb6dd4b26
commit c45f9f678d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
1 changed files with 2 additions and 2 deletions

View File

@ -53,7 +53,7 @@ def git_describe(path=Path(__file__).parent): # path must be a directory
return '' # not a git repository return '' # not a git repository
def select_device(device='', batch_size=None, newline=True): def select_device(device='', batch_size=0, newline=True):
# device = 'cpu' or '0' or '0,1,2,3' # device = 'cpu' or '0' or '0,1,2,3'
s = f'YOLOv5 🚀 {git_describe() or date_modified()} torch {torch.__version__} ' # string s = f'YOLOv5 🚀 {git_describe() or date_modified()} torch {torch.__version__} ' # string
device = str(device).strip().lower().replace('cuda:', '') # to string, 'cuda:0' to '0' device = str(device).strip().lower().replace('cuda:', '') # to string, 'cuda:0' to '0'
@ -68,7 +68,7 @@ def select_device(device='', batch_size=None, newline=True):
if cuda: if cuda:
devices = device.split(',') if device else '0' # range(torch.cuda.device_count()) # i.e. 0,1,6,7 devices = device.split(',') if device else '0' # range(torch.cuda.device_count()) # i.e. 0,1,6,7
n = len(devices) # device count n = len(devices) # device count
if n > 1 and batch_size: # check batch_size is divisible by device_count if n > 1 and batch_size > 0: # check batch_size is divisible by device_count
assert batch_size % n == 0, f'batch-size {batch_size} not multiple of GPU count {n}' assert batch_size % n == 0, f'batch-size {batch_size} not multiple of GPU count {n}'
space = ' ' * (len(s) + 1) space = ' ' * (len(s) + 1)
for i, d in enumerate(devices): for i, d in enumerate(devices):