Browse Source

`pretrained=False` fix (#5966)

* `pretriained=False` fix

Fix for https://github.com/ultralytics/yolov5/issues/5964

* CI speed improvement
modifyDataloader
Glenn Jocher GitHub 2 years ago
parent
commit
e8ef8fb1ca
No known key found for this signature in database GPG Key ID: 4AEE18F83AFDEB23
1 changed files with 2 additions and 2 deletions
  1. +2
    -2
      hubconf.py

+ 2
- 2
hubconf.py View File

model = DetectMultiBackend(path, device=device) # download/load FP32 model model = DetectMultiBackend(path, device=device) # download/load FP32 model
# model = models.experimental.attempt_load(path, map_location=device) # download/load FP32 model # model = models.experimental.attempt_load(path, map_location=device) # download/load FP32 model
else: else:
cfg = list((Path(__file__).parent / 'models').rglob(f'{path.name}.yaml'))[0] # model.yaml path
cfg = list((Path(__file__).parent / 'models').rglob(f'{path.stem}.yaml'))[0] # model.yaml path
model = Model(cfg, channels, classes) # create model model = Model(cfg, channels, classes) # create model
if pretrained: if pretrained:
ckpt = torch.load(attempt_download(path), map_location=device) # load ckpt = torch.load(attempt_download(path), map_location=device) # load
Image.open('data/images/bus.jpg'), # PIL Image.open('data/images/bus.jpg'), # PIL
np.zeros((320, 640, 3))] # numpy np.zeros((320, 640, 3))] # numpy


results = model(imgs) # batched inference
results = model(imgs, size=320) # batched inference
results.print() results.print()
results.save() results.save()

Loading…
Cancel
Save