Browse Source

Namespace `VERBOSE` env variable to `YOLOv5_VERBOSE` (#6428)

* Verbose updates

* Verbose updates
modifyDataloader
Glenn Jocher GitHub 2 years ago
parent
commit
d5966c93f1
No known key found for this signature in database GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 36 additions and 36 deletions
  1. +6
    -6
      hubconf.py
  2. +27
    -27
      utils/general.py
  3. +3
    -3
      utils/plots.py

+ 6
- 6
hubconf.py View File





def _create(name, pretrained=True, channels=3, classes=80, autoshape=True, verbose=True, device=None): def _create(name, pretrained=True, channels=3, classes=80, autoshape=True, verbose=True, device=None):
"""Creates a specified YOLOv5 model
"""Creates or loads a YOLOv5 model


Arguments: Arguments:
name (str): name of model, i.e. 'yolov5s'
name (str): model name 'yolov5s' or path 'path/to/best.pt'
pretrained (bool): load pretrained weights into the model pretrained (bool): load pretrained weights into the model
channels (int): number of input channels channels (int): number of input channels
classes (int): number of model classes classes (int): number of model classes
device (str, torch.device, None): device to use for model parameters device (str, torch.device, None): device to use for model parameters


Returns: Returns:
YOLOv5 pytorch model
YOLOv5 model
""" """
from pathlib import Path from pathlib import Path


from models.common import AutoShape, DetectMultiBackend from models.common import AutoShape, DetectMultiBackend
from models.yolo import Model from models.yolo import Model
from utils.downloads import attempt_download from utils.downloads import attempt_download
from utils.general import check_requirements, intersect_dicts, set_logging
from utils.general import LOGGER, check_requirements, intersect_dicts, logging
from utils.torch_utils import select_device from utils.torch_utils import select_device


if not verbose:
LOGGER.setLevel(logging.WARNING)
check_requirements(exclude=('tensorboard', 'thop', 'opencv-python')) check_requirements(exclude=('tensorboard', 'thop', 'opencv-python'))
set_logging(verbose=verbose)

name = Path(name) name = Path(name)
path = name.with_suffix('.pt') if name.suffix == '' else name # checkpoint path path = name.with_suffix('.pt') if name.suffix == '' else name # checkpoint path
try: try:

+ 27
- 27
utils/general.py View File

FILE = Path(__file__).resolve() FILE = Path(__file__).resolve()
ROOT = FILE.parents[1] # YOLOv5 root directory ROOT = FILE.parents[1] # YOLOv5 root directory
NUM_THREADS = min(8, max(1, os.cpu_count() - 1)) # number of YOLOv5 multiprocessing threads NUM_THREADS = min(8, max(1, os.cpu_count() - 1)) # number of YOLOv5 multiprocessing threads
VERBOSE = str(os.getenv('VERBOSE', True)).lower() == 'true' # global verbose mode
VERBOSE = str(os.getenv('YOLOv5_VERBOSE', True)).lower() == 'true' # global verbose mode


torch.set_printoptions(linewidth=320, precision=5, profile='long') torch.set_printoptions(linewidth=320, precision=5, profile='long')
np.set_printoptions(linewidth=320, formatter={'float_kind': '{:11.5g}'.format}) # format short g, %precision=5 np.set_printoptions(linewidth=320, formatter={'float_kind': '{:11.5g}'.format}) # format short g, %precision=5
def check_git_status(): def check_git_status():
# Recommend 'git pull' if code is out of date # Recommend 'git pull' if code is out of date
msg = ', for updates see https://github.com/ultralytics/yolov5' msg = ', for updates see https://github.com/ultralytics/yolov5'
print(colorstr('github: '), end='')
assert Path('.git').exists(), 'skipping check (not a git repository)' + msg
assert not is_docker(), 'skipping check (Docker image)' + msg
assert check_online(), 'skipping check (offline)' + msg
s = colorstr('github: ') # string
assert Path('.git').exists(), s + 'skipping check (not a git repository)' + msg
assert not is_docker(), s + 'skipping check (Docker image)' + msg
assert check_online(), s + 'skipping check (offline)' + msg


cmd = 'git fetch && git config --get remote.origin.url' cmd = 'git fetch && git config --get remote.origin.url'
url = check_output(cmd, shell=True, timeout=5).decode().strip().rstrip('.git') # git fetch url = check_output(cmd, shell=True, timeout=5).decode().strip().rstrip('.git') # git fetch
branch = check_output('git rev-parse --abbrev-ref HEAD', shell=True).decode().strip() # checked out branch = check_output('git rev-parse --abbrev-ref HEAD', shell=True).decode().strip() # checked out
n = int(check_output(f'git rev-list {branch}..origin/master --count', shell=True)) # commits behind n = int(check_output(f'git rev-list {branch}..origin/master --count', shell=True)) # commits behind
if n > 0: if n > 0:
s = f"⚠️ YOLOv5 is out of date by {n} commit{'s' * (n > 1)}. Use `git pull` or `git clone {url}` to update."
s += f"⚠️ YOLOv5 is out of date by {n} commit{'s' * (n > 1)}. Use `git pull` or `git clone {url}` to update."
else: else:
s = f'up to date with {url} ✅'
print(emojis(s)) # emoji-safe
s += f'up to date with {url} ✅'
LOGGER.info(emojis(s)) # emoji-safe




def check_python(minimum='3.6.2'): def check_python(minimum='3.6.2'):
except Exception as e: # DistributionNotFound or VersionConflict if requirements not met except Exception as e: # DistributionNotFound or VersionConflict if requirements not met
s = f"{prefix} {r} not found and is required by YOLOv5" s = f"{prefix} {r} not found and is required by YOLOv5"
if install: if install:
print(f"{s}, attempting auto-update...")
LOGGER.info(f"{s}, attempting auto-update...")
try: try:
assert check_online(), f"'pip install {r}' skipped (offline)" assert check_online(), f"'pip install {r}' skipped (offline)"
print(check_output(f"pip install '{r}'", shell=True).decode())
LOGGER.info(check_output(f"pip install '{r}'", shell=True).decode())
n += 1 n += 1
except Exception as e: except Exception as e:
print(f'{prefix} {e}')
LOGGER.warning(f'{prefix} {e}')
else: else:
print(f'{s}. Please install and rerun your command.')
LOGGER.info(f'{s}. Please install and rerun your command.')


if n: # if packages updated if n: # if packages updated
source = file.resolve() if 'file' in locals() else requirements source = file.resolve() if 'file' in locals() else requirements
s = f"{prefix} {n} package{'s' * (n > 1)} updated per {source}\n" \ s = f"{prefix} {n} package{'s' * (n > 1)} updated per {source}\n" \
f"{prefix} ⚠️ {colorstr('bold', 'Restart runtime or rerun command for updates to take effect')}\n" f"{prefix} ⚠️ {colorstr('bold', 'Restart runtime or rerun command for updates to take effect')}\n"
print(emojis(s))
LOGGER.info(emojis(s))




def check_img_size(imgsz, s=32, floor=0): def check_img_size(imgsz, s=32, floor=0):
else: # list i.e. img_size=[640, 480] else: # list i.e. img_size=[640, 480]
new_size = [max(make_divisible(x, int(s)), floor) for x in imgsz] new_size = [max(make_divisible(x, int(s)), floor) for x in imgsz]
if new_size != imgsz: if new_size != imgsz:
print(f'WARNING: --img-size {imgsz} must be multiple of max stride {s}, updating to {new_size}')
LOGGER.warning(f'WARNING: --img-size {imgsz} must be multiple of max stride {s}, updating to {new_size}')
return new_size return new_size




cv2.waitKey(1) cv2.waitKey(1)
return True return True
except Exception as e: except Exception as e:
print(f'WARNING: Environment does not support cv2.imshow() or PIL Image.show() image displays\n{e}')
LOGGER.warning(f'WARNING: Environment does not support cv2.imshow() or PIL Image.show() image displays\n{e}')
return False return False




url = str(Path(file)).replace(':/', '://') # Pathlib turns :// -> :/ url = str(Path(file)).replace(':/', '://') # Pathlib turns :// -> :/
file = Path(urllib.parse.unquote(file).split('?')[0]).name # '%2F' to '/', split https://url.com/file.txt?auth file = Path(urllib.parse.unquote(file).split('?')[0]).name # '%2F' to '/', split https://url.com/file.txt?auth
if Path(file).is_file(): if Path(file).is_file():
print(f'Found {url} locally at {file}') # file already exists
LOGGER.info(f'Found {url} locally at {file}') # file already exists
else: else:
print(f'Downloading {url} to {file}...')
LOGGER.info(f'Downloading {url} to {file}...')
torch.hub.download_url_to_file(url, file) torch.hub.download_url_to_file(url, file)
assert Path(file).exists() and Path(file).stat().st_size > 0, f'File download failed: {url}' # check assert Path(file).exists() and Path(file).stat().st_size > 0, f'File download failed: {url}' # check
return file return file
if val: if val:
val = [Path(x).resolve() for x in (val if isinstance(val, list) else [val])] # val path val = [Path(x).resolve() for x in (val if isinstance(val, list) else [val])] # val path
if not all(x.exists() for x in val): if not all(x.exists() for x in val):
print('\nWARNING: Dataset not found, nonexistent paths: %s' % [str(x) for x in val if not x.exists()])
LOGGER.info('\nDataset not found, missing paths: %s' % [str(x) for x in val if not x.exists()])
if s and autodownload: # download script if s and autodownload: # download script
root = path.parent if 'path' in data else '..' # unzip directory i.e. '../' root = path.parent if 'path' in data else '..' # unzip directory i.e. '../'
if s.startswith('http') and s.endswith('.zip'): # URL if s.startswith('http') and s.endswith('.zip'): # URL
f = Path(s).name # filename f = Path(s).name # filename
print(f'Downloading {s} to {f}...')
LOGGER.info(f'Downloading {s} to {f}...')
torch.hub.download_url_to_file(s, f) torch.hub.download_url_to_file(s, f)
Path(root).mkdir(parents=True, exist_ok=True) # create root Path(root).mkdir(parents=True, exist_ok=True) # create root
ZipFile(f).extractall(path=root) # unzip ZipFile(f).extractall(path=root) # unzip
Path(f).unlink() # remove zip Path(f).unlink() # remove zip
r = None # success r = None # success
elif s.startswith('bash '): # bash script elif s.startswith('bash '): # bash script
print(f'Running {s} ...')
LOGGER.info(f'Running {s} ...')
r = os.system(s) r = os.system(s)
else: # python script else: # python script
r = exec(s, {'yaml': data}) # return None r = exec(s, {'yaml': data}) # return None
print(f"Dataset autodownload {f'success, saved to {root}' if r in (0, None) else 'failure'}\n")
LOGGER.info(f"Dataset autodownload {f'success, saved to {root}' if r in (0, None) else 'failure'}\n")
else: else:
raise Exception('Dataset not found.') raise Exception('Dataset not found.')


if Path(url).is_file(): # exists in current path if Path(url).is_file(): # exists in current path
Path(url).rename(f) # move to dir Path(url).rename(f) # move to dir
elif not f.exists(): elif not f.exists():
print(f'Downloading {url} to {f}...')
LOGGER.info(f'Downloading {url} to {f}...')
if curl: if curl:
os.system(f"curl -L '{url}' -o '{f}' --retry 9 -C -") # curl download, retry and resume on fail os.system(f"curl -L '{url}' -o '{f}' --retry 9 -C -") # curl download, retry and resume on fail
else: else:
torch.hub.download_url_to_file(url, f, progress=True) # torch download torch.hub.download_url_to_file(url, f, progress=True) # torch download
if unzip and f.suffix in ('.zip', '.gz'): if unzip and f.suffix in ('.zip', '.gz'):
print(f'Unzipping {f}...')
LOGGER.info(f'Unzipping {f}...')
if f.suffix == '.zip': if f.suffix == '.zip':
ZipFile(f).extractall(path=dir) # unzip ZipFile(f).extractall(path=dir) # unzip
elif f.suffix == '.gz': elif f.suffix == '.gz':


output[xi] = x[i] output[xi] = x[i]
if (time.time() - t) > time_limit: if (time.time() - t) > time_limit:
print(f'WARNING: NMS time limit {time_limit}s exceeded')
LOGGER.warning(f'WARNING: NMS time limit {time_limit}s exceeded')
break # time limit exceeded break # time limit exceeded


return output return output
p.requires_grad = False p.requires_grad = False
torch.save(x, s or f) torch.save(x, s or f)
mb = os.path.getsize(s or f) / 1E6 # filesize mb = os.path.getsize(s or f) / 1E6 # filesize
print(f"Optimizer stripped from {f},{(' saved as %s,' % s) if s else ''} {mb:.1f}MB")
LOGGER.info(f"Optimizer stripped from {f},{(' saved as %s,' % s) if s else ''} {mb:.1f}MB")




def print_mutation(results, hyp, save_dir, bucket): def print_mutation(results, hyp, save_dir, bucket):
f.write(s + ('%20.5g,' * n % vals).rstrip(',') + '\n') f.write(s + ('%20.5g,' * n % vals).rstrip(',') + '\n')


# Print to screen # Print to screen
print(colorstr('evolve: ') + ', '.join(f'{x.strip():>20s}' for x in keys))
print(colorstr('evolve: ') + ', '.join(f'{x:20.5g}' for x in vals), end='\n\n\n')
LOGGER.info(colorstr('evolve: ') + ', '.join(f'{x.strip():>20s}' for x in keys))
LOGGER.info(colorstr('evolve: ') + ', '.join(f'{x:20.5g}' for x in vals) + '\n\n')


# Save yaml # Save yaml
with open(evolve_yaml, 'w') as f: with open(evolve_yaml, 'w') as f:

+ 3
- 3
utils/plots.py View File

return ImageFont.truetype(str(font) if font.exists() else font.name, size) return ImageFont.truetype(str(font) if font.exists() else font.name, size)
except Exception as e: # download if missing except Exception as e: # download if missing
url = "https://ultralytics.com/assets/" + font.name url = "https://ultralytics.com/assets/" + font.name
print(f'Downloading {url} to {font}...')
LOGGER.info(f'Downloading {url} to {font}...')
torch.hub.download_url_to_file(url, str(font), progress=False) torch.hub.download_url_to_file(url, str(font), progress=False)
try: try:
return ImageFont.truetype(str(font), size) return ImageFont.truetype(str(font), size)
ax[i].imshow(blocks[i].squeeze()) # cmap='gray' ax[i].imshow(blocks[i].squeeze()) # cmap='gray'
ax[i].axis('off') ax[i].axis('off')


print(f'Saving {f}... ({n}/{channels})')
LOGGER.info(f'Saving {f}... ({n}/{channels})')
plt.savefig(f, dpi=300, bbox_inches='tight') plt.savefig(f, dpi=300, bbox_inches='tight')
plt.close() plt.close()
np.save(str(f.with_suffix('.npy')), x[0].cpu().numpy()) # npy save np.save(str(f.with_suffix('.npy')), x[0].cpu().numpy()) # npy save
# if j in [8, 9, 10]: # share train and val loss y axes # if j in [8, 9, 10]: # share train and val loss y axes
# ax[i].get_shared_y_axes().join(ax[i], ax[i - 5]) # ax[i].get_shared_y_axes().join(ax[i], ax[i - 5])
except Exception as e: except Exception as e:
print(f'Warning: Plotting error for {f}: {e}')
LOGGER.info(f'Warning: Plotting error for {f}: {e}')
ax[1].legend() ax[1].legend()
fig.savefig(save_dir / 'results.png', dpi=200) fig.savefig(save_dir / 'results.png', dpi=200)
plt.close() plt.close()

Loading…
Cancel
Save