* Remove `encoding='ascii'` * Reinstate `encoding='ascii'` in emojis()modifyDataloader
@@ -104,7 +104,7 @@ def kmean_anchors(dataset='./data/coco128.yaml', n=9, img_size=640, thr=4.0, gen | |||
return k | |||
if isinstance(dataset, str): # *.yaml file | |||
with open(dataset, encoding='ascii', errors='ignore') as f: | |||
with open(dataset, errors='ignore') as f: | |||
data_dict = yaml.safe_load(f) # model dict | |||
from utils.datasets import LoadImagesAndLabels | |||
dataset = LoadImagesAndLabels(data_dict['train'], augment=True, rect=True) |
@@ -931,7 +931,7 @@ def dataset_stats(path='coco128.yaml', autodownload=False, verbose=False, profil | |||
im.save(im_dir / Path(f).name, quality=75) # save | |||
zipped, data_dir, yaml_path = unzip(Path(path)) | |||
with open(check_file(yaml_path), encoding='ascii', errors='ignore') as f: | |||
with open(check_file(yaml_path), errors='ignore') as f: | |||
data = yaml.safe_load(f) # data dict | |||
if zipped: | |||
data['path'] = data_dir # TODO: should this be dir.resolve()? |
@@ -112,7 +112,7 @@ def is_pip(): | |||
def emojis(str=''): | |||
# Return platform-dependent emoji-safe version of string | |||
return str.encode().decode('ascii', 'ignore') if platform.system() == 'Windows' else str | |||
return str.encode().decode(encoding='ascii', errors='ignore') if platform.system() == 'Windows' else str | |||
def file_size(file): | |||
@@ -250,7 +250,7 @@ def check_dataset(data, autodownload=True): | |||
# Read yaml (optional) | |||
if isinstance(data, (str, Path)): | |||
with open(data, encoding='ascii', errors='ignore') as f: | |||
with open(data, errors='ignore') as f: | |||
data = yaml.safe_load(f) # dictionary | |||
# Parse yaml |
@@ -62,7 +62,7 @@ def check_wandb_resume(opt): | |||
def process_wandb_config_ddp_mode(opt): | |||
with open(check_file(opt.data), encoding='ascii', errors='ignore') as f: | |||
with open(check_file(opt.data), errors='ignore') as f: | |||
data_dict = yaml.safe_load(f) # data dict | |||
train_dir, val_dir = None, None | |||
if isinstance(data_dict['train'], str) and data_dict['train'].startswith(WANDB_ARTIFACT_PREFIX): | |||
@@ -152,7 +152,7 @@ class WandbLogger(): | |||
self.wandb_artifact_data_dict = self.check_and_upload_dataset(opt) | |||
elif opt.data.endswith('_wandb.yaml'): # When dataset is W&B artifact | |||
with open(opt.data, encoding='ascii', errors='ignore') as f: | |||
with open(opt.data, errors='ignore') as f: | |||
data_dict = yaml.safe_load(f) | |||
self.data_dict = data_dict | |||
else: # Local .yaml dataset file or .zip file | |||
@@ -186,7 +186,7 @@ class WandbLogger(): | |||
opt.single_cls, | |||
'YOLOv5' if opt.project == 'runs/train' else Path(opt.project).stem) | |||
print("Created dataset config file ", config_path) | |||
with open(config_path, encoding='ascii', errors='ignore') as f: | |||
with open(config_path, errors='ignore') as f: | |||
wandb_data_dict = yaml.safe_load(f) | |||
return wandb_data_dict | |||