* Implement yaml.safe_load() * yaml.safe_dump()modifyDataloader
@@ -30,6 +30,6 @@ names: [ 'person', 'bicycle', 'car', 'motorcycle', 'airplane', 'bus', 'train', ' | |||
# Print classes | |||
# with open('data/coco.yaml') as f: | |||
# d = yaml.load(f, Loader=yaml.FullLoader) # dict | |||
# d = yaml.safe_load(f) # dict | |||
# for i, x in enumerate(d['names']): | |||
# print(i, x) |
@@ -72,7 +72,7 @@ class Model(nn.Module): | |||
import yaml # for torch hub | |||
self.yaml_file = Path(cfg).name | |||
with open(cfg) as f: | |||
self.yaml = yaml.load(f, Loader=yaml.SafeLoader) # model dict | |||
self.yaml = yaml.safe_load(f) # model dict | |||
# Define model | |||
ch = self.yaml['ch'] = self.yaml.get('ch', ch) # input channels |
@@ -71,7 +71,7 @@ def test(data, | |||
if isinstance(data, str): | |||
is_coco = data.endswith('coco.yaml') | |||
with open(data) as f: | |||
data = yaml.load(f, Loader=yaml.SafeLoader) | |||
data = yaml.safe_load(f) | |||
check_dataset(data) # check | |||
nc = 1 if single_cls else int(data['nc']) # number of classes | |||
iouv = torch.linspace(0.5, 0.95, 10).to(device) # iou vector for mAP@0.5:0.95 |
@@ -41,7 +41,7 @@ logger = logging.getLogger(__name__) | |||
def train(hyp, opt, device, tb_writer=None): | |||
logger.info(colorstr('hyperparameters: ') + ', '.join(f'{k}={v}' for k, v in hyp.items())) | |||
save_dir, epochs, batch_size, total_batch_size, weights, rank = \ | |||
opt.save_dir, opt.epochs, opt.batch_size, opt.total_batch_size, opt.weights, opt.global_rank | |||
Path(opt.save_dir), opt.epochs, opt.batch_size, opt.total_batch_size, opt.weights, opt.global_rank | |||
# Directories | |||
wdir = save_dir / 'weights' | |||
@@ -52,16 +52,16 @@ def train(hyp, opt, device, tb_writer=None): | |||
# Save run settings | |||
with open(save_dir / 'hyp.yaml', 'w') as f: | |||
yaml.dump(hyp, f, sort_keys=False) | |||
yaml.safe_dump(hyp, f, sort_keys=False) | |||
with open(save_dir / 'opt.yaml', 'w') as f: | |||
yaml.dump(vars(opt), f, sort_keys=False) | |||
yaml.safe_dump(vars(opt), f, sort_keys=False) | |||
# Configure | |||
plots = not opt.evolve # create plots | |||
cuda = device.type != 'cpu' | |||
init_seeds(2 + rank) | |||
with open(opt.data) as f: | |||
data_dict = yaml.load(f, Loader=yaml.SafeLoader) # data dict | |||
data_dict = yaml.safe_load(f) # data dict | |||
is_coco = opt.data.endswith('coco.yaml') | |||
# Logging- Doing this before checking the dataset. Might update data_dict | |||
@@ -506,8 +506,9 @@ if __name__ == '__main__': | |||
assert os.path.isfile(ckpt), 'ERROR: --resume checkpoint does not exist' | |||
apriori = opt.global_rank, opt.local_rank | |||
with open(Path(ckpt).parent.parent / 'opt.yaml') as f: | |||
opt = argparse.Namespace(**yaml.load(f, Loader=yaml.SafeLoader)) # replace | |||
opt.cfg, opt.weights, opt.resume, opt.batch_size, opt.global_rank, opt.local_rank = '', ckpt, True, opt.total_batch_size, *apriori # reinstate | |||
opt = argparse.Namespace(**yaml.safe_load(f)) # replace | |||
opt.cfg, opt.weights, opt.resume, opt.batch_size, opt.global_rank, opt.local_rank = \ | |||
'', ckpt, True, opt.total_batch_size, *apriori # reinstate | |||
logger.info('Resuming training from %s' % ckpt) | |||
else: | |||
# opt.hyp = opt.hyp or ('hyp.finetune.yaml' if opt.weights else 'hyp.scratch.yaml') | |||
@@ -515,7 +516,7 @@ if __name__ == '__main__': | |||
assert len(opt.cfg) or len(opt.weights), 'either --cfg or --weights must be specified' | |||
opt.img_size.extend([opt.img_size[-1]] * (2 - len(opt.img_size))) # extend to 2 sizes (train, test) | |||
opt.name = 'evolve' if opt.evolve else opt.name | |||
opt.save_dir = increment_path(Path(opt.project) / opt.name, exist_ok=opt.exist_ok | opt.evolve) # increment run | |||
opt.save_dir = str(increment_path(Path(opt.project) / opt.name, exist_ok=opt.exist_ok | opt.evolve)) | |||
# DDP mode | |||
opt.total_batch_size = opt.batch_size | |||
@@ -530,7 +531,7 @@ if __name__ == '__main__': | |||
# Hyperparameters | |||
with open(opt.hyp) as f: | |||
hyp = yaml.load(f, Loader=yaml.SafeLoader) # load hyps | |||
hyp = yaml.safe_load(f) # load hyps | |||
# Train | |||
logger.info(opt) | |||
@@ -577,7 +578,7 @@ if __name__ == '__main__': | |||
assert opt.local_rank == -1, 'DDP mode not implemented for --evolve' | |||
opt.notest, opt.nosave = True, True # only test/save final epoch | |||
# ei = [isinstance(x, (int, float)) for x in hyp.values()] # evolvable indices | |||
yaml_file = opt.save_dir / 'hyp_evolved.yaml' # save best result here | |||
yaml_file = Path(opt.save_dir) / 'hyp_evolved.yaml' # save best result here | |||
if opt.bucket: | |||
os.system('gsutil cp gs://%s/evolve.txt .' % opt.bucket) # download evolve.txt if exists | |||
@@ -102,7 +102,7 @@ def kmean_anchors(path='./data/coco128.yaml', n=9, img_size=640, thr=4.0, gen=10 | |||
if isinstance(path, str): # *.yaml file | |||
with open(path) as f: | |||
data_dict = yaml.load(f, Loader=yaml.SafeLoader) # model dict | |||
data_dict = yaml.safe_load(f) # model dict | |||
from utils.datasets import LoadImagesAndLabels | |||
dataset = LoadImagesAndLabels(data_dict['train'], augment=True, rect=True) | |||
else: |
@@ -19,7 +19,7 @@ for last in path.rglob('*/**/last.pt'): | |||
# Load opt.yaml | |||
with open(last.parent.parent / 'opt.yaml') as f: | |||
opt = yaml.load(f, Loader=yaml.SafeLoader) | |||
opt = yaml.safe_load(f) | |||
# Get device count | |||
d = opt['device'].split(',') # devices |
@@ -550,7 +550,7 @@ def print_mutation(hyp, results, yaml_file='hyp_evolved.yaml', bucket=''): | |||
results = tuple(x[0, :7]) | |||
c = '%10.4g' * len(results) % results # results (P, R, mAP@0.5, mAP@0.5:0.95, val_losses x 3) | |||
f.write('# Hyperparameter Evolution Results\n# Generations: %g\n# Metrics: ' % len(x) + c + '\n\n') | |||
yaml.dump(hyp, f, sort_keys=False) | |||
yaml.safe_dump(hyp, f, sort_keys=False) | |||
if bucket: | |||
os.system('gsutil cp evolve.txt %s gs://%s' % (yaml_file, bucket)) # upload |
@@ -323,7 +323,7 @@ def plot_labels(labels, names=(), save_dir=Path(''), loggers=None): | |||
def plot_evolution(yaml_file='data/hyp.finetune.yaml'): # from utils.plots import *; plot_evolution() | |||
# Plot hyperparameter evolution results in evolve.txt | |||
with open(yaml_file) as f: | |||
hyp = yaml.load(f, Loader=yaml.SafeLoader) | |||
hyp = yaml.safe_load(f) | |||
x = np.loadtxt('evolve.txt', ndmin=2) | |||
f = fitness(x) | |||
# weights = (f - f.min()) ** 2 # for weighted results |
@@ -9,7 +9,7 @@ WANDB_ARTIFACT_PREFIX = 'wandb-artifact://' | |||
def create_dataset_artifact(opt): | |||
with open(opt.data) as f: | |||
data = yaml.load(f, Loader=yaml.SafeLoader) # data dict | |||
data = yaml.safe_load(f) # data dict | |||
logger = WandbLogger(opt, '', None, data, job_type='Dataset Creation') | |||
@@ -55,7 +55,7 @@ def check_wandb_resume(opt): | |||
def process_wandb_config_ddp_mode(opt): | |||
with open(opt.data) as f: | |||
data_dict = yaml.load(f, Loader=yaml.SafeLoader) # data dict | |||
data_dict = yaml.safe_load(f) # data dict | |||
train_dir, val_dir = None, None | |||
if isinstance(data_dict['train'], str) and data_dict['train'].startswith(WANDB_ARTIFACT_PREFIX): | |||
api = wandb.Api() | |||
@@ -73,7 +73,7 @@ def process_wandb_config_ddp_mode(opt): | |||
if train_dir or val_dir: | |||
ddp_data_path = str(Path(val_dir) / 'wandb_local_data.yaml') | |||
with open(ddp_data_path, 'w') as f: | |||
yaml.dump(data_dict, f) | |||
yaml.safe_dump(data_dict, f) | |||
opt.data = ddp_data_path | |||
@@ -120,7 +120,7 @@ class WandbLogger(): | |||
'YOLOv5' if opt.project == 'runs/train' else Path(opt.project).stem) | |||
print("Created dataset config file ", config_path) | |||
with open(config_path) as f: | |||
wandb_data_dict = yaml.load(f, Loader=yaml.SafeLoader) | |||
wandb_data_dict = yaml.safe_load(f) | |||
return wandb_data_dict | |||
def setup_training(self, opt, data_dict): | |||
@@ -192,7 +192,7 @@ class WandbLogger(): | |||
def log_dataset_artifact(self, data_file, single_cls, project, overwrite_config=False): | |||
with open(data_file) as f: | |||
data = yaml.load(f, Loader=yaml.SafeLoader) # data dict | |||
data = yaml.safe_load(f) # data dict | |||
nc, names = (1, ['item']) if single_cls else (int(data['nc']), data['names']) | |||
names = {k: v for k, v in enumerate(names)} # to index dictionary | |||
self.train_artifact = self.create_dataset_table(LoadImagesAndLabels( | |||
@@ -206,7 +206,7 @@ class WandbLogger(): | |||
path = data_file if overwrite_config else '_wandb.'.join(data_file.rsplit('.', 1)) # updated data.yaml path | |||
data.pop('download', None) | |||
with open(path, 'w') as f: | |||
yaml.dump(data, f) | |||
yaml.safe_dump(data, f) | |||
if self.job_type == 'Training': # builds correct artifact pipeline graph | |||
self.wandb_run.use_artifact(self.val_artifact) |