Browse Source

Dynamic normalization layer selection (#7392)

* Dynamic normalization layer selection

Based on actual available layers. Torch 1.7 compatible, resolves https://github.com/ultralytics/yolov5/issues/7381

* Update train.py
modifyDataloader
Glenn Jocher GitHub 2 years ago
parent
commit
4bb7eb8b84
No known key found for this signature in database GPG Key ID: 4AEE18F83AFDEB23
1 changed files with 1 additions and 1 deletions
  1. +1
    -1
      train.py

+ 1
- 1
train.py View File

@@ -151,7 +151,7 @@ def train(hyp, opt, device, callbacks): # hyp is path/to/hyp.yaml or hyp dictio
LOGGER.info(f"Scaled weight_decay = {hyp['weight_decay']}")

g = [], [], [] # optimizer parameter groups
bn = nn.BatchNorm2d, nn.LazyBatchNorm2d, nn.GroupNorm, nn.InstanceNorm2d, nn.LazyInstanceNorm2d, nn.LayerNorm
bn = tuple(v for k, v in nn.__dict__.items() if 'Norm' in k) # normalization layers, i.e. BatchNorm2d()
for v in model.modules():
if hasattr(v, 'bias') and isinstance(v.bias, nn.Parameter): # bias
g[2].append(v.bias)

Loading…
Cancel
Save