Browse Source

Remove DDP MultiHeadAttention fix (#3768)

modifyDataloader
Glenn Jocher GitHub 3 years ago
parent
commit
f2d97ebb25
No known key found for this signature in database GPG Key ID: 4AEE18F83AFDEB23
1 changed files with 1 additions and 3 deletions
  1. +1
    -3
      train.py

+ 1
- 3
train.py View File

@@ -252,9 +252,7 @@ def train(hyp, # path/to/hyp.yaml or hyp dictionary

# DDP mode
if cuda and RANK != -1:
model = DDP(model, device_ids=[LOCAL_RANK], output_device=LOCAL_RANK,
# nn.MultiheadAttention incompatibility with DDP https://github.com/pytorch/pytorch/issues/26698
find_unused_parameters=any(isinstance(layer, nn.MultiheadAttention) for layer in model.modules()))
model = DDP(model, device_ids=[LOCAL_RANK], output_device=LOCAL_RANK)

# Model parameters
hyp['box'] *= 3. / nl # scale to layers

Loading…
Cancel
Save