You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

пре 4 година
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748
  1. from models.common import *
  2. class Sum(nn.Module):
  3. # weighted sum of 2 or more layers https://arxiv.org/abs/1911.09070
  4. def __init__(self, n, weight=False): # n: number of inputs
  5. super(Sum, self).__init__()
  6. self.weight = weight # apply weights boolean
  7. self.iter = range(n - 1) # iter object
  8. if weight:
  9. self.w = nn.Parameter(-torch.arange(1., n) / 2, requires_grad=True) # layer weights
  10. def forward(self, x):
  11. y = x[0] # no weight
  12. if self.weight:
  13. w = torch.sigmoid(self.w) * 2
  14. for i in self.iter:
  15. y = y + x[i + 1] * w[i]
  16. else:
  17. for i in self.iter:
  18. y = y + x[i + 1]
  19. return y
  20. class GhostConv(nn.Module):
  21. def __init__(self, c1, c2, k=1, s=1, g=1, act=True): # ch_in, ch_out, kernel, stride, groups
  22. super(GhostConv, self).__init__()
  23. c_ = c2 // 2 # hidden channels
  24. self.cv1 = Conv(c1, c_, k, s, g, act)
  25. self.cv2 = Conv(c_, c_, 5, 1, c_, act)
  26. def forward(self, x):
  27. y = self.cv1(x)
  28. return torch.cat([y, self.cv2(y)], 1)
  29. class GhostBottleneck(nn.Module):
  30. def __init__(self, c1, c2, k, s):
  31. super(GhostBottleneck, self).__init__()
  32. c_ = c2 // 2
  33. self.conv = nn.Sequential(GhostConv(c1, c_, 1, 1), # pw
  34. DWConv(c_, c_, k, s, act=False) if s == 2 else nn.Identity(), # dw
  35. GhostConv(c_, c2, 1, 1, act=False)) # pw-linear
  36. self.shortcut = nn.Sequential(DWConv(c1, c1, k, s, act=False),
  37. Conv(c1, c2, 1, 1, act=False)) if s == 2 else nn.Identity()
  38. def forward(self, x):
  39. return self.conv(x) + self.shortcut(x)