|
|
|
@ -1,9 +1,13 @@
|
|
|
|
|
# This file contains modules common to various models
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
from utils.utils import *
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def autopad(k):
|
|
|
|
|
# Pad to 'same'
|
|
|
|
|
return k // 2 if isinstance(k, int) else [x // 2 for x in k] # auto-pad
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def DWConv(c1, c2, k=1, s=1, act=True):
|
|
|
|
|
# Depthwise convolution
|
|
|
|
|
return Conv(c1, c2, k, s, g=math.gcd(c1, c2), act=act)
|
|
|
|
@ -11,10 +15,9 @@ def DWConv(c1, c2, k=1, s=1, act=True):
|
|
|
|
|
|
|
|
|
|
class Conv(nn.Module):
|
|
|
|
|
# Standard convolution
|
|
|
|
|
def __init__(self, c1, c2, k=1, s=1, g=1, act=True): # ch_in, ch_out, kernel, stride, groups
|
|
|
|
|
def __init__(self, c1, c2, k=1, s=1, p=None, g=1, act=True): # ch_in, ch_out, kernel, stride, padding, groups
|
|
|
|
|
super(Conv, self).__init__()
|
|
|
|
|
p = k // 2 if isinstance(k, int) else [x // 2 for x in k] # padding
|
|
|
|
|
self.conv = nn.Conv2d(c1, c2, k, s, p, groups=g, bias=False)
|
|
|
|
|
self.conv = nn.Conv2d(c1, c2, k, s, p or autopad(k), groups=g, bias=False)
|
|
|
|
|
self.bn = nn.BatchNorm2d(c2)
|
|
|
|
|
self.act = nn.LeakyReLU(0.1, inplace=True) if act else nn.Identity()
|
|
|
|
|
|
|
|
|
@ -46,7 +49,7 @@ class BottleneckCSP(nn.Module):
|
|
|
|
|
self.cv1 = Conv(c1, c_, 1, 1)
|
|
|
|
|
self.cv2 = nn.Conv2d(c1, c_, 1, 1, bias=False)
|
|
|
|
|
self.cv3 = nn.Conv2d(c_, c_, 1, 1, bias=False)
|
|
|
|
|
self.cv4 = Conv(c2, c2, 1, 1)
|
|
|
|
|
self.cv4 = Conv(2 * c_, c2, 1, 1)
|
|
|
|
|
self.bn = nn.BatchNorm2d(2 * c_) # applied to cat(cv2, cv3)
|
|
|
|
|
self.act = nn.LeakyReLU(0.1, inplace=True)
|
|
|
|
|
self.m = nn.Sequential(*[Bottleneck(c_, c_, shortcut, g, e=1.0) for _ in range(n)])
|
|
|
|
@ -79,9 +82,9 @@ class Flatten(nn.Module):
|
|
|
|
|
|
|
|
|
|
class Focus(nn.Module):
|
|
|
|
|
# Focus wh information into c-space
|
|
|
|
|
def __init__(self, c1, c2, k=1):
|
|
|
|
|
def __init__(self, c1, c2, k=1, s=1, p=None, g=1, act=True): # ch_in, ch_out, kernel, stride, padding, groups
|
|
|
|
|
super(Focus, self).__init__()
|
|
|
|
|
self.conv = Conv(c1 * 4, c2, k, 1)
|
|
|
|
|
self.conv = Conv(c1 * 4, c2, k, s, p, g, act)
|
|
|
|
|
|
|
|
|
|
def forward(self, x): # x(b,c,w,h) -> y(b,4c,w/2,h/2)
|
|
|
|
|
return self.conv(torch.cat([x[..., ::2, ::2], x[..., 1::2, ::2], x[..., ::2, 1::2], x[..., 1::2, 1::2]], 1))
|
|
|
|
|