|
|
|
@ -3,9 +3,11 @@
|
|
|
|
|
from utils.utils import *
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def autopad(k):
|
|
|
|
|
def autopad(k, p=None): # kernel, padding
|
|
|
|
|
# Pad to 'same'
|
|
|
|
|
return k // 2 if isinstance(k, int) else [x // 2 for x in k] # auto-pad
|
|
|
|
|
if p is None:
|
|
|
|
|
p = k // 2 if isinstance(k, int) else [x // 2 for x in k] # auto-pad
|
|
|
|
|
return p
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def DWConv(c1, c2, k=1, s=1, act=True):
|
|
|
|
@ -17,7 +19,7 @@ class Conv(nn.Module):
|
|
|
|
|
# Standard convolution
|
|
|
|
|
def __init__(self, c1, c2, k=1, s=1, p=None, g=1, act=True): # ch_in, ch_out, kernel, stride, padding, groups
|
|
|
|
|
super(Conv, self).__init__()
|
|
|
|
|
self.conv = nn.Conv2d(c1, c2, k, s, p or autopad(k), groups=g, bias=False)
|
|
|
|
|
self.conv = nn.Conv2d(c1, c2, k, s, autopad(k, p), groups=g, bias=False)
|
|
|
|
|
self.bn = nn.BatchNorm2d(c2)
|
|
|
|
|
self.act = nn.LeakyReLU(0.1, inplace=True) if act else nn.Identity()
|
|
|
|
|
|
|
|
|
|