|
|
|
@ -1,7 +1,6 @@
|
|
|
|
|
import torch
|
|
|
|
|
import torch.nn as nn
|
|
|
|
|
import torch.nn.functional as F
|
|
|
|
|
import torch.nn as nn
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# Swish ------------------------------------------------------------------------
|
|
|
|
@ -61,16 +60,15 @@ class Mish(nn.Module): # https://github.com/digantamisra98/Mish
|
|
|
|
|
@staticmethod
|
|
|
|
|
def forward(x):
|
|
|
|
|
return x * F.softplus(x).tanh()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# FReLU https://arxiv.org/abs/2007.11824 --------------------------------------
|
|
|
|
|
class FReLU(nn.Module):
|
|
|
|
|
def __init__(self, c1, k=3): # ch_in, kernel
|
|
|
|
|
super().__init()__()
|
|
|
|
|
super(FReLU, self).__init__()
|
|
|
|
|
self.conv = nn.Conv2d(c1, c1, k, 1, 1, groups=c1)
|
|
|
|
|
self.bn = nn.BatchNorm2d(c1)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
|
def forward(self, x):
|
|
|
|
|
return torch.max(x, self.bn(self.conv(x)))
|
|
|
|
|
|
|
|
|
|