From 4b074d9d9dfc08ff4676178a263b3693192fd6e6 Mon Sep 17 00:00:00 2001 From: AlexWang1900 <60679873+AlexWang1900@users.noreply.github.com> Date: Fri, 31 Jul 2020 08:25:49 +0800 Subject: [PATCH] Funnel ReLU (FReLU) (#556) * fix #541 #542 * Update train.py * Add Frelu * Update activations.py PEP8 and format updates for commonality with models.common.Conv() * Update activations.py Update case * Update activations.py Co-authored-by: Glenn Jocher --- utils/activations.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/utils/activations.py b/utils/activations.py index cf226fe..da3a4c3 100644 --- a/utils/activations.py +++ b/utils/activations.py @@ -61,3 +61,16 @@ class Mish(nn.Module): # https://github.com/digantamisra98/Mish @staticmethod def forward(x): return x * F.softplus(x).tanh() + + +# FReLU https://arxiv.org/abs/2007.11824 -------------------------------------- +class FReLU(nn.Module): + def __init__(self, c1, k=3): # ch_in, kernel + super().__init()__() + self.conv = nn.Conv2d(c1, c1, k, 1, 1, groups=c1) + self.bn = nn.BatchNorm2d(c1) + + @staticmethod + def forward(self, x): + return torch.max(x, self.bn(self.conv(x))) +