-
Notifications
You must be signed in to change notification settings - Fork 22
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
upload resnexst and some new results on ImageNet
1 parent
062357f
commit e4d8ea7
Showing
14 changed files
with
789 additions
and
56 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,3 +1,4 @@ | ||
# coding=utf-8 | ||
from .drop import get_drop | ||
from .activations import get_act | ||
from .split_attn import SplitAttnConv2d |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,88 @@ | ||
""" Split Attention Conv2d (for ResNeSt Models) | ||
Paper: `ResNeSt: Split-Attention Networks` - /https://arxiv.org/abs/2004.08955 | ||
Adapted from original PyTorch impl at https://github.com/zhanghang1989/ResNeSt | ||
Modified for torchscript compat, performance, and consistency with timm by Ross Wightman | ||
""" | ||
import torch | ||
import torch.nn.functional as F | ||
from torch import nn | ||
|
||
|
||
class RadixSoftmax(nn.Module): | ||
def __init__(self, radix, cardinality): | ||
super(RadixSoftmax, self).__init__() | ||
self.radix = radix | ||
self.cardinality = cardinality | ||
|
||
def forward(self, x): | ||
batch = x.size(0) | ||
if self.radix > 1: | ||
x = x.view(batch, self.cardinality, self.radix, -1).transpose(1, 2) | ||
x = F.softmax(x, dim=1) | ||
x = x.reshape(batch, -1) | ||
else: | ||
x = torch.sigmoid(x) | ||
return x | ||
|
||
|
||
class SplitAttnConv2d(nn.Module): | ||
"""Split-Attention Conv2d | ||
""" | ||
def __init__(self, in_channels, out_channels, kernel_size, stride=1, padding=0, | ||
dilation=1, groups=1, bias=False, radix=2, reduction_factor=4, | ||
act_layer=nn.ReLU, norm_layer=None, drop_block=None, **kwargs): | ||
super(SplitAttnConv2d, self).__init__() | ||
self.radix = radix | ||
self.drop_block = drop_block | ||
mid_chs = out_channels * radix | ||
attn_chs = max(in_channels * radix // reduction_factor, 32) | ||
|
||
self.conv = nn.Conv2d( | ||
in_channels, mid_chs, kernel_size, stride, padding, dilation, | ||
groups=groups * radix, bias=bias, **kwargs) | ||
self.bn0 = norm_layer(mid_chs) if norm_layer is not None else None | ||
self.act0 = act_layer(inplace=True) | ||
self.fc1 = nn.Conv2d(out_channels, attn_chs, 1, groups=groups) | ||
self.bn1 = norm_layer(attn_chs) if norm_layer is not None else None | ||
self.act1 = act_layer(inplace=True) | ||
self.fc2 = nn.Conv2d(attn_chs, mid_chs, 1, groups=groups) | ||
self.rsoftmax = RadixSoftmax(radix, groups) | ||
|
||
@property | ||
def in_channels(self): | ||
return self.conv.in_channels | ||
|
||
@property | ||
def out_channels(self): | ||
return self.fc1.out_channels | ||
|
||
def forward(self, x): | ||
x = self.conv(x) | ||
if self.bn0 is not None: | ||
x = self.bn0(x) | ||
if self.drop_block is not None: | ||
x = self.drop_block(x) | ||
x = self.act0(x) | ||
|
||
B, RC, H, W = x.shape | ||
if self.radix > 1: | ||
x = x.reshape((B, self.radix, RC // self.radix, H, W)) | ||
x_gap = x.sum(dim=1) | ||
else: | ||
x_gap = x | ||
x_gap = F.adaptive_avg_pool2d(x_gap, 1) | ||
x_gap = self.fc1(x_gap) | ||
if self.bn1 is not None: | ||
x_gap = self.bn1(x_gap) | ||
x_gap = self.act1(x_gap) | ||
x_attn = self.fc2(x_gap) | ||
|
||
x_attn = self.rsoftmax(x_attn).view(B, -1, 1, 1) | ||
if self.radix > 1: | ||
out = (x * x_attn.reshape((B, self.radix, RC // self.radix, 1, 1))).sum(dim=1) | ||
else: | ||
out = x * x_attn | ||
return out.contiguous() |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -126,6 +126,7 @@ def forward(self, x): | |
|
||
return out | ||
|
||
|
||
class ResNet(nn.Module): | ||
"""ResNet Variants | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.