-
Notifications
You must be signed in to change notification settings - Fork 5
/
extractor.py
110 lines (85 loc) · 3.48 KB
/
extractor.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
import paddle.nn as nn
from weight_init import weight_init_
class ResidualBlock(nn.Layer):
"""Residual Block with custom normalization."""
def __init__(self, in_planes, planes, norm_fn="group", stride=1):
super(ResidualBlock, self).__init__()
self.conv1 = nn.Conv2D(in_planes, planes, 3, padding=1, stride=stride)
self.conv2 = nn.Conv2D(planes, planes, 3, padding=1)
self.relu = nn.ReLU()
if norm_fn == "group":
num_groups = planes // 8
self.norm1 = nn.GroupNorm(num_groups, planes)
self.norm2 = nn.GroupNorm(num_groups, planes)
if not stride == 1:
self.norm3 = nn.GroupNorm(num_groups, planes)
elif norm_fn == "batch":
self.norm1 = nn.BatchNorm2D(planes)
self.norm2 = nn.BatchNorm2D(planes)
if not stride == 1:
self.norm3 = nn.BatchNorm2D(planes)
elif norm_fn == "instance":
self.norm1 = nn.InstanceNorm2D(planes)
self.norm2 = nn.InstanceNorm2D(planes)
if not stride == 1:
self.norm3 = nn.InstanceNorm2D(planes)
elif norm_fn == "none":
self.norm1 = nn.Sequential()
self.norm2 = nn.Sequential()
if not stride == 1:
self.norm3 = nn.Sequential()
if stride == 1:
self.downsample = None
else:
self.downsample = nn.Sequential(
nn.Conv2D(in_planes, planes, 1, stride=stride), self.norm3
)
def forward(self, x):
y = x
y = self.relu(self.norm1(self.conv1(y)))
y = self.relu(self.norm2(self.conv2(y)))
if self.downsample is not None:
x = self.downsample(x)
return self.relu(x + y)
class BasicEncoder(nn.Layer):
"""Basic Encoder with custom normalization."""
def __init__(self, output_dim=128, norm_fn="batch"):
super(BasicEncoder, self).__init__()
self.norm_fn = norm_fn
if self.norm_fn == "group":
self.norm1 = nn.GroupNorm(8, 64)
elif self.norm_fn == "batch":
self.norm1 = nn.BatchNorm2D(64)
elif self.norm_fn == "instance":
self.norm1 = nn.InstanceNorm2D(64)
elif self.norm_fn == "none":
self.norm1 = nn.Sequential()
self.conv1 = nn.Conv2D(3, 64, 7, stride=2, padding=3)
self.relu1 = nn.ReLU()
self.in_planes = 64
self.layer1 = self._make_layer(64, stride=1)
self.layer2 = self._make_layer(128, stride=2)
self.layer3 = self._make_layer(192, stride=2)
self.conv2 = nn.Conv2D(192, output_dim, 1)
for m in self.sublayers():
if isinstance(m, nn.Conv2D):
weight_init_(
m.weight, "kaiming_normal_", mode="fan_out", nonlinearity="relu"
)
elif isinstance(m, (nn.BatchNorm2D, nn.InstanceNorm2D, nn.GroupNorm)):
weight_init_(m, "Constant", value=1, bias_value=0.0)
def _make_layer(self, dim, stride=1):
layer1 = ResidualBlock(self.in_planes, dim, self.norm_fn, stride=stride)
layer2 = ResidualBlock(dim, dim, self.norm_fn, stride=1)
layers = layer1, layer2
self.in_planes = dim
return nn.Sequential(*layers)
def forward(self, x):
x = self.conv1(x)
x = self.norm1(x)
x = self.relu1(x)
x = self.layer1(x)
x = self.layer2(x)
x = self.layer3(x)
x = self.conv2(x)
return x