-
Notifications
You must be signed in to change notification settings - Fork 0
/
unet.py
129 lines (96 loc) · 3.63 KB
/
unet.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
"""
Simple Unet Structure.
"""
import torch
import torch.nn as nn
import torch.nn.functional as F
class Conv3(nn.Module):
def __init__(
self, in_channels: int, out_channels: int, is_res: bool = False
) -> None:
super().__init__()
self.main = nn.Sequential(
nn.Conv2d(in_channels, out_channels, 3, 1, 1),
nn.GroupNorm(8, out_channels),
nn.ReLU(),
)
self.conv = nn.Sequential(
nn.Conv2d(out_channels, out_channels, 3, 1, 1),
nn.GroupNorm(8, out_channels),
nn.ReLU(),
nn.Conv2d(out_channels, out_channels, 3, 1, 1),
nn.GroupNorm(8, out_channels),
nn.ReLU(),
)
self.is_res = is_res
def forward(self, x: torch.Tensor) -> torch.Tensor:
x = self.main(x)
if self.is_res:
x = x + self.conv(x)
return x / 1.414
else:
return self.conv(x)
class UnetDown(nn.Module):
def __init__(self, in_channels: int, out_channels: int) -> None:
super(UnetDown, self).__init__()
layers = [Conv3(in_channels, out_channels), nn.MaxPool2d(2)]
self.model = nn.Sequential(*layers)
def forward(self, x: torch.Tensor) -> torch.Tensor:
return self.model(x)
class UnetUp(nn.Module):
def __init__(self, in_channels: int, out_channels: int) -> None:
super(UnetUp, self).__init__()
layers = [
nn.ConvTranspose2d(in_channels, out_channels, 2, 2),
Conv3(out_channels, out_channels),
Conv3(out_channels, out_channels),
]
self.model = nn.Sequential(*layers)
def forward(self, x: torch.Tensor, skip: torch.Tensor) -> torch.Tensor:
x = torch.cat((x, skip), 1)
x = self.model(x)
return x
class TimeSiren(nn.Module):
def __init__(self, emb_dim: int) -> None:
super(TimeSiren, self).__init__()
self.lin1 = nn.Linear(1, emb_dim, bias=False)
self.lin2 = nn.Linear(emb_dim, emb_dim)
def forward(self, x: torch.Tensor) -> torch.Tensor:
x = x.view(-1, 1)
x = torch.sin(self.lin1(x))
x = self.lin2(x)
return x
class NaiveUnet(nn.Module):
def __init__(self, in_channels: int, out_channels: int, n_feat: int = 256) -> None:
super(NaiveUnet, self).__init__()
self.in_channels = in_channels
self.out_channels = out_channels
self.n_feat = n_feat
self.init_conv = Conv3(in_channels, n_feat, is_res=True)
self.down1 = UnetDown(n_feat, n_feat)
self.down2 = UnetDown(n_feat, 2 * n_feat)
self.down3 = UnetDown(2 * n_feat, 2 * n_feat)
self.to_vec = nn.Sequential(nn.AvgPool2d(4), nn.ReLU())
self.timeembed = TimeSiren(2 * n_feat)
self.up0 = nn.Sequential(
nn.ConvTranspose2d(2 * n_feat, 2 * n_feat, 4, 4),
nn.GroupNorm(8, 2 * n_feat),
nn.ReLU(),
)
self.up1 = UnetUp(4 * n_feat, 2 * n_feat)
self.up2 = UnetUp(4 * n_feat, n_feat)
self.up3 = UnetUp(2 * n_feat, n_feat)
self.out = nn.Conv2d(2 * n_feat, self.out_channels, 3, 1, 1)
def forward(self, x: torch.Tensor, t: torch.Tensor) -> torch.Tensor:
x = self.init_conv(x)
down1 = self.down1(x)
down2 = self.down2(down1)
down3 = self.down3(down2)
thro = self.to_vec(down3)
temb = self.timeembed(t).view(-1, self.n_feat * 2, 1, 1)
thro = self.up0(thro + temb)
up1 = self.up1(thro, down3) + temb
up2 = self.up2(up1, down2)
up3 = self.up3(up2, down1)
out = self.out(torch.cat((up3, x), 1))
return out