-
Notifications
You must be signed in to change notification settings - Fork 1
/
model.py
77 lines (67 loc) · 2.56 KB
/
model.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
# -*- coding: utf-8 -*-
import torch.nn as nn
def init_weights(net):
# Zero-centered normal distribution, std=0.02
for m in net.modules():
if isinstance(m, nn.Conv2d):
m.weight.data.normal_(0, 0.02)
elif isinstance(m, nn.ConvTranspose2d):
m.weight.data.normal_(0, 0.02)
elif isinstance(m, nn.BatchNorm2d):
m.weight.data.normal_(1.0, 0.02)
m.bias.data.fill_(0)
class Generator(nn.Module):
def __init__(self):
super(Generator, self).__init__()
self.main = nn.Sequential(
# input 100 x 1 x 1
nn.ConvTranspose2d(100, 1024, 4, 1, 0, bias=False),
nn.BatchNorm2d(1024),
nn.ReLU(inplace=True),
# state size 512 x 8 x 8
nn.ConvTranspose2d(1024, 512, 4, 2, 1, bias=False),
nn.BatchNorm2d(512),
nn.ReLU(inplace=True),
# state size 256 x 16 x 16
nn.ConvTranspose2d(512, 256, 4, 2, 1, bias=False),
nn.BatchNorm2d(256),
nn.ReLU(inplace=True),
# state size 128 x 32 x 32
nn.ConvTranspose2d(256, 128, 4, 2, 1, bias=False),
nn.BatchNorm2d(128),
nn.ReLU(inplace=True),
# state size 3 x 64 x 64
nn.ConvTranspose2d(128, 3, 4, 2, 1, bias=False),
# nn.BatchNorm2d(3), # WRONG! How stupid the mistake is!
nn.Tanh()
)
def forward(self, z):
x = self.main(z)
return x
class Discriminator(nn.Module):
def __init__(self):
super(Discriminator, self).__init__()
self.main = nn.Sequential(
# input size 3 x 64 x 64
nn.Conv2d(3, 64, 4, 2, 1, bias=False),
nn.LeakyReLU(0.2, inplace=True),
# state size. 64 x 32 x 32
nn.Conv2d(64, 128, 4, 2, 1, bias=False),
nn.BatchNorm2d(128),
nn.LeakyReLU(0.2, inplace=True),
# state size. 128 x 16 x 16
nn.Conv2d(128, 256, 4, 2, 1, bias=False),
nn.BatchNorm2d(256),
nn.LeakyReLU(0.2, inplace=True),
# state size. 256 x 8 x 8
nn.Conv2d(256, 512, 4, 2, 1, bias=False),
nn.BatchNorm2d(512),
nn.LeakyReLU(0.2, inplace=True),
# state size. 512 x 4 x 4
nn.Conv2d(512, 1, 4, 1, 0, bias=False),
nn.Sigmoid()
)
def forward(self, x):
y = self.main(x)
# squeeze - Returns a `Tensor` with all the dimensions of :attr:`input` of size `1` removed.
return y.view(-1, 1).squeeze(1)