-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathnets.py
More file actions
110 lines (92 loc) · 3 KB
/
nets.py
File metadata and controls
110 lines (92 loc) · 3 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
import base_net as bn
import torch
import torch.nn as nn
import torch.nn.functional as F
size = (1, 100, 1)
# Vanilla Generator and Discriminator
# 3 Linear Hidden layers, Discriminator output is applied squashing function
class Generator(bn.BaseNet):
def __init__(self, size, activ_fn_name='relu'):
super(Generator, self).__init__(size, activ_fn_name)
def forward(self, x):
x = self.activ_fn(self.map1(x))
x = self.activ_fn(self.map2(x))
x = self.activ_fn(self.map3(x))
return self.map4(x)
class Discriminator(bn.BaseNet):
def __init__(self, size, activ_fn_name='relu'):
super(Discriminator, self).__init__(size, activ_fn_name)
def forward(self, x):
x = self.activ_fn(self.map1(x))
x = self.activ_fn(self.map2(x))
x = self.activ_fn(self.map3(x))
return F.sigmoid(self.map4(x))
class Encoder(bn.BaseNet):
def __init__(self, size, activ_fn_name='relu'):
super(Encoder, self).__init__(size, activ_fn_name)
def forward(self, x):
x = self.activ_fn(self.map1(x))
x = self.activ_fn(self.map2(x))
x = self.activ_fn(self.map3(x))
return self.map4(x)
# Bayes Generator and Discriminator
# Adds dropout layer
class BayesGenerator(bn.BaseNet):
def __init__(self, size, activ_fn_name='relu'):
super(BayesGenerator, self).__init__(size, activ_fn_name)
self.drop1 = nn.Dropout()
self.drop2 = nn.Dropout()
self.drop3 = nn.Dropout()
def forward(self, x):
x = self.activ_fn(self.map1(x))
x = self.drop1(x)
x = self.activ_fn(self.map2(x))
x = self.drop2(x)
x = self.activ_fn(self.map3(x))
# x = self.drop3(x)
return self.map4(x)
class BayesDiscriminator(bn.BaseNet):
def __init__(self, size, activ_fn_name='relu'):
super(BayesDiscriminator, self).__init__(size, activ_fn_name)
self.drop1 = nn.Dropout()
self.drop2 = nn.Dropout()
self.drop3 = nn.Dropout()
def forward(self, x):
x = self.activ_fn(self.map1(x))
x = self.drop1(x)
x = self.activ_fn(self.map2(x))
x = self.drop2(x)
x = self.activ_fn(self.map3(x))
# x = self.drop3(x)
return F.sigmoid(self.map4(x))
# Bayes Generator and Discriminator
# Discriminator output is not squashed
class Bayes_Logan_Generator(bn.BaseNet):
def __init__(self, size, activ_fn_name='relu'):
super(Generator, self).__init__(size, activ_fn[activ_fn_name])
self.drop1 = nn.Dropout()
self.drop2 = nn.Dropout()
self.drop3 = nn.Dropout()
def forward(self, x):
x = self.activ_fn(self.map1(x))
x = self.drop1(x)
x = self.activ_fn(self.map2(x))
x = self.drop2(x)
x = self.activ_fn(self.map3(x))
x = self.drop3(x)
return self.map4(x)
class Bayes_Logan_Discriminator(bn.BaseNet):
def __init__(self, size, activ_fn_name='relu'):
super(Discriminator, self).__init__(size, activ_fn_name)
self.drop1 = nn.Dropout()
self.drop2 = nn.Dropout()
self.drop3 = nn.Dropout()
def forward(self, x):
x = self.activ_fn(self.map1(x))
x = self.drop1(x)
x = self.activ_fn(self.map2(x))
x = self.drop2(x)
x = self.activ_fn(self.map3(x))
x = self.drop3(x)
return self.map4(x)
# Bayes Generator and Discriminator based on BIGAN setup