-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathreencoder.py
More file actions
executable file
·79 lines (56 loc) · 1.48 KB
/
reencoder.py
File metadata and controls
executable file
·79 lines (56 loc) · 1.48 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
import sys
import torch
from torch import nn
from torch.autograd import Variable
from view import *
from holder import *
from util import *
from join_table import *
from dropout_lstm import *
from locked_dropout import *
# re-encoder
class ReEncoder(torch.nn.Module):
def __init__(self, opt, shared):
super(ReEncoder, self).__init__()
self.opt = opt
self.shared = shared
# dropout for rnn
self.drop = nn.Dropout(opt.dropout)
self.bidir = opt.birnn == 1
rnn_in_size = opt.hidden_size*2 * 4
rnn_hidden_size = opt.hidden_size*2 if not self.bidir else opt.hidden_size
self.rnn = []
# do it this way rather than have a 2-layer lstm
# pytorch multi-layer lstm is volatile to randomness
for i in range(opt.reenc_rnn_layer):
self.rnn.append(
build_rnn(opt.rnn_type,
input_size=rnn_in_size if i == 0 else rnn_hidden_size*2,
hidden_size=rnn_hidden_size,
num_layers=1,
bias=True,
batch_first=True,
dropout=opt.dropout,
bidirectional=self.bidir))
self.rnn = nn.ModuleList(self.rnn)
def rnn_over(self, context):
if self.opt.rnn_type == 'lstm' or self.opt.rnn_type == 'gru':
M = context
for i in range(self.opt.reenc_rnn_layer):
M, _ = self.rnn[i](self.drop(M))
return M
else:
assert(False)
def forward(self, G):
self.update_context()
M = self.rnn_over(G)
self.shared.M = M
return M
def update_context(self):
pass
def begin_pass(self):
pass
def end_pass(self):
pass
if __name__ == '__main__':
pass