hamish
"Here, have potato"
???
???
"fold" over this building up an understanding of patterns
"unfold" until some condition is met
elements of sequence
neural network things
happen
memory
same weights
✨
can think of this as "memory"
sequence element
Long Short Term Memory (LSTM)
Gated Recurrent Unit
inputs
outputs
Encoder
Decoder
This thing is really useful
class Encoder(nn.Module):
def __init__(self, input_dim, hidden_dim):
super(Encoder, self).__init__()
self.gru = nn.GRU(input_dim, hidden_dim)
def forward(self, x):
outputs, hidden = self.gru(x)
return outputs, hidden
class Decoder(nn.Module):
def __init__(self, output_dim, hidden_dim):
super(Decoder, self).__init__()
self.gru = nn.GRU(output_dim, hidden_dim)
self.out = nn.Linear(hidden_dim, output_dim)
def forward(self, x, hidden):
output, hidden = self.gru(x, hidden)
prediction = self.out(output[0])
return prediction, hidden
class Seq2Seq(nn.Module):
def __init__(self, encoder, decoder):
super().__init__()
self.encoder = encoder
self.decoder = decoder
def forward(self, source, target_length, device):
input_length = source.size(0)
batch_size = target.shape[1]
# create something to hold the predicted outputs
outputs = torch.zeros(target_length, batch_size).to(self.device)
encoder_output, encoder_hidden = self.encoder(source)
#use the encoder’s hidden layer as the decoder hidden
decoder_hidden = encoder_hidden.to(device)
# we need an input here, in NLP we will typically use a special token
decoder_input = torch.tensor([0], device=device)
for t in range(target_length):
decoder_output, decoder_hidden = self.decoder(decoder_input, decoder_hidden)
outputs[t] = decoder_output
return outputs
sorry
model = Sequential()
model.add(GaussianNoise(0.01, input_shape=(n_steps_in, n_features)))
model.add(GRU(20, activation='relu'))
model.add(RepeatVector(n_steps_out))
model.add(GRU(20, activation='relu', return_sequences=True))
model.add(BatchNormalization())
model.add(TimeDistributed(Dense(1)))
model.compile(optimizer='adam', loss='mse')
model.summary()
ugly matplotlib charts ftw