These are RNN, LSTM and GRU PyTorch implementations created by Ignacio Oguiza - oguiza@gmail.com based on:
bs = 16
c_in = 3
seq_len = 12
c_out = 2
xb = torch.rand(bs, c_in, seq_len)
test_eq(RNN(c_in, c_out, hidden_size=100, n_layers=2, bias=True, rnn_dropout=0.2, bidirectional=True, fc_dropout=0.5)(xb).shape, [bs, c_out])
test_eq(RNN(c_in, c_out)(xb).shape, [bs, c_out])
test_eq(RNN(c_in, c_out, hidden_size=100, n_layers=2, bias=True, rnn_dropout=0.2, bidirectional=True, fc_dropout=0.5)(xb).shape, [bs, c_out])
test_eq(LSTM(c_in, c_out)(xb).shape, [bs, c_out])
test_eq(GRU(c_in, c_out)(xb).shape, [bs, c_out])
from tsai.data.all import *
dsid = 'NATOPS'
bs = 16
X, y, splits = get_UCR_data(dsid, return_split=False)
tfms = [None, [Categorize()]]
dsets = TSDatasets(X, y, tfms=tfms, splits=splits, inplace=True)
dls = TSDataLoaders.from_dsets(dsets.train, dsets.valid, bs=bs, num_workers=0, shuffle=False)
model = LSTM(dls.vars, dls.c)
learn = Learner(dls, model, metrics=accuracy)
learn.fit_one_cycle(1, 3e-3)
m = RNN(c_in, c_out, hidden_size=100,n_layers=2,bidirectional=True,rnn_dropout=.5,fc_dropout=.5)
print(m)
print(total_params(m))
m(xb).shape
m = LSTM(c_in, c_out, hidden_size=100,n_layers=2,bidirectional=True,rnn_dropout=.5,fc_dropout=.5)
print(m)
print(total_params(m))
m(xb).shape
m = GRU(c_in, c_out, hidden_size=100,n_layers=2,bidirectional=True,rnn_dropout=.5,fc_dropout=.5)
print(m)
print(total_params(m))
m(xb).shape