This is an unofficial PyTorch implementation by Ignacio Oguiza - oguiza@gmail.com based on:

class RNN_FCNPlus[source]

RNN_FCNPlus(c_in, c_out, seq_len=None, hidden_size=100, rnn_layers=1, bias=True, cell_dropout=0, rnn_dropout=0.8, bidirectional=False, shuffle=True, fc_dropout=0.0, conv_layers=[128, 256, 128], kss=[7, 5, 3], se=0) :: _RNN_FCN_BasePlus

A sequential container. Modules will be added to it in the order they are passed in the constructor. Alternatively, an ordered dict of modules can also be passed in.

To make it easier to understand, here is a small example::

# Example of using Sequential
model = nn.Sequential(
          nn.Conv2d(1,20,5),
          nn.ReLU(),
          nn.Conv2d(20,64,5),
          nn.ReLU()
        )

# Example of using Sequential with OrderedDict
model = nn.Sequential(OrderedDict([
          ('conv1', nn.Conv2d(1,20,5)),
          ('relu1', nn.ReLU()),
          ('conv2', nn.Conv2d(20,64,5)),
          ('relu2', nn.ReLU())
        ]))

class LSTM_FCNPlus[source]

LSTM_FCNPlus(c_in, c_out, seq_len=None, hidden_size=100, rnn_layers=1, bias=True, cell_dropout=0, rnn_dropout=0.8, bidirectional=False, shuffle=True, fc_dropout=0.0, conv_layers=[128, 256, 128], kss=[7, 5, 3], se=0) :: _RNN_FCN_BasePlus

A sequential container. Modules will be added to it in the order they are passed in the constructor. Alternatively, an ordered dict of modules can also be passed in.

To make it easier to understand, here is a small example::

# Example of using Sequential
model = nn.Sequential(
          nn.Conv2d(1,20,5),
          nn.ReLU(),
          nn.Conv2d(20,64,5),
          nn.ReLU()
        )

# Example of using Sequential with OrderedDict
model = nn.Sequential(OrderedDict([
          ('conv1', nn.Conv2d(1,20,5)),
          ('relu1', nn.ReLU()),
          ('conv2', nn.Conv2d(20,64,5)),
          ('relu2', nn.ReLU())
        ]))

class GRU_FCNPlus[source]

GRU_FCNPlus(c_in, c_out, seq_len=None, hidden_size=100, rnn_layers=1, bias=True, cell_dropout=0, rnn_dropout=0.8, bidirectional=False, shuffle=True, fc_dropout=0.0, conv_layers=[128, 256, 128], kss=[7, 5, 3], se=0) :: _RNN_FCN_BasePlus

A sequential container. Modules will be added to it in the order they are passed in the constructor. Alternatively, an ordered dict of modules can also be passed in.

To make it easier to understand, here is a small example::

# Example of using Sequential
model = nn.Sequential(
          nn.Conv2d(1,20,5),
          nn.ReLU(),
          nn.Conv2d(20,64,5),
          nn.ReLU()
        )

# Example of using Sequential with OrderedDict
model = nn.Sequential(OrderedDict([
          ('conv1', nn.Conv2d(1,20,5)),
          ('relu1', nn.ReLU()),
          ('conv2', nn.Conv2d(20,64,5)),
          ('relu2', nn.ReLU())
        ]))

class MRNN_FCNPlus[source]

MRNN_FCNPlus(*args, se=16, **kwargs) :: _RNN_FCN_BasePlus

A sequential container. Modules will be added to it in the order they are passed in the constructor. Alternatively, an ordered dict of modules can also be passed in.

To make it easier to understand, here is a small example::

# Example of using Sequential
model = nn.Sequential(
          nn.Conv2d(1,20,5),
          nn.ReLU(),
          nn.Conv2d(20,64,5),
          nn.ReLU()
        )

# Example of using Sequential with OrderedDict
model = nn.Sequential(OrderedDict([
          ('conv1', nn.Conv2d(1,20,5)),
          ('relu1', nn.ReLU()),
          ('conv2', nn.Conv2d(20,64,5)),
          ('relu2', nn.ReLU())
        ]))

class MLSTM_FCNPlus[source]

MLSTM_FCNPlus(*args, se=16, **kwargs) :: _RNN_FCN_BasePlus

A sequential container. Modules will be added to it in the order they are passed in the constructor. Alternatively, an ordered dict of modules can also be passed in.

To make it easier to understand, here is a small example::

# Example of using Sequential
model = nn.Sequential(
          nn.Conv2d(1,20,5),
          nn.ReLU(),
          nn.Conv2d(20,64,5),
          nn.ReLU()
        )

# Example of using Sequential with OrderedDict
model = nn.Sequential(OrderedDict([
          ('conv1', nn.Conv2d(1,20,5)),
          ('relu1', nn.ReLU()),
          ('conv2', nn.Conv2d(20,64,5)),
          ('relu2', nn.ReLU())
        ]))

class MGRU_FCNPlus[source]

MGRU_FCNPlus(*args, se=16, **kwargs) :: _RNN_FCN_BasePlus

A sequential container. Modules will be added to it in the order they are passed in the constructor. Alternatively, an ordered dict of modules can also be passed in.

To make it easier to understand, here is a small example::

# Example of using Sequential
model = nn.Sequential(
          nn.Conv2d(1,20,5),
          nn.ReLU(),
          nn.Conv2d(20,64,5),
          nn.ReLU()
        )

# Example of using Sequential with OrderedDict
model = nn.Sequential(OrderedDict([
          ('conv1', nn.Conv2d(1,20,5)),
          ('relu1', nn.ReLU()),
          ('conv2', nn.Conv2d(20,64,5)),
          ('relu2', nn.ReLU())
        ]))
from tsai.models.utils import count_parameters
from tsai.models.RNN_FCN import *
bs = 16
n_vars = 3
seq_len = 12
c_out = 2
xb = torch.rand(bs, n_vars, seq_len)
test_eq(RNN_FCNPlus(n_vars, c_out, seq_len)(xb).shape, [bs, c_out])
test_eq(LSTM_FCNPlus(n_vars, c_out, seq_len)(xb).shape, [bs, c_out])
test_eq(MLSTM_FCNPlus(n_vars, c_out, seq_len)(xb).shape, [bs, c_out])
test_eq(GRU_FCNPlus(n_vars, c_out, shuffle=False)(xb).shape, [bs, c_out])
test_eq(GRU_FCNPlus(n_vars, c_out, seq_len, shuffle=False)(xb).shape, [bs, c_out])
test_eq(count_parameters(LSTM_FCNPlus(n_vars, c_out, seq_len)), count_parameters(LSTM_FCN(n_vars, c_out, seq_len)))
LSTM_FCNPlus(n_vars, seq_len, c_out, se=8)
LSTM_FCNPlus(
  (backbone): _RNN_FCN_Base_Backbone(
    (rnn): LSTM(2, 100, batch_first=True)
    (rnn_dropout): Dropout(p=0.8, inplace=False)
    (convblock1): ConvBlock(
      (0): Conv1d(3, 128, kernel_size=(7,), stride=(1,), padding=(3,), bias=False)
      (1): BatchNorm1d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (2): ReLU()
    )
    (se1): SqueezeExciteBlock(
      (avg_pool): GAP1d(
        (gap): AdaptiveAvgPool1d(output_size=1)
        (flatten): Flatten(full=False)
      )
      (fc): Sequential(
        (0): Linear(in_features=128, out_features=16, bias=False)
        (1): ReLU()
        (2): Linear(in_features=16, out_features=128, bias=False)
        (3): Sigmoid()
      )
    )
    (convblock2): ConvBlock(
      (0): Conv1d(128, 256, kernel_size=(5,), stride=(1,), padding=(2,), bias=False)
      (1): BatchNorm1d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (2): ReLU()
    )
    (se2): SqueezeExciteBlock(
      (avg_pool): GAP1d(
        (gap): AdaptiveAvgPool1d(output_size=1)
        (flatten): Flatten(full=False)
      )
      (fc): Sequential(
        (0): Linear(in_features=256, out_features=32, bias=False)
        (1): ReLU()
        (2): Linear(in_features=32, out_features=256, bias=False)
        (3): Sigmoid()
      )
    )
    (convblock3): ConvBlock(
      (0): Conv1d(256, 128, kernel_size=(3,), stride=(1,), padding=(1,), bias=False)
      (1): BatchNorm1d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (2): ReLU()
    )
    (gap): GAP1d(
      (gap): AdaptiveAvgPool1d(output_size=1)
      (flatten): Flatten(full=False)
    )
    (concat): Concat(dim=1)
  )
  (head): Sequential(
    (0): Linear(in_features=228, out_features=12, bias=True)
  )
)