Masked Value Predictor callback used to predict time series step values after a binary mask has been applied.

create_subsequence_mask[source]

create_subsequence_mask(o, r=0.15, lm=3, stateful=True, sync=False)

create_variable_mask[source]

create_variable_mask(o, r=0.15)

create_future_mask[source]

create_future_mask(o, r=0.15, sync=False)

t = torch.rand(16, 3, 100)
mask = create_subsequence_mask(t, sync=False)
test_eq(mask.shape, t.shape)
mask = create_subsequence_mask(t, sync=True)
test_eq(mask.shape, t.shape)
mask = create_variable_mask(t)
test_eq(mask.shape, t.shape)
mask = create_future_mask(t)
test_eq(mask.shape, t.shape)
t = torch.rand(16, 30, 100)
mask = create_subsequence_mask(t, r=.15)# default settings
plt.figure(figsize=(10, 3))
plt.pcolormesh(mask[0], cmap='cool')
plt.title(f'sample 0 subsequence mask (sync=False) - default mean: {mask[0].mean().item():.3f}')
plt.show()
plt.figure(figsize=(10, 3))
plt.pcolormesh(mask[1], cmap='cool')
plt.title(f'sample 1 subsequence mask (sync=False) - default mean: {mask[1].mean().item():.3f}')
plt.show()
t = torch.rand(16, 30, 100)
mask = create_subsequence_mask(t, r=.3)# 30% of values masked
plt.figure(figsize=(10, 3))
plt.pcolormesh(mask[0], cmap='cool')
plt.title(f'sample 0 subsequence mask (r=.3) mean: {mask[0].mean().item():.3f}')
plt.show()
t = torch.rand(16, 30, 100)
mask = create_subsequence_mask(t, lm=5)# average length of mask = 5 
plt.figure(figsize=(10, 3))
plt.pcolormesh(mask[0], cmap='cool')
plt.title(f'sample 0 subsequence mask (lm=5) mean: {mask[0].mean().item():.3f}')
plt.show()
t = torch.rand(16, 30, 100)
mask = create_subsequence_mask(t, stateful=False)# individual time steps masked 
plt.figure(figsize=(10, 3))
plt.pcolormesh(mask[0], cmap='cool')
plt.title(f'per sample subsequence mask (stateful=False) mean: {mask[0].mean().item():.3f}')
plt.show()
t = torch.rand(1, 30, 100)
mask = create_subsequence_mask(t, sync=True)# all time steps masked simultaneously
plt.figure(figsize=(10, 3))
plt.pcolormesh(mask[0], cmap='cool')
plt.title(f'per sample subsequence mask (sync=True) mean: {mask[0].mean().item():.3f}')
plt.show()
t = torch.rand(1, 30, 100)
mask = create_variable_mask(t)# masked variables
plt.figure(figsize=(10, 3))
plt.pcolormesh(mask[0], cmap='cool')
plt.title(f'per sample variable mask mean: {mask[0].mean().item():.3f}')
plt.show()
t = torch.rand(1, 30, 100)
mask = create_future_mask(t, r=.15, sync=True)# masked steps
plt.figure(figsize=(10, 3))
plt.pcolormesh(mask[0], cmap='cool')
plt.title(f'future mask mean: {mask[0].mean().item():.3f}')
plt.show()
t = torch.rand(1, 30, 100)
mask = create_future_mask(t, r=.15, sync=False)# masked steps
plt.figure(figsize=(10, 3))
plt.pcolormesh(mask[0], cmap='cool')
plt.title(f'future mask mean: {mask[0].mean().item():.3f}')
plt.show()

create_mask[source]

create_mask(o, r=0.15, lm=3, stateful=True, sync=False, subsequence_mask=True, variable_mask=False, future_mask=False, custom_mask=None)

class MVP[source]

MVP(r:float=0.15, subsequence_mask:bool=True, lm:float=3.0, stateful:bool=True, sync:bool=False, variable_mask:bool=False, future_mask:bool=False, custom_mask:Optional=None, dropout:float=0.1, crit:callable=None, weights_path:Optional[str]=None, target_dir:str='./data/MVP', fname:str='model', save_best:bool=True, verbose:bool=False) :: Callback

Basic class handling tweaks of the training loop by changing a Learner in various events

Experiments

from fastai.data.transforms import *
from tsai.data.all import *
from tsai.models.utils import *
from tsai.models.layers import *
from tsai.learner import *
from tsai.models.TSTPlus import *
from tsai.models.InceptionTimePlus import *
dsid = 'MoteStrain'
X, y, splits = get_UCR_data(dsid, split_data=False)
check_data(X, y, splits, False)
X      - shape: [1272 samples x 1 features x 84 timesteps]  type: memmap  dtype:float32  isnan: 0
y      - shape: (1272,)  type: memmap  dtype:<U1  n_classes: 2 (636 samples per class) ['1', '2']  isnan: False
splits - n_splits: 2 shape: [20, 1252]  overlap: [False]
tfms  = [None, [Categorize()]]
batch_tfms = [TSStandardize(by_var=True)]
unlabeled_dls = get_ts_dls(X, splits=splits, tfms=tfms, batch_tfms=batch_tfms)
learn = ts_learner(unlabeled_dls, InceptionTimePlus, cbs=[MVP(fname=f'{dsid}')])
learn.fit_one_cycle(1, 3e-3)
epoch train_loss valid_loss time
0 1.092720 0.996880 00:16
best epoch:   0  val_loss: 0.996880 - pretrained weights_path='data/MVP/MoteStrain.pth'
learn = ts_learner(unlabeled_dls, InceptionTimePlus, cbs=[MVP(weights_path=f'data/MVP/{dsid}.pth')])
learn.fit_one_cycle(1, 3e-3)
weights from data/MVP/MoteStrain.pth successfully transferred!

epoch train_loss valid_loss time
0 1.006202 0.997518 00:16
best epoch:   0  val_loss: 0.997518 - pretrained weights_path='data/MVP/model.pth'
learn.MVP.show_preds(sharey=True) # these preds are highly inaccurate as the model's been trained for just 1 epoch for testing purposes
tfms  = [None, [Categorize()]]
batch_tfms = [TSStandardize(by_var=True)]
labeled_dls = get_ts_dls(X, y, splits=splits, tfms=tfms, batch_tfms=batch_tfms, bs=64)
learn = ts_learner(labeled_dls, InceptionTimePlus, pretrained=True, weights_path=f'data/MVP/{dsid}.pth', metrics=accuracy)
learn.fit_one_cycle(1)
weights from data/MVP/MoteStrain.pth successfully transferred!

epoch train_loss valid_loss accuracy time
0 0.720919 0.690484 0.539137 00:16
tfms  = [None, [Categorize()]]
batch_tfms = [TSStandardize(by_var=True)]
unlabeled_dls = get_ts_dls(X, splits=splits, tfms=tfms, batch_tfms=batch_tfms, bs=64)
fname = f'{dsid}_test'
mvp = MVP(subsequence_mask=True, sync='random', variable_mask=True, future_mask=True, fname=fname)
learn = ts_learner(unlabeled_dls, InceptionTimePlus, metrics=accuracy, cbs=mvp) # Metrics will not be used!
/Users/nacho/anaconda3/lib/python3.7/site-packages/ipykernel_launcher.py:36: UserWarning: Only future_mask will be used
tfms  = [None, [Categorize()]]
batch_tfms = [TSStandardize(by_var=True)]
unlabeled_dls = get_ts_dls(X, splits=splits, tfms=tfms, batch_tfms=batch_tfms, bs=64)
fname = f'{dsid}_test'
mvp = MVP(subsequence_mask=True, sync='random', variable_mask=True, future_mask=True, custom_mask=partial(create_future_mask, r=.15),
                fname=fname)
learn = ts_learner(unlabeled_dls, InceptionTimePlus, metrics=accuracy, cbs=mvp) # Metrics will not be used!
/Users/nacho/anaconda3/lib/python3.7/site-packages/ipykernel_launcher.py:34: UserWarning: Only custom_mask will be used