From 31869b7ea470f5a7f3b85b3dfe5b89b7c50553a1 Mon Sep 17 00:00:00 2001 From: fedepup Date: Wed, 10 Apr 2024 12:14:26 +0200 Subject: [PATCH] ConstrainedConv1d now has causal padding --- selfeeg/models/layers.py | 28 +++++++++++++++++++++++++--- test/EEGself/models/layers_test.py | 2 +- 2 files changed, 26 insertions(+), 4 deletions(-) diff --git a/selfeeg/models/layers.py b/selfeeg/models/layers.py index 1034367..e8a8da2 100644 --- a/selfeeg/models/layers.py +++ b/selfeeg/models/layers.py @@ -213,7 +213,8 @@ class ConstrainedConv1d(nn.Conv1d): Default = 1 padding: int, tuple or str, optional - Padding added to all four sides of the input. + Padding added to all four sides of the input. This class also accepts the + string 'causal', which triggers causal convolution like in Wavenet. Default = 0 dilation: int or tuple, optional @@ -268,6 +269,15 @@ class ConstrainedConv1d(nn.Conv1d): constraint, set both min_norm and max_norm. To apply a UnitNorm constraint, set both min_norm and max_norm to 1.0. + Note + ---- + When setting ``padding`` to ``"causal"``, padding will be internally changed + to an integer equal to ``(kernel_size - 1) * dilation``. Then, during forward, + the extra features are removed. This is preferable over F.pad, which can + lead to memory allocation or even non-deterministic operations during the + backboard pass. Additional information can be found at the following link: + https://github.com/pytorch/pytorch/issues/1333 + Example ------- >>> from import selfeeg.models import ConstrainedConv1d @@ -301,12 +311,21 @@ def __init__( axis_norm=[1,2], minmax_rate=1.0 ): + + # Check causal Padding + self.pad = padding + self.causal_pad = False + if isinstance(padding, str): + if padding.casefold() == "causal": + self.causal_pad = True + self.pad = (kernel_size - 1) * dilation + super(ConstrainedConv1d, self).__init__( in_channels, out_channels, kernel_size, stride, - padding, + self.pad, dilation, groups, bias, @@ -410,7 +429,10 @@ def forward(self, input): """ if self.constraint_type != 0: self.scale_norm() - return self._conv_forward(input, self.weight, self.bias) + if self.causal_pad: + return self._conv_forward(input, self.weight, self.bias)[:,:,:-self.pad] + else: + return self._conv_forward(input, self.weight, self.bias) class ConstrainedConv2d(nn.Conv2d): diff --git a/test/EEGself/models/layers_test.py b/test/EEGself/models/layers_test.py index 77c545a..143612c 100644 --- a/test/EEGself/models/layers_test.py +++ b/test/EEGself/models/layers_test.py @@ -56,7 +56,7 @@ def test_ConstrainedConv1d(self): "bias": [True, False], "max_norm": [None, 1, 2], "min_norm": [None, 1], - "padding": ["valid"], + "padding": ["valid", "causal"], } Conv_args = self.makeGrid(Conv_args) for i in Conv_args: