Dilation for convolutional layers (#766)

* add dilation parameter to Conv1d layer

* space here too

* add conv1d dilation test

* add dilation parameter for Conv2d layer

* conv2d dilation test
This commit is contained in:
Piotr Rybiec
2024-03-04 15:43:00 +01:00
committed by GitHub
parent bc06cb9ff6
commit 6a665ea6ed
2 changed files with 24 additions and 4 deletions

View File

@@ -586,6 +586,13 @@ class TestLayers(mlx_tests.MLXTestCase):
self.assertEqual(y.shape, (N, (L - ks + 1) // 2, C_out))
self.assertTrue("bias" in c.parameters())
dil = 2
c = nn.Conv1d(
in_channels=C_in, out_channels=C_out, kernel_size=ks, dilation=dil
)
y = c(x)
self.assertEqual(y.shape, (N, L - (ks - 1) * dil, C_out))
c = nn.Conv1d(in_channels=C_in, out_channels=C_out, kernel_size=ks, bias=False)
self.assertTrue("bias" not in c.parameters())
@@ -632,6 +639,11 @@ class TestLayers(mlx_tests.MLXTestCase):
self.assertEqual(y.shape, (4, 3, 3, 8))
self.assertLess(mx.abs(y - c.weight.sum((1, 2, 3))).max(), 1e-4)
c = nn.Conv2d(3, 8, 3, dilation=2)
y = c(x)
self.assertEqual(y.shape, (4, 4, 4, 8))
self.assertLess(mx.abs(y - c.weight.sum((1, 2, 3))).max(), 1e-4)
def test_sequential(self):
x = mx.ones((10, 2))
m = nn.Sequential(nn.Linear(2, 10), nn.ReLU(), nn.Linear(10, 1))