Layer norm grad fix donation bug (#941)

* add layer norm grad test

* Fix donation bug in layernorm vjp

---------

Co-authored-by: Awni Hannun <awni@apple.com>
This commit is contained in:
Angelos Katharopoulos 2024-04-01 06:15:50 -07:00 committed by GitHub
parent 9cbff5ec1d
commit 110d9b149d
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 23 additions and 1 deletions

View File

@ -355,7 +355,14 @@ void LayerNormVJP::eval_gpu(
ReductionPlan plan(
ReductionOpType::ContiguousStridedReduce, {n_rows}, {axis_size});
strided_reduce_general_dispatch(
g, gb, "sum", plan, {0}, compute_encoder, d, s);
g_in_gx ? gx : (g_in_gw ? gw_temp : g),
gb,
"sum",
plan,
{0},
compute_encoder,
d,
s);
}
const int simd_size = 32;

View File

@ -375,6 +375,21 @@ class TestFast(mlx_tests.MLXTestCase):
self.assertLess(mx.abs(gb1).max(), 1e-9)
self.assertLess(mx.abs(gb2).max(), 1e-9)
def test_layer_norm_grad_params(self):
eps = 1e-5
f1 = lambda params, x: (layer_norm(x, params[0], params[1], eps)).sum()
f2 = lambda params, x: (mx.fast.layer_norm(x, params[0], params[1], eps)).sum()
w = mx.ones((8,))
b = mx.zeros((8,))
x = mx.random.normal(shape=(2, 2, 8))
mx.eval(x, w, b)
gw1, gb1 = mx.grad(f1)((w, b), x)
gw2, gb2 = mx.grad(f2)((w, b), x)
self.assertLess(mx.abs(gw1 - gw2).max() / mx.abs(gw1).mean(), 1e-5)
self.assertLess(mx.abs(gb1 - gb2).max() / mx.abs(gb1).mean(), 1e-5)
def test_fast_transforms(self):
x = mx.random.uniform(shape=(2, 2, 8))