This commit is contained in:
Aditya Dhulipala 2024-08-27 23:06:12 -07:00 committed by GitHub
parent e64349bbdd
commit e6b223df5f
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
5 changed files with 124 additions and 1 deletions

View File

@ -306,6 +306,49 @@ array cholesky(
{a});
}
array pinv(const array& a, StreamOrDevice s /* = {} */) {
if (a.dtype() != float32) {
std::ostringstream msg;
msg << "[linalg::pinv] Arrays must type float32. Received array "
<< "with type " << a.dtype() << ".";
throw std::invalid_argument(msg.str());
}
if (a.ndim() < 2) {
std::ostringstream msg;
msg << "[linalg::pinv] Arrays must have >= 2 dimensions. Received array "
<< "with " << a.ndim() << " dimensions.";
throw std::invalid_argument(msg.str());
}
int m = a.shape(-2);
int n = a.shape(-1);
int k = std::min(m, n);
auto outs = linalg::svd(a, s);
array U = outs[0];
array S = outs[1];
array V = outs[2];
std::vector<int> starts(a.ndim(), 0);
std::vector<int> ends = a.shape();
int i = a.ndim() - 2;
int j = a.ndim() - 1;
// Prepare U
ends[i] = m;
ends[j] = k;
U = swapaxes(slice(U, starts, ends, s), -1, -2, s);
// Prepare V
ends[i] = k;
ends[j] = n;
V = swapaxes(slice(V, starts, ends, s), -1, -2, s);
// Prepare S
S = expand_dims(S, -2, s);
return matmul(divide(V, S, s), U);
}
array cholesky_inv(
const array& L,
bool upper /* = false */,

View File

@ -70,6 +70,8 @@ array tri_inv(const array& a, bool upper = false, StreamOrDevice s = {});
array cholesky(const array& a, bool upper = false, StreamOrDevice s = {});
array pinv(const array& a, StreamOrDevice s = {});
array cholesky_inv(const array& a, bool upper = false, StreamOrDevice s = {});
} // namespace mlx::core::linalg

View File

@ -353,4 +353,28 @@ void init_linalg(nb::module_& parent_module) {
Returns:
array: :math:`\mathbf{A^{-1}}` where :math:`\mathbf{A} = \mathbf{L}\mathbf{L}^T`.
)pbdoc");
m.def(
"pinv",
&pinv,
"a"_a,
nb::kw_only(),
"stream"_a = nb::none(),
nb::sig(
"def pinv(a: array, *, stream: Union[None, Stream, Device] = None) -> array"),
R"pbdoc(
Compute the (Moore-Penrose) pseudo-inverse of a matrix.
This function calculates a generalized inverse of a matrix using its
singular-value decomposition. This function supports arrays with at least 2 dimensions.
When the input has more than two dimensions, the inverse is computed for each
matrix in the last two dimensions of ``a``.
Args:
a (array): Input array.
stream (Stream, optional): Stream or device. Defaults to ``None``
in which case the default stream of the default device is used.
Returns:
array: ``aplus`` such that ``a @ aplus @ a = a``
)pbdoc");
}

View File

@ -181,6 +181,18 @@ class TestLinalg(mlx_tests.MLXTestCase):
for M, L in zip(AB, Ls):
self.assertTrue(mx.allclose(L @ L.T, M, rtol=1e-5, atol=1e-7))
def test_pseudo_inverse(self):
A = mx.array([[1, 2, 3], [6, -5, 4], [-9, 8, 7]], dtype=mx.float32)
A_plus = mx.linalg.pinv(A, stream=mx.cpu)
self.assertTrue(mx.allclose(A @ A_plus @ A, A, rtol=0, atol=1e-5))
# Multiple matrices
B = A - 100
AB = mx.stack([A, B])
pinvs = mx.linalg.pinv(AB, stream=mx.cpu)
for M, M_plus in zip(AB, pinvs):
self.assertTrue(mx.allclose(M @ M_plus @ M, M, rtol=0, atol=1e-3))
def test_cholesky_inv(self):
mx.random.seed(7)

View File

@ -347,4 +347,46 @@ TEST_CASE("test matrix cholesky") {
.item<bool>());
CHECK(allclose(matmul(transpose(U), U), A, /* rtol = */ 0, /* atol = */ 1e-6)
.item<bool>());
}
}
TEST_CASE("test matrix pseudo-inverse") {
// 0D and 1D throw
CHECK_THROWS(linalg::pinv(array(0.0), Device::cpu));
CHECK_THROWS(linalg::pinv(array({0.0, 1.0}), Device::cpu));
// Unsupported types throw
CHECK_THROWS(linalg::pinv(array({0, 1}, {1, 2}), Device::cpu));
{ // Square m == n
const auto A = array({1.0, 2.0, 3.0, 4.0}, {2, 2});
const auto A_pinv = linalg::pinv(A, Device::cpu);
const auto A_again = matmul(matmul(A, A_pinv), A);
CHECK(allclose(A_again, A).item<bool>());
const auto A_pinv_again = matmul(matmul(A_pinv, A), A_pinv);
CHECK(allclose(A_pinv_again, A_pinv).item<bool>());
}
{ // Rectangular matrix m < n
const auto prng_key = random::key(42);
const auto A = random::normal({4, 5}, prng_key);
const auto A_pinv = linalg::pinv(A, Device::cpu);
const auto zeros = zeros_like(A_pinv, Device::cpu);
CHECK_FALSE(allclose(zeros, A_pinv, /* rtol = */ 0, /* atol = */ 1e-6)
.item<bool>());
const auto A_again = matmul(matmul(A, A_pinv), A);
CHECK(allclose(A_again, A).item<bool>());
const auto A_pinv_again = matmul(matmul(A_pinv, A), A_pinv);
CHECK(allclose(A_pinv_again, A_pinv).item<bool>());
}
{ // Rectangular matrix m > n
const auto prng_key = random::key(10);
const auto A = random::normal({6, 5}, prng_key);
const auto A_pinv = linalg::pinv(A, Device::cpu);
const auto zeros2 = zeros_like(A_pinv, Device::cpu);
CHECK_FALSE(allclose(zeros2, A_pinv, /* rtol = */ 0, /* atol = */ 1e-6)
.item<bool>());
const auto A_again = matmul(matmul(A, A_pinv), A);
CHECK(allclose(A_again, A).item<bool>());
const auto A_pinv_again = matmul(matmul(A_pinv, A), A_pinv);
CHECK(allclose(A_pinv_again, A_pinv).item<bool>());
}
}