From de4f3e72fd4fc122501e87f4053bc9d4a0656e9d Mon Sep 17 00:00:00 2001
From: Awni Hannun
Date: Thu, 21 Dec 2023 22:13:41 -0800
Subject: [PATCH] docs
---
docs/build/html/.buildinfo | 2 +-
.../_autosummary/mlx.core.array.round.rst | 6 +
.../python/_autosummary/mlx.core.array.rst | 1 +
.../python/_autosummary/mlx.core.clip.rst | 6 +
.../_autosummary/mlx.core.dequantize.rst | 6 +
.../_autosummary/mlx.core.floor_divide.rst | 6 +
.../python/_autosummary/mlx.core.linspace.rst | 6 +
.../python/_autosummary/mlx.core.quantize.rst | 6 +
.../mlx.core.quantized_matmul.rst | 6 +
.../python/_autosummary/mlx.core.round.rst | 6 +
.../python/_autosummary/mlx.nn.Module.rst | 1 +
.../_autosummary/mlx.optimizers.Lion.rst | 18 +
.../mlx.nn.losses.smooth_l1_loss.rst | 8 +
docs/build/html/_sources/python/array.rst | 1 +
.../_autosummary/mlx.nn.QuantizedLinear.rst | 8 +
.../mlx.nn.losses.smooth_l1_loss.rst | 8 +
.../mlx.nn.losses.triplet_loss.rst | 8 +
docs/build/html/_sources/python/nn/layers.rst | 1 +
docs/build/html/_sources/python/nn/losses.rst | 6 +-
docs/build/html/_sources/python/ops.rst | 9 +-
.../build/html/_sources/python/optimizers.rst | 1 +
.../html/_static/documentation_options.js | 2 +-
docs/build/html/cpp/ops.html | 24 +-
docs/build/html/dev/extensions.html | 24 +-
.../html/examples/linear_regression.html | 24 +-
docs/build/html/examples/llama-inference.html | 24 +-
docs/build/html/examples/mlp.html | 24 +-
docs/build/html/genindex.html | 69 +-
docs/build/html/index.html | 24 +-
docs/build/html/install.html | 24 +-
docs/build/html/objects.inv | Bin 5279 -> 5533 bytes
.../python/_autosummary/mlx.core.Device.html | 24 +-
.../python/_autosummary/mlx.core.Dtype.html | 24 +-
.../python/_autosummary/mlx.core.Stream.html | 24 +-
.../python/_autosummary/mlx.core.abs.html | 24 +-
.../python/_autosummary/mlx.core.add.html | 24 +-
.../python/_autosummary/mlx.core.all.html | 24 +-
.../_autosummary/mlx.core.allclose.html | 24 +-
.../python/_autosummary/mlx.core.any.html | 24 +-
.../python/_autosummary/mlx.core.arange.html | 24 +-
.../python/_autosummary/mlx.core.arccos.html | 24 +-
.../python/_autosummary/mlx.core.arccosh.html | 24 +-
.../python/_autosummary/mlx.core.arcsin.html | 24 +-
.../python/_autosummary/mlx.core.arcsinh.html | 24 +-
.../python/_autosummary/mlx.core.arctan.html | 24 +-
.../python/_autosummary/mlx.core.arctanh.html | 24 +-
.../python/_autosummary/mlx.core.argmax.html | 24 +-
.../python/_autosummary/mlx.core.argmin.html | 24 +-
.../_autosummary/mlx.core.argpartition.html | 24 +-
.../python/_autosummary/mlx.core.argsort.html | 24 +-
.../python/_autosummary/mlx.core.array.T.html | 24 +-
.../_autosummary/mlx.core.array.abs.html | 24 +-
.../_autosummary/mlx.core.array.all.html | 24 +-
.../_autosummary/mlx.core.array.any.html | 24 +-
.../_autosummary/mlx.core.array.argmax.html | 24 +-
.../_autosummary/mlx.core.array.argmin.html | 24 +-
.../_autosummary/mlx.core.array.astype.html | 24 +-
.../_autosummary/mlx.core.array.cos.html | 24 +-
.../_autosummary/mlx.core.array.dtype.html | 24 +-
.../_autosummary/mlx.core.array.exp.html | 24 +-
.../python/_autosummary/mlx.core.array.html | 49 +-
.../_autosummary/mlx.core.array.item.html | 24 +-
.../_autosummary/mlx.core.array.log.html | 24 +-
.../_autosummary/mlx.core.array.log1p.html | 24 +-
.../mlx.core.array.logsumexp.html | 24 +-
.../_autosummary/mlx.core.array.max.html | 24 +-
.../_autosummary/mlx.core.array.mean.html | 24 +-
.../_autosummary/mlx.core.array.min.html | 24 +-
.../_autosummary/mlx.core.array.ndim.html | 24 +-
.../_autosummary/mlx.core.array.prod.html | 24 +-
.../mlx.core.array.reciprocal.html | 24 +-
.../_autosummary/mlx.core.array.reshape.html | 30 +-
.../_autosummary/mlx.core.array.round.html | 690 ++++++++++++++++
.../_autosummary/mlx.core.array.rsqrt.html | 30 +-
.../_autosummary/mlx.core.array.shape.html | 24 +-
.../_autosummary/mlx.core.array.sin.html | 24 +-
.../_autosummary/mlx.core.array.size.html | 24 +-
.../_autosummary/mlx.core.array.split.html | 24 +-
.../_autosummary/mlx.core.array.sqrt.html | 24 +-
.../_autosummary/mlx.core.array.square.html | 24 +-
.../_autosummary/mlx.core.array.sum.html | 24 +-
.../_autosummary/mlx.core.array.tolist.html | 24 +-
.../mlx.core.array.transpose.html | 24 +-
.../_autosummary/mlx.core.array.var.html | 24 +-
.../_autosummary/mlx.core.array_equal.html | 24 +-
.../_autosummary/mlx.core.broadcast_to.html | 24 +-
.../python/_autosummary/mlx.core.ceil.html | 30 +-
.../python/_autosummary/mlx.core.clip.html | 708 +++++++++++++++++
.../_autosummary/mlx.core.concatenate.html | 30 +-
.../python/_autosummary/mlx.core.conv1d.html | 24 +-
.../python/_autosummary/mlx.core.conv2d.html | 24 +-
.../_autosummary/mlx.core.convolve.html | 24 +-
.../python/_autosummary/mlx.core.cos.html | 24 +-
.../python/_autosummary/mlx.core.cosh.html | 30 +-
.../_autosummary/mlx.core.default_device.html | 24 +-
.../_autosummary/mlx.core.default_stream.html | 24 +-
.../_autosummary/mlx.core.dequantize.html | 716 +++++++++++++++++
.../python/_autosummary/mlx.core.divide.html | 30 +-
.../python/_autosummary/mlx.core.equal.html | 24 +-
.../python/_autosummary/mlx.core.erf.html | 24 +-
.../python/_autosummary/mlx.core.erfinv.html | 24 +-
.../python/_autosummary/mlx.core.eval.html | 24 +-
.../python/_autosummary/mlx.core.exp.html | 24 +-
.../_autosummary/mlx.core.expand_dims.html | 24 +-
.../python/_autosummary/mlx.core.eye.html | 30 +-
.../python/_autosummary/mlx.core.fft.fft.html | 24 +-
.../_autosummary/mlx.core.fft.fft2.html | 24 +-
.../_autosummary/mlx.core.fft.fftn.html | 24 +-
.../_autosummary/mlx.core.fft.ifft.html | 24 +-
.../_autosummary/mlx.core.fft.ifft2.html | 24 +-
.../_autosummary/mlx.core.fft.ifftn.html | 24 +-
.../_autosummary/mlx.core.fft.irfft.html | 24 +-
.../_autosummary/mlx.core.fft.irfft2.html | 24 +-
.../_autosummary/mlx.core.fft.irfftn.html | 24 +-
.../_autosummary/mlx.core.fft.rfft.html | 24 +-
.../_autosummary/mlx.core.fft.rfft2.html | 24 +-
.../_autosummary/mlx.core.fft.rfftn.html | 24 +-
.../python/_autosummary/mlx.core.flatten.html | 36 +-
.../python/_autosummary/mlx.core.floor.html | 36 +-
.../_autosummary/mlx.core.floor_divide.html | 706 +++++++++++++++++
.../python/_autosummary/mlx.core.full.html | 30 +-
.../python/_autosummary/mlx.core.grad.html | 24 +-
.../python/_autosummary/mlx.core.greater.html | 24 +-
.../_autosummary/mlx.core.greater_equal.html | 24 +-
.../_autosummary/mlx.core.identity.html | 24 +-
.../python/_autosummary/mlx.core.jvp.html | 24 +-
.../python/_autosummary/mlx.core.less.html | 24 +-
.../_autosummary/mlx.core.less_equal.html | 30 +-
.../_autosummary/mlx.core.linspace.html | 707 +++++++++++++++++
.../python/_autosummary/mlx.core.load.html | 30 +-
.../python/_autosummary/mlx.core.log.html | 24 +-
.../python/_autosummary/mlx.core.log10.html | 24 +-
.../python/_autosummary/mlx.core.log1p.html | 24 +-
.../python/_autosummary/mlx.core.log2.html | 24 +-
.../_autosummary/mlx.core.logaddexp.html | 24 +-
.../_autosummary/mlx.core.logical_not.html | 24 +-
.../_autosummary/mlx.core.logsumexp.html | 24 +-
.../python/_autosummary/mlx.core.matmul.html | 24 +-
.../python/_autosummary/mlx.core.max.html | 24 +-
.../python/_autosummary/mlx.core.maximum.html | 24 +-
.../python/_autosummary/mlx.core.mean.html | 24 +-
.../python/_autosummary/mlx.core.min.html | 24 +-
.../python/_autosummary/mlx.core.minimum.html | 24 +-
.../_autosummary/mlx.core.moveaxis.html | 24 +-
.../_autosummary/mlx.core.multiply.html | 24 +-
.../_autosummary/mlx.core.negative.html | 24 +-
.../_autosummary/mlx.core.new_stream.html | 24 +-
.../python/_autosummary/mlx.core.ones.html | 24 +-
.../_autosummary/mlx.core.ones_like.html | 24 +-
.../python/_autosummary/mlx.core.pad.html | 24 +-
.../_autosummary/mlx.core.partition.html | 24 +-
.../python/_autosummary/mlx.core.prod.html | 30 +-
.../_autosummary/mlx.core.quantize.html | 742 ++++++++++++++++++
.../mlx.core.quantized_matmul.html | 713 +++++++++++++++++
.../mlx.core.random.bernoulli.html | 24 +-
.../mlx.core.random.categorical.html | 24 +-
.../_autosummary/mlx.core.random.gumbel.html | 24 +-
.../_autosummary/mlx.core.random.key.html | 24 +-
.../_autosummary/mlx.core.random.normal.html | 24 +-
.../_autosummary/mlx.core.random.randint.html | 24 +-
.../_autosummary/mlx.core.random.seed.html | 24 +-
.../_autosummary/mlx.core.random.split.html | 24 +-
.../mlx.core.random.truncated_normal.html | 24 +-
.../_autosummary/mlx.core.random.uniform.html | 24 +-
.../_autosummary/mlx.core.reciprocal.html | 30 +-
.../python/_autosummary/mlx.core.reshape.html | 30 +-
.../python/_autosummary/mlx.core.round.html | 709 +++++++++++++++++
.../python/_autosummary/mlx.core.rsqrt.html | 30 +-
.../python/_autosummary/mlx.core.save.html | 32 +-
.../python/_autosummary/mlx.core.savez.html | 24 +-
.../mlx.core.savez_compressed.html | 24 +-
.../mlx.core.set_default_device.html | 24 +-
.../mlx.core.set_default_stream.html | 24 +-
.../python/_autosummary/mlx.core.sigmoid.html | 24 +-
.../python/_autosummary/mlx.core.sign.html | 24 +-
.../_autosummary/mlx.core.simplify.html | 24 +-
.../python/_autosummary/mlx.core.sin.html | 24 +-
.../python/_autosummary/mlx.core.sinh.html | 24 +-
.../python/_autosummary/mlx.core.softmax.html | 24 +-
.../python/_autosummary/mlx.core.sort.html | 24 +-
.../python/_autosummary/mlx.core.split.html | 24 +-
.../python/_autosummary/mlx.core.sqrt.html | 24 +-
.../python/_autosummary/mlx.core.square.html | 24 +-
.../python/_autosummary/mlx.core.squeeze.html | 24 +-
.../python/_autosummary/mlx.core.stack.html | 24 +-
.../_autosummary/mlx.core.stop_gradient.html | 24 +-
.../_autosummary/mlx.core.subtract.html | 24 +-
.../python/_autosummary/mlx.core.sum.html | 24 +-
.../_autosummary/mlx.core.swapaxes.html | 24 +-
.../python/_autosummary/mlx.core.take.html | 24 +-
.../mlx.core.take_along_axis.html | 24 +-
.../python/_autosummary/mlx.core.tan.html | 24 +-
.../python/_autosummary/mlx.core.tanh.html | 24 +-
.../_autosummary/mlx.core.transpose.html | 24 +-
.../python/_autosummary/mlx.core.tri.html | 24 +-
.../python/_autosummary/mlx.core.tril.html | 24 +-
.../python/_autosummary/mlx.core.triu.html | 24 +-
.../_autosummary/mlx.core.value_and_grad.html | 24 +-
.../python/_autosummary/mlx.core.var.html | 24 +-
.../python/_autosummary/mlx.core.vjp.html | 24 +-
.../python/_autosummary/mlx.core.vmap.html | 24 +-
.../python/_autosummary/mlx.core.where.html | 24 +-
.../python/_autosummary/mlx.core.zeros.html | 24 +-
.../_autosummary/mlx.core.zeros_like.html | 24 +-
.../python/_autosummary/mlx.nn.Module.html | 33 +-
.../_autosummary/mlx.nn.value_and_grad.html | 24 +-
.../_autosummary/mlx.optimizers.AdaDelta.html | 24 +-
.../_autosummary/mlx.optimizers.Adagrad.html | 24 +-
.../_autosummary/mlx.optimizers.Adam.html | 24 +-
.../_autosummary/mlx.optimizers.AdamW.html | 24 +-
.../_autosummary/mlx.optimizers.Adamax.html | 30 +-
.../_autosummary/mlx.optimizers.Lion.html | 725 +++++++++++++++++
.../mlx.optimizers.Optimizer.html | 24 +-
.../mlx.optimizers.OptimizerState.html | 24 +-
.../_autosummary/mlx.optimizers.RMSprop.html | 24 +-
.../_autosummary/mlx.optimizers.SGD.html | 24 +-
.../_autosummary/mlx.utils.tree_flatten.html | 24 +-
.../_autosummary/mlx.utils.tree_map.html | 30 +-
.../mlx.utils.tree_unflatten.html | 24 +-
.../mlx.nn.losses.smooth_l1_loss.html | 690 ++++++++++++++++
docs/build/html/python/array.html | 45 +-
docs/build/html/python/data_types.html | 24 +-
.../html/python/devices_and_streams.html | 24 +-
docs/build/html/python/fft.html | 24 +-
docs/build/html/python/nn.html | 31 +-
.../python/nn/_autosummary/mlx.nn.Conv1d.html | 24 +-
.../python/nn/_autosummary/mlx.nn.Conv2d.html | 24 +-
.../nn/_autosummary/mlx.nn.Embedding.html | 24 +-
.../python/nn/_autosummary/mlx.nn.GELU.html | 24 +-
.../nn/_autosummary/mlx.nn.GroupNorm.html | 24 +-
.../nn/_autosummary/mlx.nn.LayerNorm.html | 24 +-
.../python/nn/_autosummary/mlx.nn.Linear.html | 24 +-
.../python/nn/_autosummary/mlx.nn.Mish.html | 24 +-
.../mlx.nn.MultiHeadAttention.html | 24 +-
.../python/nn/_autosummary/mlx.nn.PReLU.html | 24 +-
.../_autosummary/mlx.nn.QuantizedLinear.html | 715 +++++++++++++++++
.../nn/_autosummary/mlx.nn.RMSNorm.html | 24 +-
.../python/nn/_autosummary/mlx.nn.ReLU.html | 24 +-
.../python/nn/_autosummary/mlx.nn.RoPE.html | 24 +-
.../python/nn/_autosummary/mlx.nn.SELU.html | 24 +-
.../nn/_autosummary/mlx.nn.Sequential.html | 30 +-
.../python/nn/_autosummary/mlx.nn.SiLU.html | 24 +-
.../python/nn/_autosummary/mlx.nn.Step.html | 24 +-
.../_autosummary_functions/mlx.nn.gelu.html | 24 +-
.../mlx.nn.gelu_approx.html | 24 +-
.../mlx.nn.gelu_fast_approx.html | 24 +-
.../mlx.nn.losses.binary_cross_entropy.html | 52 +-
.../mlx.nn.losses.cross_entropy.html | 48 +-
.../mlx.nn.losses.kl_div_loss.html | 45 +-
.../mlx.nn.losses.l1_loss.html | 42 +-
.../mlx.nn.losses.mse_loss.html | 36 +-
.../mlx.nn.losses.nll_loss.html | 38 +-
.../mlx.nn.losses.smooth_l1_loss.html | 719 +++++++++++++++++
.../mlx.nn.losses.triplet_loss.html | 719 +++++++++++++++++
.../_autosummary_functions/mlx.nn.mish.html | 24 +-
.../_autosummary_functions/mlx.nn.prelu.html | 24 +-
.../_autosummary_functions/mlx.nn.relu.html | 24 +-
.../_autosummary_functions/mlx.nn.selu.html | 24 +-
.../_autosummary_functions/mlx.nn.silu.html | 24 +-
.../_autosummary_functions/mlx.nn.step.html | 24 +-
docs/build/html/python/nn/functions.html | 30 +-
docs/build/html/python/nn/layers.html | 27 +-
docs/build/html/python/nn/losses.html | 60 +-
docs/build/html/python/ops.html | 139 ++--
docs/build/html/python/optimizers.html | 33 +-
docs/build/html/python/random.html | 24 +-
docs/build/html/python/transforms.html | 24 +-
docs/build/html/python/tree_utils.html | 32 +-
docs/build/html/quick_start.html | 24 +-
docs/build/html/search.html | 24 +-
docs/build/html/searchindex.js | 2 +-
docs/build/html/unified_memory.html | 24 +-
docs/build/html/using_streams.html | 24 +-
273 files changed, 13936 insertions(+), 1646 deletions(-)
create mode 100644 docs/build/html/_sources/python/_autosummary/mlx.core.array.round.rst
create mode 100644 docs/build/html/_sources/python/_autosummary/mlx.core.clip.rst
create mode 100644 docs/build/html/_sources/python/_autosummary/mlx.core.dequantize.rst
create mode 100644 docs/build/html/_sources/python/_autosummary/mlx.core.floor_divide.rst
create mode 100644 docs/build/html/_sources/python/_autosummary/mlx.core.linspace.rst
create mode 100644 docs/build/html/_sources/python/_autosummary/mlx.core.quantize.rst
create mode 100644 docs/build/html/_sources/python/_autosummary/mlx.core.quantized_matmul.rst
create mode 100644 docs/build/html/_sources/python/_autosummary/mlx.core.round.rst
create mode 100644 docs/build/html/_sources/python/_autosummary/mlx.optimizers.Lion.rst
create mode 100644 docs/build/html/_sources/python/_autosummary_functions/mlx.nn.losses.smooth_l1_loss.rst
create mode 100644 docs/build/html/_sources/python/nn/_autosummary/mlx.nn.QuantizedLinear.rst
create mode 100644 docs/build/html/_sources/python/nn/_autosummary_functions/mlx.nn.losses.smooth_l1_loss.rst
create mode 100644 docs/build/html/_sources/python/nn/_autosummary_functions/mlx.nn.losses.triplet_loss.rst
create mode 100644 docs/build/html/python/_autosummary/mlx.core.array.round.html
create mode 100644 docs/build/html/python/_autosummary/mlx.core.clip.html
create mode 100644 docs/build/html/python/_autosummary/mlx.core.dequantize.html
create mode 100644 docs/build/html/python/_autosummary/mlx.core.floor_divide.html
create mode 100644 docs/build/html/python/_autosummary/mlx.core.linspace.html
create mode 100644 docs/build/html/python/_autosummary/mlx.core.quantize.html
create mode 100644 docs/build/html/python/_autosummary/mlx.core.quantized_matmul.html
create mode 100644 docs/build/html/python/_autosummary/mlx.core.round.html
create mode 100644 docs/build/html/python/_autosummary/mlx.optimizers.Lion.html
create mode 100644 docs/build/html/python/_autosummary_functions/mlx.nn.losses.smooth_l1_loss.html
create mode 100644 docs/build/html/python/nn/_autosummary/mlx.nn.QuantizedLinear.html
create mode 100644 docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.smooth_l1_loss.html
create mode 100644 docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.triplet_loss.html
diff --git a/docs/build/html/.buildinfo b/docs/build/html/.buildinfo
index e8163c948..0bdadc036 100644
--- a/docs/build/html/.buildinfo
+++ b/docs/build/html/.buildinfo
@@ -1,4 +1,4 @@
# Sphinx build info version 1
# This file hashes the configuration used when building these files. When it is not found, a full rebuild will be done.
-config: 7192ed07ca0b377e57ed9f17cbd933a4
+config: 78e86a9caf7acb193f064f97ea2f4572
tags: 645f666f9bcd5a90fca523b33c5a78b7
diff --git a/docs/build/html/_sources/python/_autosummary/mlx.core.array.round.rst b/docs/build/html/_sources/python/_autosummary/mlx.core.array.round.rst
new file mode 100644
index 000000000..9a4270188
--- /dev/null
+++ b/docs/build/html/_sources/python/_autosummary/mlx.core.array.round.rst
@@ -0,0 +1,6 @@
+mlx.core.array.round
+====================
+
+.. currentmodule:: mlx.core
+
+.. automethod:: array.round
\ No newline at end of file
diff --git a/docs/build/html/_sources/python/_autosummary/mlx.core.array.rst b/docs/build/html/_sources/python/_autosummary/mlx.core.array.rst
index a93bbadcd..1a56a5822 100644
--- a/docs/build/html/_sources/python/_autosummary/mlx.core.array.rst
+++ b/docs/build/html/_sources/python/_autosummary/mlx.core.array.rst
@@ -40,6 +40,7 @@
~array.prod
~array.reciprocal
~array.reshape
+ ~array.round
~array.rsqrt
~array.sin
~array.split
diff --git a/docs/build/html/_sources/python/_autosummary/mlx.core.clip.rst b/docs/build/html/_sources/python/_autosummary/mlx.core.clip.rst
new file mode 100644
index 000000000..12db7db21
--- /dev/null
+++ b/docs/build/html/_sources/python/_autosummary/mlx.core.clip.rst
@@ -0,0 +1,6 @@
+mlx.core.clip
+=============
+
+.. currentmodule:: mlx.core
+
+.. autofunction:: clip
\ No newline at end of file
diff --git a/docs/build/html/_sources/python/_autosummary/mlx.core.dequantize.rst b/docs/build/html/_sources/python/_autosummary/mlx.core.dequantize.rst
new file mode 100644
index 000000000..de7cfe21c
--- /dev/null
+++ b/docs/build/html/_sources/python/_autosummary/mlx.core.dequantize.rst
@@ -0,0 +1,6 @@
+mlx.core.dequantize
+===================
+
+.. currentmodule:: mlx.core
+
+.. autofunction:: dequantize
\ No newline at end of file
diff --git a/docs/build/html/_sources/python/_autosummary/mlx.core.floor_divide.rst b/docs/build/html/_sources/python/_autosummary/mlx.core.floor_divide.rst
new file mode 100644
index 000000000..d28782c77
--- /dev/null
+++ b/docs/build/html/_sources/python/_autosummary/mlx.core.floor_divide.rst
@@ -0,0 +1,6 @@
+mlx.core.floor\_divide
+======================
+
+.. currentmodule:: mlx.core
+
+.. autofunction:: floor_divide
\ No newline at end of file
diff --git a/docs/build/html/_sources/python/_autosummary/mlx.core.linspace.rst b/docs/build/html/_sources/python/_autosummary/mlx.core.linspace.rst
new file mode 100644
index 000000000..42e329d23
--- /dev/null
+++ b/docs/build/html/_sources/python/_autosummary/mlx.core.linspace.rst
@@ -0,0 +1,6 @@
+mlx.core.linspace
+=================
+
+.. currentmodule:: mlx.core
+
+.. autofunction:: linspace
\ No newline at end of file
diff --git a/docs/build/html/_sources/python/_autosummary/mlx.core.quantize.rst b/docs/build/html/_sources/python/_autosummary/mlx.core.quantize.rst
new file mode 100644
index 000000000..386d8bc88
--- /dev/null
+++ b/docs/build/html/_sources/python/_autosummary/mlx.core.quantize.rst
@@ -0,0 +1,6 @@
+mlx.core.quantize
+=================
+
+.. currentmodule:: mlx.core
+
+.. autofunction:: quantize
\ No newline at end of file
diff --git a/docs/build/html/_sources/python/_autosummary/mlx.core.quantized_matmul.rst b/docs/build/html/_sources/python/_autosummary/mlx.core.quantized_matmul.rst
new file mode 100644
index 000000000..3e358f1ac
--- /dev/null
+++ b/docs/build/html/_sources/python/_autosummary/mlx.core.quantized_matmul.rst
@@ -0,0 +1,6 @@
+mlx.core.quantized\_matmul
+==========================
+
+.. currentmodule:: mlx.core
+
+.. autofunction:: quantized_matmul
\ No newline at end of file
diff --git a/docs/build/html/_sources/python/_autosummary/mlx.core.round.rst b/docs/build/html/_sources/python/_autosummary/mlx.core.round.rst
new file mode 100644
index 000000000..ca752fcea
--- /dev/null
+++ b/docs/build/html/_sources/python/_autosummary/mlx.core.round.rst
@@ -0,0 +1,6 @@
+mlx.core.round
+==============
+
+.. currentmodule:: mlx.core
+
+.. autofunction:: round
\ No newline at end of file
diff --git a/docs/build/html/_sources/python/_autosummary/mlx.nn.Module.rst b/docs/build/html/_sources/python/_autosummary/mlx.nn.Module.rst
index 79f55b253..8b28b3ac6 100644
--- a/docs/build/html/_sources/python/_autosummary/mlx.nn.Module.rst
+++ b/docs/build/html/_sources/python/_autosummary/mlx.nn.Module.rst
@@ -41,6 +41,7 @@
~Module.trainable_parameters
~Module.unfreeze
~Module.update
+ ~Module.update_modules
~Module.valid_child_filter
~Module.valid_parameter_filter
~Module.values
diff --git a/docs/build/html/_sources/python/_autosummary/mlx.optimizers.Lion.rst b/docs/build/html/_sources/python/_autosummary/mlx.optimizers.Lion.rst
new file mode 100644
index 000000000..a00dc50f0
--- /dev/null
+++ b/docs/build/html/_sources/python/_autosummary/mlx.optimizers.Lion.rst
@@ -0,0 +1,18 @@
+mlx.optimizers.Lion
+===================
+
+.. currentmodule:: mlx.optimizers
+
+.. autoclass:: Lion
+
+
+
+
+ .. rubric:: Methods
+
+ .. autosummary::
+
+ ~Lion.__init__
+ ~Lion.apply_single
+
+
diff --git a/docs/build/html/_sources/python/_autosummary_functions/mlx.nn.losses.smooth_l1_loss.rst b/docs/build/html/_sources/python/_autosummary_functions/mlx.nn.losses.smooth_l1_loss.rst
new file mode 100644
index 000000000..00a647a75
--- /dev/null
+++ b/docs/build/html/_sources/python/_autosummary_functions/mlx.nn.losses.smooth_l1_loss.rst
@@ -0,0 +1,8 @@
+mlx.nn.losses.smooth\_l1\_loss
+==============================
+
+.. currentmodule:: mlx.nn.losses
+
+.. autoclass:: smooth_l1_loss
+
+
\ No newline at end of file
diff --git a/docs/build/html/_sources/python/array.rst b/docs/build/html/_sources/python/array.rst
index 96ddd32b3..e96e7234d 100644
--- a/docs/build/html/_sources/python/array.rst
+++ b/docs/build/html/_sources/python/array.rst
@@ -34,6 +34,7 @@ Array
array.prod
array.reciprocal
array.reshape
+ array.round
array.rsqrt
array.sin
array.split
diff --git a/docs/build/html/_sources/python/nn/_autosummary/mlx.nn.QuantizedLinear.rst b/docs/build/html/_sources/python/nn/_autosummary/mlx.nn.QuantizedLinear.rst
new file mode 100644
index 000000000..ccbde4340
--- /dev/null
+++ b/docs/build/html/_sources/python/nn/_autosummary/mlx.nn.QuantizedLinear.rst
@@ -0,0 +1,8 @@
+mlx.nn.QuantizedLinear
+======================
+
+.. currentmodule:: mlx.nn
+
+.. autoclass:: QuantizedLinear
+
+
\ No newline at end of file
diff --git a/docs/build/html/_sources/python/nn/_autosummary_functions/mlx.nn.losses.smooth_l1_loss.rst b/docs/build/html/_sources/python/nn/_autosummary_functions/mlx.nn.losses.smooth_l1_loss.rst
new file mode 100644
index 000000000..00a647a75
--- /dev/null
+++ b/docs/build/html/_sources/python/nn/_autosummary_functions/mlx.nn.losses.smooth_l1_loss.rst
@@ -0,0 +1,8 @@
+mlx.nn.losses.smooth\_l1\_loss
+==============================
+
+.. currentmodule:: mlx.nn.losses
+
+.. autoclass:: smooth_l1_loss
+
+
\ No newline at end of file
diff --git a/docs/build/html/_sources/python/nn/_autosummary_functions/mlx.nn.losses.triplet_loss.rst b/docs/build/html/_sources/python/nn/_autosummary_functions/mlx.nn.losses.triplet_loss.rst
new file mode 100644
index 000000000..4698d6155
--- /dev/null
+++ b/docs/build/html/_sources/python/nn/_autosummary_functions/mlx.nn.losses.triplet_loss.rst
@@ -0,0 +1,8 @@
+mlx.nn.losses.triplet\_loss
+===========================
+
+.. currentmodule:: mlx.nn.losses
+
+.. autoclass:: triplet_loss
+
+
\ No newline at end of file
diff --git a/docs/build/html/_sources/python/nn/layers.rst b/docs/build/html/_sources/python/nn/layers.rst
index 5628134d6..fab3ff785 100644
--- a/docs/build/html/_sources/python/nn/layers.rst
+++ b/docs/build/html/_sources/python/nn/layers.rst
@@ -26,3 +26,4 @@ Layers
RoPE
MultiHeadAttention
Sequential
+ QuantizedLinear
diff --git a/docs/build/html/_sources/python/nn/losses.rst b/docs/build/html/_sources/python/nn/losses.rst
index 4808ce5ab..b6a202d4a 100644
--- a/docs/build/html/_sources/python/nn/losses.rst
+++ b/docs/build/html/_sources/python/nn/losses.rst
@@ -9,9 +9,11 @@ Loss Functions
:toctree: _autosummary_functions
:template: nn-module-template.rst
- cross_entropy
binary_cross_entropy
+ cross_entropy
+ kl_div_loss
l1_loss
mse_loss
nll_loss
- kl_div_loss
+ smooth_l1_loss
+ triplet_loss
\ No newline at end of file
diff --git a/docs/build/html/_sources/python/ops.rst b/docs/build/html/_sources/python/ops.rst
index ea25b90f9..7e391ec4c 100644
--- a/docs/build/html/_sources/python/ops.rst
+++ b/docs/build/html/_sources/python/ops.rst
@@ -27,12 +27,14 @@ Operations
array_equal
broadcast_to
ceil
+ clip
concatenate
convolve
conv1d
conv2d
cos
cosh
+ dequantize
divide
equal
erf
@@ -40,14 +42,16 @@ Operations
exp
expand_dims
eye
- floor
flatten
+ floor
+ floor_divide
full
greater
greater_equal
identity
less
less_equal
+ linspace
load
log
log2
@@ -70,8 +74,11 @@ Operations
partition
pad
prod
+ quantize
+ quantized_matmul
reciprocal
reshape
+ round
rsqrt
save
savez
diff --git a/docs/build/html/_sources/python/optimizers.rst b/docs/build/html/_sources/python/optimizers.rst
index b8e5cfea7..7cc6ef906 100644
--- a/docs/build/html/_sources/python/optimizers.rst
+++ b/docs/build/html/_sources/python/optimizers.rst
@@ -44,3 +44,4 @@ model's parameters and the **optimizer state**.
Adam
AdamW
Adamax
+ Lion
diff --git a/docs/build/html/_static/documentation_options.js b/docs/build/html/_static/documentation_options.js
index 0339f4d46..14cb7e611 100644
--- a/docs/build/html/_static/documentation_options.js
+++ b/docs/build/html/_static/documentation_options.js
@@ -1,6 +1,6 @@
var DOCUMENTATION_OPTIONS = {
URL_ROOT: document.getElementById("documentation_options").getAttribute('data-url_root'),
- VERSION: '0.0.5',
+ VERSION: '0.0.6',
LANGUAGE: 'en',
COLLAPSE_INDEX: false,
BUILDER: 'html',
diff --git a/docs/build/html/cpp/ops.html b/docs/build/html/cpp/ops.html
index a6e660fe2..0509907bd 100644
--- a/docs/build/html/cpp/ops.html
+++ b/docs/build/html/cpp/ops.html
@@ -9,7 +9,7 @@
- Operations — MLX 0.0.5 documentation
+ Operations — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/dev/extensions.html b/docs/build/html/dev/extensions.html
index b46f3a39c..b61eeee4c 100644
--- a/docs/build/html/dev/extensions.html
+++ b/docs/build/html/dev/extensions.html
@@ -9,7 +9,7 @@
- Developer Documentation — MLX 0.0.5 documentation
+ Developer Documentation — MLX 0.0.6 documentation
@@ -133,8 +133,8 @@
-
-
+
+
@@ -185,6 +185,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -226,12 +227,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -239,14 +242,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -269,8 +274,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -363,6 +371,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -398,6 +409,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/examples/linear_regression.html b/docs/build/html/examples/linear_regression.html
index f81c9f4cd..54ac2b5ec 100644
--- a/docs/build/html/examples/linear_regression.html
+++ b/docs/build/html/examples/linear_regression.html
@@ -9,7 +9,7 @@
- Linear Regression — MLX 0.0.5 documentation
+ Linear Regression — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/examples/llama-inference.html b/docs/build/html/examples/llama-inference.html
index f102d4087..afba02217 100644
--- a/docs/build/html/examples/llama-inference.html
+++ b/docs/build/html/examples/llama-inference.html
@@ -9,7 +9,7 @@
- LLM inference — MLX 0.0.5 documentation
+ LLM inference — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/examples/mlp.html b/docs/build/html/examples/mlp.html
index b5f6b0989..99e83333b 100644
--- a/docs/build/html/examples/mlp.html
+++ b/docs/build/html/examples/mlp.html
@@ -9,7 +9,7 @@
- Multi-Layer Perceptron — MLX 0.0.5 documentation
+ Multi-Layer Perceptron — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/genindex.html b/docs/build/html/genindex.html
index 7a9ba3568..3cfe9a7df 100644
--- a/docs/build/html/genindex.html
+++ b/docs/build/html/genindex.html
@@ -8,7 +8,7 @@
- Index — MLX 0.0.5 documentation
+ Index — MLX 0.0.6 documentation
@@ -131,8 +131,8 @@
-
-
+
+
@@ -183,6 +183,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -224,12 +225,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -237,14 +240,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -267,8 +272,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -361,6 +369,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -396,6 +407,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
+Q
+
+
R
-
-
+
+ smooth_l1_loss (class in mlx.nn.losses)
+
softmax() (in module mlx.core)
sort() (in module mlx.core)
@@ -1160,6 +1205,8 @@ document.write(`
tri() (in module mlx.core)
tril() (in module mlx.core)
+
+ triplet_loss (class in mlx.nn.losses)
triu() (in module mlx.core)
diff --git a/docs/build/html/index.html b/docs/build/html/index.html
index e3a70fdc3..452870082 100644
--- a/docs/build/html/index.html
+++ b/docs/build/html/index.html
@@ -9,7 +9,7 @@
- MLX — MLX 0.0.5 documentation
+ MLX — MLX 0.0.6 documentation
@@ -133,8 +133,8 @@
-
-
+
+
@@ -185,6 +185,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -226,12 +227,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -239,14 +242,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -269,8 +274,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -363,6 +371,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -398,6 +409,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/install.html b/docs/build/html/install.html
index f08edca64..d896c4491 100644
--- a/docs/build/html/install.html
+++ b/docs/build/html/install.html
@@ -9,7 +9,7 @@
- Build and Install — MLX 0.0.5 documentation
+ Build and Install — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/objects.inv b/docs/build/html/objects.inv
index 9ccc5cd4c4a15739176b03de165e38561de1836b..4f89b9506cc4f74cde0361d6a1310c33441a9527 100644
GIT binary patch
delta 5441
zcmV-H6~5}9DV-~jI{`M4JUo9*liW6vz2~n$gnTV&x{h=6TDEs0q#2D`4(+Xis$y5s
z;+IKQtGj>w0%VZ{hy;<3?SsSWOyqk2SOftO%va=3Z;w@zzI~rQX2(?3&;LB;N!zMB
z{m<@FmG2J8wX51|Q6$atyTUF0_Rflhzh5xeIasF*1o
zLfKNS0N#Dqq)8#J)=)f%v!P-^#N=CRP#&)3vCFD5hNFvfAg2=qayoG!c@BZ(IS+Eo
zt2PCZ`WOzbe1?$9XAY!E$}?7@K7xfh9xLpE^$|g+3yyW+!P=|@v2;Wb>Vjhh>eVG^
zy>vtn>VjkO^*R^H6NZ0fA_7oo*P93)s!p0N>p?KUDUOG2tEK}HO$ZCtB+uf3v@ab(
zdHcoS)J;{VP4_H=`Vs`g$K>_O-3Dd^Dlj+?zF~BQ;P?@C@C^ea1Xr#!cDf%R#PB^a
zx`#Uh1r6LCy}KPJ7!YV1m>3N1+nA0Zh6Z)xJB?Mue%q(jI;MY5HCjg<5`@dTv_SDp
zOb{@y&M2M^S%bT)Hvrz%Ymho_LqAS|LFf%P&)S_+qzQcAn3ymiejl#ggyktK@ZYzI
z3PNhA6Iw+R69jD1W2Orn6L{R6BUixNOM(wWx3Dm5buHmu=!3$L?O(&AAA&SNLCC4;
z*yUeudW-);+m(M`Xq&xksiKYwL)Li)_hcUwhTL#%)-TOp*Q7~tND~zXy~0llZ4Sb9
zRi3qQvxlfKs9v$P^=;t^;f{ep=*Of%L3Kz7?vU!v#unOh7z6sPsgl!C&uC#gF2cgYjJm}-y3B-TX5do+@47CqIRrw>@&OU~P
zYcZsj2f2VK+@kxGJ|x$?JDjG?ODUimxdPoz`+O*L8`_{dnLDrSZqE}r>=?yCW{=qk
zgX<$0D0nmpjU#FE0Kqg5JV;hPf;c*YgL(wp^~g8t2^xR+Ili_g<>{cOuPq2RhH;?J
z&@Vt2XFz`*9y;?M0(1)rgY7qNBfzXmlTA7`QS$CtZf^
zAB-_9+>Z^5>N$#mtjZMXJ5xRYbI7yrDT>txF@QBT(AVrZzv=z`+ROX#y@(i#eA-_oY6^aOt}L*SROVR*AQc&?i4h|NCy=rG0kT6{~l
zpbvpz;P2`422>pthAgY5*whn4U>Ml^FDttZ$hXKabepE94Y($31$2DTgQ5oB_1xTD
zbM3dIPlwGty@UvpUCZo27umpZi{zl4H>zUTwYRC(dsI1I=tbi6_EZcNcyl_vGZlXW
z1zyUv3HBg1d3Ig0FDR2kMc
zLkJI6f=DF`Q|Q6v;TeM(Vpuq^Qhc4T)0lxHqHXoifvcBW2n&YYxPe2C@Dm_3^rHhw
zXdIbd>&t-xuk^JeOk@1YWXcC%4oO~>=L5RBU4*jWB?egXAQupY8*rSXB4VtS
zVDEl&q@r`?TYp&Ja3+iQRP6WWB+#W{bFBEe8}
zc?RakCB??(K7avwNgM36MjsFYg7-$|C>OFU-~KWkm06tnFB!0@JPWf2VfH@EuX<^G
z((A&r9BB&iz<&Sr!yj@SQ`(2n>xlZ_x7C&qXAJScJ|xd+gMm#UK3GdSjD
zAG6{7j|{|=_93d%H5YvsI2C^mU|=75te}}a)c;MB)6eEr>bPYmY@!$I!GHRkB0sFY
zxCipzajOiykc&VM`18km>^F7`@PVLKvW@&)efkxE(4{?yd*~*<=WgQrRL^pCpADNE
zLZB(ehs&TEWn7KAv<4t_X%FI@=GQpHVSGZg@2of$?2y!TQ$20PyoG=B&=2M?>MQXr
z438u?!({tbPwcbDqz_#T{yX{TZ$CWN2;7l#Cm%ZPk#~(?G0g0MamE>_);K^4k0DBM
zk6niQrT9v;K!=$5tAuW$pf34kx&Q8A4u@_29qUkh6c(J(Kv|0=-L6`jf
z#;AE=oHnaV>fP2gb$GcxSEXv(R1cV0X@=WlQ<>wTW!F^o^Olvw3Y@>F7|Q-#oFcOC
zxjA2bFn_&*%0dYd<#(?oxgmrIiZ*>M!w3AYd3RILLVID&~rnRegXu)
zMFmmse}4}m>IfH2ua0u_07%mV_VJk?7zCETTXlnifY)+!U8m3)d`M%~cBg;li8-M=
zPfwk!U_YBb=-V~R`*qomth=aVVGdk4p?|0J@m>0)3EF;Rpbs8{b|z>y1pSW#^vg$W
zqhnpatLnA~JXJ>ncQf@k>OXbb3>yxn=wU26aodI8=s$L*gcWp3y5wLoOSNosXZL_$
z{@xA6cjUIqt|HT7^*((}bCYgGy|0efLeFCRD$sD4uk9Zr!>PMKRs3?dIt`AUc7Nhq!*#v+I|%>QmYr(^{_#N;03vay@8$0{h!-J_a@2qhDN)B`1V6BFk@mMP}zy58UUeS21{Q8VnOUDGRS79~)
zybDo3??%V1r-!||;gS^nABSO_=PP_$Aom4#L%}bXbcFfE7^o~)gMU*0PJgdWlI#C<
zKdR=t37F@Up^lmg3?OB>n?vRv^MFM!NV@kLLEY
zAkne?!7_w3=Gi*hNtS>bCG;PMqn%_as!_u7nI|koR}a3s7iq5*>mqTQ>kZ7z
zT|c`>oC_OeZkZXU?IKyuY><|Fh~Mla&8Q7ho`MZMN!w{sgkYp&eh}r`Ntl=$WSnw&
zE5<>xq}U+knAAr3c7Kv4_r@vj>JaH&O-S-@oZ3qKqC#Ghr3wehL*tWhaO!infQ)Iv
z4@|~EvV7wxWnd^2j)Q#?bBuAyV6GSk2{V?1oKr#%#W)B`XAUwizT8(qH_20M1#Z;yffxqmI%}p8zY)L()X+AZ@st#hu*3H-B&&xPe>Q4czB=M;TuN
zt<(;Z=5t3$r;o!G=OJOzca$~sI}-nlhL6rs*6F)*#kxqAJszdK^5e3$lQ_9N$Z2Jy
zSFDSW^z$gW^F6W(c}bS69wZ;X$yL6cq^ayNN-Nu=;yfhIbuZ@i667n^MbWhQV#X1ey3Do;}C27x46n{Sl@q
ztH1KhF}4;UYCVoN3V%WHFFrwJ-*u5kNXo?|5K;jr0lEM%f6Ki(bDbm<5CFKHjD3o6
zkU(WX&NnA{RtqD=H3-Bs+|Vx=^o^i_nBt~pplk~XgntVVhEeB0EsPY^HITDB`aSg+
z?I>Vt!{5$`ZeiHs2>^|woS--l#q<*7bkECQv9^#zs6b3%t1hUeRYbK%x%NESUc^?S
zY$eZDl1TpmV$1lB%tSqz!CjdJ`ZAMsW(N0W7U<4Q*q;sDp-rGi8(Eh&c%L?*PHlv}
z+Th*Vgn#V#fLa)1JWl}0lgIEx5-k;kVv;CPODjdi
zR)6F?{;U1?OMT>ZzW*XW`U-zkbO9*1z8b6~(r%G+*f6409YNeh>!UN)K$L%eK)yaeo|?)ltS#K{r7fRlt8L~`!X;YaDQNA$oWe}B|b
zbjT4q-pCzn6dY;94l{Dc7zGCyd%+|r$E22|98;X(1)_LiI9>?SO2YV-B1P~8DjZ&2ePwA7CBvN?-SS(p6<(Xq_NOSzw
zL>mKq)X99L8vjCdRGSi$SW;7JX@8}twiLkQ4VUrH9I;6TH@gr_E1+SY8TAE&K6cQU
zJz^|5Ts%r&0YsDTm0DUUu5%?PnpClrYa_Y#6+pIB%u>9ML?T&oURza5dFB`!@VxN0
z`J$)5Cq*tXhb_mYJTpb*vcyc2T9iy)A&Y4Oiq<~Bv}+bNAPo>AdfO!5r(VJa3w%oOw7e(u~Y$PjK2X$8;>b+
zHo>TxiQ)fTzAV!git3li@yA?z$}v$~*-T6^>!2yuMsb}oG0Civrd%5d#n3P&Zd-qL
zP&9YHgw0L#uN)JJler3+LVx#;=OQ5CtAsn0CsqZVB+m?s)4Of=eM0gAXD&jtg_QH_
z2GXxV)~_T^MZ1C5UTJwQ5~i#Lc|SNWt`-hL(!qknZkAdV@)Depc56N6bhc`>2+7mf
zf>p7T-zwfmvW&MN@t7l5`F4_L!3
zFGFGFYx>zv)3nHR>jo|I+^YBbZ}r9eP2u#j98MrfEt`;bPRq(5O^3rxs@5sDCR6a1
ztJBNWDUr-!;bIay$}y2dGY8|@`#k#?aCkrgCL=)Y3|EwLdx6qz$Mn0|_N_02)VS
zM=h-+RLqeRx*|JjX@4b&01?183(zRXL~-FnIm;xos4N4a@-zS+FKwfOZW4$%%IRm#
zM`c?G5(SdeacV+}_mN0&NltAmG%43canVYEXW5~G{_NG8F&KECV6Blz(xq6&X)u8EAtf=T>rC
zM^pKB+A7SsxjsvAPZe;|-yr+7rIIlfF%>UIuTc+sMV+ISeN`=O`LgYGT*{ZJpqsX7
zmc62Gwt$ynU9?f<>=l)Agwho6qm9aFuc($IuBLb&y-G*z6_sNI+0+DmuSW~9wi{X7C2I?@W*b?vC2NMT
zil$jBf_BO>5F1^~@F@8$LS6@HAC_n7`?-
zYj*s8XuHHcdSsc0kpCKky6?;Vel?%UTWwq_$XKm>^V8VQR1n)99D;zW6GKNaw9FO#+jSBo`Lm`!KOa;HZFJ~XTsmyY`a_eC0d)6XKshn
raoj3mOd`{ErXM+1!;5{>nVilljdSGdLBUb-TG^Ml&a(s4LsW!4=I2Czsq&}n}=V)tAF6*Hwn
zC|jx(z>c3D-%aCC7FGy}AUg
zmyQTRU2rVEUgsit!mxi#L;woydK1Az)k)K3JqQLk#qqFh)pQ`D31PvS7Hd!pMzldn7m%O`@oDq1qSEAH;k?j96!PizF}a5;L4T8PS*p37``S(
z_i$yPpnj@5cSTl{ot
zS5Bukd)ZP&9Tk6utn&=+$v!9yx#8HXADX|eNt5D`CMpbig@<)*4#IU+p0#kZho~^9
zUS75JeU%8|j)6hwThgGQIwS;lNOfmptKvC~0sY!k$?2#kBZsa+Q0+Jd_Lyc+B~5t_
zrYes~mzMe;gf@rqpttuY5Kl)0p!P7-J_J?eH?*C73=4nPVn{6yasg4erR^zwOs;u%
zI8BcTrGRea3UoU?O+%sE&||PucFRs!ZTkoY3chATS8LilLNLuE50aHP5JyLFP&cq$
zH@;cJ1Xzs1PJ6XADNhGIqiaF1F^mI!hRy(8oB?@!?94v|=pGUV+iyVIhvCW%aAg24
z+tAi5+}M9?OKY>0=4M;ko2@mt+0x=>rOC~vHaBaHZZ@^LS!wo`R(
z2IonqXA}_J5W<30Rf9q401oJiy&*B>8JKgEK$93#o`p&EjH|(r`j9XfKHqf$8DM>j
zF1xIQ-r|fg4BVeL=-#D^Gaz}|LN_f_J^+Jv*c^YwK~~tQVal^GXAI@cgS`I-2=o3Q
z0iYTMs(GM2h_mNmlG6#C1x-ObV0KLMLs@ko#kBx|Ob7>-Vs9A(4@9Dz{
zmk?pHYneUjA{#jFksP%1LQD+1_A<%N~XXQYFceC1&rY|U#
z3E{xHyxgtFiz23UXcAQupY8*rR={$i|^_x)XyVNe+ll7lEY4|4@!
zt_;jA$*=m5mZt;yjM$B2p&Ja3drEx!6M7n@i*q1Ff}!a049xdSijB>E00Z=rHrU(y
zJ|F}HKO~r=T*$I~`_r^_VzJk{WWc8KEX*E++50d*>lN)uFXzs3q$$J$`}Ke4-~N!}
zn9@E(Q(fzS-xorFRfexi
z_338-LYMX+9-wROfxFfoQayhe)jd8uIuHU)F+N-d)hOd?)TK25p-X!Z=QO{@Ar9j}
z-oCTqSg=D<*G=`b74sg>LqD3$f3L*1Fg%jnjArdyJ^#*bNguixqj&N_+8*QA2;2c~
zCm-SL0dkFCF&gZE;lLSB);K^4k0DC%fE|X1fOAx7n0hr3r?=*;*LokDgA`&B5G7jqH2nCoWi
zQIq!vAAiv^RLjf1duds;K!=$5#XHg8}t;(dcj*|T*`qXjpJwmVq|>^OSv2mzRin53T(afE@@cv1+9As>cjdNZ7~a-p
zFa4>_Ej)MN%F|oIt}`;L(;c`x_13xTjNTGx2Y;=B77M8*S6#LJ4um8>q~)>dkF~qd
z*~wQr_Cje%)IUEouS3&jb>+F+y7UaMYUip{ZJX)=D=W=#cWf%N|F7(ts(#)wlV5@J
zC-p$d7L%_{b1w^SD;f*dZUk#tuxuk(#({|*QmjpZEU-;42fR2JwyL|V$X?7)`Qs`1
zkmg+?d05DVt>KRK#b9H#EpW(U74ORiz%Q$a$7RLGc2&-kE=P#xNArWeUbDQP?SId@i#itOZMzeC
zN^kGdCr!}y%gsJ`4BDBX-4OIY4$w~@wT+H-{jRFp9`IBh4cy(-mhNMm4A^AvHN53ufa^iqKB=eo^srmMEAP(?Q4lYYEyLNxV;(7J1g8jl7GWIHCU@*
z7CY9;%!Pj$r$aOzE5APD(b6%2<5ietrFS6e=iTVI_w?`tXgDNA|Kl)>^L&Nx%f7zg
zZYcQql8!Jx83UE&YVZ&0-|4kUa{XWTy=uOhfO!rW>Zqx}08*B_Ib`lJZ>YKhOrzBg
z_Bb`)umVBT{cMUIEoAj%q<@P)^=NKS3$#V;6JIJtsuo6yi-pQ*CUq|5+DIV$DQ2;B
zLe;`ZF=0?K$*5DOYzxICL*?*yc~NCsC?)|ahaX2k+d?9RQG{cbBvqc7Vk)F^qPr-c
zvMnSKCzUhITBd4YB!Su}z%vP*s)bR)a{-AiEYpPqLZt}PxCvV=jDHfI3rKWfnc__{
z+dKiLahs=F7$rOxkm$lPMVr)%QOcPn6-u=*l0b(Q;$hAPVElA7V#P4>Jir9jbr(i=*
z(sml51dMc4Nl?C>gj%>D<0PM}7zfF8XhF(R+C%wvlIrQ=l<(^h>HC@xyjqe--N@nHV!ld*ug6Z6~o-
zGstNPJS)~kh<}HisQo>fAImXrk
zM6JisM&U0A{>3MVJf2)=tw^~Tkrfqi5}>6G^S9g^CV$sS0^w~r@2u0U7DkH8a*JuW
zK~gY08jWu;#a-ZA*%lILc_R#?^tW0VDJu0XXSw-3b&GZsup0MQ;L%mLttSr9IAY?8
z^H5AeTu%4AJd3r3B--6#3R{R=Ev+J|J<7G`$@U_)5@joSwvt4W;1FBJcVs5&$qeqw
zEYO#ktba2zxHq#vcV@!=Y~T)U0zKNuy0pRjv`uuJhC4RxrbKkwU5#dOx5Q?;~GT=X3v8YSYDYon+lyqx3ez1cG+ii*XH8Ky1v
z$}y2d@f{$UMd#JhN^)^|l#pFEJ3bPr(2IE;*?)TFnJK1JFD4pQ?UikzsOr2Jf6}{G
zj){c2_7D$ylx7D-}yZq3(tFE04bZ56+kVFF`g%Ye!3{-{~3J<|*9c3GHuSc7Ha+_cVCv9HeY)E(aBK6PULFt;aIugIXy-
z@+=Q%HLTnaiuaK)83f8a=8sT}gXF0qP+BY5f#Q7xWRpPI?IaW`;3R3*36y-wRG~Nz
ziPKWRyv()>+eKT_RiKq%(_N@oDFU)wpf$3SW2k_Wgy}Hk+@U>P(%oF>J}!O-mw#yQ
z7Pf1P+pi_qsnzG_kaA4PIh13HGrT|)FAT>EL8&<~zGY4i6?7Arze8RRKJ|xMDL`PN
zk4<%9XRLk!k|YO_SHMapqL$VuRf+JFK1oTUq#^)|m4!rk<`^5&9KSWu#sHs@F`vc7
zzlj@@n}|uQ%qD7SrD!S>fX5py<9{DkVfTOT#xJI(*aY)2)#mr}Bea?G>>
z5KU=T)Y3}vv?_9|mJByfUN)DqO6CwFuAhKoN7!~rys!#&7rAJjp;V%gO#V3f|DSz-S$f3>0
z!fnSUYmJ;L^GLENe2(DfK0zL3xFQT!pW#YCE*miyqjRSM&KQ3KkTxDu{!SF4YJ9
zTYWaaDJ(w8;RGZqSvksCl#xN2_SqVt#z|R^DR@`v^K|+oX&6|zSlR~Vm`IX`0pr>G
zJo^~WoN+-+<)&^>ODn}QNXfaTd{fFYkT4AcK;vX?P)jQbvoOdB-NX%QX(dTg3xI7-
zf}tD}#glQ#S*Fxo%6~EtnsNc)$pqm7V!Q}LF9x-KGNR&ZCPGhIsP_B*QS;zpM
zWosmjG3hwu3}ZGAWf>@*=uFP=upBKO;-iI?uSd?~rv6c$nc~^i0Jdz|sx00rlaxTt
zFs33$A+(rhfuX`Wy1dY^h`{Escto
zqt~RPc}1P0m3>t$?R+@f>$uE^qk?YQmIUV&b+gmxDAq+A)8@RQQcmU_#rtSus-0I<
z%gNTGcpo8|dbZT1SkA_Z@N6uQ(PsgH3(xZCvm~&V;|Y+jjTzOSCpC
u&)g$1$8oQSF-_0E{K&Z)UhJFRG^KZ0AWi3l-;2&!(f8tXUi?2WXc8*pp8o{^
diff --git a/docs/build/html/python/_autosummary/mlx.core.Device.html b/docs/build/html/python/_autosummary/mlx.core.Device.html
index ca731d049..440126ef2 100644
--- a/docs/build/html/python/_autosummary/mlx.core.Device.html
+++ b/docs/build/html/python/_autosummary/mlx.core.Device.html
@@ -9,7 +9,7 @@
- mlx.core.Device — MLX 0.0.5 documentation
+ mlx.core.Device — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.Dtype.html b/docs/build/html/python/_autosummary/mlx.core.Dtype.html
index 756f844a4..d88144d99 100644
--- a/docs/build/html/python/_autosummary/mlx.core.Dtype.html
+++ b/docs/build/html/python/_autosummary/mlx.core.Dtype.html
@@ -9,7 +9,7 @@
- mlx.core.Dtype — MLX 0.0.5 documentation
+ mlx.core.Dtype — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.Stream.html b/docs/build/html/python/_autosummary/mlx.core.Stream.html
index 809a60a7d..c60b3051b 100644
--- a/docs/build/html/python/_autosummary/mlx.core.Stream.html
+++ b/docs/build/html/python/_autosummary/mlx.core.Stream.html
@@ -9,7 +9,7 @@
- mlx.core.Stream — MLX 0.0.5 documentation
+ mlx.core.Stream — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.abs.html b/docs/build/html/python/_autosummary/mlx.core.abs.html
index bc7d405e0..8d825ce07 100644
--- a/docs/build/html/python/_autosummary/mlx.core.abs.html
+++ b/docs/build/html/python/_autosummary/mlx.core.abs.html
@@ -9,7 +9,7 @@
- mlx.core.abs — MLX 0.0.5 documentation
+ mlx.core.abs — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.add.html b/docs/build/html/python/_autosummary/mlx.core.add.html
index 3ac60a81f..5c7c12ada 100644
--- a/docs/build/html/python/_autosummary/mlx.core.add.html
+++ b/docs/build/html/python/_autosummary/mlx.core.add.html
@@ -9,7 +9,7 @@
- mlx.core.add — MLX 0.0.5 documentation
+ mlx.core.add — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.all.html b/docs/build/html/python/_autosummary/mlx.core.all.html
index 196395cf6..290f4bc14 100644
--- a/docs/build/html/python/_autosummary/mlx.core.all.html
+++ b/docs/build/html/python/_autosummary/mlx.core.all.html
@@ -9,7 +9,7 @@
- mlx.core.all — MLX 0.0.5 documentation
+ mlx.core.all — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.allclose.html b/docs/build/html/python/_autosummary/mlx.core.allclose.html
index ced3e5704..55d5f9e20 100644
--- a/docs/build/html/python/_autosummary/mlx.core.allclose.html
+++ b/docs/build/html/python/_autosummary/mlx.core.allclose.html
@@ -9,7 +9,7 @@
- mlx.core.allclose — MLX 0.0.5 documentation
+ mlx.core.allclose — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.any.html b/docs/build/html/python/_autosummary/mlx.core.any.html
index 8d26a4fda..a34b8d8b1 100644
--- a/docs/build/html/python/_autosummary/mlx.core.any.html
+++ b/docs/build/html/python/_autosummary/mlx.core.any.html
@@ -9,7 +9,7 @@
- mlx.core.any — MLX 0.0.5 documentation
+ mlx.core.any — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.arange.html b/docs/build/html/python/_autosummary/mlx.core.arange.html
index c7afb60c0..1145a257b 100644
--- a/docs/build/html/python/_autosummary/mlx.core.arange.html
+++ b/docs/build/html/python/_autosummary/mlx.core.arange.html
@@ -9,7 +9,7 @@
- mlx.core.arange — MLX 0.0.5 documentation
+ mlx.core.arange — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.arccos.html b/docs/build/html/python/_autosummary/mlx.core.arccos.html
index 66f1541d1..1be44b3e0 100644
--- a/docs/build/html/python/_autosummary/mlx.core.arccos.html
+++ b/docs/build/html/python/_autosummary/mlx.core.arccos.html
@@ -9,7 +9,7 @@
- mlx.core.arccos — MLX 0.0.5 documentation
+ mlx.core.arccos — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.arccosh.html b/docs/build/html/python/_autosummary/mlx.core.arccosh.html
index 8464c4f0c..81af1b973 100644
--- a/docs/build/html/python/_autosummary/mlx.core.arccosh.html
+++ b/docs/build/html/python/_autosummary/mlx.core.arccosh.html
@@ -9,7 +9,7 @@
- mlx.core.arccosh — MLX 0.0.5 documentation
+ mlx.core.arccosh — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.arcsin.html b/docs/build/html/python/_autosummary/mlx.core.arcsin.html
index 1bd171ee5..bc2ebeb49 100644
--- a/docs/build/html/python/_autosummary/mlx.core.arcsin.html
+++ b/docs/build/html/python/_autosummary/mlx.core.arcsin.html
@@ -9,7 +9,7 @@
- mlx.core.arcsin — MLX 0.0.5 documentation
+ mlx.core.arcsin — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.arcsinh.html b/docs/build/html/python/_autosummary/mlx.core.arcsinh.html
index 7d51b5457..89f1910b0 100644
--- a/docs/build/html/python/_autosummary/mlx.core.arcsinh.html
+++ b/docs/build/html/python/_autosummary/mlx.core.arcsinh.html
@@ -9,7 +9,7 @@
- mlx.core.arcsinh — MLX 0.0.5 documentation
+ mlx.core.arcsinh — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.arctan.html b/docs/build/html/python/_autosummary/mlx.core.arctan.html
index 76d7dbddf..fb178b438 100644
--- a/docs/build/html/python/_autosummary/mlx.core.arctan.html
+++ b/docs/build/html/python/_autosummary/mlx.core.arctan.html
@@ -9,7 +9,7 @@
- mlx.core.arctan — MLX 0.0.5 documentation
+ mlx.core.arctan — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.arctanh.html b/docs/build/html/python/_autosummary/mlx.core.arctanh.html
index 5d83f34c9..a6fb0fd05 100644
--- a/docs/build/html/python/_autosummary/mlx.core.arctanh.html
+++ b/docs/build/html/python/_autosummary/mlx.core.arctanh.html
@@ -9,7 +9,7 @@
- mlx.core.arctanh — MLX 0.0.5 documentation
+ mlx.core.arctanh — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.argmax.html b/docs/build/html/python/_autosummary/mlx.core.argmax.html
index 0fbab4736..3533fe61f 100644
--- a/docs/build/html/python/_autosummary/mlx.core.argmax.html
+++ b/docs/build/html/python/_autosummary/mlx.core.argmax.html
@@ -9,7 +9,7 @@
- mlx.core.argmax — MLX 0.0.5 documentation
+ mlx.core.argmax — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.argmin.html b/docs/build/html/python/_autosummary/mlx.core.argmin.html
index edd73a2b4..579a62bae 100644
--- a/docs/build/html/python/_autosummary/mlx.core.argmin.html
+++ b/docs/build/html/python/_autosummary/mlx.core.argmin.html
@@ -9,7 +9,7 @@
- mlx.core.argmin — MLX 0.0.5 documentation
+ mlx.core.argmin — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.argpartition.html b/docs/build/html/python/_autosummary/mlx.core.argpartition.html
index 1c1a61b14..a9189019f 100644
--- a/docs/build/html/python/_autosummary/mlx.core.argpartition.html
+++ b/docs/build/html/python/_autosummary/mlx.core.argpartition.html
@@ -9,7 +9,7 @@
- mlx.core.argpartition — MLX 0.0.5 documentation
+ mlx.core.argpartition — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.argsort.html b/docs/build/html/python/_autosummary/mlx.core.argsort.html
index 1586374b6..416023a3e 100644
--- a/docs/build/html/python/_autosummary/mlx.core.argsort.html
+++ b/docs/build/html/python/_autosummary/mlx.core.argsort.html
@@ -9,7 +9,7 @@
- mlx.core.argsort — MLX 0.0.5 documentation
+ mlx.core.argsort — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.array.T.html b/docs/build/html/python/_autosummary/mlx.core.array.T.html
index 64b513f93..d2118d16f 100644
--- a/docs/build/html/python/_autosummary/mlx.core.array.T.html
+++ b/docs/build/html/python/_autosummary/mlx.core.array.T.html
@@ -9,7 +9,7 @@
- mlx.core.array.T — MLX 0.0.5 documentation
+ mlx.core.array.T — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.array.abs.html b/docs/build/html/python/_autosummary/mlx.core.array.abs.html
index 730913f50..796568de4 100644
--- a/docs/build/html/python/_autosummary/mlx.core.array.abs.html
+++ b/docs/build/html/python/_autosummary/mlx.core.array.abs.html
@@ -9,7 +9,7 @@
- mlx.core.array.abs — MLX 0.0.5 documentation
+ mlx.core.array.abs — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.array.all.html b/docs/build/html/python/_autosummary/mlx.core.array.all.html
index d83b1a71d..7d3161108 100644
--- a/docs/build/html/python/_autosummary/mlx.core.array.all.html
+++ b/docs/build/html/python/_autosummary/mlx.core.array.all.html
@@ -9,7 +9,7 @@
- mlx.core.array.all — MLX 0.0.5 documentation
+ mlx.core.array.all — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.array.any.html b/docs/build/html/python/_autosummary/mlx.core.array.any.html
index e5a21b5f7..e21f39121 100644
--- a/docs/build/html/python/_autosummary/mlx.core.array.any.html
+++ b/docs/build/html/python/_autosummary/mlx.core.array.any.html
@@ -9,7 +9,7 @@
- mlx.core.array.any — MLX 0.0.5 documentation
+ mlx.core.array.any — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.array.argmax.html b/docs/build/html/python/_autosummary/mlx.core.array.argmax.html
index 4b856e716..8f3ad6820 100644
--- a/docs/build/html/python/_autosummary/mlx.core.array.argmax.html
+++ b/docs/build/html/python/_autosummary/mlx.core.array.argmax.html
@@ -9,7 +9,7 @@
- mlx.core.array.argmax — MLX 0.0.5 documentation
+ mlx.core.array.argmax — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.array.argmin.html b/docs/build/html/python/_autosummary/mlx.core.array.argmin.html
index 17da23217..1ce4d228c 100644
--- a/docs/build/html/python/_autosummary/mlx.core.array.argmin.html
+++ b/docs/build/html/python/_autosummary/mlx.core.array.argmin.html
@@ -9,7 +9,7 @@
- mlx.core.array.argmin — MLX 0.0.5 documentation
+ mlx.core.array.argmin — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.array.astype.html b/docs/build/html/python/_autosummary/mlx.core.array.astype.html
index 7992bbf1f..a93100610 100644
--- a/docs/build/html/python/_autosummary/mlx.core.array.astype.html
+++ b/docs/build/html/python/_autosummary/mlx.core.array.astype.html
@@ -9,7 +9,7 @@
- mlx.core.array.astype — MLX 0.0.5 documentation
+ mlx.core.array.astype — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.array.cos.html b/docs/build/html/python/_autosummary/mlx.core.array.cos.html
index 7979bb620..6b57699fb 100644
--- a/docs/build/html/python/_autosummary/mlx.core.array.cos.html
+++ b/docs/build/html/python/_autosummary/mlx.core.array.cos.html
@@ -9,7 +9,7 @@
- mlx.core.array.cos — MLX 0.0.5 documentation
+ mlx.core.array.cos — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.array.dtype.html b/docs/build/html/python/_autosummary/mlx.core.array.dtype.html
index 339247c91..ad55d7cd7 100644
--- a/docs/build/html/python/_autosummary/mlx.core.array.dtype.html
+++ b/docs/build/html/python/_autosummary/mlx.core.array.dtype.html
@@ -9,7 +9,7 @@
- mlx.core.array.dtype — MLX 0.0.5 documentation
+ mlx.core.array.dtype — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.array.exp.html b/docs/build/html/python/_autosummary/mlx.core.array.exp.html
index 9deb0eb6a..de030bdef 100644
--- a/docs/build/html/python/_autosummary/mlx.core.array.exp.html
+++ b/docs/build/html/python/_autosummary/mlx.core.array.exp.html
@@ -9,7 +9,7 @@
- mlx.core.array.exp — MLX 0.0.5 documentation
+ mlx.core.array.exp — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.array.html b/docs/build/html/python/_autosummary/mlx.core.array.html
index 29ef2f214..cc9f7b844 100644
--- a/docs/build/html/python/_autosummary/mlx.core.array.html
+++ b/docs/build/html/python/_autosummary/mlx.core.array.html
@@ -9,7 +9,7 @@
- mlx.core.array — MLX 0.0.5 documentation
+ mlx.core.array — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
@@ -675,37 +687,40 @@ document.write(`
reshape
(self, *args[, stream])
Equivalent to reshape()
but the shape can be passed either as a tuple or as separate arguments.
-rsqrt
(self, *[, stream])
+round
(self, /[, decimals, stream])
+See round()
.
+
+rsqrt
(self, *[, stream])
See rsqrt()
.
-sin
(self, *[, stream])
+sin
(self, *[, stream])
See sin()
.
-split
(self, indices_or_sections[, axis, stream])
+split
(self, indices_or_sections[, axis, stream])
See split()
.
-sqrt
(self, *[, stream])
+sqrt
(self, *[, stream])
See sqrt()
.
-square
(self, *[, stream])
+square
(self, *[, stream])
See square()
.
-squeeze
(self[, axis, stream])
+squeeze
(self[, axis, stream])
See squeeze()
.
-sum
(self[, axis, keepdims, stream])
+sum
(self[, axis, keepdims, stream])
See sum()
.
-swapaxes
(self, axis1, axis2, *[, stream])
+swapaxes
(self, axis1, axis2, *[, stream])
See moveaxis()
.
-tolist
(self)
+tolist
(self)
Convert the array to a Python list
.
-transpose
(self, *args[, stream])
+transpose
(self, *args[, stream])
Equivalent to transpose()
but the axes can be passed either as a tuple or as separate arguments.
-var
(self[, axis, keepdims, ddof, stream])
+var
(self[, axis, keepdims, ddof, stream])
See var()
.
diff --git a/docs/build/html/python/_autosummary/mlx.core.array.item.html b/docs/build/html/python/_autosummary/mlx.core.array.item.html
index 1bcb7c269..5853461b1 100644
--- a/docs/build/html/python/_autosummary/mlx.core.array.item.html
+++ b/docs/build/html/python/_autosummary/mlx.core.array.item.html
@@ -9,7 +9,7 @@
- mlx.core.array.item — MLX 0.0.5 documentation
+ mlx.core.array.item — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.array.log.html b/docs/build/html/python/_autosummary/mlx.core.array.log.html
index 0d32286af..abeff626f 100644
--- a/docs/build/html/python/_autosummary/mlx.core.array.log.html
+++ b/docs/build/html/python/_autosummary/mlx.core.array.log.html
@@ -9,7 +9,7 @@
- mlx.core.array.log — MLX 0.0.5 documentation
+ mlx.core.array.log — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.array.log1p.html b/docs/build/html/python/_autosummary/mlx.core.array.log1p.html
index 3b15ea948..3a5e3f4e0 100644
--- a/docs/build/html/python/_autosummary/mlx.core.array.log1p.html
+++ b/docs/build/html/python/_autosummary/mlx.core.array.log1p.html
@@ -9,7 +9,7 @@
- mlx.core.array.log1p — MLX 0.0.5 documentation
+ mlx.core.array.log1p — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.array.logsumexp.html b/docs/build/html/python/_autosummary/mlx.core.array.logsumexp.html
index c4b6b64da..d17c3f047 100644
--- a/docs/build/html/python/_autosummary/mlx.core.array.logsumexp.html
+++ b/docs/build/html/python/_autosummary/mlx.core.array.logsumexp.html
@@ -9,7 +9,7 @@
- mlx.core.array.logsumexp — MLX 0.0.5 documentation
+ mlx.core.array.logsumexp — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.array.max.html b/docs/build/html/python/_autosummary/mlx.core.array.max.html
index dc67553e0..352107074 100644
--- a/docs/build/html/python/_autosummary/mlx.core.array.max.html
+++ b/docs/build/html/python/_autosummary/mlx.core.array.max.html
@@ -9,7 +9,7 @@
- mlx.core.array.max — MLX 0.0.5 documentation
+ mlx.core.array.max — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.array.mean.html b/docs/build/html/python/_autosummary/mlx.core.array.mean.html
index d1ab07c8f..0405fa1cd 100644
--- a/docs/build/html/python/_autosummary/mlx.core.array.mean.html
+++ b/docs/build/html/python/_autosummary/mlx.core.array.mean.html
@@ -9,7 +9,7 @@
- mlx.core.array.mean — MLX 0.0.5 documentation
+ mlx.core.array.mean — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.array.min.html b/docs/build/html/python/_autosummary/mlx.core.array.min.html
index d71765f11..59eed9cec 100644
--- a/docs/build/html/python/_autosummary/mlx.core.array.min.html
+++ b/docs/build/html/python/_autosummary/mlx.core.array.min.html
@@ -9,7 +9,7 @@
- mlx.core.array.min — MLX 0.0.5 documentation
+ mlx.core.array.min — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.array.ndim.html b/docs/build/html/python/_autosummary/mlx.core.array.ndim.html
index d68c70d70..2e1cb1c03 100644
--- a/docs/build/html/python/_autosummary/mlx.core.array.ndim.html
+++ b/docs/build/html/python/_autosummary/mlx.core.array.ndim.html
@@ -9,7 +9,7 @@
- mlx.core.array.ndim — MLX 0.0.5 documentation
+ mlx.core.array.ndim — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.array.prod.html b/docs/build/html/python/_autosummary/mlx.core.array.prod.html
index 0cde08008..9a6a543a5 100644
--- a/docs/build/html/python/_autosummary/mlx.core.array.prod.html
+++ b/docs/build/html/python/_autosummary/mlx.core.array.prod.html
@@ -9,7 +9,7 @@
- mlx.core.array.prod — MLX 0.0.5 documentation
+ mlx.core.array.prod — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.array.reciprocal.html b/docs/build/html/python/_autosummary/mlx.core.array.reciprocal.html
index fc2e38179..ef63ddd29 100644
--- a/docs/build/html/python/_autosummary/mlx.core.array.reciprocal.html
+++ b/docs/build/html/python/_autosummary/mlx.core.array.reciprocal.html
@@ -9,7 +9,7 @@
- mlx.core.array.reciprocal — MLX 0.0.5 documentation
+ mlx.core.array.reciprocal — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.array.reshape.html b/docs/build/html/python/_autosummary/mlx.core.array.reshape.html
index 16b114c06..d70f33742 100644
--- a/docs/build/html/python/_autosummary/mlx.core.array.reshape.html
+++ b/docs/build/html/python/_autosummary/mlx.core.array.reshape.html
@@ -9,7 +9,7 @@
- mlx.core.array.reshape — MLX 0.0.5 documentation
+ mlx.core.array.reshape — MLX 0.0.6 documentation
@@ -46,7 +46,7 @@
-
+
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
@@ -613,11 +625,11 @@ tuple or as separate arguments.
next
-
mlx.core.array.rsqrt
+
mlx.core.array.round
diff --git a/docs/build/html/python/_autosummary/mlx.core.array.round.html b/docs/build/html/python/_autosummary/mlx.core.array.round.html
new file mode 100644
index 000000000..2a402f90e
--- /dev/null
+++ b/docs/build/html/python/_autosummary/mlx.core.array.round.html
@@ -0,0 +1,690 @@
+
+
+
+
+
+
+
+
+
+
+
+ mlx.core.array.round — MLX 0.0.6 documentation
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Skip to main content
+
+
+
+
+
+
+ Back to top
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
mlx.core.array.round
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/docs/build/html/python/_autosummary/mlx.core.array.rsqrt.html b/docs/build/html/python/_autosummary/mlx.core.array.rsqrt.html
index 0fbe39e6e..970f4f067 100644
--- a/docs/build/html/python/_autosummary/mlx.core.array.rsqrt.html
+++ b/docs/build/html/python/_autosummary/mlx.core.array.rsqrt.html
@@ -9,7 +9,7 @@
- mlx.core.array.rsqrt — MLX 0.0.5 documentation
+ mlx.core.array.rsqrt — MLX 0.0.6 documentation
@@ -47,7 +47,7 @@
-
+
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
@@ -602,12 +614,12 @@ document.write(`
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.array.sin.html b/docs/build/html/python/_autosummary/mlx.core.array.sin.html
index b8f0562ee..e61dbdeaa 100644
--- a/docs/build/html/python/_autosummary/mlx.core.array.sin.html
+++ b/docs/build/html/python/_autosummary/mlx.core.array.sin.html
@@ -9,7 +9,7 @@
- mlx.core.array.sin — MLX 0.0.5 documentation
+ mlx.core.array.sin — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.array.size.html b/docs/build/html/python/_autosummary/mlx.core.array.size.html
index 43efdc92a..20b1d4248 100644
--- a/docs/build/html/python/_autosummary/mlx.core.array.size.html
+++ b/docs/build/html/python/_autosummary/mlx.core.array.size.html
@@ -9,7 +9,7 @@
- mlx.core.array.size — MLX 0.0.5 documentation
+ mlx.core.array.size — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.array.split.html b/docs/build/html/python/_autosummary/mlx.core.array.split.html
index c9021446b..2518252e5 100644
--- a/docs/build/html/python/_autosummary/mlx.core.array.split.html
+++ b/docs/build/html/python/_autosummary/mlx.core.array.split.html
@@ -9,7 +9,7 @@
- mlx.core.array.split — MLX 0.0.5 documentation
+ mlx.core.array.split — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.array.sqrt.html b/docs/build/html/python/_autosummary/mlx.core.array.sqrt.html
index 2a2527ca9..192a13011 100644
--- a/docs/build/html/python/_autosummary/mlx.core.array.sqrt.html
+++ b/docs/build/html/python/_autosummary/mlx.core.array.sqrt.html
@@ -9,7 +9,7 @@
- mlx.core.array.sqrt — MLX 0.0.5 documentation
+ mlx.core.array.sqrt — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.array.square.html b/docs/build/html/python/_autosummary/mlx.core.array.square.html
index ea97c1c01..106b388cf 100644
--- a/docs/build/html/python/_autosummary/mlx.core.array.square.html
+++ b/docs/build/html/python/_autosummary/mlx.core.array.square.html
@@ -9,7 +9,7 @@
- mlx.core.array.square — MLX 0.0.5 documentation
+ mlx.core.array.square — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.array.sum.html b/docs/build/html/python/_autosummary/mlx.core.array.sum.html
index 079fed512..501f00153 100644
--- a/docs/build/html/python/_autosummary/mlx.core.array.sum.html
+++ b/docs/build/html/python/_autosummary/mlx.core.array.sum.html
@@ -9,7 +9,7 @@
- mlx.core.array.sum — MLX 0.0.5 documentation
+ mlx.core.array.sum — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.array.tolist.html b/docs/build/html/python/_autosummary/mlx.core.array.tolist.html
index ec1a6897f..4fae72d7e 100644
--- a/docs/build/html/python/_autosummary/mlx.core.array.tolist.html
+++ b/docs/build/html/python/_autosummary/mlx.core.array.tolist.html
@@ -9,7 +9,7 @@
- mlx.core.array.tolist — MLX 0.0.5 documentation
+ mlx.core.array.tolist — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.array.transpose.html b/docs/build/html/python/_autosummary/mlx.core.array.transpose.html
index ec77684fc..a0d5d5db1 100644
--- a/docs/build/html/python/_autosummary/mlx.core.array.transpose.html
+++ b/docs/build/html/python/_autosummary/mlx.core.array.transpose.html
@@ -9,7 +9,7 @@
- mlx.core.array.transpose — MLX 0.0.5 documentation
+ mlx.core.array.transpose — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.array.var.html b/docs/build/html/python/_autosummary/mlx.core.array.var.html
index 5b53bcb3c..c20dd2132 100644
--- a/docs/build/html/python/_autosummary/mlx.core.array.var.html
+++ b/docs/build/html/python/_autosummary/mlx.core.array.var.html
@@ -9,7 +9,7 @@
- mlx.core.array.var — MLX 0.0.5 documentation
+ mlx.core.array.var — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.array_equal.html b/docs/build/html/python/_autosummary/mlx.core.array_equal.html
index f0adff233..dd489037f 100644
--- a/docs/build/html/python/_autosummary/mlx.core.array_equal.html
+++ b/docs/build/html/python/_autosummary/mlx.core.array_equal.html
@@ -9,7 +9,7 @@
- mlx.core.array_equal — MLX 0.0.5 documentation
+ mlx.core.array_equal — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.broadcast_to.html b/docs/build/html/python/_autosummary/mlx.core.broadcast_to.html
index b3a988982..7da8b7a72 100644
--- a/docs/build/html/python/_autosummary/mlx.core.broadcast_to.html
+++ b/docs/build/html/python/_autosummary/mlx.core.broadcast_to.html
@@ -9,7 +9,7 @@
- mlx.core.broadcast_to — MLX 0.0.5 documentation
+ mlx.core.broadcast_to — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.ceil.html b/docs/build/html/python/_autosummary/mlx.core.ceil.html
index 76ce37d1f..1560d69d8 100644
--- a/docs/build/html/python/_autosummary/mlx.core.ceil.html
+++ b/docs/build/html/python/_autosummary/mlx.core.ceil.html
@@ -9,7 +9,7 @@
- mlx.core.ceil — MLX 0.0.5 documentation
+ mlx.core.ceil — MLX 0.0.6 documentation
@@ -46,7 +46,7 @@
-
+
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
@@ -622,11 +634,11 @@ document.write(`
next
-
mlx.core.concatenate
+
mlx.core.clip
diff --git a/docs/build/html/python/_autosummary/mlx.core.clip.html b/docs/build/html/python/_autosummary/mlx.core.clip.html
new file mode 100644
index 000000000..903959f58
--- /dev/null
+++ b/docs/build/html/python/_autosummary/mlx.core.clip.html
@@ -0,0 +1,708 @@
+
+
+
+
+
+
+
+
+
+
+
+ mlx.core.clip — MLX 0.0.6 documentation
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Skip to main content
+
+
+
+
+
+
+ Back to top
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+mlx.core.clip
+
+
+mlx.core. clip ( a : array , / , a_min : Union [ scalar , array , None ] , a_max : Union [ scalar , array , None ] , * , stream : Union [ None , Stream , Device ] = None ) → array
+Clip the values of the array between the given minimum and maximum.
+If either a_min
or a_max
are None
, then corresponding edge
+is ignored. At least one of a_min
and a_max
cannot be None
.
+The input a
and the limits must broadcast with one another.
+
+Parameters:
+
+
+Returns:
+The clipped array.
+
+Return type:
+array
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/docs/build/html/python/_autosummary/mlx.core.concatenate.html b/docs/build/html/python/_autosummary/mlx.core.concatenate.html
index 244ea292f..ab5e44838 100644
--- a/docs/build/html/python/_autosummary/mlx.core.concatenate.html
+++ b/docs/build/html/python/_autosummary/mlx.core.concatenate.html
@@ -9,7 +9,7 @@
- mlx.core.concatenate — MLX 0.0.5 documentation
+ mlx.core.concatenate — MLX 0.0.6 documentation
@@ -47,7 +47,7 @@
-
+
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
@@ -617,12 +629,12 @@ unspecified defaults to
previous
-
mlx.core.ceil
+
mlx.core.clip
- mlx.core.conv1d — MLX 0.0.5 documentation
+ mlx.core.conv1d — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.conv2d.html b/docs/build/html/python/_autosummary/mlx.core.conv2d.html
index 55d374ccb..21768ccdc 100644
--- a/docs/build/html/python/_autosummary/mlx.core.conv2d.html
+++ b/docs/build/html/python/_autosummary/mlx.core.conv2d.html
@@ -9,7 +9,7 @@
- mlx.core.conv2d — MLX 0.0.5 documentation
+ mlx.core.conv2d — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.convolve.html b/docs/build/html/python/_autosummary/mlx.core.convolve.html
index 1898ee76b..22fd4ddca 100644
--- a/docs/build/html/python/_autosummary/mlx.core.convolve.html
+++ b/docs/build/html/python/_autosummary/mlx.core.convolve.html
@@ -9,7 +9,7 @@
- mlx.core.convolve — MLX 0.0.5 documentation
+ mlx.core.convolve — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.cos.html b/docs/build/html/python/_autosummary/mlx.core.cos.html
index 6dc0ae2b1..4ee6661d6 100644
--- a/docs/build/html/python/_autosummary/mlx.core.cos.html
+++ b/docs/build/html/python/_autosummary/mlx.core.cos.html
@@ -9,7 +9,7 @@
- mlx.core.cos — MLX 0.0.5 documentation
+ mlx.core.cos — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.cosh.html b/docs/build/html/python/_autosummary/mlx.core.cosh.html
index 6b3503214..f286419f5 100644
--- a/docs/build/html/python/_autosummary/mlx.core.cosh.html
+++ b/docs/build/html/python/_autosummary/mlx.core.cosh.html
@@ -9,7 +9,7 @@
- mlx.core.cosh — MLX 0.0.5 documentation
+ mlx.core.cosh — MLX 0.0.6 documentation
@@ -46,7 +46,7 @@
-
+
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
@@ -622,11 +634,11 @@ document.write(`
next
-
mlx.core.divide
+
mlx.core.dequantize
diff --git a/docs/build/html/python/_autosummary/mlx.core.default_device.html b/docs/build/html/python/_autosummary/mlx.core.default_device.html
index 33213a028..9d52673cb 100644
--- a/docs/build/html/python/_autosummary/mlx.core.default_device.html
+++ b/docs/build/html/python/_autosummary/mlx.core.default_device.html
@@ -9,7 +9,7 @@
- mlx.core.default_device — MLX 0.0.5 documentation
+ mlx.core.default_device — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.default_stream.html b/docs/build/html/python/_autosummary/mlx.core.default_stream.html
index 4607c7ead..4fbdf4668 100644
--- a/docs/build/html/python/_autosummary/mlx.core.default_stream.html
+++ b/docs/build/html/python/_autosummary/mlx.core.default_stream.html
@@ -9,7 +9,7 @@
- mlx.core.default_stream — MLX 0.0.5 documentation
+ mlx.core.default_stream — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.dequantize.html b/docs/build/html/python/_autosummary/mlx.core.dequantize.html
new file mode 100644
index 000000000..9aacc3df8
--- /dev/null
+++ b/docs/build/html/python/_autosummary/mlx.core.dequantize.html
@@ -0,0 +1,716 @@
+
+
+
+
+
+
+
+
+
+
+
+ mlx.core.dequantize — MLX 0.0.6 documentation
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Skip to main content
+
+
+
+
+
+
+ Back to top
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
mlx.core.dequantize
+
+
+
+
+
+
+
+
+
+
+mlx.core.dequantize
+
+
+mlx.core. dequantize ( w : array , / , scales : array , biases : array , group_size : int = 64 , bits : int = 4 , * , stream : Union [ None , Stream , Device ] = None ) → array
+Dequantize the matrix w
using the provided scales
and
+biases
and the group_size
and bits
configuration.
+Formally, given the notation in quantize()
, we compute
+\(w_i\) from \(\hat{w_i}\) and corresponding \(s\) and
+\(\beta\) as follows
+
+\[w_i = s \hat{w_i} - \beta\]
+
+Parameters:
+
+w (array ) – Matrix to be quantized
+scales (array ) – The scales to use per group_size
elements of w
+biases (array ) – The biases to use per group_size
elements of w
+group_size (int , optional ) – The size of the group in w
that shares a
+scale and bias. (default: 64)
+bits (int , optional ) – The number of bits occupied by each element in
+w
. (default: 4)
+
+
+Returns:
+The dequantized version of w
+
+Return type:
+result (array )
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/docs/build/html/python/_autosummary/mlx.core.divide.html b/docs/build/html/python/_autosummary/mlx.core.divide.html
index 690fc0987..1deac978c 100644
--- a/docs/build/html/python/_autosummary/mlx.core.divide.html
+++ b/docs/build/html/python/_autosummary/mlx.core.divide.html
@@ -9,7 +9,7 @@
- mlx.core.divide — MLX 0.0.5 documentation
+ mlx.core.divide — MLX 0.0.6 documentation
@@ -47,7 +47,7 @@
-
+
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
@@ -618,12 +630,12 @@ input arrays can also be scalars.
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.erf.html b/docs/build/html/python/_autosummary/mlx.core.erf.html
index 58774d58f..4a43666b9 100644
--- a/docs/build/html/python/_autosummary/mlx.core.erf.html
+++ b/docs/build/html/python/_autosummary/mlx.core.erf.html
@@ -9,7 +9,7 @@
- mlx.core.erf — MLX 0.0.5 documentation
+ mlx.core.erf — MLX 0.0.6 documentation
@@ -135,8 +135,8 @@
-
-
+
+
@@ -187,6 +187,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -228,12 +229,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -241,14 +244,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -271,8 +276,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -365,6 +373,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -400,6 +411,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.erfinv.html b/docs/build/html/python/_autosummary/mlx.core.erfinv.html
index afc81fa6f..96d9adc09 100644
--- a/docs/build/html/python/_autosummary/mlx.core.erfinv.html
+++ b/docs/build/html/python/_autosummary/mlx.core.erfinv.html
@@ -9,7 +9,7 @@
- mlx.core.erfinv — MLX 0.0.5 documentation
+ mlx.core.erfinv — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.eval.html b/docs/build/html/python/_autosummary/mlx.core.eval.html
index 11c63df2e..6b6010fd7 100644
--- a/docs/build/html/python/_autosummary/mlx.core.eval.html
+++ b/docs/build/html/python/_autosummary/mlx.core.eval.html
@@ -9,7 +9,7 @@
- mlx.core.eval — MLX 0.0.5 documentation
+ mlx.core.eval — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.exp.html b/docs/build/html/python/_autosummary/mlx.core.exp.html
index 4610fa472..d7c3d4630 100644
--- a/docs/build/html/python/_autosummary/mlx.core.exp.html
+++ b/docs/build/html/python/_autosummary/mlx.core.exp.html
@@ -9,7 +9,7 @@
- mlx.core.exp — MLX 0.0.5 documentation
+ mlx.core.exp — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.expand_dims.html b/docs/build/html/python/_autosummary/mlx.core.expand_dims.html
index 4ae37ab1a..6f99e20dc 100644
--- a/docs/build/html/python/_autosummary/mlx.core.expand_dims.html
+++ b/docs/build/html/python/_autosummary/mlx.core.expand_dims.html
@@ -9,7 +9,7 @@
- mlx.core.expand_dims — MLX 0.0.5 documentation
+ mlx.core.expand_dims — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.eye.html b/docs/build/html/python/_autosummary/mlx.core.eye.html
index 7c30aae81..af3c6c5c5 100644
--- a/docs/build/html/python/_autosummary/mlx.core.eye.html
+++ b/docs/build/html/python/_autosummary/mlx.core.eye.html
@@ -9,7 +9,7 @@
- mlx.core.eye — MLX 0.0.5 documentation
+ mlx.core.eye — MLX 0.0.6 documentation
@@ -46,7 +46,7 @@
-
+
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
@@ -628,11 +640,11 @@ document.write(`
next
-
mlx.core.floor
+
mlx.core.flatten
diff --git a/docs/build/html/python/_autosummary/mlx.core.fft.fft.html b/docs/build/html/python/_autosummary/mlx.core.fft.fft.html
index 75da4a492..34b2b4092 100644
--- a/docs/build/html/python/_autosummary/mlx.core.fft.fft.html
+++ b/docs/build/html/python/_autosummary/mlx.core.fft.fft.html
@@ -9,7 +9,7 @@
- mlx.core.fft.fft — MLX 0.0.5 documentation
+ mlx.core.fft.fft — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.fft.fft2.html b/docs/build/html/python/_autosummary/mlx.core.fft.fft2.html
index b1b94ad44..6e7488689 100644
--- a/docs/build/html/python/_autosummary/mlx.core.fft.fft2.html
+++ b/docs/build/html/python/_autosummary/mlx.core.fft.fft2.html
@@ -9,7 +9,7 @@
- mlx.core.fft.fft2 — MLX 0.0.5 documentation
+ mlx.core.fft.fft2 — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.fft.fftn.html b/docs/build/html/python/_autosummary/mlx.core.fft.fftn.html
index 8b32167f9..8484b1601 100644
--- a/docs/build/html/python/_autosummary/mlx.core.fft.fftn.html
+++ b/docs/build/html/python/_autosummary/mlx.core.fft.fftn.html
@@ -9,7 +9,7 @@
- mlx.core.fft.fftn — MLX 0.0.5 documentation
+ mlx.core.fft.fftn — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.fft.ifft.html b/docs/build/html/python/_autosummary/mlx.core.fft.ifft.html
index 1a4c122b2..666f78c58 100644
--- a/docs/build/html/python/_autosummary/mlx.core.fft.ifft.html
+++ b/docs/build/html/python/_autosummary/mlx.core.fft.ifft.html
@@ -9,7 +9,7 @@
- mlx.core.fft.ifft — MLX 0.0.5 documentation
+ mlx.core.fft.ifft — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.fft.ifft2.html b/docs/build/html/python/_autosummary/mlx.core.fft.ifft2.html
index fb3d0dde9..0a081dbf6 100644
--- a/docs/build/html/python/_autosummary/mlx.core.fft.ifft2.html
+++ b/docs/build/html/python/_autosummary/mlx.core.fft.ifft2.html
@@ -9,7 +9,7 @@
- mlx.core.fft.ifft2 — MLX 0.0.5 documentation
+ mlx.core.fft.ifft2 — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.fft.ifftn.html b/docs/build/html/python/_autosummary/mlx.core.fft.ifftn.html
index 30418cf8f..91d8590df 100644
--- a/docs/build/html/python/_autosummary/mlx.core.fft.ifftn.html
+++ b/docs/build/html/python/_autosummary/mlx.core.fft.ifftn.html
@@ -9,7 +9,7 @@
- mlx.core.fft.ifftn — MLX 0.0.5 documentation
+ mlx.core.fft.ifftn — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.fft.irfft.html b/docs/build/html/python/_autosummary/mlx.core.fft.irfft.html
index df33345db..c4dfdb8f9 100644
--- a/docs/build/html/python/_autosummary/mlx.core.fft.irfft.html
+++ b/docs/build/html/python/_autosummary/mlx.core.fft.irfft.html
@@ -9,7 +9,7 @@
- mlx.core.fft.irfft — MLX 0.0.5 documentation
+ mlx.core.fft.irfft — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.fft.irfft2.html b/docs/build/html/python/_autosummary/mlx.core.fft.irfft2.html
index e03c9dacc..90aa90587 100644
--- a/docs/build/html/python/_autosummary/mlx.core.fft.irfft2.html
+++ b/docs/build/html/python/_autosummary/mlx.core.fft.irfft2.html
@@ -9,7 +9,7 @@
- mlx.core.fft.irfft2 — MLX 0.0.5 documentation
+ mlx.core.fft.irfft2 — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.fft.irfftn.html b/docs/build/html/python/_autosummary/mlx.core.fft.irfftn.html
index 5ba41b9ea..e00315a79 100644
--- a/docs/build/html/python/_autosummary/mlx.core.fft.irfftn.html
+++ b/docs/build/html/python/_autosummary/mlx.core.fft.irfftn.html
@@ -9,7 +9,7 @@
- mlx.core.fft.irfftn — MLX 0.0.5 documentation
+ mlx.core.fft.irfftn — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.fft.rfft.html b/docs/build/html/python/_autosummary/mlx.core.fft.rfft.html
index a1dc56d41..a4533e1da 100644
--- a/docs/build/html/python/_autosummary/mlx.core.fft.rfft.html
+++ b/docs/build/html/python/_autosummary/mlx.core.fft.rfft.html
@@ -9,7 +9,7 @@
- mlx.core.fft.rfft — MLX 0.0.5 documentation
+ mlx.core.fft.rfft — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.fft.rfft2.html b/docs/build/html/python/_autosummary/mlx.core.fft.rfft2.html
index dd8a20d4b..ea456d647 100644
--- a/docs/build/html/python/_autosummary/mlx.core.fft.rfft2.html
+++ b/docs/build/html/python/_autosummary/mlx.core.fft.rfft2.html
@@ -9,7 +9,7 @@
- mlx.core.fft.rfft2 — MLX 0.0.5 documentation
+ mlx.core.fft.rfft2 — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.fft.rfftn.html b/docs/build/html/python/_autosummary/mlx.core.fft.rfftn.html
index 1cb1cdd58..b350ec6d3 100644
--- a/docs/build/html/python/_autosummary/mlx.core.fft.rfftn.html
+++ b/docs/build/html/python/_autosummary/mlx.core.fft.rfftn.html
@@ -9,7 +9,7 @@
- mlx.core.fft.rfftn — MLX 0.0.5 documentation
+ mlx.core.fft.rfftn — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.flatten.html b/docs/build/html/python/_autosummary/mlx.core.flatten.html
index fea897b04..c8ee1f035 100644
--- a/docs/build/html/python/_autosummary/mlx.core.flatten.html
+++ b/docs/build/html/python/_autosummary/mlx.core.flatten.html
@@ -9,7 +9,7 @@
- mlx.core.flatten — MLX 0.0.5 documentation
+ mlx.core.flatten — MLX 0.0.6 documentation
@@ -46,8 +46,8 @@
-
-
+
+
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
@@ -619,20 +631,20 @@ in which case the default stream of the default device is used.
previous
-
mlx.core.floor
+
mlx.core.eye
next
-
mlx.core.full
+
mlx.core.floor
diff --git a/docs/build/html/python/_autosummary/mlx.core.floor.html b/docs/build/html/python/_autosummary/mlx.core.floor.html
index 5c0298c3a..69d069c32 100644
--- a/docs/build/html/python/_autosummary/mlx.core.floor.html
+++ b/docs/build/html/python/_autosummary/mlx.core.floor.html
@@ -9,7 +9,7 @@
-
mlx.core.floor — MLX 0.0.5 documentation
+
mlx.core.floor — MLX 0.0.6 documentation
@@ -46,8 +46,8 @@
-
-
+
+
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
@@ -613,20 +625,20 @@ document.write(`
previous
-
mlx.core.eye
+
mlx.core.flatten
next
-
mlx.core.flatten
+
mlx.core.floor_divide
diff --git a/docs/build/html/python/_autosummary/mlx.core.floor_divide.html b/docs/build/html/python/_autosummary/mlx.core.floor_divide.html
new file mode 100644
index 000000000..997dcf628
--- /dev/null
+++ b/docs/build/html/python/_autosummary/mlx.core.floor_divide.html
@@ -0,0 +1,706 @@
+
+
+
+
+
+
+
+
+
+
+
+
mlx.core.floor_divide — MLX 0.0.6 documentation
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
Skip to main content
+
+
+
+
+
+
+ Back to top
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
mlx.core.floor_divide
+
+
+
+
+
+
+
+
+
+
+mlx.core.floor_divide
+
+
+mlx.core. floor_divide ( a : Union [ scalar , array ] , b : Union [ scalar , array ] , stream : Union [ None , Stream , Device ] = None ) → array
+Element-wise integer division.
+If either array is a floating point type then it is equivalent to
+calling floor()
after divide()
.
+
+Parameters:
+
+
+Returns:
+The quotient a // b
.
+
+Return type:
+array
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/docs/build/html/python/_autosummary/mlx.core.full.html b/docs/build/html/python/_autosummary/mlx.core.full.html
index 89d7ef081..d0500f91c 100644
--- a/docs/build/html/python/_autosummary/mlx.core.full.html
+++ b/docs/build/html/python/_autosummary/mlx.core.full.html
@@ -9,7 +9,7 @@
-
mlx.core.full — MLX 0.0.5 documentation
+
mlx.core.full — MLX 0.0.6 documentation
@@ -47,7 +47,7 @@
-
+
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
@@ -620,12 +632,12 @@ unspecified the output type is inferred from
previous
-
mlx.core.flatten
+
mlx.core.floor_divide
- mlx.core.grad — MLX 0.0.5 documentation
+ mlx.core.grad — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.greater.html b/docs/build/html/python/_autosummary/mlx.core.greater.html
index dc56810f4..e09f2646b 100644
--- a/docs/build/html/python/_autosummary/mlx.core.greater.html
+++ b/docs/build/html/python/_autosummary/mlx.core.greater.html
@@ -9,7 +9,7 @@
- mlx.core.greater — MLX 0.0.5 documentation
+ mlx.core.greater — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.greater_equal.html b/docs/build/html/python/_autosummary/mlx.core.greater_equal.html
index 235162849..4d10eee8d 100644
--- a/docs/build/html/python/_autosummary/mlx.core.greater_equal.html
+++ b/docs/build/html/python/_autosummary/mlx.core.greater_equal.html
@@ -9,7 +9,7 @@
- mlx.core.greater_equal — MLX 0.0.5 documentation
+ mlx.core.greater_equal — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.identity.html b/docs/build/html/python/_autosummary/mlx.core.identity.html
index 6169d2b01..2b41b4db3 100644
--- a/docs/build/html/python/_autosummary/mlx.core.identity.html
+++ b/docs/build/html/python/_autosummary/mlx.core.identity.html
@@ -9,7 +9,7 @@
- mlx.core.identity — MLX 0.0.5 documentation
+ mlx.core.identity — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.jvp.html b/docs/build/html/python/_autosummary/mlx.core.jvp.html
index aac07086b..dc4910230 100644
--- a/docs/build/html/python/_autosummary/mlx.core.jvp.html
+++ b/docs/build/html/python/_autosummary/mlx.core.jvp.html
@@ -9,7 +9,7 @@
- mlx.core.jvp — MLX 0.0.5 documentation
+ mlx.core.jvp — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.less.html b/docs/build/html/python/_autosummary/mlx.core.less.html
index 15114d617..fea9c8ae4 100644
--- a/docs/build/html/python/_autosummary/mlx.core.less.html
+++ b/docs/build/html/python/_autosummary/mlx.core.less.html
@@ -9,7 +9,7 @@
- mlx.core.less — MLX 0.0.5 documentation
+ mlx.core.less — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.less_equal.html b/docs/build/html/python/_autosummary/mlx.core.less_equal.html
index 4381b417e..228ec3791 100644
--- a/docs/build/html/python/_autosummary/mlx.core.less_equal.html
+++ b/docs/build/html/python/_autosummary/mlx.core.less_equal.html
@@ -9,7 +9,7 @@
- mlx.core.less_equal — MLX 0.0.5 documentation
+ mlx.core.less_equal — MLX 0.0.6 documentation
@@ -46,7 +46,7 @@
-
+
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
@@ -627,11 +639,11 @@ Either or both input arrays can also be scalars.
next
-
mlx.core.load
+
mlx.core.linspace
diff --git a/docs/build/html/python/_autosummary/mlx.core.linspace.html b/docs/build/html/python/_autosummary/mlx.core.linspace.html
new file mode 100644
index 000000000..e8e4f67bf
--- /dev/null
+++ b/docs/build/html/python/_autosummary/mlx.core.linspace.html
@@ -0,0 +1,707 @@
+
+
+
+
+
+
+
+
+
+
+
+ mlx.core.linspace — MLX 0.0.6 documentation
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Skip to main content
+
+
+
+
+
+
+ Back to top
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
mlx.core.linspace
+
+
+
+
+
+
+
+
+
+
+mlx.core.linspace
+
+
+mlx.core. linspace ( start , stop , num : Optional [ int ] = 50 , dtype : Optional [ Dtype ] = float32 , stream : Union [ None , Stream , Device ] = None ) → array
+Generate num
evenly spaced numbers over interval [start, stop]
.
+
+Parameters:
+
+start (scalar ) – Starting value.
+stop (scalar ) – Stopping value.
+num (int , optional ) – Number of samples, defaults to 50
.
+dtype (Dtype , optional ) – Specifies the data type of the output,
+float32. (default to ) –
+
+
+Returns:
+The range of values.
+
+Return type:
+array
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/docs/build/html/python/_autosummary/mlx.core.load.html b/docs/build/html/python/_autosummary/mlx.core.load.html
index 83826406a..91cd2438c 100644
--- a/docs/build/html/python/_autosummary/mlx.core.load.html
+++ b/docs/build/html/python/_autosummary/mlx.core.load.html
@@ -9,7 +9,7 @@
- mlx.core.load — MLX 0.0.5 documentation
+ mlx.core.load — MLX 0.0.6 documentation
@@ -47,7 +47,7 @@
-
+
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
@@ -613,12 +625,12 @@ document.write(`
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.log10.html b/docs/build/html/python/_autosummary/mlx.core.log10.html
index ba9fe3532..e3b0f38f9 100644
--- a/docs/build/html/python/_autosummary/mlx.core.log10.html
+++ b/docs/build/html/python/_autosummary/mlx.core.log10.html
@@ -9,7 +9,7 @@
- mlx.core.log10 — MLX 0.0.5 documentation
+ mlx.core.log10 — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.log1p.html b/docs/build/html/python/_autosummary/mlx.core.log1p.html
index f913121fa..b54c2d0d3 100644
--- a/docs/build/html/python/_autosummary/mlx.core.log1p.html
+++ b/docs/build/html/python/_autosummary/mlx.core.log1p.html
@@ -9,7 +9,7 @@
- mlx.core.log1p — MLX 0.0.5 documentation
+ mlx.core.log1p — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.log2.html b/docs/build/html/python/_autosummary/mlx.core.log2.html
index b0b3c5e0b..4291e5fd1 100644
--- a/docs/build/html/python/_autosummary/mlx.core.log2.html
+++ b/docs/build/html/python/_autosummary/mlx.core.log2.html
@@ -9,7 +9,7 @@
- mlx.core.log2 — MLX 0.0.5 documentation
+ mlx.core.log2 — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.logaddexp.html b/docs/build/html/python/_autosummary/mlx.core.logaddexp.html
index fe753e116..fcc835cff 100644
--- a/docs/build/html/python/_autosummary/mlx.core.logaddexp.html
+++ b/docs/build/html/python/_autosummary/mlx.core.logaddexp.html
@@ -9,7 +9,7 @@
- mlx.core.logaddexp — MLX 0.0.5 documentation
+ mlx.core.logaddexp — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.logical_not.html b/docs/build/html/python/_autosummary/mlx.core.logical_not.html
index 6a656e74a..706429a9c 100644
--- a/docs/build/html/python/_autosummary/mlx.core.logical_not.html
+++ b/docs/build/html/python/_autosummary/mlx.core.logical_not.html
@@ -9,7 +9,7 @@
- mlx.core.logical_not — MLX 0.0.5 documentation
+ mlx.core.logical_not — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.logsumexp.html b/docs/build/html/python/_autosummary/mlx.core.logsumexp.html
index 810c1e6a1..457326f3f 100644
--- a/docs/build/html/python/_autosummary/mlx.core.logsumexp.html
+++ b/docs/build/html/python/_autosummary/mlx.core.logsumexp.html
@@ -9,7 +9,7 @@
- mlx.core.logsumexp — MLX 0.0.5 documentation
+ mlx.core.logsumexp — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.matmul.html b/docs/build/html/python/_autosummary/mlx.core.matmul.html
index d4c8cadf3..ad13d3e3f 100644
--- a/docs/build/html/python/_autosummary/mlx.core.matmul.html
+++ b/docs/build/html/python/_autosummary/mlx.core.matmul.html
@@ -9,7 +9,7 @@
- mlx.core.matmul — MLX 0.0.5 documentation
+ mlx.core.matmul — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.max.html b/docs/build/html/python/_autosummary/mlx.core.max.html
index 401fa79e2..033a6d24f 100644
--- a/docs/build/html/python/_autosummary/mlx.core.max.html
+++ b/docs/build/html/python/_autosummary/mlx.core.max.html
@@ -9,7 +9,7 @@
- mlx.core.max — MLX 0.0.5 documentation
+ mlx.core.max — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.maximum.html b/docs/build/html/python/_autosummary/mlx.core.maximum.html
index 68fbb9c95..608c5ab09 100644
--- a/docs/build/html/python/_autosummary/mlx.core.maximum.html
+++ b/docs/build/html/python/_autosummary/mlx.core.maximum.html
@@ -9,7 +9,7 @@
- mlx.core.maximum — MLX 0.0.5 documentation
+ mlx.core.maximum — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.mean.html b/docs/build/html/python/_autosummary/mlx.core.mean.html
index 048a963a0..788b33a9c 100644
--- a/docs/build/html/python/_autosummary/mlx.core.mean.html
+++ b/docs/build/html/python/_autosummary/mlx.core.mean.html
@@ -9,7 +9,7 @@
- mlx.core.mean — MLX 0.0.5 documentation
+ mlx.core.mean — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.min.html b/docs/build/html/python/_autosummary/mlx.core.min.html
index e4026c41a..3930c2cf6 100644
--- a/docs/build/html/python/_autosummary/mlx.core.min.html
+++ b/docs/build/html/python/_autosummary/mlx.core.min.html
@@ -9,7 +9,7 @@
- mlx.core.min — MLX 0.0.5 documentation
+ mlx.core.min — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.minimum.html b/docs/build/html/python/_autosummary/mlx.core.minimum.html
index 168c007cb..6e8a15a2e 100644
--- a/docs/build/html/python/_autosummary/mlx.core.minimum.html
+++ b/docs/build/html/python/_autosummary/mlx.core.minimum.html
@@ -9,7 +9,7 @@
- mlx.core.minimum — MLX 0.0.5 documentation
+ mlx.core.minimum — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.moveaxis.html b/docs/build/html/python/_autosummary/mlx.core.moveaxis.html
index 084fb9e20..5c695c87e 100644
--- a/docs/build/html/python/_autosummary/mlx.core.moveaxis.html
+++ b/docs/build/html/python/_autosummary/mlx.core.moveaxis.html
@@ -9,7 +9,7 @@
- mlx.core.moveaxis — MLX 0.0.5 documentation
+ mlx.core.moveaxis — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.multiply.html b/docs/build/html/python/_autosummary/mlx.core.multiply.html
index 37d20fa11..e2c611394 100644
--- a/docs/build/html/python/_autosummary/mlx.core.multiply.html
+++ b/docs/build/html/python/_autosummary/mlx.core.multiply.html
@@ -9,7 +9,7 @@
- mlx.core.multiply — MLX 0.0.5 documentation
+ mlx.core.multiply — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.negative.html b/docs/build/html/python/_autosummary/mlx.core.negative.html
index fb26b0e32..fef0c497f 100644
--- a/docs/build/html/python/_autosummary/mlx.core.negative.html
+++ b/docs/build/html/python/_autosummary/mlx.core.negative.html
@@ -9,7 +9,7 @@
- mlx.core.negative — MLX 0.0.5 documentation
+ mlx.core.negative — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.new_stream.html b/docs/build/html/python/_autosummary/mlx.core.new_stream.html
index 6f3e5a681..ec2c8ad79 100644
--- a/docs/build/html/python/_autosummary/mlx.core.new_stream.html
+++ b/docs/build/html/python/_autosummary/mlx.core.new_stream.html
@@ -9,7 +9,7 @@
- mlx.core.new_stream — MLX 0.0.5 documentation
+ mlx.core.new_stream — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.ones.html b/docs/build/html/python/_autosummary/mlx.core.ones.html
index 744bac4c0..f365e7b8e 100644
--- a/docs/build/html/python/_autosummary/mlx.core.ones.html
+++ b/docs/build/html/python/_autosummary/mlx.core.ones.html
@@ -9,7 +9,7 @@
- mlx.core.ones — MLX 0.0.5 documentation
+ mlx.core.ones — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.ones_like.html b/docs/build/html/python/_autosummary/mlx.core.ones_like.html
index 405198b60..445a29663 100644
--- a/docs/build/html/python/_autosummary/mlx.core.ones_like.html
+++ b/docs/build/html/python/_autosummary/mlx.core.ones_like.html
@@ -9,7 +9,7 @@
- mlx.core.ones_like — MLX 0.0.5 documentation
+ mlx.core.ones_like — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.pad.html b/docs/build/html/python/_autosummary/mlx.core.pad.html
index 195adbf77..bbd8276c0 100644
--- a/docs/build/html/python/_autosummary/mlx.core.pad.html
+++ b/docs/build/html/python/_autosummary/mlx.core.pad.html
@@ -9,7 +9,7 @@
- mlx.core.pad — MLX 0.0.5 documentation
+ mlx.core.pad — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.partition.html b/docs/build/html/python/_autosummary/mlx.core.partition.html
index 39f1db136..e3ebf437c 100644
--- a/docs/build/html/python/_autosummary/mlx.core.partition.html
+++ b/docs/build/html/python/_autosummary/mlx.core.partition.html
@@ -9,7 +9,7 @@
- mlx.core.partition — MLX 0.0.5 documentation
+ mlx.core.partition — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.prod.html b/docs/build/html/python/_autosummary/mlx.core.prod.html
index 7fd275486..6ae0387aa 100644
--- a/docs/build/html/python/_autosummary/mlx.core.prod.html
+++ b/docs/build/html/python/_autosummary/mlx.core.prod.html
@@ -9,7 +9,7 @@
- mlx.core.prod — MLX 0.0.5 documentation
+ mlx.core.prod — MLX 0.0.6 documentation
@@ -46,7 +46,7 @@
-
+
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
@@ -629,11 +641,11 @@ singleton dimensions, defaults to False .
next
-
mlx.core.reciprocal
+
mlx.core.quantize
diff --git a/docs/build/html/python/_autosummary/mlx.core.quantize.html b/docs/build/html/python/_autosummary/mlx.core.quantize.html
new file mode 100644
index 000000000..5cef242cc
--- /dev/null
+++ b/docs/build/html/python/_autosummary/mlx.core.quantize.html
@@ -0,0 +1,742 @@
+
+
+
+
+
+
+
+
+
+
+
+ mlx.core.quantize — MLX 0.0.6 documentation
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Skip to main content
+
+
+
+
+
+
+ Back to top
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
mlx.core.quantize
+
+
+
+
+
+
+
+
+
+
+mlx.core.quantize
+
+
+mlx.core. quantize ( w : array , / , group_size : int = 64 , bits : int = 4 , * , stream : Union [ None , Stream , Device ] = None ) → Tuple [ array , array , array ]
+Quantize the matrix w
using bits
bits per element.
+Note, every group_size
elements in a row of w
are quantized
+together. Hence, number of columns of w
should be divisible by
+group_size
. In particular, the rows of w
are divided into groups of
+size group_size
which are quantized together.
+
+
Warning
+
quantize
currently only supports 2D inputs with dimensions which are multiples of 32
+
+Formally, for a group of \(g\) consecutive elements \(w_1\) to
+\(w_g\) in a row of w
we compute the quantized representation
+of each element \(\hat{w_i}\) as follows
+
+\[\begin{split}\begin{aligned}
+ \alpha &= \max_i w_i \\
+ \beta &= \min_i w_i \\
+ s &= \frac{\alpha - \beta}{2^b - 1} \\
+ \hat{w_i} &= \textrm{round}\left( \frac{w_i - \beta}{s}\right).
+\end{aligned}\end{split}\]
+After the above computation, \(\hat{w_i}\) fits in \(b\) bits
+and is packed in an unsigned 32-bit integer from the lower to upper
+bits. For instance, for 4-bit quantization we fit 8 elements in an
+unsigned 32 bit integer where the 1st element occupies the 4 least
+significant bits, the 2nd bits 4-7 etc.
+In order to be able to dequantize the elements of w
we also need to
+save \(s\) and \(\beta\) which are the returned scales
and
+biases
respectively.
+
+Parameters:
+
+w (array ) – Matrix to be quantized
+group_size (int , optional ) – The size of the group in w
that shares a
+scale and bias. (default: 64)
+bits (int , optional ) – The number of bits occupied by each element of
+w
in the returned quantized matrix. (default: 4)
+
+
+Returns:
+
A tuple containing
+
+
+w_q (array): The quantized version of w
+scales (array): The scale to multiply each element with, namely \(s\)
+biases (array): The biases to add to each element, namely \(\beta\)
+
+
+
+
+Return type:
+(tuple )
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/docs/build/html/python/_autosummary/mlx.core.quantized_matmul.html b/docs/build/html/python/_autosummary/mlx.core.quantized_matmul.html
new file mode 100644
index 000000000..2e3081bc2
--- /dev/null
+++ b/docs/build/html/python/_autosummary/mlx.core.quantized_matmul.html
@@ -0,0 +1,713 @@
+
+
+
+
+
+
+
+
+
+
+
+ mlx.core.quantized_matmul — MLX 0.0.6 documentation
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Skip to main content
+
+
+
+
+
+
+ Back to top
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
mlx.core.quantized_matmul
+
+
+
+
+
+
+
+
+
+
+mlx.core.quantized_matmul
+
+
+mlx.core. quantized_matmul ( x : array , w : array , scales : array , biases : array , / , group_size : int = 64 , bits : int = 4 , * , stream : Union [ None , Stream , Device ] = None ) → array
+Perform the matrix multiplication with the quantized matrix w
. The
+quantization uses one floating point scale and bias per group_size
of
+elements. Each element in w
takes bits
bits and is packed in an
+unsigned 32 bit integer.
+
+Parameters:
+
+x (array ) – Input array
+w (array ) – Quantized matrix packed in unsigned integers
+scales (array ) – The scales to use per group_size
elements of w
+biases (array ) – The biases to use per group_size
elements of w
+group_size (int , optional ) – The size of the group in w
that
+shares a scale and bias. (default: 64)
+bits (int , optional ) – The number of bits occupied by each element in
+w
. (default: 4)
+
+
+Returns:
+The result of the multiplication of x
with w
.
+
+Return type:
+result (array )
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/docs/build/html/python/_autosummary/mlx.core.random.bernoulli.html b/docs/build/html/python/_autosummary/mlx.core.random.bernoulli.html
index 66805581a..38e6fb15e 100644
--- a/docs/build/html/python/_autosummary/mlx.core.random.bernoulli.html
+++ b/docs/build/html/python/_autosummary/mlx.core.random.bernoulli.html
@@ -9,7 +9,7 @@
- mlx.core.random.bernoulli — MLX 0.0.5 documentation
+ mlx.core.random.bernoulli — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.random.categorical.html b/docs/build/html/python/_autosummary/mlx.core.random.categorical.html
index 999c96485..cabc2b721 100644
--- a/docs/build/html/python/_autosummary/mlx.core.random.categorical.html
+++ b/docs/build/html/python/_autosummary/mlx.core.random.categorical.html
@@ -9,7 +9,7 @@
- mlx.core.random.categorical — MLX 0.0.5 documentation
+ mlx.core.random.categorical — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.random.gumbel.html b/docs/build/html/python/_autosummary/mlx.core.random.gumbel.html
index 9bb60151e..552a6f40d 100644
--- a/docs/build/html/python/_autosummary/mlx.core.random.gumbel.html
+++ b/docs/build/html/python/_autosummary/mlx.core.random.gumbel.html
@@ -9,7 +9,7 @@
- mlx.core.random.gumbel — MLX 0.0.5 documentation
+ mlx.core.random.gumbel — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.random.key.html b/docs/build/html/python/_autosummary/mlx.core.random.key.html
index eb1759453..58eed485b 100644
--- a/docs/build/html/python/_autosummary/mlx.core.random.key.html
+++ b/docs/build/html/python/_autosummary/mlx.core.random.key.html
@@ -9,7 +9,7 @@
- mlx.core.random.key — MLX 0.0.5 documentation
+ mlx.core.random.key — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.random.normal.html b/docs/build/html/python/_autosummary/mlx.core.random.normal.html
index 57ab6d226..5a949134d 100644
--- a/docs/build/html/python/_autosummary/mlx.core.random.normal.html
+++ b/docs/build/html/python/_autosummary/mlx.core.random.normal.html
@@ -9,7 +9,7 @@
- mlx.core.random.normal — MLX 0.0.5 documentation
+ mlx.core.random.normal — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.random.randint.html b/docs/build/html/python/_autosummary/mlx.core.random.randint.html
index 7dac1ca47..2f273b9cb 100644
--- a/docs/build/html/python/_autosummary/mlx.core.random.randint.html
+++ b/docs/build/html/python/_autosummary/mlx.core.random.randint.html
@@ -9,7 +9,7 @@
- mlx.core.random.randint — MLX 0.0.5 documentation
+ mlx.core.random.randint — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.random.seed.html b/docs/build/html/python/_autosummary/mlx.core.random.seed.html
index bf23aace0..ea2082a1f 100644
--- a/docs/build/html/python/_autosummary/mlx.core.random.seed.html
+++ b/docs/build/html/python/_autosummary/mlx.core.random.seed.html
@@ -9,7 +9,7 @@
- mlx.core.random.seed — MLX 0.0.5 documentation
+ mlx.core.random.seed — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.random.split.html b/docs/build/html/python/_autosummary/mlx.core.random.split.html
index 64a803672..fe77ce2c2 100644
--- a/docs/build/html/python/_autosummary/mlx.core.random.split.html
+++ b/docs/build/html/python/_autosummary/mlx.core.random.split.html
@@ -9,7 +9,7 @@
- mlx.core.random.split — MLX 0.0.5 documentation
+ mlx.core.random.split — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.random.truncated_normal.html b/docs/build/html/python/_autosummary/mlx.core.random.truncated_normal.html
index d6d1be7d9..f84d11d5b 100644
--- a/docs/build/html/python/_autosummary/mlx.core.random.truncated_normal.html
+++ b/docs/build/html/python/_autosummary/mlx.core.random.truncated_normal.html
@@ -9,7 +9,7 @@
- mlx.core.random.truncated_normal — MLX 0.0.5 documentation
+ mlx.core.random.truncated_normal — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.random.uniform.html b/docs/build/html/python/_autosummary/mlx.core.random.uniform.html
index 38633b612..1962f1e2d 100644
--- a/docs/build/html/python/_autosummary/mlx.core.random.uniform.html
+++ b/docs/build/html/python/_autosummary/mlx.core.random.uniform.html
@@ -9,7 +9,7 @@
- mlx.core.random.uniform — MLX 0.0.5 documentation
+ mlx.core.random.uniform — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.reciprocal.html b/docs/build/html/python/_autosummary/mlx.core.reciprocal.html
index 510bd444a..2460d3526 100644
--- a/docs/build/html/python/_autosummary/mlx.core.reciprocal.html
+++ b/docs/build/html/python/_autosummary/mlx.core.reciprocal.html
@@ -9,7 +9,7 @@
- mlx.core.reciprocal — MLX 0.0.5 documentation
+ mlx.core.reciprocal — MLX 0.0.6 documentation
@@ -47,7 +47,7 @@
-
+
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
@@ -613,12 +625,12 @@ document.write(`
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
@@ -627,11 +639,11 @@ in which case the default stream of the default device is used.
next
-
mlx.core.rsqrt
+
mlx.core.round
diff --git a/docs/build/html/python/_autosummary/mlx.core.round.html b/docs/build/html/python/_autosummary/mlx.core.round.html
new file mode 100644
index 000000000..0589ee9ea
--- /dev/null
+++ b/docs/build/html/python/_autosummary/mlx.core.round.html
@@ -0,0 +1,709 @@
+
+
+
+
+
+
+
+
+
+
+
+ mlx.core.round — MLX 0.0.6 documentation
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Skip to main content
+
+
+
+
+
+
+ Back to top
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+mlx.core.round
+
+
+mlx.core. round ( a : array , / , decimals : int = 0 , stream : Union [ None , Stream , Device ] = None ) → array
+Round to the given number of decimals.
+Bascially performs:
+s = 10 ** decimals
+x = round ( x * s ) / s
+
+
+
+Parameters:
+
+
+Returns:
+An array of the same type as a
rounded to the given number of decimals.
+
+Return type:
+result (array )
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/docs/build/html/python/_autosummary/mlx.core.rsqrt.html b/docs/build/html/python/_autosummary/mlx.core.rsqrt.html
index e9ee1732c..395dfab67 100644
--- a/docs/build/html/python/_autosummary/mlx.core.rsqrt.html
+++ b/docs/build/html/python/_autosummary/mlx.core.rsqrt.html
@@ -9,7 +9,7 @@
- mlx.core.rsqrt — MLX 0.0.5 documentation
+ mlx.core.rsqrt — MLX 0.0.6 documentation
@@ -47,7 +47,7 @@
-
+
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
@@ -613,12 +625,12 @@ document.write(`
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
@@ -584,15 +596,17 @@ document.write(`
mlx.core.save
-mlx.core. save ( file : str , arr : array , / , retain_graph : bool = True )
+mlx.core. save ( file : str , arr : array , / , retain_graph : Optional [ bool ] = None )
Save the array to a binary file in .npy
format.
Parameters:
file (str ) – File to which the array is saved
arr (array ) – Array to be saved.
-retain_graph (bool ) – Optional argument to retain graph
-during array evaluation before saving. Default: True
+retain_graph (bool , optional ) – Optional argument to retain graph
+during array evaluation before saving. If not provided the graph
+is retained if we are during a function transformation. Default:
+None
diff --git a/docs/build/html/python/_autosummary/mlx.core.savez.html b/docs/build/html/python/_autosummary/mlx.core.savez.html
index bfe649b7d..5a7459fa7 100644
--- a/docs/build/html/python/_autosummary/mlx.core.savez.html
+++ b/docs/build/html/python/_autosummary/mlx.core.savez.html
@@ -9,7 +9,7 @@
- mlx.core.savez — MLX 0.0.5 documentation
+ mlx.core.savez — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.savez_compressed.html b/docs/build/html/python/_autosummary/mlx.core.savez_compressed.html
index 2f2c5d9f2..f5556f3a8 100644
--- a/docs/build/html/python/_autosummary/mlx.core.savez_compressed.html
+++ b/docs/build/html/python/_autosummary/mlx.core.savez_compressed.html
@@ -9,7 +9,7 @@
- mlx.core.savez_compressed — MLX 0.0.5 documentation
+ mlx.core.savez_compressed — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.set_default_device.html b/docs/build/html/python/_autosummary/mlx.core.set_default_device.html
index 1ff17c00d..5b56c1dbe 100644
--- a/docs/build/html/python/_autosummary/mlx.core.set_default_device.html
+++ b/docs/build/html/python/_autosummary/mlx.core.set_default_device.html
@@ -9,7 +9,7 @@
- mlx.core.set_default_device — MLX 0.0.5 documentation
+ mlx.core.set_default_device — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.set_default_stream.html b/docs/build/html/python/_autosummary/mlx.core.set_default_stream.html
index 40351d1ff..9f119419f 100644
--- a/docs/build/html/python/_autosummary/mlx.core.set_default_stream.html
+++ b/docs/build/html/python/_autosummary/mlx.core.set_default_stream.html
@@ -9,7 +9,7 @@
- mlx.core.set_default_stream — MLX 0.0.5 documentation
+ mlx.core.set_default_stream — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.sigmoid.html b/docs/build/html/python/_autosummary/mlx.core.sigmoid.html
index 74e37e74e..96adbaaf5 100644
--- a/docs/build/html/python/_autosummary/mlx.core.sigmoid.html
+++ b/docs/build/html/python/_autosummary/mlx.core.sigmoid.html
@@ -9,7 +9,7 @@
- mlx.core.sigmoid — MLX 0.0.5 documentation
+ mlx.core.sigmoid — MLX 0.0.6 documentation
@@ -135,8 +135,8 @@
-
-
+
+
@@ -187,6 +187,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -228,12 +229,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -241,14 +244,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -271,8 +276,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -365,6 +373,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -400,6 +411,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.sign.html b/docs/build/html/python/_autosummary/mlx.core.sign.html
index e7f1ce2be..46a14409c 100644
--- a/docs/build/html/python/_autosummary/mlx.core.sign.html
+++ b/docs/build/html/python/_autosummary/mlx.core.sign.html
@@ -9,7 +9,7 @@
- mlx.core.sign — MLX 0.0.5 documentation
+ mlx.core.sign — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.simplify.html b/docs/build/html/python/_autosummary/mlx.core.simplify.html
index 38d805543..dddc6dd90 100644
--- a/docs/build/html/python/_autosummary/mlx.core.simplify.html
+++ b/docs/build/html/python/_autosummary/mlx.core.simplify.html
@@ -9,7 +9,7 @@
- mlx.core.simplify — MLX 0.0.5 documentation
+ mlx.core.simplify — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.sin.html b/docs/build/html/python/_autosummary/mlx.core.sin.html
index 8c5c3c09c..bf47d2590 100644
--- a/docs/build/html/python/_autosummary/mlx.core.sin.html
+++ b/docs/build/html/python/_autosummary/mlx.core.sin.html
@@ -9,7 +9,7 @@
- mlx.core.sin — MLX 0.0.5 documentation
+ mlx.core.sin — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.sinh.html b/docs/build/html/python/_autosummary/mlx.core.sinh.html
index 530df2956..8bfc66364 100644
--- a/docs/build/html/python/_autosummary/mlx.core.sinh.html
+++ b/docs/build/html/python/_autosummary/mlx.core.sinh.html
@@ -9,7 +9,7 @@
- mlx.core.sinh — MLX 0.0.5 documentation
+ mlx.core.sinh — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.softmax.html b/docs/build/html/python/_autosummary/mlx.core.softmax.html
index fa6f7d798..95528f448 100644
--- a/docs/build/html/python/_autosummary/mlx.core.softmax.html
+++ b/docs/build/html/python/_autosummary/mlx.core.softmax.html
@@ -9,7 +9,7 @@
- mlx.core.softmax — MLX 0.0.5 documentation
+ mlx.core.softmax — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.sort.html b/docs/build/html/python/_autosummary/mlx.core.sort.html
index 4d7766ee2..b8e3c5991 100644
--- a/docs/build/html/python/_autosummary/mlx.core.sort.html
+++ b/docs/build/html/python/_autosummary/mlx.core.sort.html
@@ -9,7 +9,7 @@
- mlx.core.sort — MLX 0.0.5 documentation
+ mlx.core.sort — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.split.html b/docs/build/html/python/_autosummary/mlx.core.split.html
index 5ead4bcf9..13fab313f 100644
--- a/docs/build/html/python/_autosummary/mlx.core.split.html
+++ b/docs/build/html/python/_autosummary/mlx.core.split.html
@@ -9,7 +9,7 @@
- mlx.core.split — MLX 0.0.5 documentation
+ mlx.core.split — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.sqrt.html b/docs/build/html/python/_autosummary/mlx.core.sqrt.html
index b8f0c49ed..ca169dca3 100644
--- a/docs/build/html/python/_autosummary/mlx.core.sqrt.html
+++ b/docs/build/html/python/_autosummary/mlx.core.sqrt.html
@@ -9,7 +9,7 @@
- mlx.core.sqrt — MLX 0.0.5 documentation
+ mlx.core.sqrt — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.square.html b/docs/build/html/python/_autosummary/mlx.core.square.html
index e745574d0..9fe097b7a 100644
--- a/docs/build/html/python/_autosummary/mlx.core.square.html
+++ b/docs/build/html/python/_autosummary/mlx.core.square.html
@@ -9,7 +9,7 @@
- mlx.core.square — MLX 0.0.5 documentation
+ mlx.core.square — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.squeeze.html b/docs/build/html/python/_autosummary/mlx.core.squeeze.html
index 471ad7078..aa6e0f21c 100644
--- a/docs/build/html/python/_autosummary/mlx.core.squeeze.html
+++ b/docs/build/html/python/_autosummary/mlx.core.squeeze.html
@@ -9,7 +9,7 @@
- mlx.core.squeeze — MLX 0.0.5 documentation
+ mlx.core.squeeze — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.stack.html b/docs/build/html/python/_autosummary/mlx.core.stack.html
index a3a1109c7..faad2eb62 100644
--- a/docs/build/html/python/_autosummary/mlx.core.stack.html
+++ b/docs/build/html/python/_autosummary/mlx.core.stack.html
@@ -9,7 +9,7 @@
- mlx.core.stack — MLX 0.0.5 documentation
+ mlx.core.stack — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.stop_gradient.html b/docs/build/html/python/_autosummary/mlx.core.stop_gradient.html
index f303678de..fdd40ca00 100644
--- a/docs/build/html/python/_autosummary/mlx.core.stop_gradient.html
+++ b/docs/build/html/python/_autosummary/mlx.core.stop_gradient.html
@@ -9,7 +9,7 @@
- mlx.core.stop_gradient — MLX 0.0.5 documentation
+ mlx.core.stop_gradient — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.subtract.html b/docs/build/html/python/_autosummary/mlx.core.subtract.html
index 16ca99156..36b03df8e 100644
--- a/docs/build/html/python/_autosummary/mlx.core.subtract.html
+++ b/docs/build/html/python/_autosummary/mlx.core.subtract.html
@@ -9,7 +9,7 @@
- mlx.core.subtract — MLX 0.0.5 documentation
+ mlx.core.subtract — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.sum.html b/docs/build/html/python/_autosummary/mlx.core.sum.html
index 671d7688c..73f7fc04c 100644
--- a/docs/build/html/python/_autosummary/mlx.core.sum.html
+++ b/docs/build/html/python/_autosummary/mlx.core.sum.html
@@ -9,7 +9,7 @@
- mlx.core.sum — MLX 0.0.5 documentation
+ mlx.core.sum — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.swapaxes.html b/docs/build/html/python/_autosummary/mlx.core.swapaxes.html
index b73a2b88a..feda35fb7 100644
--- a/docs/build/html/python/_autosummary/mlx.core.swapaxes.html
+++ b/docs/build/html/python/_autosummary/mlx.core.swapaxes.html
@@ -9,7 +9,7 @@
- mlx.core.swapaxes — MLX 0.0.5 documentation
+ mlx.core.swapaxes — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.take.html b/docs/build/html/python/_autosummary/mlx.core.take.html
index 43858b936..ecc4cf15f 100644
--- a/docs/build/html/python/_autosummary/mlx.core.take.html
+++ b/docs/build/html/python/_autosummary/mlx.core.take.html
@@ -9,7 +9,7 @@
- mlx.core.take — MLX 0.0.5 documentation
+ mlx.core.take — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.take_along_axis.html b/docs/build/html/python/_autosummary/mlx.core.take_along_axis.html
index 781130702..9d836598d 100644
--- a/docs/build/html/python/_autosummary/mlx.core.take_along_axis.html
+++ b/docs/build/html/python/_autosummary/mlx.core.take_along_axis.html
@@ -9,7 +9,7 @@
- mlx.core.take_along_axis — MLX 0.0.5 documentation
+ mlx.core.take_along_axis — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.tan.html b/docs/build/html/python/_autosummary/mlx.core.tan.html
index 1673c26d4..7904c8a94 100644
--- a/docs/build/html/python/_autosummary/mlx.core.tan.html
+++ b/docs/build/html/python/_autosummary/mlx.core.tan.html
@@ -9,7 +9,7 @@
- mlx.core.tan — MLX 0.0.5 documentation
+ mlx.core.tan — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.tanh.html b/docs/build/html/python/_autosummary/mlx.core.tanh.html
index 22106b44a..259452926 100644
--- a/docs/build/html/python/_autosummary/mlx.core.tanh.html
+++ b/docs/build/html/python/_autosummary/mlx.core.tanh.html
@@ -9,7 +9,7 @@
- mlx.core.tanh — MLX 0.0.5 documentation
+ mlx.core.tanh — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.transpose.html b/docs/build/html/python/_autosummary/mlx.core.transpose.html
index e0864d22d..6dff76000 100644
--- a/docs/build/html/python/_autosummary/mlx.core.transpose.html
+++ b/docs/build/html/python/_autosummary/mlx.core.transpose.html
@@ -9,7 +9,7 @@
- mlx.core.transpose — MLX 0.0.5 documentation
+ mlx.core.transpose — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.tri.html b/docs/build/html/python/_autosummary/mlx.core.tri.html
index 70f36cb92..632363f62 100644
--- a/docs/build/html/python/_autosummary/mlx.core.tri.html
+++ b/docs/build/html/python/_autosummary/mlx.core.tri.html
@@ -9,7 +9,7 @@
- mlx.core.tri — MLX 0.0.5 documentation
+ mlx.core.tri — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.tril.html b/docs/build/html/python/_autosummary/mlx.core.tril.html
index 3729a1b40..7ee2c2b29 100644
--- a/docs/build/html/python/_autosummary/mlx.core.tril.html
+++ b/docs/build/html/python/_autosummary/mlx.core.tril.html
@@ -9,7 +9,7 @@
- mlx.core.tril — MLX 0.0.5 documentation
+ mlx.core.tril — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.triu.html b/docs/build/html/python/_autosummary/mlx.core.triu.html
index a2394ab7f..798b86de1 100644
--- a/docs/build/html/python/_autosummary/mlx.core.triu.html
+++ b/docs/build/html/python/_autosummary/mlx.core.triu.html
@@ -9,7 +9,7 @@
- mlx.core.triu — MLX 0.0.5 documentation
+ mlx.core.triu — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.value_and_grad.html b/docs/build/html/python/_autosummary/mlx.core.value_and_grad.html
index 6f0a8fdd3..2e8fbdbf4 100644
--- a/docs/build/html/python/_autosummary/mlx.core.value_and_grad.html
+++ b/docs/build/html/python/_autosummary/mlx.core.value_and_grad.html
@@ -9,7 +9,7 @@
- mlx.core.value_and_grad — MLX 0.0.5 documentation
+ mlx.core.value_and_grad — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.var.html b/docs/build/html/python/_autosummary/mlx.core.var.html
index d0b279851..c851671bc 100644
--- a/docs/build/html/python/_autosummary/mlx.core.var.html
+++ b/docs/build/html/python/_autosummary/mlx.core.var.html
@@ -9,7 +9,7 @@
- mlx.core.var — MLX 0.0.5 documentation
+ mlx.core.var — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.vjp.html b/docs/build/html/python/_autosummary/mlx.core.vjp.html
index adbb7f37f..2908da343 100644
--- a/docs/build/html/python/_autosummary/mlx.core.vjp.html
+++ b/docs/build/html/python/_autosummary/mlx.core.vjp.html
@@ -9,7 +9,7 @@
- mlx.core.vjp — MLX 0.0.5 documentation
+ mlx.core.vjp — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.vmap.html b/docs/build/html/python/_autosummary/mlx.core.vmap.html
index 193b96f51..7bf3fd512 100644
--- a/docs/build/html/python/_autosummary/mlx.core.vmap.html
+++ b/docs/build/html/python/_autosummary/mlx.core.vmap.html
@@ -9,7 +9,7 @@
- mlx.core.vmap — MLX 0.0.5 documentation
+ mlx.core.vmap — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.where.html b/docs/build/html/python/_autosummary/mlx.core.where.html
index fa6f6bb63..fc6e994fe 100644
--- a/docs/build/html/python/_autosummary/mlx.core.where.html
+++ b/docs/build/html/python/_autosummary/mlx.core.where.html
@@ -9,7 +9,7 @@
- mlx.core.where — MLX 0.0.5 documentation
+ mlx.core.where — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.zeros.html b/docs/build/html/python/_autosummary/mlx.core.zeros.html
index 099d86342..deb7ad72f 100644
--- a/docs/build/html/python/_autosummary/mlx.core.zeros.html
+++ b/docs/build/html/python/_autosummary/mlx.core.zeros.html
@@ -9,7 +9,7 @@
- mlx.core.zeros — MLX 0.0.5 documentation
+ mlx.core.zeros — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.core.zeros_like.html b/docs/build/html/python/_autosummary/mlx.core.zeros_like.html
index a4ac5f296..1cb6a9774 100644
--- a/docs/build/html/python/_autosummary/mlx.core.zeros_like.html
+++ b/docs/build/html/python/_autosummary/mlx.core.zeros_like.html
@@ -9,7 +9,7 @@
- mlx.core.zeros_like — MLX 0.0.5 documentation
+ mlx.core.zeros_like — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.nn.Module.html b/docs/build/html/python/_autosummary/mlx.nn.Module.html
index 8b9e6aba0..e45a1926f 100644
--- a/docs/build/html/python/_autosummary/mlx.nn.Module.html
+++ b/docs/build/html/python/_autosummary/mlx.nn.Module.html
@@ -9,7 +9,7 @@
- mlx.nn.Module — MLX 0.0.5 documentation
+ mlx.nn.Module — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
-valid_child_filter
(module, key, value)
+update_modules
(modules)
+Replace the child modules of this Module
instance with the provided ones in the dict of dicts and lists.
+
+valid_child_filter
(module, key, value)
-valid_parameter_filter
(module, key, value)
+valid_parameter_filter
(module, key, value)
-values
()
+values
()
diff --git a/docs/build/html/python/_autosummary/mlx.nn.value_and_grad.html b/docs/build/html/python/_autosummary/mlx.nn.value_and_grad.html
index 37798613f..59b3764b7 100644
--- a/docs/build/html/python/_autosummary/mlx.nn.value_and_grad.html
+++ b/docs/build/html/python/_autosummary/mlx.nn.value_and_grad.html
@@ -9,7 +9,7 @@
- mlx.nn.value_and_grad — MLX 0.0.5 documentation
+ mlx.nn.value_and_grad — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.optimizers.AdaDelta.html b/docs/build/html/python/_autosummary/mlx.optimizers.AdaDelta.html
index d9a3b8812..fece67dc4 100644
--- a/docs/build/html/python/_autosummary/mlx.optimizers.AdaDelta.html
+++ b/docs/build/html/python/_autosummary/mlx.optimizers.AdaDelta.html
@@ -9,7 +9,7 @@
- mlx.optimizers.AdaDelta — MLX 0.0.5 documentation
+ mlx.optimizers.AdaDelta — MLX 0.0.6 documentation
@@ -135,8 +135,8 @@
-
-
+
+
@@ -187,6 +187,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -228,12 +229,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -241,14 +244,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -271,8 +276,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -365,6 +373,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -400,6 +411,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.optimizers.Adagrad.html b/docs/build/html/python/_autosummary/mlx.optimizers.Adagrad.html
index 1940208e5..6db90cb4f 100644
--- a/docs/build/html/python/_autosummary/mlx.optimizers.Adagrad.html
+++ b/docs/build/html/python/_autosummary/mlx.optimizers.Adagrad.html
@@ -9,7 +9,7 @@
- mlx.optimizers.Adagrad — MLX 0.0.5 documentation
+ mlx.optimizers.Adagrad — MLX 0.0.6 documentation
@@ -135,8 +135,8 @@
-
-
+
+
@@ -187,6 +187,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -228,12 +229,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -241,14 +244,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -271,8 +276,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -365,6 +373,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -400,6 +411,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.optimizers.Adam.html b/docs/build/html/python/_autosummary/mlx.optimizers.Adam.html
index b4af6ca3a..cf63abf57 100644
--- a/docs/build/html/python/_autosummary/mlx.optimizers.Adam.html
+++ b/docs/build/html/python/_autosummary/mlx.optimizers.Adam.html
@@ -9,7 +9,7 @@
- mlx.optimizers.Adam — MLX 0.0.5 documentation
+ mlx.optimizers.Adam — MLX 0.0.6 documentation
@@ -135,8 +135,8 @@
-
-
+
+
@@ -187,6 +187,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -228,12 +229,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -241,14 +244,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -271,8 +276,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -365,6 +373,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -400,6 +411,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.optimizers.AdamW.html b/docs/build/html/python/_autosummary/mlx.optimizers.AdamW.html
index c98ed46a0..1902dff25 100644
--- a/docs/build/html/python/_autosummary/mlx.optimizers.AdamW.html
+++ b/docs/build/html/python/_autosummary/mlx.optimizers.AdamW.html
@@ -9,7 +9,7 @@
- mlx.optimizers.AdamW — MLX 0.0.5 documentation
+ mlx.optimizers.AdamW — MLX 0.0.6 documentation
@@ -135,8 +135,8 @@
-
-
+
+
@@ -187,6 +187,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -228,12 +229,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -241,14 +244,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -271,8 +276,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -365,6 +373,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -400,6 +411,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.optimizers.Adamax.html b/docs/build/html/python/_autosummary/mlx.optimizers.Adamax.html
index bf6e05707..d8549d932 100644
--- a/docs/build/html/python/_autosummary/mlx.optimizers.Adamax.html
+++ b/docs/build/html/python/_autosummary/mlx.optimizers.Adamax.html
@@ -9,7 +9,7 @@
- mlx.optimizers.Adamax — MLX 0.0.5 documentation
+ mlx.optimizers.Adamax — MLX 0.0.6 documentation
@@ -47,7 +47,7 @@
-
+
@@ -135,8 +135,8 @@
-
-
+
+
@@ -187,6 +187,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -228,12 +229,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -241,14 +244,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -271,8 +276,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -365,6 +373,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -400,6 +411,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
@@ -644,11 +656,11 @@ denominator to improve numerical stability. Default:
next
-
Tree Utils
+
mlx.optimizers.Lion
diff --git a/docs/build/html/python/_autosummary/mlx.optimizers.Lion.html b/docs/build/html/python/_autosummary/mlx.optimizers.Lion.html
new file mode 100644
index 000000000..28b0f4e8d
--- /dev/null
+++ b/docs/build/html/python/_autosummary/mlx.optimizers.Lion.html
@@ -0,0 +1,725 @@
+
+
+
+
+
+
+
+
+
+
+
+ mlx.optimizers.Lion — MLX 0.0.6 documentation
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Skip to main content
+
+
+
+
+
+
+ Back to top
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
mlx.optimizers.Lion
+
+
+
+
+
+
+
+
+
+
+mlx.optimizers.Lion
+
+
+class mlx.optimizers. Lion ( learning_rate : float , betas : List [ float ] = [0.9, 0.99] , weight_decay : float = 0.0 )
+Implementation of the Lion optimizer [1].
+Since updates are computed through the sign operation, they tend to
+have larger norm than for other optimizers such as SGD and Adam.
+We recommend a learning rate that is 3-10x smaller than AdamW and a
+weight decay 3-10x larger than AdamW to maintain the strength
+(lr * wd). Our Lion implementation follows the original paper. In
+detail,
+[1]: Chen, X. Symbolic Discovery of Optimization Algorithms. arXiv
+preprint arXiv:2302.06675.
+
+\[c_{t + 1} &= \beta_1 m_t + (1 - \beta_1) g_t
+m_{t + 1} &= \beta_2 m_t + (1 - \beta_2) g_t
+w_{t + 1} &= w_t - \eta (\text{sign}(c_t) + \lambda w_t)\]
+
+Parameters:
+
+learning_rate (float ) – The learning rate \(\eta\) .
+betas (Tuple [ float , float ] , optional ) – The coefficients
+\((\beta_1, \beta_2)\) used for computing the gradient
+momentum and update direction. Default: (0.9, 0.99)
+weight_decay (float , optional ) – The weight decay \(\lambda\) . Default: 0.0
+
+
+
+Methods
+
+
+__init__
(learning_rate[, betas, weight_decay])
+
+
+apply_single
(gradient, parameter, state)
+Performs the Lion parameter update and stores \(m\) in the optimizer state.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/docs/build/html/python/_autosummary/mlx.optimizers.Optimizer.html b/docs/build/html/python/_autosummary/mlx.optimizers.Optimizer.html
index a97e1206a..d114a555f 100644
--- a/docs/build/html/python/_autosummary/mlx.optimizers.Optimizer.html
+++ b/docs/build/html/python/_autosummary/mlx.optimizers.Optimizer.html
@@ -9,7 +9,7 @@
- mlx.optimizers.Optimizer — MLX 0.0.5 documentation
+ mlx.optimizers.Optimizer — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.optimizers.OptimizerState.html b/docs/build/html/python/_autosummary/mlx.optimizers.OptimizerState.html
index 7df5c04b2..ee2420fbf 100644
--- a/docs/build/html/python/_autosummary/mlx.optimizers.OptimizerState.html
+++ b/docs/build/html/python/_autosummary/mlx.optimizers.OptimizerState.html
@@ -9,7 +9,7 @@
- mlx.optimizers.OptimizerState — MLX 0.0.5 documentation
+ mlx.optimizers.OptimizerState — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.optimizers.RMSprop.html b/docs/build/html/python/_autosummary/mlx.optimizers.RMSprop.html
index 94e9ff062..dc865d89c 100644
--- a/docs/build/html/python/_autosummary/mlx.optimizers.RMSprop.html
+++ b/docs/build/html/python/_autosummary/mlx.optimizers.RMSprop.html
@@ -9,7 +9,7 @@
- mlx.optimizers.RMSprop — MLX 0.0.5 documentation
+ mlx.optimizers.RMSprop — MLX 0.0.6 documentation
@@ -135,8 +135,8 @@
-
-
+
+
@@ -187,6 +187,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -228,12 +229,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -241,14 +244,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -271,8 +276,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -365,6 +373,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -400,6 +411,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.optimizers.SGD.html b/docs/build/html/python/_autosummary/mlx.optimizers.SGD.html
index 02e8f62a5..9fce4fc1a 100644
--- a/docs/build/html/python/_autosummary/mlx.optimizers.SGD.html
+++ b/docs/build/html/python/_autosummary/mlx.optimizers.SGD.html
@@ -9,7 +9,7 @@
- mlx.optimizers.SGD — MLX 0.0.5 documentation
+ mlx.optimizers.SGD — MLX 0.0.6 documentation
@@ -135,8 +135,8 @@
-
-
+
+
@@ -187,6 +187,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -228,12 +229,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -241,14 +244,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -271,8 +276,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -365,6 +373,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -400,6 +411,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.utils.tree_flatten.html b/docs/build/html/python/_autosummary/mlx.utils.tree_flatten.html
index 39c547272..8654500ea 100644
--- a/docs/build/html/python/_autosummary/mlx.utils.tree_flatten.html
+++ b/docs/build/html/python/_autosummary/mlx.utils.tree_flatten.html
@@ -9,7 +9,7 @@
- mlx.utils.tree_flatten — MLX 0.0.5 documentation
+ mlx.utils.tree_flatten — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary/mlx.utils.tree_map.html b/docs/build/html/python/_autosummary/mlx.utils.tree_map.html
index 1ac6c22b3..9d78c8b1f 100644
--- a/docs/build/html/python/_autosummary/mlx.utils.tree_map.html
+++ b/docs/build/html/python/_autosummary/mlx.utils.tree_map.html
@@ -9,7 +9,7 @@
- mlx.utils.tree_map — MLX 0.0.5 documentation
+ mlx.utils.tree_map — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
@@ -584,13 +596,15 @@ document.write(`
mlx.utils.tree_map
-mlx.utils. tree_map ( fn , tree , * rest )
+mlx.utils. tree_map ( fn , tree , * rest , is_leaf = None )
Applies fn
to the leaves of the python tree tree
and
returns a new collection with the results.
If rest
is provided, every item is assumed to be a superset of tree
and the corresponding leaves are provided as extra positional arguments to
fn
. In that respect, tree_map()
is closer to itertools.starmap()
than to map()
.
+The keyword argument is_leaf
decides what constitutes a leaf from
+tree
similar to tree_flatten()
.
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+
mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+
mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+
mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-
mlx.core.floor
mlx.core.flatten
+
mlx.core.floor
+
mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+
mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+
mlx.core.quantize
+
mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+
mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+
mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+
mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/_autosummary_functions/mlx.nn.losses.smooth_l1_loss.html b/docs/build/html/python/_autosummary_functions/mlx.nn.losses.smooth_l1_loss.html
new file mode 100644
index 000000000..61ec33975
--- /dev/null
+++ b/docs/build/html/python/_autosummary_functions/mlx.nn.losses.smooth_l1_loss.html
@@ -0,0 +1,690 @@
+
+
+
+
+
+
+
+
+
+
+
+ mlx.nn.losses.smooth_l1_loss — MLX 0.0.5 documentation
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Skip to main content
+
+
+
+
+
+
+ Back to top
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
mlx.nn.losses.smooth_l1_loss
+
+
+
+
+
+
+
+
+
+
+mlx.nn.losses.smooth_l1_loss
+
+
+class mlx.nn.losses. smooth_l1_loss ( predictions : array , targets : array , beta : float = 1.0 , reduction : str = 'mean' )
+Computes the smooth L1 loss.
+The smooth L1 loss is a variant of the L1 loss which replaces the absolute
+difference with a squared difference when the absolute difference is less
+than beta
.
+The formula for the smooth L1 Loss is:
+
+\[\begin{split}l =
+ \begin{cases}
+ 0.5 (x - y)^2, & \text{ if } & (x - y) < \beta \\
+ |x - y| - 0.5 \beta, & & \text{otherwise}
+ \end{cases}\end{split}\]
+
+Parameters:
+
+predictions (array ) – Predicted values.
+targets (array ) – Ground truth values.
+beta (float , optional ) – The threshold after which the loss changes
+from the squared to the absolute difference. Default: 1.0
.
+reduction (str , optional ) – Specifies the reduction to apply to the output:
+'none'
| 'mean'
| 'sum'
. Default: 'mean'
.
+
+
+Returns:
+The computed smooth L1 loss.
+
+Return type:
+array
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/docs/build/html/python/array.html b/docs/build/html/python/array.html
index a534518e7..9496f9ffa 100644
--- a/docs/build/html/python/array.html
+++ b/docs/build/html/python/array.html
@@ -9,7 +9,7 @@
- Array — MLX 0.0.5 documentation
+ Array — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
@@ -662,31 +674,34 @@ document.write(`
array.reshape
(self, *args[, stream])
Equivalent to reshape()
but the shape can be passed either as a tuple or as separate arguments.
-array.rsqrt
(self, *[, stream])
+array.round
(self, /[, decimals, stream])
+See round()
.
+
+array.rsqrt
(self, *[, stream])
See rsqrt()
.
-array.sin
(self, *[, stream])
+array.sin
(self, *[, stream])
See sin()
.
-array.split
(self, indices_or_sections[, ...])
+array.split
(self, indices_or_sections[, ...])
See split()
.
-array.sqrt
(self, *[, stream])
+array.sqrt
(self, *[, stream])
See sqrt()
.
-array.square
(self, *[, stream])
+array.square
(self, *[, stream])
See square()
.
-array.sum
(self[, axis, keepdims, stream])
+array.sum
(self[, axis, keepdims, stream])
See sum()
.
-array.transpose
(self, *args[, stream])
+array.transpose
(self, *args[, stream])
Equivalent to transpose()
but the axes can be passed either as a tuple or as separate arguments.
-array.T
+array.T
Equivalent to calling self.transpose()
with no arguments.
-array.var
(self[, axis, keepdims, ddof, stream])
+array.var
(self[, axis, keepdims, ddof, stream])
See var()
.
diff --git a/docs/build/html/python/data_types.html b/docs/build/html/python/data_types.html
index 33f7a3b9b..894be9bab 100644
--- a/docs/build/html/python/data_types.html
+++ b/docs/build/html/python/data_types.html
@@ -9,7 +9,7 @@
- Data Types — MLX 0.0.5 documentation
+ Data Types — MLX 0.0.6 documentation
@@ -132,8 +132,8 @@
-
-
+
+
@@ -184,6 +184,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -225,12 +226,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -238,14 +241,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -268,8 +273,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -362,6 +370,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -397,6 +408,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/devices_and_streams.html b/docs/build/html/python/devices_and_streams.html
index 624fe322b..b6ba836b3 100644
--- a/docs/build/html/python/devices_and_streams.html
+++ b/docs/build/html/python/devices_and_streams.html
@@ -9,7 +9,7 @@
- Devices and Streams — MLX 0.0.5 documentation
+ Devices and Streams — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/fft.html b/docs/build/html/python/fft.html
index 85865b81e..7ea345510 100644
--- a/docs/build/html/python/fft.html
+++ b/docs/build/html/python/fft.html
@@ -9,7 +9,7 @@
- FFT — MLX 0.0.5 documentation
+ FFT — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/nn.html b/docs/build/html/python/nn.html
index bc53fd292..1686b94de 100644
--- a/docs/build/html/python/nn.html
+++ b/docs/build/html/python/nn.html
@@ -9,7 +9,7 @@
- Neural Networks — MLX 0.0.5 documentation
+ Neural Networks — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
Functions
@@ -784,12 +797,14 @@ parameters as the first argument to the function returned by
Loss Functions
diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.Conv1d.html b/docs/build/html/python/nn/_autosummary/mlx.nn.Conv1d.html
index 821579149..ab43288a1 100644
--- a/docs/build/html/python/nn/_autosummary/mlx.nn.Conv1d.html
+++ b/docs/build/html/python/nn/_autosummary/mlx.nn.Conv1d.html
@@ -9,7 +9,7 @@
- mlx.nn.Conv1d — MLX 0.0.5 documentation
+ mlx.nn.Conv1d — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.Conv2d.html b/docs/build/html/python/nn/_autosummary/mlx.nn.Conv2d.html
index e839b19ea..1d6b54f77 100644
--- a/docs/build/html/python/nn/_autosummary/mlx.nn.Conv2d.html
+++ b/docs/build/html/python/nn/_autosummary/mlx.nn.Conv2d.html
@@ -9,7 +9,7 @@
- mlx.nn.Conv2d — MLX 0.0.5 documentation
+ mlx.nn.Conv2d — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.Embedding.html b/docs/build/html/python/nn/_autosummary/mlx.nn.Embedding.html
index 2191ec525..6c6e55f73 100644
--- a/docs/build/html/python/nn/_autosummary/mlx.nn.Embedding.html
+++ b/docs/build/html/python/nn/_autosummary/mlx.nn.Embedding.html
@@ -9,7 +9,7 @@
- mlx.nn.Embedding — MLX 0.0.5 documentation
+ mlx.nn.Embedding — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.GELU.html b/docs/build/html/python/nn/_autosummary/mlx.nn.GELU.html
index ebc04b5b3..9f66a97eb 100644
--- a/docs/build/html/python/nn/_autosummary/mlx.nn.GELU.html
+++ b/docs/build/html/python/nn/_autosummary/mlx.nn.GELU.html
@@ -9,7 +9,7 @@
- mlx.nn.GELU — MLX 0.0.5 documentation
+ mlx.nn.GELU — MLX 0.0.6 documentation
@@ -135,8 +135,8 @@
-
-
+
+
@@ -187,6 +187,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -228,12 +229,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -241,14 +244,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -271,8 +276,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -365,6 +373,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -400,6 +411,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.GroupNorm.html b/docs/build/html/python/nn/_autosummary/mlx.nn.GroupNorm.html
index 1faa5bfc2..c1881e3ed 100644
--- a/docs/build/html/python/nn/_autosummary/mlx.nn.GroupNorm.html
+++ b/docs/build/html/python/nn/_autosummary/mlx.nn.GroupNorm.html
@@ -9,7 +9,7 @@
- mlx.nn.GroupNorm — MLX 0.0.5 documentation
+ mlx.nn.GroupNorm — MLX 0.0.6 documentation
@@ -135,8 +135,8 @@
-
-
+
+
@@ -187,6 +187,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -228,12 +229,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -241,14 +244,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -271,8 +276,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -365,6 +373,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -400,6 +411,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.LayerNorm.html b/docs/build/html/python/nn/_autosummary/mlx.nn.LayerNorm.html
index 2cda282e8..bfdb181ef 100644
--- a/docs/build/html/python/nn/_autosummary/mlx.nn.LayerNorm.html
+++ b/docs/build/html/python/nn/_autosummary/mlx.nn.LayerNorm.html
@@ -9,7 +9,7 @@
- mlx.nn.LayerNorm — MLX 0.0.5 documentation
+ mlx.nn.LayerNorm — MLX 0.0.6 documentation
@@ -135,8 +135,8 @@
-
-
+
+
@@ -187,6 +187,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -228,12 +229,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -241,14 +244,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -271,8 +276,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -365,6 +373,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -400,6 +411,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.Linear.html b/docs/build/html/python/nn/_autosummary/mlx.nn.Linear.html
index 3959e8949..a60f09c14 100644
--- a/docs/build/html/python/nn/_autosummary/mlx.nn.Linear.html
+++ b/docs/build/html/python/nn/_autosummary/mlx.nn.Linear.html
@@ -9,7 +9,7 @@
- mlx.nn.Linear — MLX 0.0.5 documentation
+ mlx.nn.Linear — MLX 0.0.6 documentation
@@ -135,8 +135,8 @@
-
-
+
+
@@ -187,6 +187,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -228,12 +229,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -241,14 +244,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -271,8 +276,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -365,6 +373,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -400,6 +411,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.Mish.html b/docs/build/html/python/nn/_autosummary/mlx.nn.Mish.html
index 41b12e010..4c26708bd 100644
--- a/docs/build/html/python/nn/_autosummary/mlx.nn.Mish.html
+++ b/docs/build/html/python/nn/_autosummary/mlx.nn.Mish.html
@@ -9,7 +9,7 @@
- mlx.nn.Mish — MLX 0.0.5 documentation
+ mlx.nn.Mish — MLX 0.0.6 documentation
@@ -135,8 +135,8 @@
-
-
+
+
@@ -187,6 +187,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -228,12 +229,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -241,14 +244,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -271,8 +276,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -365,6 +373,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -400,6 +411,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.MultiHeadAttention.html b/docs/build/html/python/nn/_autosummary/mlx.nn.MultiHeadAttention.html
index 4db16918d..377053e05 100644
--- a/docs/build/html/python/nn/_autosummary/mlx.nn.MultiHeadAttention.html
+++ b/docs/build/html/python/nn/_autosummary/mlx.nn.MultiHeadAttention.html
@@ -9,7 +9,7 @@
- mlx.nn.MultiHeadAttention — MLX 0.0.5 documentation
+ mlx.nn.MultiHeadAttention — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.PReLU.html b/docs/build/html/python/nn/_autosummary/mlx.nn.PReLU.html
index 104315b0c..03fe0c053 100644
--- a/docs/build/html/python/nn/_autosummary/mlx.nn.PReLU.html
+++ b/docs/build/html/python/nn/_autosummary/mlx.nn.PReLU.html
@@ -9,7 +9,7 @@
- mlx.nn.PReLU — MLX 0.0.5 documentation
+ mlx.nn.PReLU — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.QuantizedLinear.html b/docs/build/html/python/nn/_autosummary/mlx.nn.QuantizedLinear.html
new file mode 100644
index 000000000..1750cf62c
--- /dev/null
+++ b/docs/build/html/python/nn/_autosummary/mlx.nn.QuantizedLinear.html
@@ -0,0 +1,715 @@
+
+
+
+
+
+
+
+
+
+
+
+ mlx.nn.QuantizedLinear — MLX 0.0.6 documentation
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Skip to main content
+
+
+
+
+
+
+ Back to top
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
mlx.nn.QuantizedLinear
+
+
+
+
+
+
+
+
+
+
+mlx.nn.QuantizedLinear
+
+
+class mlx.nn. QuantizedLinear ( input_dims : int , output_dims : int , bias : bool = True , group_size : int = 64 , bits : int = 4 )
+Applies an affine transformation to the input using a quantized weight matrix.
+It is the quantized equivalent of mlx.nn.Linear
. For now its
+parameters are frozen and will not be included in any gradient computation
+but this will probably change in the future.
+QuantizedLinear also provides two useful classmethods to convert linear
+layers to QuantizedLinear layers.
+
+from_linear()
returns a QuantizedLinear layer that applies the same
+linear transformation up to the quantization error.
+quantize_module()
swaps all the linear layers of the passed module
+with QuantizedLinear ones.
+
+
+Parameters:
+
+input_dims (int ) – The dimensionality of the input features
+output_dims (int ) – The dimensionality of the output features
+bias (bool , optional ) – If set to False
then the layer will not use
+a bias. (default: True).
+group_size (int , optional ) – The group size to use for the quantized
+weight. See quantize()
. (default: 64)
+bits (int , optional ) – The bit width to use for the quantized weight.
+See quantize()
. (default: 4)
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.RMSNorm.html b/docs/build/html/python/nn/_autosummary/mlx.nn.RMSNorm.html
index 798a53684..e27690a0c 100644
--- a/docs/build/html/python/nn/_autosummary/mlx.nn.RMSNorm.html
+++ b/docs/build/html/python/nn/_autosummary/mlx.nn.RMSNorm.html
@@ -9,7 +9,7 @@
- mlx.nn.RMSNorm — MLX 0.0.5 documentation
+ mlx.nn.RMSNorm — MLX 0.0.6 documentation
@@ -135,8 +135,8 @@
-
-
+
+
@@ -187,6 +187,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -228,12 +229,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -241,14 +244,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -271,8 +276,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -365,6 +373,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -400,6 +411,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.ReLU.html b/docs/build/html/python/nn/_autosummary/mlx.nn.ReLU.html
index af157cd32..ca774bd72 100644
--- a/docs/build/html/python/nn/_autosummary/mlx.nn.ReLU.html
+++ b/docs/build/html/python/nn/_autosummary/mlx.nn.ReLU.html
@@ -9,7 +9,7 @@
- mlx.nn.ReLU — MLX 0.0.5 documentation
+ mlx.nn.ReLU — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.RoPE.html b/docs/build/html/python/nn/_autosummary/mlx.nn.RoPE.html
index 2cffe2970..919aa744a 100644
--- a/docs/build/html/python/nn/_autosummary/mlx.nn.RoPE.html
+++ b/docs/build/html/python/nn/_autosummary/mlx.nn.RoPE.html
@@ -9,7 +9,7 @@
- mlx.nn.RoPE — MLX 0.0.5 documentation
+ mlx.nn.RoPE — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.SELU.html b/docs/build/html/python/nn/_autosummary/mlx.nn.SELU.html
index f7fc86b8e..a8905b077 100644
--- a/docs/build/html/python/nn/_autosummary/mlx.nn.SELU.html
+++ b/docs/build/html/python/nn/_autosummary/mlx.nn.SELU.html
@@ -9,7 +9,7 @@
- mlx.nn.SELU — MLX 0.0.5 documentation
+ mlx.nn.SELU — MLX 0.0.6 documentation
@@ -135,8 +135,8 @@
-
-
+
+
@@ -187,6 +187,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -228,12 +229,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -241,14 +244,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -271,8 +276,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -365,6 +373,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -400,6 +411,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.Sequential.html b/docs/build/html/python/nn/_autosummary/mlx.nn.Sequential.html
index 3bcaaacfa..a44c2ef8a 100644
--- a/docs/build/html/python/nn/_autosummary/mlx.nn.Sequential.html
+++ b/docs/build/html/python/nn/_autosummary/mlx.nn.Sequential.html
@@ -9,7 +9,7 @@
- mlx.nn.Sequential — MLX 0.0.5 documentation
+ mlx.nn.Sequential — MLX 0.0.6 documentation
@@ -46,7 +46,7 @@
-
+
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
@@ -619,11 +631,11 @@ our functions have learnable parameters they should be implemented as
next
-
Functions
+
mlx.nn.QuantizedLinear
diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.SiLU.html b/docs/build/html/python/nn/_autosummary/mlx.nn.SiLU.html
index 519f76680..564795b2a 100644
--- a/docs/build/html/python/nn/_autosummary/mlx.nn.SiLU.html
+++ b/docs/build/html/python/nn/_autosummary/mlx.nn.SiLU.html
@@ -9,7 +9,7 @@
- mlx.nn.SiLU — MLX 0.0.5 documentation
+ mlx.nn.SiLU — MLX 0.0.6 documentation
@@ -135,8 +135,8 @@
-
-
+
+
@@ -187,6 +187,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -228,12 +229,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -241,14 +244,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -271,8 +276,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -365,6 +373,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -400,6 +411,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.Step.html b/docs/build/html/python/nn/_autosummary/mlx.nn.Step.html
index ac18c8af5..a12a2f4b6 100644
--- a/docs/build/html/python/nn/_autosummary/mlx.nn.Step.html
+++ b/docs/build/html/python/nn/_autosummary/mlx.nn.Step.html
@@ -9,7 +9,7 @@
- mlx.nn.Step — MLX 0.0.5 documentation
+ mlx.nn.Step — MLX 0.0.6 documentation
@@ -135,8 +135,8 @@
-
-
+
+
@@ -187,6 +187,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -228,12 +229,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -241,14 +244,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -271,8 +276,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -365,6 +373,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -400,6 +411,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.gelu.html b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.gelu.html
index dab2433f9..9f80fad3f 100644
--- a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.gelu.html
+++ b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.gelu.html
@@ -9,7 +9,7 @@
- mlx.nn.gelu — MLX 0.0.5 documentation
+ mlx.nn.gelu — MLX 0.0.6 documentation
@@ -135,8 +135,8 @@
-
-
+
+
@@ -187,6 +187,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -228,12 +229,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -241,14 +244,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -271,8 +276,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -365,6 +373,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -400,6 +411,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.gelu_approx.html b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.gelu_approx.html
index 475a989f4..864757dc4 100644
--- a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.gelu_approx.html
+++ b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.gelu_approx.html
@@ -9,7 +9,7 @@
- mlx.nn.gelu_approx — MLX 0.0.5 documentation
+ mlx.nn.gelu_approx — MLX 0.0.6 documentation
@@ -135,8 +135,8 @@
-
-
+
+
@@ -187,6 +187,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -228,12 +229,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -241,14 +244,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -271,8 +276,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -365,6 +373,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -400,6 +411,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.gelu_fast_approx.html b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.gelu_fast_approx.html
index 9844b1e4a..897dfd042 100644
--- a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.gelu_fast_approx.html
+++ b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.gelu_fast_approx.html
@@ -9,7 +9,7 @@
- mlx.nn.gelu_fast_approx — MLX 0.0.5 documentation
+ mlx.nn.gelu_fast_approx — MLX 0.0.6 documentation
@@ -135,8 +135,8 @@
-
-
+
+
@@ -187,6 +187,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -228,12 +229,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -241,14 +244,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -271,8 +276,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -365,6 +373,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -400,6 +411,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.binary_cross_entropy.html b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.binary_cross_entropy.html
index c487041c2..23d771f89 100644
--- a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.binary_cross_entropy.html
+++ b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.binary_cross_entropy.html
@@ -9,7 +9,7 @@
- mlx.nn.losses.binary_cross_entropy — MLX 0.0.5 documentation
+ mlx.nn.losses.binary_cross_entropy — MLX 0.0.6 documentation
@@ -46,8 +46,8 @@
-
-
+
+
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
@@ -584,13 +596,13 @@ document.write(`
mlx.nn.losses.binary_cross_entropy
-class mlx.nn.losses. binary_cross_entropy ( inputs : array , targets : array , reduction : str = 'none' )
-Computes the binary cross entropy loss between inputs and targets.
+class mlx.nn.losses. binary_cross_entropy ( logits : array , targets : array , reduction : str = 'none' )
+Computes the binary cross entropy loss.
Parameters:
-inputs (mx.array ) – The predicted inputs (post-sigmoid probabilities).
-targets (mx.array ) – The target values (binary labels).
+logits (array ) – The unnormalized (pre-sigmoid) predicted logits.
+targets (array ) – The binary target values in {0, 1}.
reduction (str , optional ) – Specifies the reduction to apply to the output:
'none'
| 'mean'
| 'sum'
. Default: 'none'
.
@@ -599,17 +611,17 @@ document.write(`
The computed binary cross entropy loss.
Return type:
-mx.array
+array
Examples
>>> import mlx.core as mx
>>> import mlx.nn as nn
->>> inputs = mx . array ([ 0.1 , 0.2 , 0.3 , 0.4 ])
+>>> inputs = mx . array ([ 0.105361 , 0.223144 , 1.20397 , 0.916291 ])
>>> targets = mx . array ([ 0 , 0 , 1 , 1 ])
->>> loss = nn . losses . binary_cross_entropy ( inputs , targets )
+>>> loss = nn . losses . binary_cross_entropy ( inputs , targets , "mean" )
>>> loss
-array([0.612192])
+array([0.612192], dtype=float32)
@@ -628,20 +640,20 @@ document.write(`
previous
-
mlx.nn.losses.cross_entropy
+
Loss Functions
next
-
mlx.nn.losses.l1_loss
+
mlx.nn.losses.cross_entropy
diff --git a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.cross_entropy.html b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.cross_entropy.html
index 4c3ec3ed1..680f2638b 100644
--- a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.cross_entropy.html
+++ b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.cross_entropy.html
@@ -9,7 +9,7 @@
-
mlx.nn.losses.cross_entropy — MLX 0.0.5 documentation
+
mlx.nn.losses.cross_entropy — MLX 0.0.6 documentation
@@ -46,8 +46,8 @@
-
-
+
+
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
@@ -584,14 +596,16 @@ document.write(`
mlx.nn.losses.cross_entropy
-class mlx.nn.losses. cross_entropy ( logits : array , targets : array , axis : int = - 1 , reduction : str = 'none' )
-Computes the cross entropy loss between logits and targets.
+class mlx.nn.losses. cross_entropy ( logits : array , targets : array , weights : Optional [ array ] = None , axis : int = - 1 , label_smoothing : float = 0.0 , reduction : str = 'none' )
+Computes the cross entropy loss.
Parameters:
-logits (mx.array ) – The predicted logits.
-targets (mx.array ) – The target values.
+logits (array ) – The unnormalized predicted logits.
+targets (array ) – The target values, as class indices.
+weights (array , optional ) – Weights for each target. Default: None
.
axis (int , optional ) – The axis over which to compute softmax. Default: -1
.
+label_smoothing (float , optional ) – Label smoothing factor. Default: 0
.
reduction (str , optional ) – Specifies the reduction to apply to the output:
'none'
| 'mean'
| 'sum'
. Default: 'none'
.
@@ -600,7 +614,7 @@ document.write(`
The computed cross entropy loss.
Return type:
-mx.array
+array
@@ -619,20 +633,20 @@ document.write(`
previous
-
Loss Functions
+
mlx.nn.losses.binary_cross_entropy
next
-
mlx.nn.losses.binary_cross_entropy
+
mlx.nn.losses.kl_div_loss
diff --git a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.kl_div_loss.html b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.kl_div_loss.html
index 477e31d2f..50698543b 100644
--- a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.kl_div_loss.html
+++ b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.kl_div_loss.html
@@ -9,7 +9,7 @@
-
mlx.nn.losses.kl_div_loss — MLX 0.0.5 documentation
+
mlx.nn.losses.kl_div_loss — MLX 0.0.6 documentation
@@ -46,8 +46,8 @@
-
-
+
+
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
@@ -585,8 +597,7 @@ document.write(`
class mlx.nn.losses. kl_div_loss ( inputs : array , targets : array , axis : int = - 1 , reduction : str = 'none' )
-Computes the Kullback-Leibler divergence loss between targets and the
-inputs.
+Computes the Kullback-Leibler divergence loss.
Computes the following when reduction == 'none'
:
mx . exp ( targets ) * ( targets - inputs ) . sum ( axis )
@@ -594,8 +605,8 @@ inputs.
Parameters:
-inputs (mx.array ) – Log probabilities for the predicted distribution.
-targets (mx.array ) – Log probabilities for the target distribution.
+inputs (array ) – Log probabilities for the predicted distribution.
+targets (array ) – Log probabilities for the target distribution.
axis (int , optional ) – The distribution axis. Default: -1
.
reduction (str , optional ) – Specifies the reduction to apply to the output:
'none'
| 'mean'
| 'sum'
. Default: 'none'
.
@@ -605,7 +616,7 @@ inputs.
The computed Kullback-Leibler divergence loss.
Return type:
-mx.array
+array
@@ -624,20 +635,20 @@ inputs.
previous
-
mlx.nn.losses.nll_loss
+
mlx.nn.losses.cross_entropy
next
-
Optimizers
+
mlx.nn.losses.l1_loss
diff --git a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.l1_loss.html b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.l1_loss.html
index bca46781f..fe7efe408 100644
--- a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.l1_loss.html
+++ b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.l1_loss.html
@@ -9,7 +9,7 @@
-
mlx.nn.losses.l1_loss — MLX 0.0.5 documentation
+
mlx.nn.losses.l1_loss — MLX 0.0.6 documentation
@@ -47,7 +47,7 @@
-
+
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+
mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+
mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+
mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-
mlx.core.floor
mlx.core.flatten
+
mlx.core.floor
+
mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+
mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+
mlx.core.quantize
+
mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+
mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+
mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+
mlx.optimizers.Lion
Tree Utils
@@ -584,22 +596,22 @@ document.write(`
mlx.nn.losses.l1_loss
-class mlx.nn.losses. l1_loss ( predictions : array , targets : array , reduction : str = 'none' )
-Computes the L1 loss between predictions and targets.
+class mlx.nn.losses. l1_loss ( predictions : array , targets : array , reduction : str = 'mean' )
+Computes the L1 loss.
Parameters:
-predictions (mx.array ) – The predicted values.
-targets (mx.array ) – The target values.
+predictions (array ) – The predicted values.
+targets (array ) – The target values.
reduction (str , optional ) – Specifies the reduction to apply to the output:
-'none'
| 'mean'
| 'sum'
. Default: 'none'
.
+'none'
| 'mean'
| 'sum'
. Default: 'mean'
.
Returns:
The computed L1 loss.
Return type:
-
mx.array
+array
@@ -618,12 +630,12 @@ document.write(`
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+
mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+
mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+
mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-
mlx.core.floor
mlx.core.flatten
+
mlx.core.floor
+
mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+
mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+
mlx.core.quantize
+
mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+
mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+
mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+
mlx.optimizers.Lion
Tree Utils
@@ -584,22 +596,22 @@ document.write(`
mlx.nn.losses.mse_loss
-class mlx.nn.losses. mse_loss ( predictions : array , targets : array , reduction : str = 'none' )
-Computes the mean squared error loss between predictions and targets.
+class mlx.nn.losses. mse_loss ( predictions : array , targets : array , reduction : str = 'mean' )
+Computes the mean squared error loss.
Parameters:
-predictions (mx.array ) – The predicted values.
-targets (mx.array ) – The target values.
+predictions (array ) – The predicted values.
+targets (array ) – The target values.
reduction (str , optional ) – Specifies the reduction to apply to the output:
-'none'
| 'mean'
| 'sum'
. Default: 'none'
.
+'none'
| 'mean'
| 'sum'
. Default: 'mean'
.
Returns:
The computed mean squared error loss.
Return type:
-
mx.array
+array
diff --git a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.nll_loss.html b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.nll_loss.html
index 310355961..51852c15f 100644
--- a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.nll_loss.html
+++ b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.nll_loss.html
@@ -9,7 +9,7 @@
-
mlx.nn.losses.nll_loss — MLX 0.0.5 documentation
+
mlx.nn.losses.nll_loss — MLX 0.0.6 documentation
@@ -46,7 +46,7 @@
-
+
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
@@ -585,12 +597,12 @@ document.write(`
class mlx.nn.losses. nll_loss ( inputs : array , targets : array , axis : int = - 1 , reduction : str = 'none' )
-Computes the negative log likelihood loss between inputs and targets.
+Computes the negative log likelihood loss.
Parameters:
-inputs (mx.array ) – The predicted distribution in log space.
-targets (mx.array ) – The target values.
+inputs (array ) – The predicted distribution in log space.
+targets (array ) – The target values.
axis (int , optional ) – The distribution axis. Default: -1
.
reduction (str , optional ) – Specifies the reduction to apply to the output:
'none'
| 'mean'
| 'sum'
. Default: 'none'
.
@@ -600,7 +612,7 @@ document.write(`
The computed NLL loss.
Return type:
-mx.array
+array
@@ -628,11 +640,11 @@ document.write(`
next
-
mlx.nn.losses.kl_div_loss
+
mlx.nn.losses.smooth_l1_loss
diff --git a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.smooth_l1_loss.html b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.smooth_l1_loss.html
new file mode 100644
index 000000000..c37cbf930
--- /dev/null
+++ b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.smooth_l1_loss.html
@@ -0,0 +1,719 @@
+
+
+
+
+
+
+
+
+
+
+
+ mlx.nn.losses.smooth_l1_loss — MLX 0.0.6 documentation
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Skip to main content
+
+
+
+
+
+
+ Back to top
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
mlx.nn.losses.smooth_l1_loss
+
+
+
+
+
+
+
+
+
+
+mlx.nn.losses.smooth_l1_loss
+
+
+class mlx.nn.losses. smooth_l1_loss ( predictions : array , targets : array , beta : float = 1.0 , reduction : str = 'mean' )
+Computes the smooth L1 loss.
+The smooth L1 loss is a variant of the L1 loss which replaces the absolute
+difference with a squared difference when the absolute difference is less
+than beta
.
+The formula for the smooth L1 Loss is:
+
+\[\begin{split}l =
+ \begin{cases}
+ 0.5 (x - y)^2, & \text{ if } & (x - y) < \beta \\
+ |x - y| - 0.5 \beta, & & \text{otherwise}
+ \end{cases}\end{split}\]
+
+Parameters:
+
+predictions (array ) – Predicted values.
+targets (array ) – Ground truth values.
+beta (float , optional ) – The threshold after which the loss changes
+from the squared to the absolute difference. Default: 1.0
.
+reduction (str , optional ) – Specifies the reduction to apply to the output:
+'none'
| 'mean'
| 'sum'
. Default: 'mean'
.
+
+
+Returns:
+The computed smooth L1 loss.
+
+Return type:
+array
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.triplet_loss.html b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.triplet_loss.html
new file mode 100644
index 000000000..204b2129e
--- /dev/null
+++ b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.triplet_loss.html
@@ -0,0 +1,719 @@
+
+
+
+
+
+
+
+
+
+
+
+ mlx.nn.losses.triplet_loss — MLX 0.0.6 documentation
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Skip to main content
+
+
+
+
+
+
+ Back to top
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
mlx.nn.losses.triplet_loss
+
+
+
+
+
+
+
+
+
+
+mlx.nn.losses.triplet_loss
+
+
+class mlx.nn.losses. triplet_loss ( anchors : array , positives : array , negatives : array , axis : int = - 1 , p : int = 2 , margin : float = 1.0 , eps : float = 1e-06 , reduction : str = 'none' )
+Computes the triplet loss for a set of anchor, positive, and negative samples.
+Margin is represented with alpha in the math section.
+
+\[L_{\text{triplet}} = \max\left(\|A - P\|_p - \|A - N\|_p + \alpha, 0\right)\]
+
+Parameters:
+
+anchors (array ) – The anchor samples.
+positives (array ) – The positive samples.
+negatives (array ) – The negative samples.
+axis (int , optional ) – The distribution axis. Default: -1
.
+p (int , optional ) – The norm degree for pairwise distance. Default: 2
.
+margin (float , optional ) – Margin for the triplet loss. Defaults to 1.0
.
+eps (float , optional ) – Small positive constant to prevent numerical instability. Defaults to 1e-6
.
+reduction (str , optional ) – Specifies the reduction to apply to the output:
+'none'
| 'mean'
| 'sum'
. Default: 'none'
.
+
+
+Returns:
+
+Computed triplet loss. If reduction is “none”, returns a tensor of the same shape as input; if reduction is “mean” or “sum”, returns a scalar tensor.
+
+
+
+
+Return type:
+array
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.mish.html b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.mish.html
index 5309b9fa8..e5bcd7e84 100644
--- a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.mish.html
+++ b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.mish.html
@@ -9,7 +9,7 @@
- mlx.nn.mish — MLX 0.0.5 documentation
+ mlx.nn.mish — MLX 0.0.6 documentation
@@ -135,8 +135,8 @@
-
-
+
+
@@ -187,6 +187,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -228,12 +229,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -241,14 +244,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -271,8 +276,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -365,6 +373,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -400,6 +411,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.prelu.html b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.prelu.html
index 095f6b839..80ca9828a 100644
--- a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.prelu.html
+++ b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.prelu.html
@@ -9,7 +9,7 @@
- mlx.nn.prelu — MLX 0.0.5 documentation
+ mlx.nn.prelu — MLX 0.0.6 documentation
@@ -135,8 +135,8 @@
-
-
+
+
@@ -187,6 +187,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -228,12 +229,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -241,14 +244,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -271,8 +276,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -365,6 +373,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -400,6 +411,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.relu.html b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.relu.html
index 329a4c888..481938607 100644
--- a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.relu.html
+++ b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.relu.html
@@ -9,7 +9,7 @@
- mlx.nn.relu — MLX 0.0.5 documentation
+ mlx.nn.relu — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.selu.html b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.selu.html
index cc8ecc54b..cdbf30057 100644
--- a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.selu.html
+++ b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.selu.html
@@ -9,7 +9,7 @@
- mlx.nn.selu — MLX 0.0.5 documentation
+ mlx.nn.selu — MLX 0.0.6 documentation
@@ -135,8 +135,8 @@
-
-
+
+
@@ -187,6 +187,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -228,12 +229,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -241,14 +244,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -271,8 +276,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -365,6 +373,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -400,6 +411,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.silu.html b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.silu.html
index e557af695..5b9d54e16 100644
--- a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.silu.html
+++ b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.silu.html
@@ -9,7 +9,7 @@
- mlx.nn.silu — MLX 0.0.5 documentation
+ mlx.nn.silu — MLX 0.0.6 documentation
@@ -135,8 +135,8 @@
-
-
+
+
@@ -187,6 +187,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -228,12 +229,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -241,14 +244,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -271,8 +276,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -365,6 +373,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -400,6 +411,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.step.html b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.step.html
index 931b5fc83..695b14a6c 100644
--- a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.step.html
+++ b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.step.html
@@ -9,7 +9,7 @@
- mlx.nn.step — MLX 0.0.5 documentation
+ mlx.nn.step — MLX 0.0.6 documentation
@@ -135,8 +135,8 @@
-
-
+
+
@@ -187,6 +187,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -228,12 +229,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -241,14 +244,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -271,8 +276,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -365,6 +373,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -400,6 +411,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/nn/functions.html b/docs/build/html/python/nn/functions.html
index 3cf95ad6d..7ee6dac08 100644
--- a/docs/build/html/python/nn/functions.html
+++ b/docs/build/html/python/nn/functions.html
@@ -9,7 +9,7 @@
- Functions — MLX 0.0.5 documentation
+ Functions — MLX 0.0.6 documentation
@@ -47,7 +47,7 @@
-
+
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
@@ -629,12 +641,12 @@ simple functions.
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
@@ -635,6 +647,9 @@ document.write(`
Sequential
(*modules)
A layer that calls the passed callables in order.
+QuantizedLinear
(input_dims, output_dims[, ...])
+Applies an affine transformation to the input using a quantized weight matrix.
+
diff --git a/docs/build/html/python/nn/losses.html b/docs/build/html/python/nn/losses.html
index b96187b94..9bc73bb23 100644
--- a/docs/build/html/python/nn/losses.html
+++ b/docs/build/html/python/nn/losses.html
@@ -9,7 +9,7 @@
- Loss Functions — MLX 0.0.5 documentation
+ Loss Functions — MLX 0.0.6 documentation
@@ -46,7 +46,7 @@
-
+
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
@@ -584,23 +596,29 @@ document.write(`
Loss Functions
-cross_entropy
(logits, targets[, axis, reduction])
-Computes the cross entropy loss between logits and targets.
+binary_cross_entropy
(logits, targets[, ...])
+Computes the binary cross entropy loss.
-binary_cross_entropy
(inputs, targets[, ...])
-Computes the binary cross entropy loss between inputs and targets.
+cross_entropy
(logits, targets[, weights, ...])
+Computes the cross entropy loss.
-l1_loss
(predictions, targets[, reduction])
-Computes the L1 loss between predictions and targets.
+kl_div_loss
(inputs, targets[, axis, reduction])
+Computes the Kullback-Leibler divergence loss.
-mse_loss
(predictions, targets[, reduction])
-Computes the mean squared error loss between predictions and targets.
+l1_loss
(predictions, targets[, reduction])
+Computes the L1 loss.
-nll_loss
(inputs, targets[, axis, reduction])
-Computes the negative log likelihood loss between inputs and targets.
+mse_loss
(predictions, targets[, reduction])
+Computes the mean squared error loss.
-kl_div_loss
(inputs, targets[, axis, reduction])
-Computes the Kullback-Leibler divergence loss between targets and the inputs.
+nll_loss
(inputs, targets[, axis, reduction])
+Computes the negative log likelihood loss.
+
+smooth_l1_loss
(predictions, targets[, beta, ...])
+Computes the smooth L1 loss.
+
+triplet_loss
(anchors, positives, negatives)
+Computes the triplet loss for a set of anchor, positive, and negative samples.
@@ -627,11 +645,11 @@ document.write(`
next
-
mlx.nn.losses.cross_entropy
+
mlx.nn.losses.binary_cross_entropy
diff --git a/docs/build/html/python/ops.html b/docs/build/html/python/ops.html
index 169e07228..673527fec 100644
--- a/docs/build/html/python/ops.html
+++ b/docs/build/html/python/ops.html
@@ -9,7 +9,7 @@
- Operations — MLX 0.0.5 documentation
+ Operations — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
@@ -641,24 +653,30 @@ document.write(`
ceil
(a, /, *[, stream])
Element-wise ceil.
-concatenate
(arrays[, axis, stream])
+clip
(a, /, a_min, a_max, *[, stream])
+Clip the values of the array between the given minimum and maximum.
+
+concatenate
(arrays[, axis, stream])
Concatenate the arrays along the given axis.
-convolve
(a, v, /[, mode, stream])
+convolve
(a, v, /[, mode, stream])
The discrete convolution of 1D arrays.
-conv1d
(input, weight, /[, stride, padding, ...])
+conv1d
(input, weight, /[, stride, padding, ...])
1D convolution over an input with several channels
-conv2d
(input, weight, /[, stride, padding, ...])
+conv2d
(input, weight, /[, stride, padding, ...])
2D convolution over an input with several channels
-cos
(a, /, *[, stream])
+cos
(a, /, *[, stream])
Element-wise cosine.
-cosh
(a, /, *[, stream])
+cosh
(a, /, *[, stream])
Element-wise hyperbolic cosine.
+dequantize
(w, /, scales, biases[, ...])
+Dequantize the matrix w
using the provided scales
and biases
and the group_size
and bits
configuration.
+
divide
(a, b[, stream])
Element-wise division.
@@ -680,30 +698,36 @@ document.write(`
eye
(n[, m, k, dtype, stream])
Create an identity matrix or a general diagonal matrix.
-floor
(a, /, *[, stream])
-Element-wise floor.
-
-flatten
(a, /[, start_axis, end_axis, stream])
+flatten
(a, /[, start_axis, end_axis, stream])
Flatten an array.
-full
(shape, vals[, dtype, stream])
+floor
(a, /, *[, stream])
+Element-wise floor.
+
+floor_divide
(a, b[, stream])
+Element-wise integer division.
+
+full
(shape, vals[, dtype, stream])
Construct an array with the given value.
-greater
(a, b[, stream])
+greater
(a, b[, stream])
Element-wise greater than.
-greater_equal
(a, b[, stream])
+greater_equal
(a, b[, stream])
Element-wise greater or equal.
-identity
(n[, dtype, stream])
+identity
(n[, dtype, stream])
Create a square identity matrix.
-less
(a, b[, stream])
+less
(a, b[, stream])
Element-wise less than.
-less_equal
(a, b[, stream])
+less_equal
(a, b[, stream])
Element-wise less than or equal.
+linspace
(start, stop[, num, dtype, stream])
+Generate num
evenly spaced numbers over interval [start, stop]
.
+
load
(file, /, *[, stream])
Load array(s) from a binary file in .npy
or .npz
format.
@@ -770,103 +794,112 @@ document.write(`
prod
(a, /[, axis, keepdims, stream])
An product reduction over the given axes.
+quantize
(w, /[, group_size, bits, stream])
+Quantize the matrix w
using bits
bits per element.
+
+quantized_matmul
(x, w, scales, biases, /[, ...])
+Perform the matrix multiplication with the quantized matrix w
.
+
reciprocal
(a, /, *[, stream])
Element-wise reciprocal.
reshape
(a, /, shape, *[, stream])
Reshape an array while preserving the size.
-rsqrt
(a, /, *[, stream])
+round
(a, /[, decimals, stream])
+Round to the given number of decimals.
+
+rsqrt
(a, /, *[, stream])
Element-wise reciprocal and square root.
-save
(file, arr, /[, retain_graph])
+save
(file, arr, /[, retain_graph])
Save the array to a binary file in .npy
format.
-savez
(file, *args, **kwargs)
+savez
(file, *args, **kwargs)
Save several arrays to a binary file in uncompressed .npz
format.
-savez_compressed
(file, *args, **kwargs)
+savez_compressed
(file, *args, **kwargs)
Save several arrays to a binary file in compressed .npz
format.
-sigmoid
(a, /, *[, stream])
+sigmoid
(a, /, *[, stream])
Element-wise logistic sigmoid.
-sign
(a, /, *[, stream])
+sign
(a, /, *[, stream])
Element-wise sign.
-sin
(a, /, *[, stream])
+sin
(a, /, *[, stream])
Element-wise sine.
-sinh
(a, /, *[, stream])
+sinh
(a, /, *[, stream])
Element-wise hyperbolic sine.
-softmax
(a, /[, axis, stream])
+softmax
(a, /[, axis, stream])
Perform the softmax along the given axis.
-sort
(a, /[, axis, stream])
+sort
(a, /[, axis, stream])
Returns a sorted copy of the array.
-split
(a, /, indices_or_sections[, axis, stream])
+split
(a, /, indices_or_sections[, axis, stream])
Split an array along a given axis.
-sqrt
(a, /, *[, stream])
+sqrt
(a, /, *[, stream])
Element-wise square root.
-square
(a, /, *[, stream])
+square
(a, /, *[, stream])
Element-wise square.
-squeeze
(a, /[, axis, stream])
+squeeze
(a, /[, axis, stream])
Remove length one axes from an array.
-stack
(arrays[, axis, stream])
+stack
(arrays[, axis, stream])
Stacks the arrays along a new axis.
-stop_gradient
(a, /, *[, stream])
+stop_gradient
(a, /, *[, stream])
Stop gradients from being computed.
-subtract
(a, b[, stream])
+subtract
(a, b[, stream])
Element-wise subtraction.
-sum
(a, /[, axis, keepdims, stream])
+sum
(a, /[, axis, keepdims, stream])
Sum reduce the array over the given axes.
-swapaxes
(a, /, axis1, axis2, *[, stream])
+swapaxes
(a, /, axis1, axis2, *[, stream])
Swap two axes of an array.
-take
(a, /, indices[, axis, stream])
+take
(a, /, indices[, axis, stream])
Take elements along an axis.
-take_along_axis
(a, /, indices[, axis, stream])
+take_along_axis
(a, /, indices[, axis, stream])
Take values along an axis at the specified indices.
-tan
(a, /, *[, stream])
+tan
(a, /, *[, stream])
Element-wise tangent.
-tanh
(a, /, *[, stream])
+tanh
(a, /, *[, stream])
Element-wise hyperbolic tangent.
-transpose
(a, /[, axes, stream])
+transpose
(a, /[, axes, stream])
Transpose the dimensions of the array.
-tri
(n, m, k[, dtype, stream])
+tri
(n, m, k[, dtype, stream])
An array with ones at and below the given diagonal and zeros elsewhere.
-tril
(x, k, *[, stream])
+tril
(x, k, *[, stream])
Zeros the array above the given diagonal.
-triu
(x, k, *[, stream])
+triu
(x, k, *[, stream])
Zeros the array below the given diagonal.
-var
(a, /[, axis, keepdims, ddof, stream])
+var
(a, /[, axis, keepdims, ddof, stream])
Compute the variance(s) over the given axes.
-where
(condition, x, y, /, *[, stream])
+where
(condition, x, y, /, *[, stream])
Select from x
or y
according to condition
.
-zeros
(shape[, dtype, stream])
+zeros
(shape[, dtype, stream])
Construct an array of zeros.
-zeros_like
(a, /, *[, stream])
+zeros_like
(a, /, *[, stream])
An array of zeros like the input.
diff --git a/docs/build/html/python/optimizers.html b/docs/build/html/python/optimizers.html
index 42c2cc150..bd3b0fb27 100644
--- a/docs/build/html/python/optimizers.html
+++ b/docs/build/html/python/optimizers.html
@@ -9,7 +9,7 @@
- Optimizers — MLX 0.0.5 documentation
+ Optimizers — MLX 0.0.6 documentation
@@ -47,7 +47,7 @@
-
+
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
@@ -635,6 +647,9 @@ model’s parameters and the optimizer state .
Adamax
(learning_rate[, betas, eps])
Implementation of the Adamax optimizer.
+Lion
(learning_rate[, betas, weight_decay])
+Implementation of the Lion optimizer [1].
+
@@ -651,12 +666,12 @@ model’s parameters and the optimizer state .
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/transforms.html b/docs/build/html/python/transforms.html
index d2bdcddd9..4793f9abe 100644
--- a/docs/build/html/python/transforms.html
+++ b/docs/build/html/python/transforms.html
@@ -9,7 +9,7 @@
- Transforms — MLX 0.0.5 documentation
+ Transforms — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/python/tree_utils.html b/docs/build/html/python/tree_utils.html
index 084d30121..b243ef34c 100644
--- a/docs/build/html/python/tree_utils.html
+++ b/docs/build/html/python/tree_utils.html
@@ -9,7 +9,7 @@
- Tree Utils — MLX 0.0.5 documentation
+ Tree Utils — MLX 0.0.6 documentation
@@ -47,7 +47,7 @@
-
+
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/search.html b/docs/build/html/search.html
index 3d5de657a..e0a8685ec 100644
--- a/docs/build/html/search.html
+++ b/docs/build/html/search.html
@@ -7,7 +7,7 @@
- Search - MLX 0.0.5 documentation
+ Search - MLX 0.0.6 documentation
@@ -133,8 +133,8 @@
-
-
+
+
@@ -185,6 +185,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -226,12 +227,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -239,14 +242,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -269,8 +274,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -363,6 +371,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -398,6 +409,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/searchindex.js b/docs/build/html/searchindex.js
index d8d3b9570..2f2a46c5f 100644
--- a/docs/build/html/searchindex.js
+++ b/docs/build/html/searchindex.js
@@ -1 +1 @@
-Search.setIndex({"docnames": ["cpp/ops", "dev/extensions", "examples/linear_regression", "examples/llama-inference", "examples/mlp", "index", "install", "python/_autosummary/mlx.core.Device", "python/_autosummary/mlx.core.Dtype", "python/_autosummary/mlx.core.Stream", "python/_autosummary/mlx.core.abs", "python/_autosummary/mlx.core.add", "python/_autosummary/mlx.core.all", "python/_autosummary/mlx.core.allclose", "python/_autosummary/mlx.core.any", "python/_autosummary/mlx.core.arange", "python/_autosummary/mlx.core.arccos", "python/_autosummary/mlx.core.arccosh", "python/_autosummary/mlx.core.arcsin", "python/_autosummary/mlx.core.arcsinh", "python/_autosummary/mlx.core.arctan", "python/_autosummary/mlx.core.arctanh", "python/_autosummary/mlx.core.argmax", "python/_autosummary/mlx.core.argmin", "python/_autosummary/mlx.core.argpartition", "python/_autosummary/mlx.core.argsort", "python/_autosummary/mlx.core.array", "python/_autosummary/mlx.core.array.T", "python/_autosummary/mlx.core.array.abs", "python/_autosummary/mlx.core.array.all", "python/_autosummary/mlx.core.array.any", "python/_autosummary/mlx.core.array.argmax", "python/_autosummary/mlx.core.array.argmin", "python/_autosummary/mlx.core.array.astype", "python/_autosummary/mlx.core.array.cos", "python/_autosummary/mlx.core.array.dtype", "python/_autosummary/mlx.core.array.exp", "python/_autosummary/mlx.core.array.item", "python/_autosummary/mlx.core.array.log", "python/_autosummary/mlx.core.array.log1p", "python/_autosummary/mlx.core.array.logsumexp", "python/_autosummary/mlx.core.array.max", "python/_autosummary/mlx.core.array.mean", "python/_autosummary/mlx.core.array.min", "python/_autosummary/mlx.core.array.ndim", "python/_autosummary/mlx.core.array.prod", "python/_autosummary/mlx.core.array.reciprocal", "python/_autosummary/mlx.core.array.reshape", "python/_autosummary/mlx.core.array.rsqrt", "python/_autosummary/mlx.core.array.shape", "python/_autosummary/mlx.core.array.sin", "python/_autosummary/mlx.core.array.size", "python/_autosummary/mlx.core.array.split", "python/_autosummary/mlx.core.array.sqrt", "python/_autosummary/mlx.core.array.square", "python/_autosummary/mlx.core.array.sum", "python/_autosummary/mlx.core.array.tolist", "python/_autosummary/mlx.core.array.transpose", "python/_autosummary/mlx.core.array.var", "python/_autosummary/mlx.core.array_equal", "python/_autosummary/mlx.core.broadcast_to", "python/_autosummary/mlx.core.ceil", "python/_autosummary/mlx.core.concatenate", "python/_autosummary/mlx.core.conv1d", "python/_autosummary/mlx.core.conv2d", "python/_autosummary/mlx.core.convolve", "python/_autosummary/mlx.core.cos", "python/_autosummary/mlx.core.cosh", "python/_autosummary/mlx.core.default_device", "python/_autosummary/mlx.core.default_stream", "python/_autosummary/mlx.core.divide", "python/_autosummary/mlx.core.equal", "python/_autosummary/mlx.core.erf", "python/_autosummary/mlx.core.erfinv", "python/_autosummary/mlx.core.eval", "python/_autosummary/mlx.core.exp", "python/_autosummary/mlx.core.expand_dims", "python/_autosummary/mlx.core.eye", "python/_autosummary/mlx.core.fft.fft", "python/_autosummary/mlx.core.fft.fft2", "python/_autosummary/mlx.core.fft.fftn", "python/_autosummary/mlx.core.fft.ifft", "python/_autosummary/mlx.core.fft.ifft2", "python/_autosummary/mlx.core.fft.ifftn", "python/_autosummary/mlx.core.fft.irfft", "python/_autosummary/mlx.core.fft.irfft2", "python/_autosummary/mlx.core.fft.irfftn", "python/_autosummary/mlx.core.fft.rfft", "python/_autosummary/mlx.core.fft.rfft2", "python/_autosummary/mlx.core.fft.rfftn", "python/_autosummary/mlx.core.flatten", "python/_autosummary/mlx.core.floor", "python/_autosummary/mlx.core.full", "python/_autosummary/mlx.core.grad", "python/_autosummary/mlx.core.greater", "python/_autosummary/mlx.core.greater_equal", "python/_autosummary/mlx.core.identity", "python/_autosummary/mlx.core.jvp", "python/_autosummary/mlx.core.less", "python/_autosummary/mlx.core.less_equal", "python/_autosummary/mlx.core.load", "python/_autosummary/mlx.core.log", "python/_autosummary/mlx.core.log10", "python/_autosummary/mlx.core.log1p", "python/_autosummary/mlx.core.log2", "python/_autosummary/mlx.core.logaddexp", "python/_autosummary/mlx.core.logical_not", "python/_autosummary/mlx.core.logsumexp", "python/_autosummary/mlx.core.matmul", "python/_autosummary/mlx.core.max", "python/_autosummary/mlx.core.maximum", "python/_autosummary/mlx.core.mean", "python/_autosummary/mlx.core.min", "python/_autosummary/mlx.core.minimum", "python/_autosummary/mlx.core.moveaxis", "python/_autosummary/mlx.core.multiply", "python/_autosummary/mlx.core.negative", "python/_autosummary/mlx.core.new_stream", "python/_autosummary/mlx.core.ones", "python/_autosummary/mlx.core.ones_like", "python/_autosummary/mlx.core.pad", "python/_autosummary/mlx.core.partition", "python/_autosummary/mlx.core.prod", "python/_autosummary/mlx.core.random.bernoulli", "python/_autosummary/mlx.core.random.categorical", "python/_autosummary/mlx.core.random.gumbel", "python/_autosummary/mlx.core.random.key", "python/_autosummary/mlx.core.random.normal", "python/_autosummary/mlx.core.random.randint", "python/_autosummary/mlx.core.random.seed", "python/_autosummary/mlx.core.random.split", "python/_autosummary/mlx.core.random.truncated_normal", "python/_autosummary/mlx.core.random.uniform", "python/_autosummary/mlx.core.reciprocal", "python/_autosummary/mlx.core.reshape", "python/_autosummary/mlx.core.rsqrt", "python/_autosummary/mlx.core.save", "python/_autosummary/mlx.core.savez", "python/_autosummary/mlx.core.savez_compressed", "python/_autosummary/mlx.core.set_default_device", "python/_autosummary/mlx.core.set_default_stream", "python/_autosummary/mlx.core.sigmoid", "python/_autosummary/mlx.core.sign", "python/_autosummary/mlx.core.simplify", "python/_autosummary/mlx.core.sin", "python/_autosummary/mlx.core.sinh", "python/_autosummary/mlx.core.softmax", "python/_autosummary/mlx.core.sort", "python/_autosummary/mlx.core.split", "python/_autosummary/mlx.core.sqrt", "python/_autosummary/mlx.core.square", "python/_autosummary/mlx.core.squeeze", "python/_autosummary/mlx.core.stack", "python/_autosummary/mlx.core.stop_gradient", "python/_autosummary/mlx.core.subtract", "python/_autosummary/mlx.core.sum", "python/_autosummary/mlx.core.swapaxes", "python/_autosummary/mlx.core.take", "python/_autosummary/mlx.core.take_along_axis", "python/_autosummary/mlx.core.tan", "python/_autosummary/mlx.core.tanh", "python/_autosummary/mlx.core.transpose", "python/_autosummary/mlx.core.tri", "python/_autosummary/mlx.core.tril", "python/_autosummary/mlx.core.triu", "python/_autosummary/mlx.core.value_and_grad", "python/_autosummary/mlx.core.var", "python/_autosummary/mlx.core.vjp", "python/_autosummary/mlx.core.vmap", "python/_autosummary/mlx.core.where", "python/_autosummary/mlx.core.zeros", "python/_autosummary/mlx.core.zeros_like", "python/_autosummary/mlx.nn.Module", "python/_autosummary/mlx.nn.value_and_grad", "python/_autosummary/mlx.optimizers.AdaDelta", "python/_autosummary/mlx.optimizers.Adagrad", "python/_autosummary/mlx.optimizers.Adam", "python/_autosummary/mlx.optimizers.AdamW", "python/_autosummary/mlx.optimizers.Adamax", "python/_autosummary/mlx.optimizers.Optimizer", "python/_autosummary/mlx.optimizers.OptimizerState", "python/_autosummary/mlx.optimizers.RMSprop", "python/_autosummary/mlx.optimizers.SGD", "python/_autosummary/mlx.utils.tree_flatten", "python/_autosummary/mlx.utils.tree_map", "python/_autosummary/mlx.utils.tree_unflatten", "python/array", "python/data_types", "python/devices_and_streams", "python/fft", "python/nn", "python/nn/_autosummary/mlx.nn.Conv1d", "python/nn/_autosummary/mlx.nn.Conv2d", "python/nn/_autosummary/mlx.nn.Embedding", "python/nn/_autosummary/mlx.nn.GELU", "python/nn/_autosummary/mlx.nn.GroupNorm", "python/nn/_autosummary/mlx.nn.LayerNorm", "python/nn/_autosummary/mlx.nn.Linear", "python/nn/_autosummary/mlx.nn.Mish", "python/nn/_autosummary/mlx.nn.MultiHeadAttention", "python/nn/_autosummary/mlx.nn.PReLU", "python/nn/_autosummary/mlx.nn.RMSNorm", "python/nn/_autosummary/mlx.nn.ReLU", "python/nn/_autosummary/mlx.nn.RoPE", "python/nn/_autosummary/mlx.nn.SELU", "python/nn/_autosummary/mlx.nn.Sequential", "python/nn/_autosummary/mlx.nn.SiLU", "python/nn/_autosummary/mlx.nn.Step", "python/nn/_autosummary_functions/mlx.nn.gelu", "python/nn/_autosummary_functions/mlx.nn.gelu_approx", "python/nn/_autosummary_functions/mlx.nn.gelu_fast_approx", "python/nn/_autosummary_functions/mlx.nn.losses.binary_cross_entropy", "python/nn/_autosummary_functions/mlx.nn.losses.cross_entropy", "python/nn/_autosummary_functions/mlx.nn.losses.kl_div_loss", "python/nn/_autosummary_functions/mlx.nn.losses.l1_loss", "python/nn/_autosummary_functions/mlx.nn.losses.mse_loss", "python/nn/_autosummary_functions/mlx.nn.losses.nll_loss", "python/nn/_autosummary_functions/mlx.nn.mish", "python/nn/_autosummary_functions/mlx.nn.prelu", "python/nn/_autosummary_functions/mlx.nn.relu", "python/nn/_autosummary_functions/mlx.nn.selu", "python/nn/_autosummary_functions/mlx.nn.silu", "python/nn/_autosummary_functions/mlx.nn.step", "python/nn/functions", "python/nn/layers", "python/nn/losses", "python/ops", "python/optimizers", "python/random", "python/transforms", "python/tree_utils", "quick_start", "unified_memory", "using_streams"], "filenames": ["cpp/ops.rst", "dev/extensions.rst", "examples/linear_regression.rst", "examples/llama-inference.rst", "examples/mlp.rst", "index.rst", "install.rst", "python/_autosummary/mlx.core.Device.rst", "python/_autosummary/mlx.core.Dtype.rst", "python/_autosummary/mlx.core.Stream.rst", "python/_autosummary/mlx.core.abs.rst", "python/_autosummary/mlx.core.add.rst", "python/_autosummary/mlx.core.all.rst", "python/_autosummary/mlx.core.allclose.rst", "python/_autosummary/mlx.core.any.rst", "python/_autosummary/mlx.core.arange.rst", "python/_autosummary/mlx.core.arccos.rst", "python/_autosummary/mlx.core.arccosh.rst", "python/_autosummary/mlx.core.arcsin.rst", "python/_autosummary/mlx.core.arcsinh.rst", "python/_autosummary/mlx.core.arctan.rst", "python/_autosummary/mlx.core.arctanh.rst", "python/_autosummary/mlx.core.argmax.rst", "python/_autosummary/mlx.core.argmin.rst", "python/_autosummary/mlx.core.argpartition.rst", "python/_autosummary/mlx.core.argsort.rst", "python/_autosummary/mlx.core.array.rst", "python/_autosummary/mlx.core.array.T.rst", "python/_autosummary/mlx.core.array.abs.rst", "python/_autosummary/mlx.core.array.all.rst", "python/_autosummary/mlx.core.array.any.rst", "python/_autosummary/mlx.core.array.argmax.rst", "python/_autosummary/mlx.core.array.argmin.rst", "python/_autosummary/mlx.core.array.astype.rst", "python/_autosummary/mlx.core.array.cos.rst", "python/_autosummary/mlx.core.array.dtype.rst", "python/_autosummary/mlx.core.array.exp.rst", "python/_autosummary/mlx.core.array.item.rst", "python/_autosummary/mlx.core.array.log.rst", "python/_autosummary/mlx.core.array.log1p.rst", "python/_autosummary/mlx.core.array.logsumexp.rst", "python/_autosummary/mlx.core.array.max.rst", "python/_autosummary/mlx.core.array.mean.rst", "python/_autosummary/mlx.core.array.min.rst", "python/_autosummary/mlx.core.array.ndim.rst", "python/_autosummary/mlx.core.array.prod.rst", "python/_autosummary/mlx.core.array.reciprocal.rst", "python/_autosummary/mlx.core.array.reshape.rst", "python/_autosummary/mlx.core.array.rsqrt.rst", "python/_autosummary/mlx.core.array.shape.rst", "python/_autosummary/mlx.core.array.sin.rst", "python/_autosummary/mlx.core.array.size.rst", "python/_autosummary/mlx.core.array.split.rst", "python/_autosummary/mlx.core.array.sqrt.rst", "python/_autosummary/mlx.core.array.square.rst", "python/_autosummary/mlx.core.array.sum.rst", "python/_autosummary/mlx.core.array.tolist.rst", "python/_autosummary/mlx.core.array.transpose.rst", "python/_autosummary/mlx.core.array.var.rst", "python/_autosummary/mlx.core.array_equal.rst", "python/_autosummary/mlx.core.broadcast_to.rst", "python/_autosummary/mlx.core.ceil.rst", "python/_autosummary/mlx.core.concatenate.rst", "python/_autosummary/mlx.core.conv1d.rst", "python/_autosummary/mlx.core.conv2d.rst", "python/_autosummary/mlx.core.convolve.rst", "python/_autosummary/mlx.core.cos.rst", "python/_autosummary/mlx.core.cosh.rst", "python/_autosummary/mlx.core.default_device.rst", "python/_autosummary/mlx.core.default_stream.rst", "python/_autosummary/mlx.core.divide.rst", "python/_autosummary/mlx.core.equal.rst", "python/_autosummary/mlx.core.erf.rst", "python/_autosummary/mlx.core.erfinv.rst", "python/_autosummary/mlx.core.eval.rst", "python/_autosummary/mlx.core.exp.rst", "python/_autosummary/mlx.core.expand_dims.rst", "python/_autosummary/mlx.core.eye.rst", "python/_autosummary/mlx.core.fft.fft.rst", "python/_autosummary/mlx.core.fft.fft2.rst", "python/_autosummary/mlx.core.fft.fftn.rst", "python/_autosummary/mlx.core.fft.ifft.rst", "python/_autosummary/mlx.core.fft.ifft2.rst", "python/_autosummary/mlx.core.fft.ifftn.rst", "python/_autosummary/mlx.core.fft.irfft.rst", "python/_autosummary/mlx.core.fft.irfft2.rst", "python/_autosummary/mlx.core.fft.irfftn.rst", "python/_autosummary/mlx.core.fft.rfft.rst", "python/_autosummary/mlx.core.fft.rfft2.rst", "python/_autosummary/mlx.core.fft.rfftn.rst", "python/_autosummary/mlx.core.flatten.rst", "python/_autosummary/mlx.core.floor.rst", "python/_autosummary/mlx.core.full.rst", "python/_autosummary/mlx.core.grad.rst", "python/_autosummary/mlx.core.greater.rst", "python/_autosummary/mlx.core.greater_equal.rst", "python/_autosummary/mlx.core.identity.rst", "python/_autosummary/mlx.core.jvp.rst", "python/_autosummary/mlx.core.less.rst", "python/_autosummary/mlx.core.less_equal.rst", "python/_autosummary/mlx.core.load.rst", "python/_autosummary/mlx.core.log.rst", "python/_autosummary/mlx.core.log10.rst", "python/_autosummary/mlx.core.log1p.rst", "python/_autosummary/mlx.core.log2.rst", "python/_autosummary/mlx.core.logaddexp.rst", "python/_autosummary/mlx.core.logical_not.rst", "python/_autosummary/mlx.core.logsumexp.rst", "python/_autosummary/mlx.core.matmul.rst", "python/_autosummary/mlx.core.max.rst", "python/_autosummary/mlx.core.maximum.rst", "python/_autosummary/mlx.core.mean.rst", "python/_autosummary/mlx.core.min.rst", "python/_autosummary/mlx.core.minimum.rst", "python/_autosummary/mlx.core.moveaxis.rst", "python/_autosummary/mlx.core.multiply.rst", "python/_autosummary/mlx.core.negative.rst", "python/_autosummary/mlx.core.new_stream.rst", "python/_autosummary/mlx.core.ones.rst", "python/_autosummary/mlx.core.ones_like.rst", "python/_autosummary/mlx.core.pad.rst", "python/_autosummary/mlx.core.partition.rst", "python/_autosummary/mlx.core.prod.rst", "python/_autosummary/mlx.core.random.bernoulli.rst", "python/_autosummary/mlx.core.random.categorical.rst", "python/_autosummary/mlx.core.random.gumbel.rst", "python/_autosummary/mlx.core.random.key.rst", "python/_autosummary/mlx.core.random.normal.rst", "python/_autosummary/mlx.core.random.randint.rst", "python/_autosummary/mlx.core.random.seed.rst", "python/_autosummary/mlx.core.random.split.rst", "python/_autosummary/mlx.core.random.truncated_normal.rst", "python/_autosummary/mlx.core.random.uniform.rst", "python/_autosummary/mlx.core.reciprocal.rst", "python/_autosummary/mlx.core.reshape.rst", "python/_autosummary/mlx.core.rsqrt.rst", "python/_autosummary/mlx.core.save.rst", "python/_autosummary/mlx.core.savez.rst", "python/_autosummary/mlx.core.savez_compressed.rst", "python/_autosummary/mlx.core.set_default_device.rst", "python/_autosummary/mlx.core.set_default_stream.rst", "python/_autosummary/mlx.core.sigmoid.rst", "python/_autosummary/mlx.core.sign.rst", "python/_autosummary/mlx.core.simplify.rst", "python/_autosummary/mlx.core.sin.rst", "python/_autosummary/mlx.core.sinh.rst", "python/_autosummary/mlx.core.softmax.rst", "python/_autosummary/mlx.core.sort.rst", "python/_autosummary/mlx.core.split.rst", "python/_autosummary/mlx.core.sqrt.rst", "python/_autosummary/mlx.core.square.rst", "python/_autosummary/mlx.core.squeeze.rst", "python/_autosummary/mlx.core.stack.rst", "python/_autosummary/mlx.core.stop_gradient.rst", "python/_autosummary/mlx.core.subtract.rst", "python/_autosummary/mlx.core.sum.rst", "python/_autosummary/mlx.core.swapaxes.rst", "python/_autosummary/mlx.core.take.rst", "python/_autosummary/mlx.core.take_along_axis.rst", "python/_autosummary/mlx.core.tan.rst", "python/_autosummary/mlx.core.tanh.rst", "python/_autosummary/mlx.core.transpose.rst", "python/_autosummary/mlx.core.tri.rst", "python/_autosummary/mlx.core.tril.rst", "python/_autosummary/mlx.core.triu.rst", "python/_autosummary/mlx.core.value_and_grad.rst", "python/_autosummary/mlx.core.var.rst", "python/_autosummary/mlx.core.vjp.rst", "python/_autosummary/mlx.core.vmap.rst", "python/_autosummary/mlx.core.where.rst", "python/_autosummary/mlx.core.zeros.rst", "python/_autosummary/mlx.core.zeros_like.rst", "python/_autosummary/mlx.nn.Module.rst", "python/_autosummary/mlx.nn.value_and_grad.rst", "python/_autosummary/mlx.optimizers.AdaDelta.rst", "python/_autosummary/mlx.optimizers.Adagrad.rst", "python/_autosummary/mlx.optimizers.Adam.rst", "python/_autosummary/mlx.optimizers.AdamW.rst", "python/_autosummary/mlx.optimizers.Adamax.rst", "python/_autosummary/mlx.optimizers.Optimizer.rst", "python/_autosummary/mlx.optimizers.OptimizerState.rst", "python/_autosummary/mlx.optimizers.RMSprop.rst", "python/_autosummary/mlx.optimizers.SGD.rst", "python/_autosummary/mlx.utils.tree_flatten.rst", "python/_autosummary/mlx.utils.tree_map.rst", "python/_autosummary/mlx.utils.tree_unflatten.rst", "python/array.rst", "python/data_types.rst", "python/devices_and_streams.rst", "python/fft.rst", "python/nn.rst", "python/nn/_autosummary/mlx.nn.Conv1d.rst", "python/nn/_autosummary/mlx.nn.Conv2d.rst", "python/nn/_autosummary/mlx.nn.Embedding.rst", "python/nn/_autosummary/mlx.nn.GELU.rst", "python/nn/_autosummary/mlx.nn.GroupNorm.rst", "python/nn/_autosummary/mlx.nn.LayerNorm.rst", "python/nn/_autosummary/mlx.nn.Linear.rst", "python/nn/_autosummary/mlx.nn.Mish.rst", "python/nn/_autosummary/mlx.nn.MultiHeadAttention.rst", "python/nn/_autosummary/mlx.nn.PReLU.rst", "python/nn/_autosummary/mlx.nn.RMSNorm.rst", "python/nn/_autosummary/mlx.nn.ReLU.rst", "python/nn/_autosummary/mlx.nn.RoPE.rst", "python/nn/_autosummary/mlx.nn.SELU.rst", "python/nn/_autosummary/mlx.nn.Sequential.rst", "python/nn/_autosummary/mlx.nn.SiLU.rst", "python/nn/_autosummary/mlx.nn.Step.rst", "python/nn/_autosummary_functions/mlx.nn.gelu.rst", "python/nn/_autosummary_functions/mlx.nn.gelu_approx.rst", "python/nn/_autosummary_functions/mlx.nn.gelu_fast_approx.rst", "python/nn/_autosummary_functions/mlx.nn.losses.binary_cross_entropy.rst", "python/nn/_autosummary_functions/mlx.nn.losses.cross_entropy.rst", "python/nn/_autosummary_functions/mlx.nn.losses.kl_div_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.l1_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.mse_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.nll_loss.rst", "python/nn/_autosummary_functions/mlx.nn.mish.rst", "python/nn/_autosummary_functions/mlx.nn.prelu.rst", "python/nn/_autosummary_functions/mlx.nn.relu.rst", "python/nn/_autosummary_functions/mlx.nn.selu.rst", "python/nn/_autosummary_functions/mlx.nn.silu.rst", "python/nn/_autosummary_functions/mlx.nn.step.rst", "python/nn/functions.rst", "python/nn/layers.rst", "python/nn/losses.rst", "python/ops.rst", "python/optimizers.rst", "python/random.rst", "python/transforms.rst", "python/tree_utils.rst", "quick_start.rst", "unified_memory.rst", "using_streams.rst"], "titles": ["Operations", "Developer Documentation", "Linear Regression", "LLM inference", "Multi-Layer Perceptron", "MLX", "Build and Install", "mlx.core.Device", "mlx.core.Dtype", "mlx.core.Stream", "mlx.core.abs", "mlx.core.add", "mlx.core.all", "mlx.core.allclose", "mlx.core.any", "mlx.core.arange", "mlx.core.arccos", "mlx.core.arccosh", "mlx.core.arcsin", "mlx.core.arcsinh", "mlx.core.arctan", "mlx.core.arctanh", "mlx.core.argmax", "mlx.core.argmin", "mlx.core.argpartition", "mlx.core.argsort", "mlx.core.array", "mlx.core.array.T", "mlx.core.array.abs", "mlx.core.array.all", "mlx.core.array.any", "mlx.core.array.argmax", "mlx.core.array.argmin", "mlx.core.array.astype", "mlx.core.array.cos", "mlx.core.array.dtype", "mlx.core.array.exp", "mlx.core.array.item", "mlx.core.array.log", "mlx.core.array.log1p", "mlx.core.array.logsumexp", "mlx.core.array.max", "mlx.core.array.mean", "mlx.core.array.min", "mlx.core.array.ndim", "mlx.core.array.prod", "mlx.core.array.reciprocal", "mlx.core.array.reshape", "mlx.core.array.rsqrt", "mlx.core.array.shape", "mlx.core.array.sin", "mlx.core.array.size", "mlx.core.array.split", "mlx.core.array.sqrt", "mlx.core.array.square", "mlx.core.array.sum", "mlx.core.array.tolist", "mlx.core.array.transpose", "mlx.core.array.var", "mlx.core.array_equal", "mlx.core.broadcast_to", "mlx.core.ceil", "mlx.core.concatenate", "mlx.core.conv1d", "mlx.core.conv2d", "mlx.core.convolve", "mlx.core.cos", "mlx.core.cosh", "mlx.core.default_device", "mlx.core.default_stream", "mlx.core.divide", "mlx.core.equal", "mlx.core.erf", "mlx.core.erfinv", "mlx.core.eval", "mlx.core.exp", "mlx.core.expand_dims", "mlx.core.eye", "mlx.core.fft.fft", "mlx.core.fft.fft2", "mlx.core.fft.fftn", "mlx.core.fft.ifft", "mlx.core.fft.ifft2", "mlx.core.fft.ifftn", "mlx.core.fft.irfft", "mlx.core.fft.irfft2", "mlx.core.fft.irfftn", "mlx.core.fft.rfft", "mlx.core.fft.rfft2", "mlx.core.fft.rfftn", "mlx.core.flatten", "mlx.core.floor", "mlx.core.full", "mlx.core.grad", "mlx.core.greater", "mlx.core.greater_equal", "mlx.core.identity", "mlx.core.jvp", "mlx.core.less", "mlx.core.less_equal", "mlx.core.load", "mlx.core.log", "mlx.core.log10", "mlx.core.log1p", "mlx.core.log2", "mlx.core.logaddexp", "mlx.core.logical_not", "mlx.core.logsumexp", "mlx.core.matmul", "mlx.core.max", "mlx.core.maximum", "mlx.core.mean", "mlx.core.min", "mlx.core.minimum", "mlx.core.moveaxis", "mlx.core.multiply", "mlx.core.negative", "mlx.core.new_stream", "mlx.core.ones", "mlx.core.ones_like", "mlx.core.pad", "mlx.core.partition", "mlx.core.prod", "mlx.core.random.bernoulli", "mlx.core.random.categorical", "mlx.core.random.gumbel", "mlx.core.random.key", "mlx.core.random.normal", "mlx.core.random.randint", "mlx.core.random.seed", "mlx.core.random.split", "mlx.core.random.truncated_normal", "mlx.core.random.uniform", "mlx.core.reciprocal", "mlx.core.reshape", "mlx.core.rsqrt", "mlx.core.save", "mlx.core.savez", "mlx.core.savez_compressed", "mlx.core.set_default_device", "mlx.core.set_default_stream", "mlx.core.sigmoid", "mlx.core.sign", "mlx.core.simplify", "mlx.core.sin", "mlx.core.sinh", "mlx.core.softmax", "mlx.core.sort", "mlx.core.split", "mlx.core.sqrt", "mlx.core.square", "mlx.core.squeeze", "mlx.core.stack", "mlx.core.stop_gradient", "mlx.core.subtract", "mlx.core.sum", "mlx.core.swapaxes", "mlx.core.take", "mlx.core.take_along_axis", "mlx.core.tan", "mlx.core.tanh", "mlx.core.transpose", "mlx.core.tri", "mlx.core.tril", "mlx.core.triu", "mlx.core.value_and_grad", "mlx.core.var", "mlx.core.vjp", "mlx.core.vmap", "mlx.core.where", "mlx.core.zeros", "mlx.core.zeros_like", "mlx.nn.Module", "mlx.nn.value_and_grad", "mlx.optimizers.AdaDelta", "mlx.optimizers.Adagrad", "mlx.optimizers.Adam", "mlx.optimizers.AdamW", "mlx.optimizers.Adamax", "mlx.optimizers.Optimizer", "mlx.optimizers.OptimizerState", "mlx.optimizers.RMSprop", "mlx.optimizers.SGD", "mlx.utils.tree_flatten", "mlx.utils.tree_map", "mlx.utils.tree_unflatten", "Array", "Data Types", "Devices and Streams", "FFT", "Neural Networks", "mlx.nn.Conv1d", "mlx.nn.Conv2d", "mlx.nn.Embedding", "mlx.nn.GELU", "mlx.nn.GroupNorm", "mlx.nn.LayerNorm", "mlx.nn.Linear", "mlx.nn.Mish", "mlx.nn.MultiHeadAttention", "mlx.nn.PReLU", "mlx.nn.RMSNorm", "mlx.nn.ReLU", "mlx.nn.RoPE", "mlx.nn.SELU", "mlx.nn.Sequential", "mlx.nn.SiLU", "mlx.nn.Step", "mlx.nn.gelu", "mlx.nn.gelu_approx", "mlx.nn.gelu_fast_approx", "mlx.nn.losses.binary_cross_entropy", "mlx.nn.losses.cross_entropy", "mlx.nn.losses.kl_div_loss", "mlx.nn.losses.l1_loss", "mlx.nn.losses.mse_loss", "mlx.nn.losses.nll_loss", "mlx.nn.mish", "mlx.nn.prelu", "mlx.nn.relu", "mlx.nn.selu", "mlx.nn.silu", "mlx.nn.step", "Functions", "Layers", "Loss Functions", "Operations", "Optimizers", "Random", "Transforms", "Tree Utils", "Quick Start Guide", "Unified Memory", "Using Streams"], "terms": {"mlx": [1, 2, 3, 4, 6, 190, 227, 228, 230, 231, 232], "provid": [1, 3, 93, 165, 172, 184, 190, 199, 223, 233], "open": [1, 15, 128, 132], "flexibl": [1, 5], "which": [1, 3, 4, 5, 6, 15, 33, 74, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 93, 97, 100, 124, 125, 134, 136, 137, 138, 151, 152, 157, 165, 167, 168, 194, 203, 212, 228, 232, 233], "user": [1, 3, 190], "mai": 1, "add": [1, 3, 76, 105, 120, 191, 192, 232], "special": 1, "without": [1, 3, 5, 153, 199, 223, 230, 232], "much": [1, 3], "hassl": 1, "while": [1, 3, 6, 134, 203], "librari": [1, 6, 190], "suppli": 1, "effici": [1, 3, 5, 203, 231], "can": [1, 3, 5, 6, 11, 15, 47, 57, 70, 71, 74, 94, 95, 98, 99, 105, 110, 113, 115, 123, 124, 128, 131, 132, 154, 165, 172, 190, 193, 205, 227, 228, 230, 231, 232, 233], "compos": [1, 5, 190, 231], "ani": [1, 3, 5, 15, 143, 183, 184, 185, 190, 194, 230, 231, 232], "number": [1, 15, 51, 64, 77, 93, 96, 97, 120, 124, 127, 130, 132, 143, 162, 165, 167, 168, 190, 191, 192, 195, 199, 228, 233], "applic": [1, 6], "aris": 1, "case": [1, 3, 80, 83, 84, 86, 87, 88, 89, 90, 108, 134, 151, 204, 207, 220, 222, 231, 232, 233], "where": [1, 4, 77, 165, 168, 191, 192, 194, 195, 196, 197, 201, 204, 206, 207, 208, 209, 210, 220, 221, 222], "new": [1, 4, 60, 114, 134, 152, 161, 172, 184, 199, 227], "function": [1, 2, 3, 4, 5, 13, 72, 73, 74, 93, 97, 108, 141, 143, 165, 167, 168, 173, 184, 190, 194, 198, 205, 207, 208, 209, 210, 217, 218, 222, 227, 228, 230], "highli": [1, 6], "optim": [1, 2, 4, 5], "ar": [1, 2, 3, 4, 5, 6, 13, 15, 59, 60, 65, 77, 79, 80, 82, 83, 85, 86, 88, 89, 93, 97, 108, 120, 121, 123, 124, 125, 128, 131, 132, 137, 138, 151, 152, 157, 165, 167, 168, 172, 183, 184, 191, 192, 195, 196, 199, 223, 230, 231, 232], "need": [1, 3, 4, 5, 59, 190, 228, 231, 232], "For": [1, 3, 6, 185, 190, 228, 231, 232], "you": [1, 3, 4, 5, 6, 190, 228, 232], "design": [1, 2, 5, 228, 232], "your": [1, 3, 6, 172], "own": [1, 6], "link": [1, 6], "top": [1, 197], "core": [1, 2, 3, 4, 172, 190, 211, 227, 231], "we": [1, 2, 3, 4, 177, 190, 193, 205, 228, 230, 232], "inner": 1, "work": [1, 3, 6], "go": [1, 3], "over": [1, 3, 4, 12, 14, 22, 23, 24, 25, 63, 64, 80, 83, 86, 89, 107, 109, 111, 112, 121, 122, 135, 146, 147, 155, 166, 191, 192, 195, 196, 201, 212], "simpl": [1, 3, 4, 190, 193, 223], "learn": [1, 2, 4, 5, 174, 175, 176, 177, 178, 181, 182, 195, 196, 201], "step": [1, 3, 4, 15, 190], "involv": [1, 227], "ad": [1, 2, 6, 172, 174, 175, 176, 177, 178, 181], "let": [1, 2, 3], "s": [1, 2, 3, 4, 35, 44, 79, 80, 82, 83, 85, 86, 88, 89, 93, 100, 111, 124, 165, 166, 168, 173, 179, 190, 227, 228, 231, 232], "sai": [1, 3], "would": [1, 3, 232], "like": [1, 3, 5, 119, 171, 231, 232], "an": [1, 3, 4, 6, 8, 12, 14, 26, 60, 63, 64, 74, 77, 90, 92, 96, 109, 112, 114, 118, 119, 120, 122, 134, 148, 151, 156, 157, 158, 162, 168, 170, 171, 174, 179, 180, 183, 190, 195, 196, 197, 199, 209, 218, 228, 230, 231, 232, 233], "take": [1, 3, 4, 93, 97, 110, 113, 119, 158, 165, 167, 168, 171, 228, 232, 233], "two": [1, 11, 13, 59, 70, 71, 79, 82, 88, 94, 95, 98, 99, 105, 108, 110, 113, 115, 156, 232], "arrai": [1, 3, 4, 5, 8, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 59, 60, 61, 62, 63, 64, 65, 66, 67, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 130, 131, 132, 133, 134, 135, 136, 137, 138, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 190, 211, 212, 213, 214, 215, 216, 217, 218, 222, 231, 232], "x": [1, 2, 3, 4, 72, 96, 125, 137, 141, 143, 163, 164, 169, 172, 184, 190, 194, 195, 196, 197, 198, 201, 202, 204, 206, 207, 208, 209, 210, 217, 218, 219, 220, 221, 222, 227, 231, 232], "y": [1, 2, 3, 4, 143, 169, 175, 190, 195, 196, 197, 201, 227], "scale": [1, 3, 199, 204, 220], "them": [1, 3, 190, 232], "both": [1, 11, 70, 71, 94, 95, 98, 99, 105, 110, 113, 115, 124, 154, 227, 231, 232], "some": [1, 2, 3, 4], "coeffic": 1, "alpha": [1, 177, 181, 204, 218, 220], "beta": [1, 176, 177, 178, 195, 196], "respect": [1, 2, 4, 93, 165, 172, 184, 190, 194, 195, 196, 231], "togeth": [1, 4, 184], "get": [1, 2, 4, 64, 126, 180, 190, 232], "z": [1, 143], "well": [1, 3, 190, 199], "veri": [1, 3, 199, 232], "easili": 1, "do": [1, 3, 6, 172, 177, 190], "just": [1, 4], "write": [1, 3, 190], "out": [1, 6], "follow": [1, 3, 4, 5, 6, 15, 65, 174, 175, 176, 177, 178, 182, 190, 209, 210, 213, 228, 232], "import": [1, 2, 3, 4, 6, 137, 143, 165, 172, 183, 184, 185, 190, 211, 231], "mx": [1, 2, 3, 4, 137, 143, 165, 172, 190, 202, 211, 212, 213, 214, 215, 216, 219, 227, 228, 231, 232, 233], "def": [1, 2, 3, 4, 143, 165, 172, 190, 232], "simple_axpbi": 1, "float": [1, 13, 15, 56, 92, 123, 128, 131, 132, 174, 175, 176, 177, 178, 181, 182, 187, 195, 196, 201, 203, 207, 222], "return": [1, 2, 3, 4, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 33, 37, 49, 56, 59, 60, 61, 62, 63, 64, 65, 66, 67, 70, 71, 72, 73, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 130, 131, 132, 133, 134, 135, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 183, 184, 185, 190, 211, 212, 213, 214, 215, 216, 230, 232], "thi": [1, 3, 4, 6, 12, 13, 14, 15, 22, 23, 24, 25, 74, 97, 105, 107, 108, 109, 111, 112, 121, 122, 124, 143, 146, 147, 148, 155, 157, 166, 172, 190, 207, 209, 210, 222, 230], "perform": [1, 3, 5, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 108, 146, 157, 190, 195, 232], "leav": [1, 184], "differenti": [1, 5], "howev": [1, 190, 194, 195, 228], "vector": [1, 2, 5, 97, 157, 167, 168, 193, 231], "math": [1, 3], "often": 1, "realiz": 1, "axpbi": 1, "routin": 1, "defin": [1, 2, 3, 4, 6, 180, 183], "same": [1, 3, 6, 59, 60, 64, 65, 84, 87, 88, 89, 93, 97, 120, 124, 167, 169, 172, 190, 195, 228, 232], "realli": 1, "part": 1, "doe": [1, 3, 6, 190], "fast": [1, 143, 194, 210, 232], "so": [1, 3, 6, 93, 143, 165, 227, 232], "decid": 1, "want": [1, 3, 232], "reli": 1, "acceler": 1, "framework": [1, 5], "continu": 1, "impos": 1, "our": [1, 3, 4, 174, 175, 176, 178, 205], "assumpt": 1, "also": [1, 3, 4, 5, 11, 70, 71, 80, 83, 86, 89, 94, 95, 98, 99, 105, 110, 113, 115, 154, 173, 180, 190, 199, 204, 206, 208, 220, 221, 223, 227, 231, 233], "assum": [1, 3, 184, 190, 195], "how": [1, 3, 4, 190, 191, 192, 193, 199, 232], "gradient": [1, 2, 4, 93, 153, 165, 172, 173, 174, 176, 177, 178, 182, 190, 227, 231], "ins": 1, "what": [1, 3], "coincid": 1, "right": [1, 194, 209, 210], "place": [1, 3], "cours": 1, "The": [1, 3, 4, 5, 6, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 33, 35, 44, 49, 56, 59, 60, 61, 62, 63, 64, 65, 66, 67, 70, 71, 72, 73, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 130, 131, 132, 133, 134, 141, 142, 144, 145, 146, 147, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 187, 191, 192, 193, 195, 196, 197, 199, 201, 203, 205, 207, 211, 212, 213, 214, 215, 216, 222, 227, 231, 232, 233], "structur": [1, 74], "from": [1, 3, 4, 5, 85, 86, 88, 89, 92, 100, 108, 119, 123, 124, 125, 126, 128, 131, 137, 151, 153, 154, 157, 158, 169, 171, 183, 184, 185, 190, 199, 230, 231, 232], "frontend": 1, "api": 1, "redirect": 1, "when": [1, 3, 5, 6, 172, 191, 192, 213, 228, 232], "appropri": 1, "fallback": 1, "metal": 1, "vjp": [1, 231], "jvp": [1, 231], "In": [1, 3, 4, 108, 172, 174, 175, 176, 178, 184, 190, 195, 230, 232], "one": [1, 3, 6, 56, 64, 76, 77, 103, 108, 124, 151, 154, 232], "sentenc": 1, "comput": [1, 2, 3, 4, 5, 6, 93, 97, 105, 111, 143, 146, 153, 165, 166, 167, 173, 174, 176, 177, 178, 190, 195, 196, 201, 203, 209, 210, 211, 212, 213, 214, 215, 216, 227, 231, 232], "graph": [1, 3, 4, 5, 74, 136, 143], "rule": 1, "evalu": [1, 3, 4, 74, 97, 136, 167, 172, 190, 227, 231], "said": [1, 3], "start": [1, 2, 3, 5, 6, 15, 148, 232], "discuss": 1, "more": [1, 4, 8, 56, 108, 190, 228, 232], "detail": [1, 8, 174, 175, 176, 178, 190], "thei": [1, 2, 3, 65, 172, 205, 230, 231, 232], "c": [1, 3, 187, 191, 192, 231, 232], "scalar": [1, 11, 13, 26, 37, 56, 59, 60, 70, 71, 92, 93, 94, 95, 98, 99, 105, 106, 108, 110, 113, 115, 120, 128, 131, 132, 154, 165, 169, 173, 231], "sum": [1, 2, 11, 107, 146, 190, 211, 212, 213, 214, 215, 216], "elementwis": 1, "numpi": [1, 3, 4, 5, 11, 13, 15, 60, 70, 71, 94, 95, 98, 99, 105, 108, 110, 113, 115, 154, 231], "style": [1, 11, 13, 70, 71, 94, 95, 98, 99, 105, 108, 110, 113, 115, 154], "broadcast": [1, 11, 13, 60, 70, 71, 92, 94, 95, 98, 99, 105, 108, 110, 113, 115, 123, 124, 131, 132, 154, 158, 169, 199], "between": [1, 5, 211, 212, 213, 214, 215, 216, 232], "input": [1, 2, 3, 10, 11, 12, 13, 14, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 59, 60, 61, 62, 63, 64, 65, 66, 67, 70, 71, 72, 73, 75, 76, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 93, 94, 95, 97, 98, 99, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 119, 120, 121, 122, 130, 133, 134, 135, 141, 142, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 163, 164, 165, 166, 168, 169, 171, 191, 192, 193, 195, 196, 197, 199, 201, 203, 207, 211, 213, 216, 222, 231], "upcast": 1, "const": 1, "factor": 1, "streamordevic": 1, "stream": [1, 5, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 28, 29, 30, 31, 32, 33, 34, 36, 38, 39, 40, 41, 42, 43, 45, 46, 47, 48, 50, 52, 53, 54, 55, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 69, 70, 71, 72, 73, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 94, 95, 96, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 127, 128, 130, 131, 132, 133, 134, 135, 140, 141, 142, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 166, 169, 170, 171, 232], "schedul": [1, 232], "itself": 1, "call": [1, 3, 4, 27, 172, 190, 193, 205, 227], "other": [1, 3, 172, 190, 199, 231], "within": [1, 24], "simplest": [1, 190], "wai": [1, 3, 6, 190], "about": [1, 3, 4, 232], "term": [1, 174, 175, 176, 177, 178, 181], "exist": [1, 3], "auto": [1, 6], "ax": [1, 12, 14, 22, 23, 57, 76, 79, 80, 82, 83, 85, 86, 88, 89, 107, 109, 111, 112, 120, 122, 146, 151, 155, 156, 161, 166], "multipli": 1, "earlier": 1, "goal": 1, "themselv": 1, "contain": [1, 3, 49, 74, 84, 85, 86, 106, 148, 169, 172, 190], "act": 1, "data": [1, 4, 5, 8, 15, 77, 87, 88, 92, 96, 118, 131, 162, 170], "nor": [1, 93, 165], "rather": [1, 232], "easi": [1, 190], "interfac": 1, "block": [1, 3], "A": [1, 3, 5, 6, 49, 59, 93, 97, 107, 108, 123, 124, 125, 127, 128, 131, 132, 148, 152, 165, 167, 168, 172, 173, 176, 178, 183, 184, 185, 190, 195, 196, 198, 201, 205, 210, 217, 227], "It": [1, 3, 6, 93, 165, 178, 179, 190], "creat": [1, 3, 6, 77, 96, 172, 190, 227], "output": [1, 3, 6, 12, 13, 14, 15, 22, 23, 24, 60, 77, 84, 87, 88, 89, 92, 93, 96, 107, 109, 111, 112, 118, 119, 121, 122, 123, 124, 125, 127, 128, 131, 132, 137, 138, 146, 151, 155, 158, 162, 165, 166, 167, 168, 169, 170, 171, 191, 192, 197, 199, 207, 211, 212, 213, 214, 215, 216, 222, 231, 232], "given": [1, 12, 14, 24, 60, 62, 74, 76, 78, 79, 80, 81, 82, 83, 87, 88, 89, 92, 107, 109, 111, 112, 122, 128, 146, 148, 155, 162, 163, 164, 166, 199], "set": [1, 3, 4, 6, 172, 180, 194, 197, 203, 207, 222, 228], "further": [1, 6], "class": [1, 3, 4, 7, 8, 9, 26, 172, 174, 175, 176, 177, 178, 179, 180, 181, 182, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222], "under": 1, "These": [1, 158, 232], "word": 1, "bit": [1, 187], "abstract": 1, "back": [1, 3], "give": [1, 3, 4, 24], "ourselv": 1, "concret": [1, 197, 232], "imag": [1, 192], "public": [1, 190], "explicit": [1, 228], "alpha_": 1, "beta_": 1, "must": [1, 6, 74, 92, 123, 124, 128, 131, 132, 169], "know": [1, 3], "popul": 1, "To": [1, 2, 3, 4, 6, 190, 231], "avoid": 1, "unnecessari": [1, 3], "alloc": [1, 172], "respons": 1, "space": [1, 216], "void": 1, "eval_cpu": 1, "std": 1, "overrid": 1, "eval_gpu": 1, "jacobian": [1, 97, 167, 231], "product": [1, 97, 108, 122, 167, 199, 231], "primal": [1, 97, 167], "tangent": [1, 20, 21, 97, 159, 160], "int": [1, 3, 4, 7, 9, 12, 14, 15, 22, 23, 24, 25, 29, 30, 31, 32, 40, 41, 42, 43, 45, 49, 52, 55, 56, 58, 60, 62, 63, 64, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 92, 93, 96, 107, 109, 111, 112, 114, 118, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 134, 146, 147, 148, 151, 152, 155, 156, 157, 158, 161, 162, 163, 164, 165, 166, 168, 170, 172, 190, 191, 192, 193, 195, 196, 197, 199, 201, 203, 212, 213, 216], "argnum": [1, 93, 165], "cotan": 1, "across": [1, 195], "pair": [1, 120, 203], "repres": [1, 3], "axi": [1, 3, 4, 12, 14, 22, 23, 24, 25, 29, 30, 31, 32, 40, 41, 42, 43, 45, 52, 55, 58, 62, 76, 78, 81, 84, 85, 86, 87, 88, 89, 107, 109, 111, 112, 114, 120, 121, 122, 124, 146, 147, 148, 151, 152, 155, 156, 157, 158, 161, 166, 168, 212, 213, 216], "correspond": [1, 12, 14, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 107, 109, 112, 122, 155, 168, 184], "dimens": [1, 3, 12, 14, 22, 23, 44, 49, 56, 64, 76, 85, 86, 88, 89, 90, 107, 108, 109, 111, 112, 122, 124, 130, 155, 158, 161, 166, 191, 192, 195, 196, 199, 201, 203], "vmap": [1, 231], "print": [1, 2, 3, 4, 6, 183, 184, 185, 190, 228, 231], "ostream": 1, "os": [1, 6], "equival": [1, 27, 47, 57, 194], "check": [1, 6, 59], "bool": [1, 12, 14, 22, 23, 29, 30, 31, 32, 40, 41, 42, 43, 45, 55, 56, 58, 59, 74, 107, 109, 111, 112, 122, 123, 128, 131, 132, 136, 155, 166, 182, 191, 192, 195, 196, 197, 199, 203], "is_equival": 1, "privat": 1, "fall": 1, "eval": [1, 2, 3, 4, 143, 172, 190, 227, 231], "deriv": 1, "base": [1, 74, 102, 104, 172, 178, 179, 203, 227, 228], "abov": [1, 3, 6, 163, 177, 190, 232], "demonstr": 1, "treat": [1, 59, 85, 86, 88, 89, 157], "paramet": [1, 2, 3, 4, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 33, 59, 60, 61, 62, 63, 64, 65, 66, 67, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 181, 182, 183, 184, 185, 191, 192, 193, 194, 195, 196, 197, 199, 201, 203, 205, 207, 211, 212, 213, 214, 215, 216, 222, 223, 227], "produc": [1, 199], "through": [1, 153], "construct": [1, 4, 92, 118, 170], "its": [1, 6, 108, 121, 130, 143, 162, 173, 176, 177, 178, 185, 190, 232], "type": [1, 5, 6, 7, 8, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 33, 49, 56, 59, 60, 61, 62, 63, 64, 65, 66, 67, 70, 71, 72, 73, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 130, 131, 132, 133, 134, 135, 141, 142, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 179, 183, 190, 211, 212, 213, 214, 215, 216], "shape": [1, 3, 4, 47, 59, 60, 63, 64, 78, 81, 84, 87, 88, 89, 92, 97, 108, 118, 119, 123, 124, 125, 127, 128, 131, 132, 134, 158, 167, 169, 170, 171, 190, 191, 192, 197, 227, 231, 232], "pass": [1, 3, 4, 47, 57, 120, 165, 173, 183, 190, 205], "re": [1, 4], "now": [1, 3], "promot": 1, "dtype": [1, 3, 15, 26, 33, 56, 77, 92, 96, 118, 125, 127, 128, 131, 132, 162, 170, 187, 231], "promoted_dtyp": 1, "promote_typ": 1, "float32": [1, 15, 77, 96, 118, 125, 127, 131, 132, 162, 170, 187, 231], "non": [1, 6, 172, 198, 217], "point": [1, 2, 3, 6, 187], "out_dtyp": 1, "is_floating_point": 1, "cast": [1, 33, 87, 88, 89], "up": [1, 3], "determin": 1, "x_cast": 1, "astyp": [1, 3], "y_cast": 1, "broadcasted_input": 1, "broadcast_arrai": 1, "out_shap": 1, "0": [1, 2, 3, 4, 6, 7, 15, 52, 58, 62, 63, 64, 77, 90, 93, 120, 123, 132, 148, 152, 162, 163, 164, 165, 166, 168, 172, 174, 176, 177, 178, 181, 182, 183, 190, 191, 192, 194, 195, 196, 200, 202, 204, 207, 209, 210, 211, 218, 219, 220, 222, 228, 231], "unique_ptr": 1, "make_uniqu": 1, "to_stream": 1, "handl": [1, 190], "resolv": 1, "No": [1, 3], "happen": [1, 3, 227], "alon": 1, "effect": 1, "onli": [1, 3, 5, 6, 59, 63, 64, 172, 187, 190, 232], "execut": [1, 6, 232], "depend": [1, 2, 56, 232], "devic": [1, 5, 6, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 28, 29, 30, 31, 32, 33, 34, 36, 38, 39, 40, 41, 42, 43, 45, 46, 47, 48, 50, 52, 53, 54, 55, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 94, 95, 96, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 127, 128, 130, 131, 132, 133, 134, 135, 139, 141, 142, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 166, 169, 170, 171, 232, 233], "specifi": [1, 15, 33, 64, 85, 86, 92, 93, 114, 118, 124, 156, 157, 158, 161, 165, 168, 170, 207, 211, 212, 213, 214, 215, 216, 222, 232], "memori": [1, 5, 143, 172], "ha": [1, 3, 4, 5, 56, 84, 85, 87, 88, 89, 93, 124, 172, 197, 227, 231, 232], "been": [1, 3], "try": [1, 6], "naiv": 1, "gener": [1, 2, 15, 77, 85, 86, 123, 127, 128, 131, 132, 228, 233], "version": [1, 6, 105, 107, 146, 168, 228], "declar": 1, "member": [1, 190], "method": [1, 3, 7, 8, 9, 26, 172, 174, 175, 176, 177, 178, 179, 180, 181, 182, 190], "each": [1, 49, 74, 108, 120, 124, 137, 138, 148, 161, 168, 169, 193, 195, 203, 228], "element": [1, 10, 11, 16, 17, 18, 19, 20, 21, 24, 61, 66, 67, 70, 71, 72, 73, 75, 77, 91, 94, 95, 98, 99, 101, 102, 103, 104, 105, 106, 110, 113, 115, 116, 121, 133, 135, 141, 142, 144, 145, 149, 150, 154, 157, 159, 160, 165, 169, 198, 203, 206, 217, 218, 221], "find": [1, 2, 6], "pointwis": 1, "captur": [1, 190], "templat": 1, "axpby_impl": 1, "typenam": 1, "t": [1, 3, 72, 165, 174, 175, 176, 177, 178, 181, 182, 190, 232], "readi": 1, "fill": [1, 92, 119, 162, 171], "malloc_or_wait": 1, "synchron": 1, "avail": [1, 2, 3, 4, 6, 8, 187, 232], "There": [1, 190], "wait": [1, 3], "here": [1, 3, 218, 232], "request": 1, "pressur": 1, "condit": [1, 169, 232], "set_data": 1, "nbyte": 1, "collect": [1, 180, 184, 230], "pointer": 1, "x_ptr": 1, "y_ptr": 1, "out_ptr": 1, "relev": 1, "static_cast": 1, "size_t": 1, "out_idx": 1, "size": [1, 3, 4, 49, 64, 76, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 92, 96, 124, 134, 148, 151, 190, 191, 192, 193], "map": [1, 4, 100, 184, 193], "linear": [1, 3, 4, 5, 172, 184, 190, 194, 202, 204, 206, 208, 209, 210, 219, 220, 221], "indic": [1, 13, 22, 23, 24, 25, 74, 93, 148, 157, 158, 165], "offset": [1, 3], "x_offset": 1, "elem_to_loc": 1, "stride": [1, 63, 64, 191, 192, 203], "y_offset": 1, "contigu": 1, "regularli": 1, "default": [1, 6, 12, 14, 15, 22, 23, 24, 25, 59, 62, 63, 64, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 93, 96, 107, 109, 111, 112, 118, 121, 122, 123, 124, 125, 127, 128, 130, 131, 132, 134, 136, 147, 148, 151, 152, 155, 161, 162, 163, 164, 165, 166, 168, 170, 172, 174, 175, 176, 177, 178, 180, 181, 182, 187, 191, 192, 197, 199, 203, 211, 212, 213, 214, 215, 216, 228, 230, 233], "row": [1, 77, 96, 162], "major": 1, "henc": 1, "doesn": [1, 190], "additon": 1, "abl": 1, "all": [1, 4, 6, 13, 24, 64, 74, 77, 80, 83, 86, 89, 108, 120, 121, 151, 172, 179, 190, 199, 228, 231, 233], "incom": 1, "accordingli": 1, "dispatch": 1, "float16": [1, 187], "bfloat16": 1, "complex64": 1, "throw": 1, "error": [1, 6, 72, 73, 148, 194, 208, 209, 210, 215], "encount": 1, "unexpect": [1, 15], "regist": [1, 4], "op": 1, "contruct": 1, "assert": 1, "2": [1, 2, 3, 4, 64, 72, 79, 82, 84, 85, 86, 87, 88, 89, 104, 108, 130, 162, 163, 164, 172, 174, 175, 176, 177, 181, 187, 190, 192, 194, 201, 209, 211, 231, 232], "1": [1, 3, 4, 15, 24, 25, 63, 64, 78, 79, 81, 82, 84, 85, 86, 87, 88, 89, 90, 108, 121, 124, 132, 141, 147, 157, 165, 172, 174, 175, 176, 177, 178, 181, 182, 187, 190, 191, 192, 194, 195, 196, 200, 201, 203, 204, 207, 209, 210, 211, 212, 213, 216, 220, 222, 227, 231, 232], "correct": [1, 176, 177, 178], "els": [1, 3, 190], "float16_t": 1, "bfloat16_t": 1, "complex64_t": 1, "runtime_error": 1, "support": [1, 3, 5, 6, 13, 63, 64, 108], "have": [1, 3, 6, 59, 85, 86, 88, 89, 108, 124, 183, 199, 205, 230, 232], "rememb": 1, "3": [1, 3, 6, 211, 228, 231], "complic": 1, "keep": [1, 12, 14, 22, 23, 107, 109, 111, 112, 122, 155, 166, 190], "mind": [1, 3], "half": [1, 15, 128, 132, 203], "precis": [1, 3, 190, 194], "direct": [1, 3, 232], "fix": [1, 3, 6], "possibl": [1, 3, 108, 148, 193, 232], "due": 1, "transpos": [1, 3, 27], "aren": 1, "guarante": 1, "fit": [1, 232], "requir": [1, 3, 190], "column": [1, 77, 96], "inplac": 1, "expect": [1, 3, 191, 192, 199], "answer": 1, "copi": [1, 3, 5, 121, 147], "simpli": [1, 3, 6, 172, 202, 219], "catlas_saxpbi": 1, "axpby_impl_acceler": 1, "first": [1, 2, 3, 4, 6, 90, 93, 108, 121, 130, 156, 165, 176, 177, 178, 183, 190, 195, 232], "mode": [1, 65], "i": [1, 3, 97, 177, 190, 191, 192], "e": [1, 4, 6, 72, 97, 141, 175, 191, 192, 195, 196, 201, 223, 227, 233], "match": [1, 6, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89], "transposit": 1, "data_s": 1, "items": 1, "flag": 1, "copy_inplac": 1, "copytyp": 1, "n": [1, 3, 26, 63, 64, 77, 78, 80, 81, 83, 84, 87, 89, 96, 162, 166, 191, 192], "incx": 1, "inci": 1, "great": 1, "But": [1, 232], "criteria": 1, "luckili": 1, "alwai": [1, 183], "With": 1, "final": [1, 2, 3, 4], "singl": [1, 4, 74, 97, 120, 167], "row_contigu": 1, "col_contigu": 1, "common": 1, "hit": 1, "mileston": 1, "enough": 1, "run": [1, 3, 4, 5, 6, 143, 174, 176, 177, 178, 232, 233], "If": [1, 3, 6, 12, 14, 15, 22, 23, 24, 25, 56, 59, 62, 65, 74, 87, 88, 89, 92, 93, 107, 108, 109, 111, 112, 118, 120, 121, 122, 124, 146, 147, 148, 155, 157, 158, 165, 166, 168, 170, 184, 191, 192, 195, 196, 197, 199, 203, 205, 232, 233], "plan": 1, "stop": [1, 3, 15, 153], "enjoi": 1, "speed": 1, "appl": [1, 3, 5, 6, 232], "silicon": [1, 3, 5, 6, 232], "address": 1, "shade": 1, "languag": [1, 187], "kernel": [1, 63, 64], "written": 1, "help": [1, 3, 232], "resourc": 1, "walkthrough": 1, "pipelin": 1, "specif": [1, 6], "cpp": 1, "algorithm": 1, "launch": 1, "exactli": [1, 3], "mani": [1, 148, 191, 192, 193, 199], "thread": 1, "pick": 1, "updat": [1, 2, 3, 4, 177, 182, 184, 227], "assign": [1, 172], "axpby_gener": 1, "buffer": 1, "constant": [1, 3, 6, 120, 181, 195, 196, 201], "4": [1, 3, 137, 187, 211, 231, 232], "5": [1, 2, 3, 6, 123, 181], "x_stride": 1, "6": [1, 3, 137, 181, 209, 210, 231], "y_stride": 1, "7": [1, 3], "ndim": 1, "8": [1, 3, 6, 174, 175, 176, 177, 178, 181, 187, 231, 232], "uint": 1, "index": [1, 7, 9, 24, 76, 77, 93, 121, 157, 158, 165], "thread_position_in_grid": 1, "convert": [1, 56, 231], "instanti": [1, 4], "uniqu": [1, 228], "host": 1, "name": [1, 100, 137, 138, 180, 190, 195], "identifi": [1, 183, 230], "instantiate_axpbi": 1, "type_nam": 1, "host_nam": 1, "axpby_general_": 1, "bflot16": 1, "compil": [1, 6], "mlx_ext": 1, "metallib": [1, 6], "see": [1, 3, 4, 6, 8, 28, 29, 30, 31, 32, 34, 36, 38, 39, 40, 41, 42, 43, 45, 46, 47, 48, 50, 52, 53, 54, 55, 57, 58, 190, 194, 204, 208, 209, 210, 220, 232], "later": [1, 6], "co": 1, "locat": [1, 232], "share": [1, 5], "register_librari": 1, "potenti": 1, "path": [1, 6, 137, 138], "tri": 1, "load": [1, 4], "hasn": 1, "alreadi": [1, 3], "static": [1, 6], "object": [1, 8, 26, 37, 56, 123, 128, 131, 132, 168, 183, 230], "why": [1, 3], "packag": [1, 2, 4], "process": [1, 3, 65, 184, 193, 230], "logic": [1, 106], "grid": 1, "shown": 1, "below": [1, 162, 164, 187], "prepar": [1, 3], "carri": 1, "should": [1, 2, 3, 4, 6, 74, 97, 143, 158, 165, 167, 172, 183, 190, 191, 192, 199, 205, 230, 233], "d": [1, 3, 108, 157, 162, 163, 164, 174, 176, 178, 185, 232], "ostringstream": 1, "kname": 1, "axpby_": 1, "general_": 1, "type_to_nam": 1, "make": [1, 3, 4, 6, 108, 190, 231, 232], "sure": [1, 3, 6, 190], "look": [1, 3], "folder": 1, "get_colocated_mtllib_path": 1, "get_kernel": 1, "str": [1, 65, 93, 100, 136, 137, 138, 165, 183, 185, 211, 212, 213, 214, 215, 216], "encod": [1, 203], "compute_encod": 1, "get_command_encod": 1, "setcomputepipelinest": 1, "those": [1, 3, 190], "decelar": 1, "nelem": 1, "set_array_buff": 1, "setbyt": 1, "sizeof": 1, "threadgroup": 1, "higher": 1, "than": [1, 3, 56, 65, 94, 95, 98, 99, 108, 184, 203, 207, 222, 232], "max": [1, 110, 178, 218, 232], "allow": [1, 172, 179, 190, 231], "tgp_size": 1, "min": [1, 113, 218], "maxtotalthreadsperthreadgroup": 1, "3d": 1, "mtl": 1, "group_dim": 1, "grid_dim": 1, "divd": 1, "among": 1, "dispatchthread": 1, "few": [1, 3, 4, 5, 143, 231], "thing": [1, 3], "note": [1, 3, 6, 13, 63, 64, 85, 86, 124, 190], "befor": [1, 3, 6, 24, 121, 136], "move": [1, 114, 232], "track": [1, 190], "activ": [1, 6, 198, 207, 217, 222, 223], "command": [1, 6], "instead": [1, 190], "end_encod": 1, "end": [1, 204, 207, 220, 222], "until": [1, 231], "limit": 1, "flush": 1, "enqueu": 1, "commit": 1, "associ": [1, 137, 138], "suggest": 1, "deeper": 1, "dive": 1, "studi": 1, "come": [1, 3], "far": [1, 227], "built": [1, 6], "includ": [1, 231, 233], "forward": [1, 165], "diff": 1, "push": 1, "along": [1, 22, 23, 62, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 146, 148, 152, 157, 158, 190], "primtiv": 1, "similarli": [1, 6, 108], "scale_arr": 1, "contribut": 1, "tangent_x": 1, "tangent_i": 1, "revers": [1, 161], "arg": [1, 3, 8, 47, 57, 74, 137, 138, 143], "push_back": 1, "fulli": [1, 5, 232], "primitv": 1, "overal": 1, "directori": [1, 3, 6], "extens": [1, 187], "h": [1, 63, 64, 192], "mlx_sample_extens": 1, "__init__": [1, 3, 4, 7, 8, 9, 26, 172, 190], "py": [1, 3, 6], "cmakelist": 1, "txt": 1, "setup": [1, 2, 4, 6], "strucutr": 1, "hold": [1, 3, 8, 179], "instal": 1, "pybind11": [1, 6], "sinc": [1, 3, 4, 172, 232], "compon": [1, 3], "etc": [1, 190], "becom": 1, "pybind11_modul": 1, "m": [1, 6, 77, 162, 174], "doc": [1, 4], "sampl": [1, 2, 3, 123, 124, 125, 128, 131, 132, 228], "_a": 1, "pos_onli": 1, "kw_onli": 1, "none": [1, 3, 7, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 28, 29, 30, 31, 32, 33, 34, 36, 38, 39, 40, 41, 42, 43, 45, 46, 47, 48, 50, 52, 53, 54, 55, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 118, 119, 120, 121, 122, 123, 124, 125, 127, 128, 129, 130, 131, 132, 133, 134, 135, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 168, 169, 170, 171, 183, 194, 199, 211, 212, 213, 214, 215, 216], "r": [1, 3, 165], "pbdoc": 1, "most": [1, 124, 190], "complex": [1, 85, 86, 87, 88, 89, 123, 128, 131, 132, 183, 190], "addit": [1, 3, 11, 172, 195, 196, 199, 201], "bell": 1, "whistl": 1, "liter": 1, "string": 1, "modul": [1, 3, 4, 173, 205, 230], "ensur": 1, "caster": 1, "find_packag": 1, "config": 1, "add_librari": 1, "sourc": [1, 114, 161], "target_sourc": 1, "cmake_current_list_dir": 1, "header": 1, "target_include_directori": 1, "target_link_librari": 1, "attach": 1, "conveni": [1, 4], "mlx_build_metallib": 1, "target": [1, 165, 211, 212, 213, 214, 215, 216], "destin": [1, 114], "automat": [1, 5, 231, 232], "practic": 1, "mlx_build_met": [1, 6], "mlx_ext_metallib": 1, "titl": 1, "include_dir": 1, "project_source_dir": 1, "mlx_include_dir": 1, "output_directori": 1, "cmake_library_output_directori": 1, "add_depend": 1, "endif": 1, "pybind11_add_modul": 1, "build_shared_lib": 1, "target_link_opt": 1, "wl": 1, "rpath": 1, "loader_path": 1, "onc": [1, 143], "describ": 1, "util": [1, 3, 5, 6, 137, 190], "__name__": [1, 3], "__main__": [1, 3], "descript": [1, 3, 187], "ext_modul": 1, "cmakeextens": 1, "cmdclass": 1, "build_ext": 1, "cmakebuild": 1, "package_dir": 1, "package_data": 1, "dylib": 1, "zip_saf": 1, "fals": [1, 3, 12, 14, 22, 23, 29, 30, 31, 32, 40, 41, 42, 43, 45, 55, 58, 59, 74, 107, 109, 111, 112, 122, 155, 166, 169, 182, 183, 187, 195, 197, 199, 203], "python_requir": 1, "even": [1, 3], "though": [1, 3], "j8": 1, "libmlx_ext": 1, "cpython": 1, "3x": 1, "darwin": 1, "pip": [1, 6], "after": [1, 3, 4, 24, 121, 195, 196, 199, 232], "plai": [1, 3], "ones": [1, 3, 119, 137, 143, 162], "b": [1, 3, 11, 13, 59, 70, 71, 94, 95, 98, 99, 105, 108, 110, 113, 115, 154, 165, 197, 231, 232], "f": [1, 2, 4, 177, 190], "item": [1, 2, 3, 4, 184, 231], "true": [1, 2, 3, 59, 136, 146, 169, 183, 187, 190, 191, 192, 195, 196, 197, 203], "quick": [1, 5], "benchmark": 1, "compar": [1, 59], "time": [1, 3, 6, 143, 190, 232], "set_default_devic": 1, "256": [1, 4], "512": [1, 3, 232], "random": [1, 2, 3, 4, 5, 232, 233], "normal": [1, 2, 3, 131, 180, 195, 196, 201, 232], "bench": 1, "warm": 1, "rang": [1, 2, 3, 4, 6, 15, 209, 210, 227, 228, 232], "100": [1, 2, 3, 232], "5000": 1, "simple_tim": 1, "custom_tim": 1, "3f": [1, 4], "custom": 1, "114": 1, "109": 1, "modest": 1, "improv": [1, 3, 174, 175, 176, 177, 178, 181], "awai": [1, 3], "good": [1, 6, 232], "nn": [1, 3, 4, 137, 184, 190, 227], "grad": [1, 2, 4, 165, 227, 231], "simplifi": 1, "full": [1, 4, 47, 57, 65, 146], "implement": [2, 4, 174, 175, 176, 177, 178, 179, 180, 181, 193, 199, 203, 205, 207, 222], "basic": 2, "model": [2, 4, 5, 137, 172, 173, 184, 190, 199, 227], "problem": [2, 4, 190], "metadata": 2, "num_featur": 2, "num_exampl": 2, "1_000": 2, "num_it": 2, "10_000": 2, "iter": [2, 4, 184, 228], "sgd": [2, 4, 227], "lr": 2, "01": [2, 177], "rate": [2, 174, 175, 176, 177, 178, 181, 182], "ll": [2, 4], "synthet": 2, "dataset": 2, "matrix": [2, 77, 96, 108], "ground": [2, 3], "truth": 2, "w_star": 2, "valu": [2, 3, 10, 15, 22, 23, 37, 56, 59, 74, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 92, 120, 123, 124, 125, 127, 128, 131, 132, 157, 158, 165, 168, 172, 173, 177, 180, 183, 184, 187, 199, 207, 211, 212, 214, 215, 216, 222], "gaussian": [2, 194, 208, 209, 210], "nois": 2, "exampl": [2, 3, 4, 15, 157, 190, 211, 227, 228, 231], "noisi": 2, "label": [2, 211], "ep": [2, 174, 175, 176, 177, 178, 181, 195, 196, 201], "1e": [2, 4, 13, 174, 175, 176, 177, 178, 181, 195, 196, 201], "us": [2, 3, 4, 5, 6, 15, 90, 108, 134, 172, 174, 176, 177, 178, 179, 183, 190, 193, 194, 197, 199, 203, 209, 210, 227, 228, 230, 231, 232], "weight": [2, 63, 64, 172, 177, 182, 184, 190], "squar": [2, 3, 96, 135, 149, 165, 174, 176, 177, 178, 184, 190, 201, 215], "loss": [2, 4, 165, 190, 227], "loss_fn": [2, 4, 227], "w": [2, 64, 165, 182, 192, 197], "mean": [2, 3, 4, 165, 190, 195, 201, 211, 212, 213, 214, 215, 216], "grad_fn": 2, "initi": [2, 3, 172, 190, 195, 196, 201], "randomli": [2, 3], "Then": [2, 6], "repeatedli": 2, "_": [2, 3, 190, 228, 232], "verifi": 2, "close": [2, 5, 13], "error_norm": 2, "5f": 2, "someth": [2, 3], "00005": 2, "00364": 2, "complet": [2, 3, 6, 232], "logist": [2, 141, 206, 209, 210, 221], "github": [2, 4, 6], "repo": [2, 4, 6], "enabl": [3, 6, 74, 182], "larg": [3, 190], "ish": 3, "transform": [3, 5, 74, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 173, 190, 195, 196, 197], "compromis": 3, "eas": 3, "llama": 3, "famili": 3, "less": [3, 24, 99, 121, 203], "200": 3, "line": 3, "python": [3, 37, 49, 56, 74, 172, 183, 184, 185, 230], "neural": [3, 5, 172, 181, 193, 198, 217], "network": [3, 5, 172, 181, 193], "build": [3, 5, 172], "concis": 3, "architectur": [3, 190, 232], "notabl": 3, "rope": [3, 190], "posit": [3, 24, 93, 114, 121, 165, 184, 190, 191, 192, 199, 203], "option": [3, 12, 14, 15, 22, 23, 24, 25, 26, 31, 32, 62, 63, 64, 65, 74, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 92, 93, 96, 107, 109, 111, 112, 118, 120, 121, 122, 123, 124, 125, 127, 128, 130, 131, 132, 134, 136, 146, 147, 148, 151, 152, 155, 157, 158, 161, 162, 163, 164, 165, 166, 168, 170, 174, 175, 176, 177, 178, 181, 182, 183, 191, 192, 197, 199, 203, 211, 212, 213, 214, 215, 216, 228, 233], "kei": [3, 123, 124, 125, 127, 128, 130, 131, 132, 180, 183, 184, 199, 228, 230], "cach": 3, "concaten": 3, "project": [3, 199], "llamaattent": 3, "self": [3, 4, 7, 9, 26, 27, 28, 29, 30, 31, 32, 33, 34, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 47, 48, 50, 52, 53, 54, 55, 56, 57, 58, 172, 190, 198, 217], "dim": [3, 193, 195, 196, 199, 201, 203], "num_head": [3, 199], "super": [3, 4, 172, 190], "tradit": [3, 203], "query_proj": 3, "bia": [3, 176, 177, 178, 184, 190, 191, 192, 197, 199], "key_proj": 3, "value_proj": 3, "out_proj": [3, 172], "__call__": [3, 4, 172, 190], "queri": [3, 199], "mask": [3, 199], "extract": [3, 172, 190], "l": [3, 4, 190, 191], "reshap": 3, "combin": 3, "key_cach": 3, "value_cach": 3, "sqrt": [3, 72, 174, 175, 176, 177, 181, 195, 196, 201], "score": 3, "softmax": [3, 212], "values_hat": 3, "rm": 3, "swiglu": 3, "rmsnorm": [3, 190], "llamaencoderlay": 3, "mlp_dim": 3, "norm1": 3, "norm2": 3, "linear1": 3, "linear2": 3, "linear3": 3, "sigmoid": [3, 206, 209, 210, 211, 221], "instanc": [3, 172, 185, 190, 205], "embed": [3, 190], "emb": [3, 193], "token": [3, 193], "num_lay": [3, 4, 227], "vocab_s": 3, "norm": [3, 178, 195], "multiheadattent": [3, 190], "create_additive_causal_mask": 3, "list": [3, 8, 12, 14, 26, 29, 30, 40, 41, 42, 43, 45, 49, 52, 55, 56, 58, 60, 62, 74, 76, 79, 80, 82, 83, 85, 86, 88, 89, 92, 93, 97, 107, 109, 111, 112, 118, 120, 122, 123, 124, 125, 127, 128, 131, 132, 134, 146, 148, 151, 152, 155, 161, 165, 166, 167, 170, 172, 176, 177, 178, 183, 185, 190, 230], "still": [3, 6], "consid": [3, 13, 59, 183, 195, 230], "train": [3, 4], "ignor": 3, "whatsoev": 3, "rest": [3, 184, 203], "subsect": 3, "prompt": 3, "autoregress": 3, "yield": [3, 4, 228], "temp": 3, "causal": 3, "save": [3, 100, 137, 138], "append": [3, 108], "store": 3, "per": [3, 4, 179, 195, 196, 201], "care": 3, "last": [3, 25, 56, 80, 83, 85, 86, 88, 89, 90, 108, 124, 147, 191, 192, 195], "logit": [3, 124, 212], "next": [3, 4], "categor": 3, "lazili": [3, 190], "noth": [3, 190], "yet": [3, 172, 190, 231], "forc": [3, 4, 190, 231], "choos": [3, 203], "pars": 3, "feed": 3, "loop": [3, 4], "unsqueez": 3, "sequenc": [3, 191, 228, 232], "length": [3, 151, 191], "len": [3, 80, 83, 86, 89], "overwrit": 3, "discard": [3, 183], "old": 3, "moment": [3, 176, 177, 178], "anymor": 3, "everyth": 3, "small": [3, 143, 195, 196, 201, 232], "10": [3, 4, 102, 137, 143, 184, 190], "12": 3, "8192": 3, "1024": 3, "actual": [3, 15, 172], "materi": [3, 5], "could": [3, 190], "20_000": 3, "machin": [3, 5, 6, 181], "8gb": 3, "ram": 3, "32": [3, 4, 187], "44": 3, "doubl": 3, "bracket": 3, "becaus": [3, 190], "batch": [3, 108, 191, 192, 199], "zip": [3, 4], "haven": 3, "anyth": [3, 165], "result": [3, 15, 56, 100, 108, 152, 169, 184], "similar": [3, 199], "runtim": 3, "section": [3, 148], "access": [3, 37, 172, 190, 232], "origin": [3, 174, 175, 176, 178], "sentencepiec": 3, "pytorch": [3, 5, 195], "compat": [3, 124], "npz": [3, 100, 137, 138], "file": [3, 6, 100, 136, 137, 138], "directli": 3, "argpars": 3, "itertool": [3, 184], "starmap": [3, 184], "np": [3, 4, 231], "torch": 3, "map_torch_to_mlx": 3, "tok_embed": 3, "elif": 3, "replac": 3, "attention_norm": 3, "ffn_norm": 3, "wq": 3, "wk": 3, "wv": 3, "wo": 3, "w1": 3, "w2": 3, "w3": 3, "ffn": 3, "separ": [3, 47, 57, 195], "submodul": [3, 4, 190], "feed_forward": 3, "parser": 3, "argumentpars": 3, "add_argu": 3, "torch_weight": 3, "output_fil": 3, "parse_arg": 3, "state": [3, 4, 179, 180, 190, 227, 228], "savez": 3, "k": [3, 77, 162, 163, 164], "v": [3, 65, 190], "left": [3, 194, 203, 209, 210], "disk": 3, "text": [3, 198, 204, 207, 217, 218, 220, 222], "format": [3, 100, 136, 137, 138], "oper": [3, 5, 33, 143, 146, 153, 158, 190, 231, 232, 233], "dictionari": [3, 179, 180, 183, 190, 230], "represent": [3, 183, 185], "tree_unflatten": 3, "helper": 3, "weight_fil": 3, "incur": 3, "sever": [3, 63, 64, 137, 138], "futur": 3, "pth": 3, "current": [3, 5, 6, 63, 64, 190], "around": 3, "m1": [3, 232], "ultra": 3, "7b": 3, "me": 3, "ishmael": 3, "year": 3, "ago": 3, "never": 3, "long": 3, "info": 3, "247": 3, "press": 3, "enter": 3, "littl": 3, "monei": 3, "my": [3, 6], "purs": 3, "greater": [3, 24, 95, 121, 207, 222], "consequ": 3, "walk": 3, "down": 3, "gower": 3, "street": 3, "afternoon": 3, "heavi": 3, "rain": 3, "saw": 3, "off": [3, 6], "man": 3, "rag": 3, "who": 3, "sat": 3, "upon": [3, 184], "hi": 3, "bundl": 3, "hard": 3, "wet": 3, "he": 3, "were": [3, 232], "cry": 3, "watch": 3, "him": 3, "observ": 3, "numer": [3, 105, 107, 146, 174, 175, 176, 177, 178, 181, 195, 196, 201], "crowd": 3, "wa": [3, 180], "hurri": 3, "437": 3, "330": 3, "second": [3, 108, 156, 165, 176, 177, 178, 232], "spent": 3, "amount": 3, "39": 3, "ms": 3, "By": 3, "bigger": 3, "remain": [3, 165], "almost": 3, "nobodi": 3, "took": 3, "least": 3, "notic": 3, "distanc": 3, "had": 3, "doubt": 3, "minut": 3, "straight": 3, "slowli": 3, "rais": [3, 148], "ey": 3, "speak": 3, "resum": 3, "postur": 3, "stood": 3, "feel": 3, "pain": 3, "heart": 3, "smile": 3, "face": 3, "am": 3, "someon": 3, "three": 3, "quarter": 3, "hour": 3, "made": 3, "immedi": 3, "repli": 3, "again": [3, 190], "hand": 3, "did": 3, "accustom": 3, "thu": [3, 190], "question": 3, "reason": 3, "tell": 3, "understand": 3, "579": 3, "690": 3, "num": [3, 130], "500": [3, 232], "628": 3, "went": 3, "nervou": 3, "trembl": 3, "told": 3, "And": 3, "perhap": 3, "surpris": 3, "matter": [3, 190], "shall": 3, "anyhow": 3, "friend": 3, "ye": 3, "slight": 3, "kind": 3, "longer": [3, 65], "soon": 3, "unless": [3, 172], "unlik": [3, 13], "strang": 3, "amus": 3, "That": 3, "secret": 3, "disappoint": 3, "mine": 3, "cannot": 3, "happi": 3, "ask": 3, "Is": 3, "shop": 3, "bui": 3, "food": 3, "633": 3, "21": 3, "475": 3, "su": 3, "j": [3, 6, 175, 176, 178], "lu": 3, "pan": 3, "murtadha": 3, "wen": 3, "liu": 3, "2021": 3, "roform": 3, "enhanc": 3, "rotari": [3, 203], "arxiv": [3, 174, 195, 196, 198, 201, 203, 217], "preprint": [3, 174], "2104": [3, 203], "09864": [3, 203], "zhang": 3, "sennrich": 3, "2019": [3, 177], "root": [3, 135, 149, 201], "advanc": 3, "inform": [3, 4, 190, 194, 199, 232], "system": 3, "shazeer": 3, "2020": 3, "glu": 3, "variant": [3, 178], "2002": 3, "05202": 3, "classifi": 4, "mnist": 4, "As": [4, 157, 190], "mlp": [4, 190, 227], "inherit": [4, 230], "standard": [4, 37, 56, 108, 125, 231], "idiom": 4, "input_dim": [4, 190, 197], "hidden_dim": [4, 172, 227], "output_dim": [4, 190, 197], "layer_s": 4, "idim": 4, "odim": 4, "maximum": [4, 22, 172, 190, 202, 209, 210, 219], "cross": [4, 211, 212], "entropi": [4, 211, 212], "sub": [4, 130], "commonli": 4, "cross_entropi": [4, 190], "accuraci": 4, "valid": [4, 65, 168, 183, 230], "eval_fn": 4, "argmax": 4, "loader": 4, "num_class": [4, 227], "batch_siz": [4, 227], "num_epoch": [4, 227], "learning_r": [4, 174, 175, 176, 177, 178, 181, 182, 227], "train_imag": [4, 227], "train_label": [4, 227], "test_imag": 4, "test_label": 4, "shuffl": 4, "minibatch": 4, "batch_iter": [4, 227], "perm": 4, "permut": 4, "id": [4, 6], "put": 4, "trainabl": [4, 172, 173, 190], "loss_and_grad_fn": [4, 227], "value_and_grad": [4, 172, 190, 227, 231], "epoch": 4, "test": [4, 6], "confus": 4, "decent": 4, "95": 4, "brought": 5, "research": 5, "except": [5, 77, 84, 85, 87, 88, 89, 195], "featur": [5, 63, 64, 195, 196, 197, 201, 203], "main": [5, 77, 184, 190], "differ": [5, 154], "lazi": [5, 172, 231], "multi": [5, 191, 192], "cpu": [5, 232], "gpu": [5, 232], "inspir": 5, "jax": [5, 228], "arrayfir": 5, "noteabl": 5, "unifi": 5, "live": [5, 232], "guid": 5, "regress": 5, "layer": [5, 172, 190, 195, 196, 197, 205, 223], "perceptron": 5, "llm": 5, "infer": [5, 92], "fft": 5, "tree": [5, 74, 93, 143, 165, 168, 179, 183, 184, 185], "develop": [5, 6], "document": [5, 47, 57], "meet": 6, "seri": 6, "chip": 6, "nativ": 6, "maco": 6, "13": 6, "recommend": 6, "14": 6, "sonoma": 6, "distribut": [6, 123, 124, 125, 127, 131, 132, 213, 216], "probabl": [6, 128, 211, 213, 232], "platform": 6, "processor": 6, "arm": [6, 187], "i386": 6, "switch": 6, "conda": 6, "17": 6, "g": [6, 181, 182, 223, 233], "clang": 6, "cmake": 6, "24": 6, "xcode": 6, "15": 6, "clone": 6, "git": 6, "com": 6, "ml": 6, "explor": 6, "cd": 6, "brew": 6, "global": [6, 129, 228], "env": 6, "cmake_build_parallel_level": 6, "edit": 6, "unittest": 6, "discov": 6, "stub": 6, "dev": 6, "generate_stub": 6, "mkdir": 6, "p": [6, 123, 176, 178, 190], "either": [6, 11, 47, 56, 57, 70, 71, 94, 95, 98, 99, 105, 108, 110, 113, 115, 154, 165, 205], "libmlx": 6, "preprocessor": 6, "metal_path": 6, "mlx_build_test": 6, "ON": 6, "mlx_build_exampl": 6, "mlx_build_benchmark": 6, "mlx_build_python_bind": 6, "multipl": [6, 108, 115, 199], "wish": 6, "environ": 6, "variabl": [6, 93, 97, 165, 167, 168], "export": 6, "developer_dir": 6, "app": 6, "content": 6, "sdk": 6, "xcrun": 6, "macosx": 6, "show": [6, 187], "unabl": 6, "tool": 6, "select": [6, 169], "sudo": 6, "devicetyp": 7, "attribut": [7, 8, 9, 26, 172], "kwarg": [8, 137, 138, 233], "union": [10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 28, 29, 30, 31, 32, 33, 34, 36, 38, 39, 40, 41, 42, 43, 45, 46, 47, 48, 50, 52, 53, 54, 55, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 70, 71, 72, 73, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 118, 119, 120, 121, 122, 123, 124, 125, 127, 128, 130, 131, 132, 133, 134, 135, 141, 142, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 169, 170, 171, 192], "wise": [10, 11, 16, 17, 18, 19, 20, 21, 61, 66, 67, 70, 71, 72, 73, 75, 91, 94, 95, 98, 99, 101, 102, 103, 104, 105, 106, 110, 113, 115, 116, 133, 135, 141, 142, 144, 145, 149, 150, 154, 159, 160, 198, 206, 217, 218, 221], "absolut": [10, 13, 209, 210], "semant": [11, 60, 70, 71, 94, 95, 98, 99, 105, 108, 110, 113, 115, 154, 232], "keepdim": [12, 14, 22, 23, 29, 30, 31, 32, 40, 41, 42, 43, 45, 55, 58, 107, 109, 111, 112, 122, 146, 155, 166], "reduct": [12, 14, 107, 109, 112, 122, 211, 212, 213, 214, 215, 216], "reduc": [12, 14, 22, 23, 107, 109, 111, 112, 122, 143, 155, 166], "unspecifi": [12, 14, 15, 22, 23, 24, 25, 62, 92, 107, 109, 111, 112, 118, 121, 122, 146, 147, 155, 157, 166, 170, 233], "entir": [12, 14, 22, 23, 107, 109, 111, 112, 122, 155, 166], "singleton": [12, 14, 22, 23, 107, 108, 109, 111, 112, 122, 155, 166], "rtol": 13, "05": [13, 195, 196, 201], "atol": 13, "08": [13, 175, 176, 177, 178, 181], "approxim": [13, 143, 194, 208, 209, 210], "comparison": [13, 71, 94, 95, 98, 99], "equal": [13, 24, 59, 77, 95, 99, 121, 128, 148], "ab": [13, 165, 195, 196, 198, 201, 203, 217], "array_equ": 13, "rel": 13, "toler": 13, "boolean": [13, 59, 106, 187], "interv": [15, 128, 132], "increment": 15, "otherwis": [15, 183, 207, 222], "int32": [15, 128, 187, 231], "convent": [15, 65, 177], "lead": 15, "fraction": 15, "integr": [15, 157], "invers": [16, 17, 18, 19, 20, 21, 73, 81, 82, 83, 84, 85, 86], "cosin": [16, 17, 66, 67], "hyperbol": [17, 19, 21, 67, 145, 160], "sine": [18, 19, 144, 145], "minimum": [22, 23], "kth": [24, 121], "partit": 24, "order": [24, 121, 190, 195, 205], "undefin": [24, 121], "sort": [24, 25, 121], "partiton": 24, "flatten": [24, 25, 121, 147, 157, 158, 183], "dimension": [26, 78, 79, 80, 81, 82, 83, 87, 88, 89, 191, 192, 193, 197], "val": [26, 92], "tupl": [26, 47, 57, 62, 64, 74, 76, 97, 120, 134, 151, 165, 167, 176, 177, 178, 183, 184, 185, 192, 205, 230], "ndarrai": [26, 231], "properti": [27, 35, 44, 49, 51], "argument": [27, 47, 57, 74, 93, 136, 165, 184, 190, 228, 232, 233], "elment": 51, "indices_or_sect": [52, 148], "nest": [56, 172, 190, 230], "correpsond": 56, "ddof": [58, 166], "equal_nan": 59, "nan": 59, "pad": [63, 64, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 191, 192], "dilat": [63, 64], "group": [63, 64, 195], "1d": [63, 65, 158], "convolut": [63, 64, 65, 191, 192], "channel": [63, 64, 191, 192], "c_in": [63, 64], "c_out": [63, 64], "convolv": [63, 64], "2d": 64, "spatial": [64, 195], "symmetr": 64, "discret": [65, 78, 79, 80, 81, 82, 83, 87, 88, 89, 193], "swap": [65, 156], "conv": 65, "filter": [65, 191, 192], "flip": 65, "signal": 65, "divis": 70, "quotient": 70, "mathrm": [72, 141], "frac": [72, 141, 174, 175, 176, 177, 178, 181, 195, 196, 201], "pi": 72, "int_0": 72, "dx": 72, "erf": 73, "retain_graph": [74, 136], "node": [74, 143, 168], "dict": [74, 100, 137, 172, 230], "leaf": [74, 183], "preserv": [74, 134], "intend": 74, "control": [74, 228], "flow": [74, 153], "exponenti": [75, 204, 220], "insert": [76, 232], "ident": [77, 153], "diagon": [77, 162, 163, 164], "zero": [77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 162, 163, 164, 171, 190], "th": 77, "whose": [77, 173], "One": [78, 81, 87, 135], "fourier": [78, 79, 80, 81, 82, 83, 87, 88, 89], "truncat": [78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 131], "dft": [78, 79, 80, 81, 82, 83, 87, 88, 89], "rfft": 84, "real": [84, 85, 86, 87, 88, 89], "rfft2": 85, "rfftn": 86, "silent": [87, 88, 89], "start_axi": 90, "end_axi": 90, "fun": [93, 97, 165, 167, 168, 232], "argnam": [93, 165], "cpp_function": [93, 165, 168], "neither": [93, 165], "keyword": [93, 137, 138, 165, 190, 228, 233], "strict": [94, 98], "binari": [100, 136, 137, 138, 207, 211, 222], "npy": [100, 136], "natur": [101, 103], "logarithm": [101, 102, 103, 104], "log": [103, 105, 107, 213, 216], "plu": 103, "exp": [105, 107, 125, 146, 204, 213, 220, 232], "stabl": [105, 107, 146], "prepend": 108, "remov": [108, 124, 151], "anoth": [108, 154, 169, 190, 232], "negat": 116, "pad_with": 120, "constant_valu": 120, "pad_width": 120, "edg": 120, "before_1": 120, "after_1": 120, "before_2": 120, "after_2": 120, "before_n": 120, "after_n": 120, "integ": [120, 123, 128, 148, 168, 187, 193], "before_i": 120, "after_i": 120, "extend": 120, "side": 120, "smaller": 121, "prng": [123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 228], "num_sampl": 124, "unnorm": 124, "draw": 124, "uint32": [124, 187], "cdf": [125, 194, 208], "accord": [125, 169, 199], "seed": 126, "low": [128, 132], "high": [128, 132, 190, 193], "lower": [128, 131, 132, 162], "upper": [128, 131, 132], "bound": [128, 131, 132, 194, 232], "roadcast": 128, "domain": 131, "optino": 131, "uniformli": 132, "reciproc": 135, "arr": 136, "retain": 136, "dure": 136, "uncompress": 137, "my_path": 137, "tree_flatten": [137, 185, 190], "transformerencod": 137, "128": [137, 190], "flat_param": 137, "compress": 138, "simplif": 143, "reus": 143, "consumpt": 143, "meant": 143, "everi": [143, 184], "overhead": [143, 232], "1m": 143, "thousand": 143, "foo": 143, "matmul": [143, 232], "twice": [143, 232], "subarrai": 148, "being": [153, 190], "prevent": 153, "unchang": [153, 203], "axis1": 156, "axis2": 156, "taken": 157, "prior": [157, 158], "equial": 157, "exclud": 158, "elsewher": 162, "col": 162, "triangl": 162, "mse": 165, "param": [165, 190], "lvalu": 165, "dlvalu": 165, "dparam": 165, "lasso": 165, "l1": [165, 214], "varianc": [166, 195], "divisor": 166, "cotang": 167, "in_ax": 168, "out_ax": 168, "prefix": [168, 183], "subclass": 172, "arbitrari": [172, 183], "recurs": [172, 180, 190], "concept": 172, "frozen": [172, 190], "freez": [172, 190], "mymlp": 172, "in_dim": [172, 190], "out_dim": [172, 190], "16": [172, 187], "in_proj": 172, "fn": [173, 184, 231], "callabl": [173, 183, 184, 205], "wrt": 173, "rho": 174, "9": [174, 176, 177, 178], "06": 174, "paper": [174, 175, 176, 178], "zeiler": 174, "2012": [174, 181], "adapt": [174, 175], "1212": 174, "5701": 174, "v_": [174, 175, 176, 177, 178, 181, 182], "v_t": [174, 175, 176, 177, 178, 181, 182], "g_t": [174, 175, 176, 177, 178, 181, 182], "delta": 174, "w_": [174, 175, 176, 177, 178, 181, 182], "u_t": 174, "epsilon": [174, 175, 176, 177, 178, 181, 195, 196, 201], "u_": 174, "w_t": [174, 175, 176, 177, 178, 181, 182], "lambda": [174, 175, 176, 177, 178, 181, 182, 184, 190, 204, 220], "coeffici": [174, 176, 177, 178], "averag": [174, 176, 177, 178], "denomin": [174, 175, 176, 177, 178, 181], "stabil": [174, 175, 176, 177, 178, 181, 195, 196, 201], "ddefault": 174, "duchi": 175, "hazan": 175, "singer": 175, "2011": 175, "subgradi": 175, "onlin": 175, "stochast": [175, 176, 178, 182], "jmlr": 175, "999": [176, 177, 178], "omit": [176, 178], "estim": [176, 178], "kingma": [176, 178], "ba": [176, 178], "2015": [176, 178], "iclr": [176, 177, 178], "m_": [176, 177, 178], "beta_1": [176, 177, 178], "m_t": [176, 177, 178], "beta_2": [176, 177, 178], "weight_decai": [177, 182], "contrast": [177, 180], "loshchilov": 177, "hutter": 177, "decoupl": 177, "decai": [177, 182], "regular": [177, 198, 217], "adam": 178, "infin": 178, "basi": 179, "appli": [179, 184, 191, 192, 194, 195, 196, 197, 198, 201, 202, 204, 206, 207, 208, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222], "optimizerst": 179, "defaultdict": 180, "miss": 180, "present": 180, "99": 181, "tieleman": 181, "hinton": 181, "lectur": 181, "coursera": 181, "smooth": 181, "momentum": 182, "dampen": 182, "nesterov": 182, "descent": 182, "mu": 182, "tau": 182, "strength": 182, "l2": 182, "penalti": 182, "is_leaf": 183, "dot": [183, 199], "notat": 183, "depth": 183, "hello": [183, 185], "charact": 183, "flat": [183, 185], "superset": 184, "extra": 184, "closer": 184, "dict_kei": 184, "recreat": 185, "world": 185, "42": 185, "tabl": [187, 193], "byte": 187, "bool_": 187, "uint8": 187, "unsign": 187, "uint16": 187, "int8": 187, "sign": 187, "int16": 187, "int64": 187, "64": 187, "arbitrarili": [190, 230, 231], "done": 190, "manual": 190, "explicitli": [190, 228], "solv": 190, "intuit": 190, "finetun": 190, "enumer": 190, "caus": 190, "local": 190, "scope": 190, "l2_loss": 190, "y_hat": 190, "trainable_paramet": 190, "loss_and_grad": 190, "workhors": 190, "Its": 190, "individu": 190, "subset": 190, "action": 190, "displai": 190, "tree_map": 190, "count": 190, "num_param": 190, "preclud": 190, "pure": [190, 227], "pattern": 190, "achiev": 190, "other_input": 190, "necessari": 190, "wrap": 190, "relu": 190, "prelu": 190, "gelu": [190, 209, 210], "silu": 190, "selu": 190, "mish": 190, "conv1d": 190, "conv2d": 190, "layernorm": 190, "groupnorm": 190, "sequenti": 190, "gelu_approx": [190, 194, 208], "gelu_fast_approx": [190, 194, 208], "binary_cross_entropi": 190, "l1_loss": 190, "mse_loss": 190, "nll_loss": 190, "kl_div_loss": 190, "in_channel": [191, 192], "out_channel": [191, 192], "kernel_s": [191, 192], "nlc": 191, "learnabl": [191, 192, 205], "nhwc": 192, "height": 192, "width": 192, "num_embed": 193, "lookup": 193, "typic": [193, 227], "usual": [193, 230], "vocabulari": 193, "approx": 194, "unit": [194, 202, 204, 206, 208, 209, 210, 219, 220, 221], "textrm": [194, 208], "phi": [194, 208], "geluapprox": 194, "sigma": [194, 206, 209, 210, 221], "60033": [194, 209], "0433603": [194, 209], "gelufast": 194, "773": [194, 210], "regard": 194, "num_group": 195, "affin": [195, 196, 197], "pytorch_compat": 195, "var": [195, 196], "gamma": [195, 196, 201], "particular": 195, "split": 195, "preced": 195, "http": [195, 196, 198, 201, 203, 217], "org": [195, 196, 198, 201, 203, 217], "1803": 195, "08494": 195, "1607": 196, "06450": 196, "monoton": [198, 217], "refer": [198, 217], "1908": [198, 217], "08681": [198, 217], "tanh": [198, 217], "softplu": [198, 217], "query_input_dim": 199, "key_input_dim": 199, "value_input_dim": 199, "value_dim": 199, "value_output_dim": 199, "attent": 199, "head": 199, "aggreg": 199, "linearli": 199, "bias": 199, "inf": 199, "neg": [199, 216], "attend": 199, "num_paramet": 200, "init": 200, "25": 200, "1910": 201, "07467": 201, "rectifi": [202, 219], "10000": 203, "rotat": 203, "consecut": 203, "larger": 203, "slightli": [203, 232], "angular": 203, "frequenc": 203, "begin": [204, 207, 220, 222], "leq": [204, 220], "0507": [204, 220], "67326": [204, 220], "elu": [204, 220], "plain": 205, "known": [206, 221], "swish": [206, 221], "cdot": [206, 209, 210, 221], "threshold": [207, 222], "geq": [207, 222], "faster": 208, "exact": [209, 210], "0003": 209, "015": 210, "predict": [211, 212, 213, 214, 215, 216], "post": 211, "612192": 211, "kullback": 213, "leibler": 213, "diverg": 213, "likelihood": 216, "nll": 216, "subsequ": 227, "implicit": 228, "fine": 228, "grain": 228, "manag": [228, 232], "uniform": [228, 232], "pseudo": 228, "altern": 228, "splittabl": 228, "threefri": 228, "counter": 228, "cycl": 230, "inspect": 231, "composit": 231, "sin": 231, "pool": 232, "advantag": 232, "don": 232, "parallel": 232, "race": 232, "interest": 232, "albeit": 232, "contriv": 232, "suppos": 232, "d1": 232, "d2": 232, "4096": 232, "dens": 232, "better": 232, "millisecond": 232, "measur": 232, "default_stream": 233, "default_devic": 233, "my_devic": 233}, "objects": {"mlx.core": [[7, 0, 1, "", "Device"], [8, 0, 1, "", "Dtype"], [9, 0, 1, "", "Stream"], [10, 2, 1, "", "abs"], [11, 2, 1, "", "add"], [12, 2, 1, "", "all"], [13, 2, 1, "", "allclose"], [14, 2, 1, "", "any"], [15, 2, 1, "", "arange"], [16, 2, 1, "", "arccos"], [17, 2, 1, "", "arccosh"], [18, 2, 1, "", "arcsin"], [19, 2, 1, "", "arcsinh"], [20, 2, 1, "", "arctan"], [21, 2, 1, "", "arctanh"], [22, 2, 1, "", "argmax"], [23, 2, 1, "", "argmin"], [24, 2, 1, "", "argpartition"], [25, 2, 1, "", "argsort"], [26, 0, 1, "", "array"], [59, 2, 1, "", "array_equal"], [60, 2, 1, "", "broadcast_to"], [61, 2, 1, "", "ceil"], [62, 2, 1, "", "concatenate"], [63, 2, 1, "", "conv1d"], [64, 2, 1, "", "conv2d"], [65, 2, 1, "", "convolve"], [66, 2, 1, "", "cos"], [67, 2, 1, "", "cosh"], [68, 2, 1, "", "default_device"], [69, 2, 1, "", "default_stream"], [70, 2, 1, "", "divide"], [71, 2, 1, "", "equal"], [72, 2, 1, "", "erf"], [73, 2, 1, "", "erfinv"], [74, 2, 1, "", "eval"], [75, 2, 1, "", "exp"], [76, 2, 1, "", "expand_dims"], [77, 2, 1, "", "eye"], [90, 2, 1, "", "flatten"], [91, 2, 1, "", "floor"], [92, 2, 1, "", "full"], [93, 2, 1, "", "grad"], [94, 2, 1, "", "greater"], [95, 2, 1, "", "greater_equal"], [96, 2, 1, "", "identity"], [97, 2, 1, "", "jvp"], [98, 2, 1, "", "less"], [99, 2, 1, "", "less_equal"], [100, 2, 1, "", "load"], [101, 2, 1, "", "log"], [102, 2, 1, "", "log10"], [103, 2, 1, "", "log1p"], [104, 2, 1, "", "log2"], [105, 2, 1, "", "logaddexp"], [106, 2, 1, "", "logical_not"], [107, 2, 1, "", "logsumexp"], [108, 2, 1, "", "matmul"], [109, 2, 1, "", "max"], [110, 2, 1, "", "maximum"], [111, 2, 1, "", "mean"], [112, 2, 1, "", "min"], [113, 2, 1, "", "minimum"], [114, 2, 1, "", "moveaxis"], [115, 2, 1, "", "multiply"], [116, 2, 1, "", "negative"], [117, 2, 1, "", "new_stream"], [118, 2, 1, "", "ones"], [119, 2, 1, "", "ones_like"], [120, 2, 1, "", "pad"], [121, 2, 1, "", "partition"], [122, 2, 1, "", "prod"], [133, 2, 1, "", "reciprocal"], [134, 2, 1, "", "reshape"], [135, 2, 1, "", "rsqrt"], [136, 2, 1, "", "save"], [137, 2, 1, "", "savez"], [138, 2, 1, "", "savez_compressed"], [139, 2, 1, "", "set_default_device"], [140, 2, 1, "", "set_default_stream"], [141, 2, 1, "", "sigmoid"], [142, 2, 1, "", "sign"], [143, 2, 1, "", "simplify"], [144, 2, 1, "", "sin"], [145, 2, 1, "", "sinh"], [146, 2, 1, "", "softmax"], [147, 2, 1, "", "sort"], [148, 2, 1, "", "split"], [149, 2, 1, "", "sqrt"], [150, 2, 1, "", "square"], [151, 2, 1, "", "squeeze"], [152, 2, 1, "", "stack"], [153, 2, 1, "", "stop_gradient"], [154, 2, 1, "", "subtract"], [155, 2, 1, "", "sum"], [156, 2, 1, "", "swapaxes"], [157, 2, 1, "", "take"], [158, 2, 1, "", "take_along_axis"], [159, 2, 1, "", "tan"], [160, 2, 1, "", "tanh"], [161, 2, 1, "", "transpose"], [162, 2, 1, "", "tri"], [163, 2, 1, "", "tril"], [164, 2, 1, "", "triu"], [165, 2, 1, "", "value_and_grad"], [166, 2, 1, "", "var"], [167, 2, 1, "", "vjp"], [168, 2, 1, "", "vmap"], [169, 2, 1, "", "where"], [170, 2, 1, "", "zeros"], [171, 2, 1, "", "zeros_like"]], "mlx.core.Device": [[7, 1, 1, "", "__init__"]], "mlx.core.Dtype": [[8, 1, 1, "", "__init__"]], "mlx.core.Stream": [[9, 1, 1, "", "__init__"]], "mlx.core.array": [[27, 3, 1, "", "T"], [26, 1, 1, "", "__init__"], [28, 1, 1, "", "abs"], [29, 1, 1, "", "all"], [30, 1, 1, "", "any"], [31, 1, 1, "", "argmax"], [32, 1, 1, "", "argmin"], [33, 1, 1, "", "astype"], [34, 1, 1, "", "cos"], [35, 3, 1, "", "dtype"], [36, 1, 1, "", "exp"], [37, 1, 1, "", "item"], [38, 1, 1, "", "log"], [39, 1, 1, "", "log1p"], [40, 1, 1, "", "logsumexp"], [41, 1, 1, "", "max"], [42, 1, 1, "", "mean"], [43, 1, 1, "", "min"], [44, 3, 1, "", "ndim"], [45, 1, 1, "", "prod"], [46, 1, 1, "", "reciprocal"], [47, 1, 1, "", "reshape"], [48, 1, 1, "", "rsqrt"], [49, 3, 1, "", "shape"], [50, 1, 1, "", "sin"], [51, 3, 1, "", "size"], [52, 1, 1, "", "split"], [53, 1, 1, "", "sqrt"], [54, 1, 1, "", "square"], [55, 1, 1, "", "sum"], [56, 1, 1, "", "tolist"], [57, 1, 1, "", "transpose"], [58, 1, 1, "", "var"]], "mlx.core.fft": [[78, 2, 1, "", "fft"], [79, 2, 1, "", "fft2"], [80, 2, 1, "", "fftn"], [81, 2, 1, "", "ifft"], [82, 2, 1, "", "ifft2"], [83, 2, 1, "", "ifftn"], [84, 2, 1, "", "irfft"], [85, 2, 1, "", "irfft2"], [86, 2, 1, "", "irfftn"], [87, 2, 1, "", "rfft"], [88, 2, 1, "", "rfft2"], [89, 2, 1, "", "rfftn"]], "mlx.core.random": [[123, 2, 1, "", "bernoulli"], [124, 2, 1, "", "categorical"], [125, 2, 1, "", "gumbel"], [126, 2, 1, "", "key"], [127, 2, 1, "", "normal"], [128, 2, 1, "", "randint"], [129, 2, 1, "", "seed"], [130, 2, 1, "", "split"], [131, 2, 1, "", "truncated_normal"], [132, 2, 1, "", "uniform"]], "mlx.nn": [[191, 0, 1, "", "Conv1d"], [192, 0, 1, "", "Conv2d"], [193, 0, 1, "", "Embedding"], [194, 0, 1, "", "GELU"], [195, 0, 1, "", "GroupNorm"], [196, 0, 1, "", "LayerNorm"], [197, 0, 1, "", "Linear"], [198, 0, 1, "", "Mish"], [172, 0, 1, "", "Module"], [199, 0, 1, "", "MultiHeadAttention"], [200, 0, 1, "", "PReLU"], [201, 0, 1, "", "RMSNorm"], [202, 0, 1, "", "ReLU"], [203, 0, 1, "", "RoPE"], [204, 0, 1, "", "SELU"], [205, 0, 1, "", "Sequential"], [206, 0, 1, "", "SiLU"], [207, 0, 1, "", "Step"], [208, 0, 1, "", "gelu"], [209, 0, 1, "", "gelu_approx"], [210, 0, 1, "", "gelu_fast_approx"], [217, 0, 1, "", "mish"], [218, 0, 1, "", "prelu"], [219, 0, 1, "", "relu"], [220, 0, 1, "", "selu"], [221, 0, 1, "", "silu"], [222, 0, 1, "", "step"], [173, 2, 1, "", "value_and_grad"]], "mlx.nn.Module": [[172, 1, 1, "", "__init__"]], "mlx.nn.losses": [[211, 0, 1, "", "binary_cross_entropy"], [212, 0, 1, "", "cross_entropy"], [213, 0, 1, "", "kl_div_loss"], [214, 0, 1, "", "l1_loss"], [215, 0, 1, "", "mse_loss"], [216, 0, 1, "", "nll_loss"]], "mlx.optimizers": [[174, 0, 1, "", "AdaDelta"], [175, 0, 1, "", "Adagrad"], [176, 0, 1, "", "Adam"], [177, 0, 1, "", "AdamW"], [178, 0, 1, "", "Adamax"], [179, 0, 1, "", "Optimizer"], [180, 0, 1, "", "OptimizerState"], [181, 0, 1, "", "RMSprop"], [182, 0, 1, "", "SGD"]], "mlx.optimizers.Optimizer": [[179, 4, 1, "", "state"]], "mlx.utils": [[183, 2, 1, "", "tree_flatten"], [184, 2, 1, "", "tree_map"], [185, 2, 1, "", "tree_unflatten"]]}, "objtypes": {"0": "py:class", "1": "py:method", "2": "py:function", "3": "py:property", "4": "py:attribute"}, "objnames": {"0": ["py", "class", "Python class"], "1": ["py", "method", "Python method"], "2": ["py", "function", "Python function"], "3": ["py", "property", "Python property"], "4": ["py", "attribute", "Python attribute"]}, "titleterms": {"oper": [0, 1, 226], "develop": 1, "document": 1, "introduc": 1, "exampl": [1, 5, 232], "primit": 1, "us": [1, 233], "implement": [1, 3], "cpu": 1, "backend": 1, "gpu": 1, "transform": [1, 229, 231], "build": [1, 6], "bind": 1, "python": [1, 5, 6], "cmake": 1, "setuptool": 1, "usag": [1, 5], "result": 1, "script": [1, 3], "download": [1, 3], "code": [1, 3], "linear": [2, 197], "regress": 2, "llm": 3, "infer": 3, "model": 3, "attent": 3, "layer": [3, 4, 224], "encod": 3, "full": [3, 92], "gener": 3, "put": 3, "all": [3, 12, 29], "togeth": 3, "convert": 3, "weight": 3, "load": [3, 100], "benchmark": 3, "multi": 4, "perceptron": 4, "mlx": [5, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222], "instal": [5, 6], "api": [5, 6], "refer": 5, "c": [5, 6], "further": 5, "read": 5, "from": 6, "pypi": 6, "troubleshoot": 6, "sourc": 6, "requir": 6, "option": 6, "metal": 6, "found": 6, "core": [7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171], "devic": [7, 188], "dtype": [8, 35], "stream": [9, 188, 233], "ab": [10, 28], "add": 11, "allclos": 13, "ani": [14, 30], "arang": 15, "arcco": 16, "arccosh": 17, "arcsin": 18, "arcsinh": 19, "arctan": 20, "arctanh": 21, "argmax": [22, 31], "argmin": [23, 32], "argpartit": 24, "argsort": 25, "arrai": [26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 186], "t": 27, "astyp": 33, "co": [34, 66], "exp": [36, 75], "item": 37, "log": [38, 101], "log1p": [39, 103], "logsumexp": [40, 107], "max": [41, 109], "mean": [42, 111], "min": [43, 112], "ndim": 44, "prod": [45, 122], "reciproc": [46, 133], "reshap": [47, 134], "rsqrt": [48, 135], "shape": 49, "sin": [50, 144], "size": 51, "split": [52, 130, 148], "sqrt": [53, 149], "squar": [54, 150], "sum": [55, 155], "tolist": 56, "transpos": [57, 161], "var": [58, 166], "array_equ": 59, "broadcast_to": 60, "ceil": 61, "concaten": 62, "conv1d": [63, 191], "conv2d": [64, 192], "convolv": 65, "cosh": 67, "default_devic": 68, "default_stream": 69, "divid": 70, "equal": 71, "erf": 72, "erfinv": 73, "eval": 74, "expand_dim": 76, "ey": 77, "fft": [78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 189], "fft2": 79, "fftn": 80, "ifft": 81, "ifft2": 82, "ifftn": 83, "irfft": 84, "irfft2": 85, "irfftn": 86, "rfft": 87, "rfft2": 88, "rfftn": 89, "flatten": 90, "floor": 91, "grad": [93, 190], "greater": 94, "greater_equ": 95, "ident": 96, "jvp": 97, "less": 98, "less_equ": 99, "log10": 102, "log2": 104, "logaddexp": 105, "logical_not": 106, "matmul": 108, "maximum": 110, "minimum": 113, "moveaxi": 114, "multipli": 115, "neg": 116, "new_stream": 117, "ones": 118, "ones_lik": 119, "pad": 120, "partit": 121, "random": [123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 228], "bernoulli": 123, "categor": 124, "gumbel": 125, "kei": 126, "normal": 127, "randint": 128, "seed": 129, "truncated_norm": 131, "uniform": 132, "save": 136, "savez": 137, "savez_compress": 138, "set_default_devic": 139, "set_default_stream": 140, "sigmoid": 141, "sign": 142, "simplifi": 143, "sinh": 145, "softmax": 146, "sort": 147, "squeez": 151, "stack": 152, "stop_gradi": 153, "subtract": 154, "swapax": 156, "take": 157, "take_along_axi": 158, "tan": 159, "tanh": 160, "tri": 162, "tril": 163, "triu": 164, "value_and_grad": [165, 173], "vjp": 167, "vmap": 168, "where": 169, "zero": 170, "zeros_lik": 171, "nn": [172, 173, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222], "modul": [172, 190], "optim": [174, 175, 176, 177, 178, 179, 180, 181, 182, 227], "adadelta": 174, "adagrad": 175, "adam": 176, "adamw": 177, "adamax": 178, "optimizerst": 180, "rmsprop": 181, "sgd": 182, "util": [183, 184, 185, 230], "tree_flatten": 183, "tree_map": 184, "tree_unflatten": 185, "data": 187, "type": 187, "support": 187, "neural": 190, "network": 190, "quick": [190, 231], "start": [190, 231], "The": 190, "class": 190, "paramet": 190, "updat": 190, "inspect": 190, "valu": 190, "embed": 193, "gelu": [194, 208], "groupnorm": 195, "layernorm": 196, "mish": [198, 217], "multiheadattent": 199, "prelu": [200, 218], "rmsnorm": 201, "relu": [202, 219], "rope": 203, "selu": [204, 220], "sequenti": 205, "silu": [206, 221], "step": [207, 222], "gelu_approx": 209, "gelu_fast_approx": 210, "loss": [211, 212, 213, 214, 215, 216, 225], "binary_cross_entropi": 211, "cross_entropi": 212, "kl_div_loss": 213, "l1_loss": 214, "mse_loss": 215, "nll_loss": 216, "function": [223, 225, 231], "tree": 230, "guid": 231, "basic": 231, "graph": 231, "unifi": 232, "memori": 232, "A": 232, "simpl": 232, "specifi": 233}, "envversion": {"sphinx.domains.c": 2, "sphinx.domains.changeset": 1, "sphinx.domains.citation": 1, "sphinx.domains.cpp": 6, "sphinx.domains.index": 1, "sphinx.domains.javascript": 2, "sphinx.domains.math": 2, "sphinx.domains.python": 3, "sphinx.domains.rst": 2, "sphinx.domains.std": 2, "sphinx.ext.intersphinx": 1, "sphinx": 56}})
\ No newline at end of file
+Search.setIndex({"docnames": ["cpp/ops", "dev/extensions", "examples/linear_regression", "examples/llama-inference", "examples/mlp", "index", "install", "python/_autosummary/mlx.core.Device", "python/_autosummary/mlx.core.Dtype", "python/_autosummary/mlx.core.Stream", "python/_autosummary/mlx.core.abs", "python/_autosummary/mlx.core.add", "python/_autosummary/mlx.core.all", "python/_autosummary/mlx.core.allclose", "python/_autosummary/mlx.core.any", "python/_autosummary/mlx.core.arange", "python/_autosummary/mlx.core.arccos", "python/_autosummary/mlx.core.arccosh", "python/_autosummary/mlx.core.arcsin", "python/_autosummary/mlx.core.arcsinh", "python/_autosummary/mlx.core.arctan", "python/_autosummary/mlx.core.arctanh", "python/_autosummary/mlx.core.argmax", "python/_autosummary/mlx.core.argmin", "python/_autosummary/mlx.core.argpartition", "python/_autosummary/mlx.core.argsort", "python/_autosummary/mlx.core.array", "python/_autosummary/mlx.core.array.T", "python/_autosummary/mlx.core.array.abs", "python/_autosummary/mlx.core.array.all", "python/_autosummary/mlx.core.array.any", "python/_autosummary/mlx.core.array.argmax", "python/_autosummary/mlx.core.array.argmin", "python/_autosummary/mlx.core.array.astype", "python/_autosummary/mlx.core.array.cos", "python/_autosummary/mlx.core.array.dtype", "python/_autosummary/mlx.core.array.exp", "python/_autosummary/mlx.core.array.item", "python/_autosummary/mlx.core.array.log", "python/_autosummary/mlx.core.array.log1p", "python/_autosummary/mlx.core.array.logsumexp", "python/_autosummary/mlx.core.array.max", "python/_autosummary/mlx.core.array.mean", "python/_autosummary/mlx.core.array.min", "python/_autosummary/mlx.core.array.ndim", "python/_autosummary/mlx.core.array.prod", "python/_autosummary/mlx.core.array.reciprocal", "python/_autosummary/mlx.core.array.reshape", "python/_autosummary/mlx.core.array.round", "python/_autosummary/mlx.core.array.rsqrt", "python/_autosummary/mlx.core.array.shape", "python/_autosummary/mlx.core.array.sin", "python/_autosummary/mlx.core.array.size", "python/_autosummary/mlx.core.array.split", "python/_autosummary/mlx.core.array.sqrt", "python/_autosummary/mlx.core.array.square", "python/_autosummary/mlx.core.array.sum", "python/_autosummary/mlx.core.array.tolist", "python/_autosummary/mlx.core.array.transpose", "python/_autosummary/mlx.core.array.var", "python/_autosummary/mlx.core.array_equal", "python/_autosummary/mlx.core.broadcast_to", "python/_autosummary/mlx.core.ceil", "python/_autosummary/mlx.core.clip", "python/_autosummary/mlx.core.concatenate", "python/_autosummary/mlx.core.conv1d", "python/_autosummary/mlx.core.conv2d", "python/_autosummary/mlx.core.convolve", "python/_autosummary/mlx.core.cos", "python/_autosummary/mlx.core.cosh", "python/_autosummary/mlx.core.default_device", "python/_autosummary/mlx.core.default_stream", "python/_autosummary/mlx.core.dequantize", "python/_autosummary/mlx.core.divide", "python/_autosummary/mlx.core.equal", "python/_autosummary/mlx.core.erf", "python/_autosummary/mlx.core.erfinv", "python/_autosummary/mlx.core.eval", "python/_autosummary/mlx.core.exp", "python/_autosummary/mlx.core.expand_dims", "python/_autosummary/mlx.core.eye", "python/_autosummary/mlx.core.fft.fft", "python/_autosummary/mlx.core.fft.fft2", "python/_autosummary/mlx.core.fft.fftn", "python/_autosummary/mlx.core.fft.ifft", "python/_autosummary/mlx.core.fft.ifft2", "python/_autosummary/mlx.core.fft.ifftn", "python/_autosummary/mlx.core.fft.irfft", "python/_autosummary/mlx.core.fft.irfft2", "python/_autosummary/mlx.core.fft.irfftn", "python/_autosummary/mlx.core.fft.rfft", "python/_autosummary/mlx.core.fft.rfft2", "python/_autosummary/mlx.core.fft.rfftn", "python/_autosummary/mlx.core.flatten", "python/_autosummary/mlx.core.floor", "python/_autosummary/mlx.core.floor_divide", "python/_autosummary/mlx.core.full", "python/_autosummary/mlx.core.grad", "python/_autosummary/mlx.core.greater", "python/_autosummary/mlx.core.greater_equal", "python/_autosummary/mlx.core.identity", "python/_autosummary/mlx.core.jvp", "python/_autosummary/mlx.core.less", "python/_autosummary/mlx.core.less_equal", "python/_autosummary/mlx.core.linspace", "python/_autosummary/mlx.core.load", "python/_autosummary/mlx.core.log", "python/_autosummary/mlx.core.log10", "python/_autosummary/mlx.core.log1p", "python/_autosummary/mlx.core.log2", "python/_autosummary/mlx.core.logaddexp", "python/_autosummary/mlx.core.logical_not", "python/_autosummary/mlx.core.logsumexp", "python/_autosummary/mlx.core.matmul", "python/_autosummary/mlx.core.max", "python/_autosummary/mlx.core.maximum", "python/_autosummary/mlx.core.mean", "python/_autosummary/mlx.core.min", "python/_autosummary/mlx.core.minimum", "python/_autosummary/mlx.core.moveaxis", "python/_autosummary/mlx.core.multiply", "python/_autosummary/mlx.core.negative", "python/_autosummary/mlx.core.new_stream", "python/_autosummary/mlx.core.ones", "python/_autosummary/mlx.core.ones_like", "python/_autosummary/mlx.core.pad", "python/_autosummary/mlx.core.partition", "python/_autosummary/mlx.core.prod", "python/_autosummary/mlx.core.quantize", "python/_autosummary/mlx.core.quantized_matmul", "python/_autosummary/mlx.core.random.bernoulli", "python/_autosummary/mlx.core.random.categorical", "python/_autosummary/mlx.core.random.gumbel", "python/_autosummary/mlx.core.random.key", "python/_autosummary/mlx.core.random.normal", "python/_autosummary/mlx.core.random.randint", "python/_autosummary/mlx.core.random.seed", "python/_autosummary/mlx.core.random.split", "python/_autosummary/mlx.core.random.truncated_normal", "python/_autosummary/mlx.core.random.uniform", "python/_autosummary/mlx.core.reciprocal", "python/_autosummary/mlx.core.reshape", "python/_autosummary/mlx.core.round", "python/_autosummary/mlx.core.rsqrt", "python/_autosummary/mlx.core.save", "python/_autosummary/mlx.core.savez", "python/_autosummary/mlx.core.savez_compressed", "python/_autosummary/mlx.core.set_default_device", "python/_autosummary/mlx.core.set_default_stream", "python/_autosummary/mlx.core.sigmoid", "python/_autosummary/mlx.core.sign", "python/_autosummary/mlx.core.simplify", "python/_autosummary/mlx.core.sin", "python/_autosummary/mlx.core.sinh", "python/_autosummary/mlx.core.softmax", "python/_autosummary/mlx.core.sort", "python/_autosummary/mlx.core.split", "python/_autosummary/mlx.core.sqrt", "python/_autosummary/mlx.core.square", "python/_autosummary/mlx.core.squeeze", "python/_autosummary/mlx.core.stack", "python/_autosummary/mlx.core.stop_gradient", "python/_autosummary/mlx.core.subtract", "python/_autosummary/mlx.core.sum", "python/_autosummary/mlx.core.swapaxes", "python/_autosummary/mlx.core.take", "python/_autosummary/mlx.core.take_along_axis", "python/_autosummary/mlx.core.tan", "python/_autosummary/mlx.core.tanh", "python/_autosummary/mlx.core.transpose", "python/_autosummary/mlx.core.tri", "python/_autosummary/mlx.core.tril", "python/_autosummary/mlx.core.triu", "python/_autosummary/mlx.core.value_and_grad", "python/_autosummary/mlx.core.var", "python/_autosummary/mlx.core.vjp", "python/_autosummary/mlx.core.vmap", "python/_autosummary/mlx.core.where", "python/_autosummary/mlx.core.zeros", "python/_autosummary/mlx.core.zeros_like", "python/_autosummary/mlx.nn.Module", "python/_autosummary/mlx.nn.value_and_grad", "python/_autosummary/mlx.optimizers.AdaDelta", "python/_autosummary/mlx.optimizers.Adagrad", "python/_autosummary/mlx.optimizers.Adam", "python/_autosummary/mlx.optimizers.AdamW", "python/_autosummary/mlx.optimizers.Adamax", "python/_autosummary/mlx.optimizers.Lion", "python/_autosummary/mlx.optimizers.Optimizer", "python/_autosummary/mlx.optimizers.OptimizerState", "python/_autosummary/mlx.optimizers.RMSprop", "python/_autosummary/mlx.optimizers.SGD", "python/_autosummary/mlx.utils.tree_flatten", "python/_autosummary/mlx.utils.tree_map", "python/_autosummary/mlx.utils.tree_unflatten", "python/array", "python/data_types", "python/devices_and_streams", "python/fft", "python/nn", "python/nn/_autosummary/mlx.nn.Conv1d", "python/nn/_autosummary/mlx.nn.Conv2d", "python/nn/_autosummary/mlx.nn.Embedding", "python/nn/_autosummary/mlx.nn.GELU", "python/nn/_autosummary/mlx.nn.GroupNorm", "python/nn/_autosummary/mlx.nn.LayerNorm", "python/nn/_autosummary/mlx.nn.Linear", "python/nn/_autosummary/mlx.nn.Mish", "python/nn/_autosummary/mlx.nn.MultiHeadAttention", "python/nn/_autosummary/mlx.nn.PReLU", "python/nn/_autosummary/mlx.nn.QuantizedLinear", "python/nn/_autosummary/mlx.nn.RMSNorm", "python/nn/_autosummary/mlx.nn.ReLU", "python/nn/_autosummary/mlx.nn.RoPE", "python/nn/_autosummary/mlx.nn.SELU", "python/nn/_autosummary/mlx.nn.Sequential", "python/nn/_autosummary/mlx.nn.SiLU", "python/nn/_autosummary/mlx.nn.Step", "python/nn/_autosummary_functions/mlx.nn.gelu", "python/nn/_autosummary_functions/mlx.nn.gelu_approx", "python/nn/_autosummary_functions/mlx.nn.gelu_fast_approx", "python/nn/_autosummary_functions/mlx.nn.losses.binary_cross_entropy", "python/nn/_autosummary_functions/mlx.nn.losses.cross_entropy", "python/nn/_autosummary_functions/mlx.nn.losses.kl_div_loss", "python/nn/_autosummary_functions/mlx.nn.losses.l1_loss", "python/nn/_autosummary_functions/mlx.nn.losses.mse_loss", "python/nn/_autosummary_functions/mlx.nn.losses.nll_loss", "python/nn/_autosummary_functions/mlx.nn.losses.smooth_l1_loss", "python/nn/_autosummary_functions/mlx.nn.losses.triplet_loss", "python/nn/_autosummary_functions/mlx.nn.mish", "python/nn/_autosummary_functions/mlx.nn.prelu", "python/nn/_autosummary_functions/mlx.nn.relu", "python/nn/_autosummary_functions/mlx.nn.selu", "python/nn/_autosummary_functions/mlx.nn.silu", "python/nn/_autosummary_functions/mlx.nn.step", "python/nn/functions", "python/nn/layers", "python/nn/losses", "python/ops", "python/optimizers", "python/random", "python/transforms", "python/tree_utils", "quick_start", "unified_memory", "using_streams"], "filenames": ["cpp/ops.rst", "dev/extensions.rst", "examples/linear_regression.rst", "examples/llama-inference.rst", "examples/mlp.rst", "index.rst", "install.rst", "python/_autosummary/mlx.core.Device.rst", "python/_autosummary/mlx.core.Dtype.rst", "python/_autosummary/mlx.core.Stream.rst", "python/_autosummary/mlx.core.abs.rst", "python/_autosummary/mlx.core.add.rst", "python/_autosummary/mlx.core.all.rst", "python/_autosummary/mlx.core.allclose.rst", "python/_autosummary/mlx.core.any.rst", "python/_autosummary/mlx.core.arange.rst", "python/_autosummary/mlx.core.arccos.rst", "python/_autosummary/mlx.core.arccosh.rst", "python/_autosummary/mlx.core.arcsin.rst", "python/_autosummary/mlx.core.arcsinh.rst", "python/_autosummary/mlx.core.arctan.rst", "python/_autosummary/mlx.core.arctanh.rst", "python/_autosummary/mlx.core.argmax.rst", "python/_autosummary/mlx.core.argmin.rst", "python/_autosummary/mlx.core.argpartition.rst", "python/_autosummary/mlx.core.argsort.rst", "python/_autosummary/mlx.core.array.rst", "python/_autosummary/mlx.core.array.T.rst", "python/_autosummary/mlx.core.array.abs.rst", "python/_autosummary/mlx.core.array.all.rst", "python/_autosummary/mlx.core.array.any.rst", "python/_autosummary/mlx.core.array.argmax.rst", "python/_autosummary/mlx.core.array.argmin.rst", "python/_autosummary/mlx.core.array.astype.rst", "python/_autosummary/mlx.core.array.cos.rst", "python/_autosummary/mlx.core.array.dtype.rst", "python/_autosummary/mlx.core.array.exp.rst", "python/_autosummary/mlx.core.array.item.rst", "python/_autosummary/mlx.core.array.log.rst", "python/_autosummary/mlx.core.array.log1p.rst", "python/_autosummary/mlx.core.array.logsumexp.rst", "python/_autosummary/mlx.core.array.max.rst", "python/_autosummary/mlx.core.array.mean.rst", "python/_autosummary/mlx.core.array.min.rst", "python/_autosummary/mlx.core.array.ndim.rst", "python/_autosummary/mlx.core.array.prod.rst", "python/_autosummary/mlx.core.array.reciprocal.rst", "python/_autosummary/mlx.core.array.reshape.rst", "python/_autosummary/mlx.core.array.round.rst", "python/_autosummary/mlx.core.array.rsqrt.rst", "python/_autosummary/mlx.core.array.shape.rst", "python/_autosummary/mlx.core.array.sin.rst", "python/_autosummary/mlx.core.array.size.rst", "python/_autosummary/mlx.core.array.split.rst", "python/_autosummary/mlx.core.array.sqrt.rst", "python/_autosummary/mlx.core.array.square.rst", "python/_autosummary/mlx.core.array.sum.rst", "python/_autosummary/mlx.core.array.tolist.rst", "python/_autosummary/mlx.core.array.transpose.rst", "python/_autosummary/mlx.core.array.var.rst", "python/_autosummary/mlx.core.array_equal.rst", "python/_autosummary/mlx.core.broadcast_to.rst", "python/_autosummary/mlx.core.ceil.rst", "python/_autosummary/mlx.core.clip.rst", "python/_autosummary/mlx.core.concatenate.rst", "python/_autosummary/mlx.core.conv1d.rst", "python/_autosummary/mlx.core.conv2d.rst", "python/_autosummary/mlx.core.convolve.rst", "python/_autosummary/mlx.core.cos.rst", "python/_autosummary/mlx.core.cosh.rst", "python/_autosummary/mlx.core.default_device.rst", "python/_autosummary/mlx.core.default_stream.rst", "python/_autosummary/mlx.core.dequantize.rst", "python/_autosummary/mlx.core.divide.rst", "python/_autosummary/mlx.core.equal.rst", "python/_autosummary/mlx.core.erf.rst", "python/_autosummary/mlx.core.erfinv.rst", "python/_autosummary/mlx.core.eval.rst", "python/_autosummary/mlx.core.exp.rst", "python/_autosummary/mlx.core.expand_dims.rst", "python/_autosummary/mlx.core.eye.rst", "python/_autosummary/mlx.core.fft.fft.rst", "python/_autosummary/mlx.core.fft.fft2.rst", "python/_autosummary/mlx.core.fft.fftn.rst", "python/_autosummary/mlx.core.fft.ifft.rst", "python/_autosummary/mlx.core.fft.ifft2.rst", "python/_autosummary/mlx.core.fft.ifftn.rst", "python/_autosummary/mlx.core.fft.irfft.rst", "python/_autosummary/mlx.core.fft.irfft2.rst", "python/_autosummary/mlx.core.fft.irfftn.rst", "python/_autosummary/mlx.core.fft.rfft.rst", "python/_autosummary/mlx.core.fft.rfft2.rst", "python/_autosummary/mlx.core.fft.rfftn.rst", "python/_autosummary/mlx.core.flatten.rst", "python/_autosummary/mlx.core.floor.rst", "python/_autosummary/mlx.core.floor_divide.rst", "python/_autosummary/mlx.core.full.rst", "python/_autosummary/mlx.core.grad.rst", "python/_autosummary/mlx.core.greater.rst", "python/_autosummary/mlx.core.greater_equal.rst", "python/_autosummary/mlx.core.identity.rst", "python/_autosummary/mlx.core.jvp.rst", "python/_autosummary/mlx.core.less.rst", "python/_autosummary/mlx.core.less_equal.rst", "python/_autosummary/mlx.core.linspace.rst", "python/_autosummary/mlx.core.load.rst", "python/_autosummary/mlx.core.log.rst", "python/_autosummary/mlx.core.log10.rst", "python/_autosummary/mlx.core.log1p.rst", "python/_autosummary/mlx.core.log2.rst", "python/_autosummary/mlx.core.logaddexp.rst", "python/_autosummary/mlx.core.logical_not.rst", "python/_autosummary/mlx.core.logsumexp.rst", "python/_autosummary/mlx.core.matmul.rst", "python/_autosummary/mlx.core.max.rst", "python/_autosummary/mlx.core.maximum.rst", "python/_autosummary/mlx.core.mean.rst", "python/_autosummary/mlx.core.min.rst", "python/_autosummary/mlx.core.minimum.rst", "python/_autosummary/mlx.core.moveaxis.rst", "python/_autosummary/mlx.core.multiply.rst", "python/_autosummary/mlx.core.negative.rst", "python/_autosummary/mlx.core.new_stream.rst", "python/_autosummary/mlx.core.ones.rst", "python/_autosummary/mlx.core.ones_like.rst", "python/_autosummary/mlx.core.pad.rst", "python/_autosummary/mlx.core.partition.rst", "python/_autosummary/mlx.core.prod.rst", "python/_autosummary/mlx.core.quantize.rst", "python/_autosummary/mlx.core.quantized_matmul.rst", "python/_autosummary/mlx.core.random.bernoulli.rst", "python/_autosummary/mlx.core.random.categorical.rst", "python/_autosummary/mlx.core.random.gumbel.rst", "python/_autosummary/mlx.core.random.key.rst", "python/_autosummary/mlx.core.random.normal.rst", "python/_autosummary/mlx.core.random.randint.rst", "python/_autosummary/mlx.core.random.seed.rst", "python/_autosummary/mlx.core.random.split.rst", "python/_autosummary/mlx.core.random.truncated_normal.rst", "python/_autosummary/mlx.core.random.uniform.rst", "python/_autosummary/mlx.core.reciprocal.rst", "python/_autosummary/mlx.core.reshape.rst", "python/_autosummary/mlx.core.round.rst", "python/_autosummary/mlx.core.rsqrt.rst", "python/_autosummary/mlx.core.save.rst", "python/_autosummary/mlx.core.savez.rst", "python/_autosummary/mlx.core.savez_compressed.rst", "python/_autosummary/mlx.core.set_default_device.rst", "python/_autosummary/mlx.core.set_default_stream.rst", "python/_autosummary/mlx.core.sigmoid.rst", "python/_autosummary/mlx.core.sign.rst", "python/_autosummary/mlx.core.simplify.rst", "python/_autosummary/mlx.core.sin.rst", "python/_autosummary/mlx.core.sinh.rst", "python/_autosummary/mlx.core.softmax.rst", "python/_autosummary/mlx.core.sort.rst", "python/_autosummary/mlx.core.split.rst", "python/_autosummary/mlx.core.sqrt.rst", "python/_autosummary/mlx.core.square.rst", "python/_autosummary/mlx.core.squeeze.rst", "python/_autosummary/mlx.core.stack.rst", "python/_autosummary/mlx.core.stop_gradient.rst", "python/_autosummary/mlx.core.subtract.rst", "python/_autosummary/mlx.core.sum.rst", "python/_autosummary/mlx.core.swapaxes.rst", "python/_autosummary/mlx.core.take.rst", "python/_autosummary/mlx.core.take_along_axis.rst", "python/_autosummary/mlx.core.tan.rst", "python/_autosummary/mlx.core.tanh.rst", "python/_autosummary/mlx.core.transpose.rst", "python/_autosummary/mlx.core.tri.rst", "python/_autosummary/mlx.core.tril.rst", "python/_autosummary/mlx.core.triu.rst", "python/_autosummary/mlx.core.value_and_grad.rst", "python/_autosummary/mlx.core.var.rst", "python/_autosummary/mlx.core.vjp.rst", "python/_autosummary/mlx.core.vmap.rst", "python/_autosummary/mlx.core.where.rst", "python/_autosummary/mlx.core.zeros.rst", "python/_autosummary/mlx.core.zeros_like.rst", "python/_autosummary/mlx.nn.Module.rst", "python/_autosummary/mlx.nn.value_and_grad.rst", "python/_autosummary/mlx.optimizers.AdaDelta.rst", "python/_autosummary/mlx.optimizers.Adagrad.rst", "python/_autosummary/mlx.optimizers.Adam.rst", "python/_autosummary/mlx.optimizers.AdamW.rst", "python/_autosummary/mlx.optimizers.Adamax.rst", "python/_autosummary/mlx.optimizers.Lion.rst", "python/_autosummary/mlx.optimizers.Optimizer.rst", "python/_autosummary/mlx.optimizers.OptimizerState.rst", "python/_autosummary/mlx.optimizers.RMSprop.rst", "python/_autosummary/mlx.optimizers.SGD.rst", "python/_autosummary/mlx.utils.tree_flatten.rst", "python/_autosummary/mlx.utils.tree_map.rst", "python/_autosummary/mlx.utils.tree_unflatten.rst", "python/array.rst", "python/data_types.rst", "python/devices_and_streams.rst", "python/fft.rst", "python/nn.rst", "python/nn/_autosummary/mlx.nn.Conv1d.rst", "python/nn/_autosummary/mlx.nn.Conv2d.rst", "python/nn/_autosummary/mlx.nn.Embedding.rst", "python/nn/_autosummary/mlx.nn.GELU.rst", "python/nn/_autosummary/mlx.nn.GroupNorm.rst", "python/nn/_autosummary/mlx.nn.LayerNorm.rst", "python/nn/_autosummary/mlx.nn.Linear.rst", "python/nn/_autosummary/mlx.nn.Mish.rst", "python/nn/_autosummary/mlx.nn.MultiHeadAttention.rst", "python/nn/_autosummary/mlx.nn.PReLU.rst", "python/nn/_autosummary/mlx.nn.QuantizedLinear.rst", "python/nn/_autosummary/mlx.nn.RMSNorm.rst", "python/nn/_autosummary/mlx.nn.ReLU.rst", "python/nn/_autosummary/mlx.nn.RoPE.rst", "python/nn/_autosummary/mlx.nn.SELU.rst", "python/nn/_autosummary/mlx.nn.Sequential.rst", "python/nn/_autosummary/mlx.nn.SiLU.rst", "python/nn/_autosummary/mlx.nn.Step.rst", "python/nn/_autosummary_functions/mlx.nn.gelu.rst", "python/nn/_autosummary_functions/mlx.nn.gelu_approx.rst", "python/nn/_autosummary_functions/mlx.nn.gelu_fast_approx.rst", "python/nn/_autosummary_functions/mlx.nn.losses.binary_cross_entropy.rst", "python/nn/_autosummary_functions/mlx.nn.losses.cross_entropy.rst", "python/nn/_autosummary_functions/mlx.nn.losses.kl_div_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.l1_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.mse_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.nll_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.smooth_l1_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.triplet_loss.rst", "python/nn/_autosummary_functions/mlx.nn.mish.rst", "python/nn/_autosummary_functions/mlx.nn.prelu.rst", "python/nn/_autosummary_functions/mlx.nn.relu.rst", "python/nn/_autosummary_functions/mlx.nn.selu.rst", "python/nn/_autosummary_functions/mlx.nn.silu.rst", "python/nn/_autosummary_functions/mlx.nn.step.rst", "python/nn/functions.rst", "python/nn/layers.rst", "python/nn/losses.rst", "python/ops.rst", "python/optimizers.rst", "python/random.rst", "python/transforms.rst", "python/tree_utils.rst", "quick_start.rst", "unified_memory.rst", "using_streams.rst"], "titles": ["Operations", "Developer Documentation", "Linear Regression", "LLM inference", "Multi-Layer Perceptron", "MLX", "Build and Install", "mlx.core.Device", "mlx.core.Dtype", "mlx.core.Stream", "mlx.core.abs", "mlx.core.add", "mlx.core.all", "mlx.core.allclose", "mlx.core.any", "mlx.core.arange", "mlx.core.arccos", "mlx.core.arccosh", "mlx.core.arcsin", "mlx.core.arcsinh", "mlx.core.arctan", "mlx.core.arctanh", "mlx.core.argmax", "mlx.core.argmin", "mlx.core.argpartition", "mlx.core.argsort", "mlx.core.array", "mlx.core.array.T", "mlx.core.array.abs", "mlx.core.array.all", "mlx.core.array.any", "mlx.core.array.argmax", "mlx.core.array.argmin", "mlx.core.array.astype", "mlx.core.array.cos", "mlx.core.array.dtype", "mlx.core.array.exp", "mlx.core.array.item", "mlx.core.array.log", "mlx.core.array.log1p", "mlx.core.array.logsumexp", "mlx.core.array.max", "mlx.core.array.mean", "mlx.core.array.min", "mlx.core.array.ndim", "mlx.core.array.prod", "mlx.core.array.reciprocal", "mlx.core.array.reshape", "mlx.core.array.round", "mlx.core.array.rsqrt", "mlx.core.array.shape", "mlx.core.array.sin", "mlx.core.array.size", "mlx.core.array.split", "mlx.core.array.sqrt", "mlx.core.array.square", "mlx.core.array.sum", "mlx.core.array.tolist", "mlx.core.array.transpose", "mlx.core.array.var", "mlx.core.array_equal", "mlx.core.broadcast_to", "mlx.core.ceil", "mlx.core.clip", "mlx.core.concatenate", "mlx.core.conv1d", "mlx.core.conv2d", "mlx.core.convolve", "mlx.core.cos", "mlx.core.cosh", "mlx.core.default_device", "mlx.core.default_stream", "mlx.core.dequantize", "mlx.core.divide", "mlx.core.equal", "mlx.core.erf", "mlx.core.erfinv", "mlx.core.eval", "mlx.core.exp", "mlx.core.expand_dims", "mlx.core.eye", "mlx.core.fft.fft", "mlx.core.fft.fft2", "mlx.core.fft.fftn", "mlx.core.fft.ifft", "mlx.core.fft.ifft2", "mlx.core.fft.ifftn", "mlx.core.fft.irfft", "mlx.core.fft.irfft2", "mlx.core.fft.irfftn", "mlx.core.fft.rfft", "mlx.core.fft.rfft2", "mlx.core.fft.rfftn", "mlx.core.flatten", "mlx.core.floor", "mlx.core.floor_divide", "mlx.core.full", "mlx.core.grad", "mlx.core.greater", "mlx.core.greater_equal", "mlx.core.identity", "mlx.core.jvp", "mlx.core.less", "mlx.core.less_equal", "mlx.core.linspace", "mlx.core.load", "mlx.core.log", "mlx.core.log10", "mlx.core.log1p", "mlx.core.log2", "mlx.core.logaddexp", "mlx.core.logical_not", "mlx.core.logsumexp", "mlx.core.matmul", "mlx.core.max", "mlx.core.maximum", "mlx.core.mean", "mlx.core.min", "mlx.core.minimum", "mlx.core.moveaxis", "mlx.core.multiply", "mlx.core.negative", "mlx.core.new_stream", "mlx.core.ones", "mlx.core.ones_like", "mlx.core.pad", "mlx.core.partition", "mlx.core.prod", "mlx.core.quantize", "mlx.core.quantized_matmul", "mlx.core.random.bernoulli", "mlx.core.random.categorical", "mlx.core.random.gumbel", "mlx.core.random.key", "mlx.core.random.normal", "mlx.core.random.randint", "mlx.core.random.seed", "mlx.core.random.split", "mlx.core.random.truncated_normal", "mlx.core.random.uniform", "mlx.core.reciprocal", "mlx.core.reshape", "mlx.core.round", "mlx.core.rsqrt", "mlx.core.save", "mlx.core.savez", "mlx.core.savez_compressed", "mlx.core.set_default_device", "mlx.core.set_default_stream", "mlx.core.sigmoid", "mlx.core.sign", "mlx.core.simplify", "mlx.core.sin", "mlx.core.sinh", "mlx.core.softmax", "mlx.core.sort", "mlx.core.split", "mlx.core.sqrt", "mlx.core.square", "mlx.core.squeeze", "mlx.core.stack", "mlx.core.stop_gradient", "mlx.core.subtract", "mlx.core.sum", "mlx.core.swapaxes", "mlx.core.take", "mlx.core.take_along_axis", "mlx.core.tan", "mlx.core.tanh", "mlx.core.transpose", "mlx.core.tri", "mlx.core.tril", "mlx.core.triu", "mlx.core.value_and_grad", "mlx.core.var", "mlx.core.vjp", "mlx.core.vmap", "mlx.core.where", "mlx.core.zeros", "mlx.core.zeros_like", "mlx.nn.Module", "mlx.nn.value_and_grad", "mlx.optimizers.AdaDelta", "mlx.optimizers.Adagrad", "mlx.optimizers.Adam", "mlx.optimizers.AdamW", "mlx.optimizers.Adamax", "mlx.optimizers.Lion", "mlx.optimizers.Optimizer", "mlx.optimizers.OptimizerState", "mlx.optimizers.RMSprop", "mlx.optimizers.SGD", "mlx.utils.tree_flatten", "mlx.utils.tree_map", "mlx.utils.tree_unflatten", "Array", "Data Types", "Devices and Streams", "FFT", "Neural Networks", "mlx.nn.Conv1d", "mlx.nn.Conv2d", "mlx.nn.Embedding", "mlx.nn.GELU", "mlx.nn.GroupNorm", "mlx.nn.LayerNorm", "mlx.nn.Linear", "mlx.nn.Mish", "mlx.nn.MultiHeadAttention", "mlx.nn.PReLU", "mlx.nn.QuantizedLinear", "mlx.nn.RMSNorm", "mlx.nn.ReLU", "mlx.nn.RoPE", "mlx.nn.SELU", "mlx.nn.Sequential", "mlx.nn.SiLU", "mlx.nn.Step", "mlx.nn.gelu", "mlx.nn.gelu_approx", "mlx.nn.gelu_fast_approx", "mlx.nn.losses.binary_cross_entropy", "mlx.nn.losses.cross_entropy", "mlx.nn.losses.kl_div_loss", "mlx.nn.losses.l1_loss", "mlx.nn.losses.mse_loss", "mlx.nn.losses.nll_loss", "mlx.nn.losses.smooth_l1_loss", "mlx.nn.losses.triplet_loss", "mlx.nn.mish", "mlx.nn.prelu", "mlx.nn.relu", "mlx.nn.selu", "mlx.nn.silu", "mlx.nn.step", "Functions", "Layers", "Loss Functions", "Operations", "Optimizers", "Random", "Transforms", "Tree Utils", "Quick Start Guide", "Unified Memory", "Using Streams"], "terms": {"mlx": [1, 2, 3, 4, 6, 199, 239, 240, 242, 243, 244], "provid": [1, 3, 72, 97, 144, 173, 180, 193, 199, 208, 210, 235, 245], "open": [1, 15, 135, 139], "flexibl": [1, 5], "which": [1, 3, 4, 5, 6, 15, 33, 77, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 97, 101, 105, 128, 131, 132, 141, 144, 145, 146, 159, 160, 165, 173, 175, 176, 203, 213, 222, 227, 240, 244, 245], "user": [1, 3, 199], "mai": 1, "add": [1, 3, 79, 110, 125, 128, 200, 201, 244], "special": 1, "without": [1, 3, 5, 161, 208, 235, 242, 244], "much": [1, 3], "hassl": 1, "while": [1, 3, 6, 141, 213], "librari": [1, 6, 199], "suppli": 1, "effici": [1, 3, 5, 213, 243], "can": [1, 3, 5, 6, 11, 15, 47, 58, 73, 74, 77, 98, 99, 102, 103, 110, 115, 118, 120, 130, 131, 135, 138, 139, 162, 173, 180, 199, 202, 215, 239, 240, 242, 243, 244, 245], "compos": [1, 5, 199, 243], "ani": [1, 3, 5, 15, 151, 192, 193, 194, 199, 203, 210, 242, 243, 244], "number": [1, 15, 52, 66, 72, 80, 97, 100, 101, 104, 125, 128, 129, 131, 134, 137, 139, 142, 151, 170, 173, 175, 176, 199, 200, 201, 204, 208, 240, 245], "applic": [1, 6], "aris": 1, "case": [1, 3, 83, 86, 87, 89, 90, 91, 92, 93, 113, 141, 159, 214, 217, 227, 232, 234, 243, 244, 245], "where": [1, 4, 80, 128, 173, 176, 200, 201, 203, 204, 205, 206, 211, 214, 216, 217, 218, 219, 220, 232, 233, 234], "new": [1, 4, 61, 119, 141, 160, 169, 180, 193, 208, 239], "function": [1, 2, 3, 4, 5, 13, 75, 76, 77, 97, 101, 113, 144, 149, 151, 173, 175, 176, 181, 193, 199, 203, 207, 215, 217, 218, 219, 220, 229, 230, 234, 239, 240, 242], "highli": [1, 6], "optim": [1, 2, 4, 5], "ar": [1, 2, 3, 4, 5, 6, 13, 15, 60, 61, 63, 67, 80, 82, 83, 85, 86, 88, 89, 91, 92, 97, 101, 113, 125, 126, 128, 130, 131, 132, 135, 138, 139, 144, 145, 146, 159, 160, 165, 173, 175, 176, 180, 187, 192, 193, 200, 201, 204, 205, 208, 210, 235, 242, 243, 244], "need": [1, 3, 4, 5, 60, 128, 199, 240, 243, 244], "For": [1, 3, 6, 128, 194, 199, 210, 240, 243, 244], "you": [1, 3, 4, 5, 6, 199, 240, 244], "design": [1, 2, 5, 240, 244], "your": [1, 3, 6, 180], "own": [1, 6], "link": [1, 6], "top": [1, 206], "core": [1, 2, 3, 4, 180, 199, 221, 239, 243], "we": [1, 2, 3, 4, 72, 128, 144, 185, 187, 199, 202, 215, 240, 242, 244], "inner": 1, "work": [1, 3, 6], "go": [1, 3], "over": [1, 3, 4, 12, 14, 22, 23, 24, 25, 65, 66, 83, 86, 89, 92, 104, 112, 114, 116, 117, 126, 127, 143, 154, 155, 163, 174, 200, 201, 204, 205, 211, 222], "simpl": [1, 3, 4, 199, 202, 235], "learn": [1, 2, 4, 5, 182, 183, 184, 185, 186, 187, 190, 191, 204, 205, 211], "step": [1, 3, 4, 15, 199], "involv": [1, 239], "ad": [1, 2, 6, 180, 182, 183, 184, 185, 186, 190], "let": [1, 2, 3], "s": [1, 2, 3, 4, 35, 44, 72, 82, 83, 85, 86, 88, 89, 91, 92, 97, 105, 116, 128, 131, 142, 173, 174, 176, 181, 188, 199, 239, 240, 243, 244], "sai": [1, 3], "would": [1, 3, 244], "like": [1, 3, 5, 124, 179, 243, 244], "an": [1, 3, 4, 6, 8, 12, 14, 26, 61, 65, 66, 77, 80, 93, 96, 100, 114, 117, 119, 123, 124, 125, 127, 128, 129, 141, 142, 156, 159, 164, 165, 166, 170, 176, 178, 179, 182, 188, 189, 192, 193, 199, 204, 205, 206, 208, 210, 219, 230, 240, 242, 243, 244, 245], "take": [1, 3, 4, 97, 101, 115, 118, 124, 129, 166, 173, 175, 176, 179, 240, 244, 245], "two": [1, 11, 13, 60, 73, 74, 82, 85, 91, 98, 99, 102, 103, 110, 113, 115, 118, 120, 164, 210, 244], "arrai": [1, 3, 4, 5, 8, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 199, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 234, 243, 244], "x": [1, 2, 3, 4, 75, 100, 129, 132, 142, 145, 149, 151, 171, 172, 177, 180, 187, 193, 199, 203, 204, 205, 206, 207, 211, 212, 214, 216, 217, 218, 219, 220, 227, 229, 230, 231, 232, 233, 234, 239, 243, 244], "y": [1, 2, 3, 4, 151, 177, 183, 199, 204, 205, 206, 211, 227, 239], "scale": [1, 3, 72, 128, 129, 208, 214, 232], "them": [1, 3, 199, 244], "both": [1, 11, 73, 74, 98, 99, 102, 103, 110, 115, 118, 120, 131, 162, 239, 243, 244], "some": [1, 2, 3, 4], "coeffic": 1, "alpha": [1, 128, 185, 190, 214, 228, 230, 232], "beta": [1, 72, 128, 184, 185, 186, 187, 204, 205, 227], "respect": [1, 2, 4, 97, 128, 173, 180, 193, 199, 203, 204, 205, 243], "togeth": [1, 4, 128, 193], "get": [1, 2, 4, 66, 133, 189, 199, 244], "z": [1, 151], "well": [1, 3, 199, 208], "veri": [1, 3, 208, 244], "easili": 1, "do": [1, 3, 6, 180, 185, 199], "just": [1, 4], "write": [1, 3, 199], "out": [1, 6], "follow": [1, 3, 4, 5, 6, 15, 67, 72, 128, 182, 183, 184, 185, 186, 187, 191, 199, 219, 220, 223, 240, 244], "import": [1, 2, 3, 4, 6, 145, 151, 173, 180, 192, 193, 194, 199, 221, 243], "mx": [1, 2, 3, 4, 145, 151, 173, 180, 199, 212, 221, 223, 231, 239, 240, 243, 244, 245], "def": [1, 2, 3, 4, 151, 173, 180, 199, 244], "simple_axpbi": 1, "float": [1, 13, 15, 57, 95, 96, 129, 130, 135, 138, 139, 182, 183, 184, 185, 186, 187, 190, 191, 196, 204, 205, 211, 213, 217, 222, 227, 228, 234], "return": [1, 2, 3, 4, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 33, 37, 50, 57, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 72, 73, 74, 75, 76, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 137, 138, 139, 140, 141, 142, 143, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 192, 193, 194, 199, 210, 221, 222, 223, 224, 225, 226, 227, 228, 242, 244], "thi": [1, 3, 4, 6, 12, 13, 14, 15, 22, 23, 24, 25, 77, 101, 110, 112, 113, 114, 116, 117, 126, 127, 131, 151, 154, 155, 156, 163, 165, 174, 180, 199, 210, 217, 219, 220, 234, 242], "perform": [1, 3, 5, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 113, 129, 142, 154, 165, 199, 204, 244], "leav": [1, 193], "differenti": [1, 5], "howev": [1, 199, 203, 204, 240], "vector": [1, 2, 5, 101, 165, 175, 176, 202, 243], "math": [1, 3, 228], "often": 1, "realiz": 1, "axpbi": 1, "routin": 1, "defin": [1, 2, 3, 4, 6, 189, 192], "same": [1, 3, 6, 60, 61, 66, 67, 87, 90, 91, 92, 97, 101, 125, 131, 142, 175, 177, 180, 199, 204, 210, 228, 240, 244], "realli": 1, "part": 1, "doe": [1, 3, 6, 199], "fast": [1, 151, 203, 220, 244], "so": [1, 3, 6, 97, 151, 173, 239, 244], "decid": [1, 193], "want": [1, 3, 244], "reli": 1, "acceler": 1, "framework": [1, 5], "continu": 1, "impos": 1, "our": [1, 3, 4, 182, 183, 184, 186, 187, 215], "assumpt": 1, "also": [1, 3, 4, 5, 11, 73, 74, 83, 86, 89, 92, 98, 99, 102, 103, 110, 115, 118, 120, 128, 162, 181, 189, 199, 208, 210, 214, 216, 218, 232, 233, 235, 239, 243, 245], "assum": [1, 3, 193, 199, 204], "how": [1, 3, 4, 199, 200, 201, 202, 208, 244], "gradient": [1, 2, 4, 97, 161, 173, 180, 181, 182, 184, 185, 186, 187, 191, 199, 210, 239, 243], "ins": 1, "what": [1, 3, 193], "coincid": 1, "right": [1, 128, 203, 219, 220, 228], "place": [1, 3, 142], "cours": 1, "The": [1, 3, 4, 5, 6, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 33, 35, 44, 50, 57, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 72, 73, 74, 75, 76, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 137, 138, 139, 140, 141, 149, 150, 152, 153, 154, 155, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 196, 200, 201, 202, 204, 205, 206, 208, 210, 211, 213, 215, 217, 221, 222, 223, 224, 225, 226, 227, 228, 234, 239, 243, 244, 245], "structur": [1, 77], "from": [1, 3, 4, 5, 72, 88, 89, 91, 92, 96, 105, 113, 124, 128, 130, 131, 132, 133, 135, 138, 145, 159, 161, 162, 165, 166, 177, 179, 192, 193, 194, 199, 208, 227, 242, 243, 244], "frontend": 1, "api": 1, "redirect": 1, "when": [1, 3, 5, 6, 180, 200, 201, 223, 227, 240, 244], "appropri": 1, "fallback": 1, "metal": 1, "vjp": [1, 243], "jvp": [1, 243], "In": [1, 3, 4, 113, 128, 180, 182, 183, 184, 186, 187, 193, 199, 204, 242, 244], "one": [1, 3, 6, 57, 63, 66, 79, 80, 108, 113, 129, 131, 159, 162, 244], "sentenc": 1, "comput": [1, 2, 3, 4, 5, 6, 72, 97, 101, 110, 116, 128, 151, 154, 161, 173, 174, 175, 181, 182, 184, 185, 186, 187, 199, 204, 205, 210, 211, 213, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 239, 243, 244], "graph": [1, 3, 4, 5, 77, 144, 151], "rule": 1, "evalu": [1, 3, 4, 77, 101, 144, 175, 180, 199, 239, 243], "said": [1, 3], "start": [1, 2, 3, 5, 6, 15, 104, 156, 244], "discuss": 1, "more": [1, 4, 8, 57, 113, 199, 240, 244], "detail": [1, 8, 182, 183, 184, 186, 187, 199], "thei": [1, 2, 3, 67, 180, 187, 215, 242, 243, 244], "c": [1, 3, 196, 200, 201, 243, 244], "scalar": [1, 11, 13, 26, 37, 57, 60, 61, 63, 73, 74, 95, 96, 97, 98, 99, 102, 103, 104, 110, 111, 113, 115, 118, 120, 125, 135, 138, 139, 162, 173, 177, 181, 228, 243], "sum": [1, 2, 11, 112, 154, 199, 221, 222, 223, 224, 225, 226, 227, 228], "elementwis": 1, "numpi": [1, 3, 4, 5, 11, 13, 15, 61, 73, 74, 98, 99, 102, 103, 110, 113, 115, 118, 120, 162, 243], "style": [1, 11, 13, 73, 74, 98, 99, 102, 103, 110, 113, 115, 118, 120, 162], "broadcast": [1, 11, 13, 61, 63, 73, 74, 96, 98, 99, 102, 103, 110, 113, 115, 118, 120, 130, 131, 138, 139, 162, 166, 177, 208], "between": [1, 5, 63, 244], "input": [1, 2, 3, 10, 11, 12, 13, 14, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 73, 74, 75, 76, 78, 79, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 97, 98, 99, 101, 102, 103, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 124, 125, 126, 127, 128, 129, 137, 140, 141, 142, 143, 149, 150, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 171, 172, 173, 174, 176, 177, 179, 200, 201, 202, 204, 205, 206, 208, 210, 211, 213, 217, 221, 223, 226, 228, 234, 243], "upcast": 1, "const": 1, "factor": [1, 222], "streamordevic": 1, "stream": [1, 5, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 28, 29, 30, 31, 32, 33, 34, 36, 38, 39, 40, 41, 42, 43, 45, 46, 47, 48, 49, 51, 53, 54, 55, 56, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 71, 72, 73, 74, 75, 76, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 98, 99, 100, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 134, 135, 137, 138, 139, 140, 141, 142, 143, 148, 149, 150, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 174, 177, 178, 179, 244], "schedul": [1, 244], "itself": 1, "call": [1, 3, 4, 27, 95, 180, 199, 202, 215, 239], "other": [1, 3, 180, 187, 199, 208, 243], "within": [1, 24], "simplest": [1, 199], "wai": [1, 3, 6, 199], "about": [1, 3, 4, 244], "term": [1, 182, 183, 184, 185, 186, 190], "exist": [1, 3], "auto": [1, 6], "ax": [1, 12, 14, 22, 23, 58, 79, 82, 83, 85, 86, 88, 89, 91, 92, 112, 114, 116, 117, 125, 127, 154, 159, 163, 164, 169, 174], "multipli": [1, 128], "earlier": 1, "goal": 1, "themselv": 1, "contain": [1, 3, 50, 77, 87, 88, 89, 111, 128, 156, 177, 180, 199], "act": 1, "data": [1, 4, 5, 8, 15, 80, 90, 91, 96, 100, 104, 123, 138, 170, 178], "nor": [1, 97, 173], "rather": [1, 244], "easi": [1, 199], "interfac": 1, "block": [1, 3], "A": [1, 3, 5, 6, 50, 60, 97, 101, 112, 113, 128, 130, 131, 132, 134, 135, 138, 139, 156, 160, 173, 175, 176, 180, 181, 184, 186, 192, 193, 194, 199, 204, 205, 207, 211, 215, 220, 228, 229, 239], "It": [1, 3, 6, 97, 173, 186, 188, 199, 210], "creat": [1, 3, 6, 80, 100, 180, 199, 239], "output": [1, 3, 6, 12, 13, 14, 15, 22, 23, 24, 61, 80, 87, 90, 91, 92, 96, 97, 100, 104, 112, 114, 116, 117, 123, 124, 126, 127, 130, 131, 132, 134, 135, 138, 139, 145, 146, 154, 159, 163, 166, 170, 173, 174, 175, 176, 177, 178, 179, 200, 201, 206, 208, 210, 217, 221, 222, 223, 224, 225, 226, 227, 228, 234, 243, 244], "given": [1, 12, 14, 24, 61, 63, 64, 72, 77, 79, 81, 82, 83, 84, 85, 86, 90, 91, 92, 96, 112, 114, 116, 117, 127, 135, 142, 154, 156, 163, 170, 171, 172, 174, 208], "set": [1, 3, 4, 6, 180, 189, 203, 206, 210, 213, 217, 228, 234, 240], "further": [1, 6], "class": [1, 3, 4, 7, 8, 9, 26, 180, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234], "under": 1, "These": [1, 166, 244], "word": 1, "bit": [1, 72, 128, 129, 196, 210], "abstract": 1, "back": [1, 3], "give": [1, 3, 4, 24], "ourselv": 1, "concret": [1, 206, 244], "imag": [1, 201], "public": [1, 199], "explicit": [1, 240], "alpha_": 1, "beta_": 1, "must": [1, 6, 63, 77, 96, 130, 131, 135, 138, 139, 177], "know": [1, 3], "popul": 1, "To": [1, 2, 3, 4, 6, 199, 243], "avoid": 1, "unnecessari": [1, 3], "alloc": [1, 180], "respons": 1, "space": [1, 104, 226], "void": 1, "eval_cpu": 1, "std": 1, "overrid": 1, "eval_gpu": 1, "jacobian": [1, 101, 175, 243], "product": [1, 101, 113, 127, 175, 208, 243], "primal": [1, 101, 175], "tangent": [1, 20, 21, 101, 167, 168], "int": [1, 3, 4, 7, 9, 12, 14, 15, 22, 23, 24, 25, 29, 30, 31, 32, 40, 41, 42, 43, 45, 48, 50, 53, 56, 57, 59, 61, 64, 65, 66, 72, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 96, 97, 100, 104, 112, 114, 116, 117, 119, 123, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 141, 142, 154, 155, 156, 159, 160, 163, 164, 165, 166, 169, 170, 171, 172, 173, 174, 176, 178, 180, 199, 200, 201, 202, 204, 205, 206, 208, 210, 211, 213, 222, 223, 226, 228], "argnum": [1, 97, 173], "cotan": 1, "across": [1, 204], "pair": [1, 125, 213], "repres": [1, 3, 228], "axi": [1, 3, 4, 12, 14, 22, 23, 24, 25, 29, 30, 31, 32, 40, 41, 42, 43, 45, 53, 56, 59, 64, 79, 81, 84, 87, 88, 89, 90, 91, 92, 112, 114, 116, 117, 119, 125, 126, 127, 131, 154, 155, 156, 159, 160, 163, 164, 165, 166, 169, 174, 176, 222, 223, 226, 228], "correspond": [1, 12, 14, 63, 72, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 112, 114, 117, 127, 163, 176, 193], "dimens": [1, 3, 12, 14, 22, 23, 44, 50, 57, 66, 79, 88, 89, 91, 92, 93, 112, 113, 114, 116, 117, 127, 128, 131, 137, 163, 166, 169, 174, 200, 201, 204, 205, 208, 211, 213], "vmap": [1, 243], "print": [1, 2, 3, 4, 6, 192, 193, 194, 199, 240, 243], "ostream": 1, "os": [1, 6], "equival": [1, 27, 47, 58, 95, 203, 210], "check": [1, 6, 60], "bool": [1, 12, 14, 22, 23, 29, 30, 31, 32, 40, 41, 42, 43, 45, 56, 57, 59, 60, 77, 112, 114, 116, 117, 127, 130, 135, 138, 139, 144, 163, 174, 191, 200, 201, 204, 205, 206, 208, 210, 213], "is_equival": 1, "privat": 1, "fall": 1, "eval": [1, 2, 3, 4, 151, 180, 199, 239, 243], "deriv": 1, "base": [1, 77, 107, 109, 180, 186, 188, 213, 239, 240], "abov": [1, 3, 6, 128, 171, 185, 199, 244], "demonstr": 1, "treat": [1, 60, 88, 89, 91, 92, 165], "paramet": [1, 2, 3, 4, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 33, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 190, 191, 192, 193, 194, 200, 201, 202, 203, 204, 205, 206, 208, 210, 211, 213, 215, 217, 221, 222, 223, 224, 225, 226, 227, 228, 234, 235, 239], "produc": [1, 208], "through": [1, 161, 187], "construct": [1, 4, 96, 123, 178], "its": [1, 6, 113, 126, 137, 151, 170, 181, 184, 185, 186, 194, 199, 210, 244], "type": [1, 5, 6, 7, 8, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 33, 50, 57, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 72, 73, 74, 75, 76, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 137, 138, 139, 140, 141, 142, 143, 149, 150, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 188, 192, 199, 221, 222, 223, 224, 225, 226, 227, 228], "shape": [1, 3, 4, 47, 60, 61, 65, 66, 81, 84, 87, 90, 91, 92, 96, 101, 113, 123, 124, 130, 131, 132, 134, 135, 138, 139, 141, 166, 175, 177, 178, 179, 199, 200, 201, 206, 228, 239, 243, 244], "pass": [1, 3, 4, 47, 58, 125, 173, 181, 192, 193, 199, 210, 215], "re": [1, 4], "now": [1, 3, 210], "promot": 1, "dtype": [1, 3, 15, 26, 33, 57, 80, 96, 100, 104, 123, 132, 134, 135, 138, 139, 170, 178, 196, 221, 243], "promoted_dtyp": 1, "promote_typ": 1, "float32": [1, 15, 80, 100, 104, 123, 132, 134, 138, 139, 170, 178, 196, 221, 243], "non": [1, 6, 180, 207, 229], "point": [1, 2, 3, 6, 95, 129, 196], "out_dtyp": 1, "is_floating_point": 1, "cast": [1, 33, 90, 91, 92], "up": [1, 3, 210], "determin": 1, "x_cast": 1, "astyp": [1, 3], "y_cast": 1, "broadcasted_input": 1, "broadcast_arrai": 1, "out_shap": 1, "0": [1, 2, 3, 4, 6, 7, 15, 48, 53, 59, 64, 65, 66, 80, 93, 97, 125, 130, 139, 142, 156, 160, 170, 171, 172, 173, 174, 176, 180, 182, 184, 185, 186, 187, 190, 191, 192, 199, 200, 201, 203, 204, 205, 209, 212, 214, 217, 219, 220, 221, 222, 227, 228, 230, 231, 232, 234, 240, 243], "unique_ptr": 1, "make_uniqu": 1, "to_stream": 1, "handl": [1, 199], "resolv": 1, "No": [1, 3], "happen": [1, 3, 239], "alon": 1, "effect": 1, "onli": [1, 3, 5, 6, 60, 65, 66, 128, 180, 196, 199, 244], "execut": [1, 6, 244], "depend": [1, 2, 57, 244], "devic": [1, 5, 6, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 28, 29, 30, 31, 32, 33, 34, 36, 38, 39, 40, 41, 42, 43, 45, 46, 47, 48, 49, 51, 53, 54, 55, 56, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 98, 99, 100, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 134, 135, 137, 138, 139, 140, 141, 142, 143, 147, 149, 150, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 174, 177, 178, 179, 244, 245], "specifi": [1, 15, 33, 66, 88, 89, 96, 97, 104, 119, 123, 131, 164, 165, 166, 169, 173, 176, 178, 217, 221, 222, 223, 224, 225, 226, 227, 228, 234, 244], "memori": [1, 5, 151, 180], "ha": [1, 3, 4, 5, 57, 87, 88, 90, 91, 92, 97, 131, 180, 206, 239, 243, 244], "been": [1, 3], "try": [1, 6], "naiv": 1, "gener": [1, 2, 15, 80, 88, 89, 104, 130, 134, 135, 138, 139, 240, 245], "version": [1, 6, 72, 110, 112, 128, 154, 176, 240], "declar": 1, "member": [1, 199], "method": [1, 3, 7, 8, 9, 26, 180, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 199], "each": [1, 50, 72, 77, 113, 125, 128, 129, 131, 145, 146, 156, 169, 176, 177, 202, 204, 213, 222, 240], "element": [1, 10, 11, 16, 17, 18, 19, 20, 21, 24, 62, 68, 69, 72, 73, 74, 75, 76, 78, 80, 94, 95, 98, 99, 102, 103, 106, 107, 108, 109, 110, 111, 115, 118, 120, 121, 126, 128, 129, 140, 143, 149, 150, 152, 153, 157, 158, 162, 165, 167, 168, 173, 177, 207, 213, 216, 229, 230, 233], "find": [1, 2, 6], "pointwis": 1, "captur": [1, 199], "templat": 1, "axpby_impl": 1, "typenam": 1, "t": [1, 3, 75, 173, 182, 183, 184, 185, 186, 187, 190, 191, 199, 244], "readi": 1, "fill": [1, 96, 124, 170, 179], "malloc_or_wait": 1, "synchron": 1, "avail": [1, 2, 3, 4, 6, 8, 196, 244], "There": [1, 199], "wait": [1, 3], "here": [1, 3, 230, 244], "request": 1, "pressur": 1, "condit": [1, 177, 244], "set_data": 1, "nbyte": 1, "collect": [1, 189, 193, 242], "pointer": 1, "x_ptr": 1, "y_ptr": 1, "out_ptr": 1, "relev": 1, "static_cast": 1, "size_t": 1, "out_idx": 1, "size": [1, 3, 4, 50, 66, 72, 79, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 96, 100, 128, 129, 131, 141, 156, 159, 199, 200, 201, 202, 210], "map": [1, 4, 105, 193, 202], "linear": [1, 3, 4, 5, 180, 193, 199, 203, 210, 212, 214, 216, 218, 219, 220, 231, 232, 233], "indic": [1, 13, 22, 23, 24, 25, 77, 97, 156, 165, 166, 173, 222], "offset": [1, 3], "x_offset": 1, "elem_to_loc": 1, "stride": [1, 65, 66, 200, 201, 213], "y_offset": 1, "contigu": 1, "regularli": 1, "default": [1, 6, 12, 14, 15, 22, 23, 24, 25, 60, 64, 65, 66, 72, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 97, 100, 104, 112, 114, 116, 117, 123, 126, 127, 128, 129, 130, 131, 132, 134, 135, 137, 138, 139, 141, 142, 144, 155, 156, 159, 160, 163, 169, 170, 171, 172, 173, 174, 176, 178, 180, 182, 183, 184, 185, 186, 187, 189, 190, 191, 196, 200, 201, 206, 208, 210, 213, 221, 222, 223, 224, 225, 226, 227, 228, 240, 242, 245], "row": [1, 80, 100, 128, 170], "major": 1, "henc": [1, 128], "doesn": [1, 199], "additon": 1, "abl": [1, 128], "all": [1, 4, 6, 13, 24, 66, 77, 80, 83, 86, 89, 92, 113, 125, 126, 159, 180, 188, 199, 208, 210, 240, 243, 245], "incom": 1, "accordingli": 1, "dispatch": 1, "float16": [1, 196], "bfloat16": 1, "complex64": 1, "throw": 1, "error": [1, 6, 75, 76, 156, 203, 210, 218, 219, 220, 225], "encount": 1, "unexpect": [1, 15], "regist": [1, 4], "op": 1, "contruct": 1, "assert": 1, "2": [1, 2, 3, 4, 66, 75, 82, 85, 87, 88, 89, 90, 91, 92, 109, 113, 128, 137, 170, 171, 172, 180, 182, 183, 184, 185, 190, 196, 199, 201, 203, 211, 219, 227, 228, 243, 244], "1": [1, 3, 4, 15, 24, 25, 65, 66, 81, 82, 84, 85, 87, 88, 89, 90, 91, 92, 93, 113, 126, 128, 131, 139, 149, 155, 165, 173, 180, 182, 183, 184, 185, 186, 187, 190, 191, 196, 199, 200, 201, 203, 204, 205, 209, 211, 213, 214, 217, 219, 220, 221, 222, 223, 226, 227, 228, 232, 234, 239, 243, 244], "correct": [1, 184, 185, 186], "els": [1, 3, 199], "float16_t": 1, "bfloat16_t": 1, "complex64_t": 1, "runtime_error": 1, "support": [1, 3, 5, 6, 13, 65, 66, 113, 128], "have": [1, 3, 6, 60, 88, 89, 91, 92, 113, 131, 187, 192, 208, 215, 242, 244], "rememb": 1, "3": [1, 3, 6, 187, 240, 243], "complic": 1, "keep": [1, 12, 14, 22, 23, 112, 114, 116, 117, 127, 163, 174, 199], "mind": [1, 3], "half": [1, 15, 135, 139, 213], "precis": [1, 3, 199, 203], "direct": [1, 3, 187, 244], "fix": [1, 3, 6], "possibl": [1, 3, 113, 156, 202, 244], "due": 1, "transpos": [1, 3, 27], "aren": 1, "guarante": 1, "fit": [1, 128, 244], "requir": [1, 3, 199], "column": [1, 80, 100, 128], "inplac": 1, "expect": [1, 3, 200, 201, 208], "answer": 1, "copi": [1, 3, 5, 126, 155], "simpli": [1, 3, 6, 180, 212, 231], "catlas_saxpbi": 1, "axpby_impl_acceler": 1, "first": [1, 2, 3, 4, 6, 93, 97, 113, 126, 137, 164, 173, 184, 185, 186, 192, 199, 204, 244], "mode": [1, 67], "i": [1, 3, 101, 185, 199, 200, 201], "e": [1, 4, 6, 75, 101, 149, 183, 200, 201, 204, 205, 211, 235, 239, 245], "match": [1, 6, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92], "transposit": 1, "data_s": 1, "items": 1, "flag": 1, "copy_inplac": 1, "copytyp": 1, "n": [1, 3, 26, 65, 66, 80, 81, 83, 84, 86, 87, 90, 92, 100, 170, 174, 200, 201, 228], "incx": 1, "inci": 1, "great": 1, "But": [1, 244], "criteria": 1, "luckili": 1, "alwai": [1, 192], "With": 1, "final": [1, 2, 3, 4], "singl": [1, 4, 77, 101, 125, 175], "row_contigu": 1, "col_contigu": 1, "common": 1, "hit": 1, "mileston": 1, "enough": 1, "run": [1, 3, 4, 5, 6, 151, 182, 184, 185, 186, 244, 245], "If": [1, 3, 6, 12, 14, 15, 22, 23, 24, 25, 57, 60, 63, 64, 67, 77, 90, 91, 92, 95, 96, 97, 112, 113, 114, 116, 117, 123, 125, 126, 127, 131, 144, 154, 155, 156, 163, 165, 166, 173, 174, 176, 178, 193, 200, 201, 204, 205, 206, 208, 210, 213, 215, 228, 244, 245], "plan": 1, "stop": [1, 3, 15, 104, 161], "enjoi": 1, "speed": 1, "appl": [1, 3, 5, 6, 244], "silicon": [1, 3, 5, 6, 244], "address": 1, "shade": 1, "languag": [1, 196], "kernel": [1, 65, 66], "written": 1, "help": [1, 3, 244], "resourc": 1, "walkthrough": 1, "pipelin": 1, "specif": [1, 6], "cpp": 1, "algorithm": [1, 187], "launch": 1, "exactli": [1, 3], "mani": [1, 156, 200, 201, 202, 208], "thread": 1, "pick": 1, "updat": [1, 2, 3, 4, 185, 187, 191, 193, 239], "assign": [1, 180], "axpby_gener": 1, "buffer": 1, "constant": [1, 3, 6, 125, 190, 204, 205, 211, 228], "4": [1, 3, 72, 128, 129, 145, 196, 210, 243, 244], "5": [1, 2, 3, 6, 130, 190, 227], "x_stride": 1, "6": [1, 3, 145, 190, 219, 220, 228, 243], "y_stride": 1, "7": [1, 3, 128], "ndim": 1, "8": [1, 3, 6, 128, 182, 183, 184, 185, 186, 190, 196, 243, 244], "uint": 1, "index": [1, 7, 9, 24, 79, 80, 97, 126, 165, 166, 173], "thread_position_in_grid": 1, "convert": [1, 57, 210, 243], "instanti": [1, 4], "uniqu": [1, 240], "host": 1, "name": [1, 105, 128, 145, 146, 189, 199, 204], "identifi": [1, 192, 242], "instantiate_axpbi": 1, "type_nam": 1, "host_nam": 1, "axpby_general_": 1, "bflot16": 1, "compil": [1, 6], "mlx_ext": 1, "metallib": [1, 6], "see": [1, 3, 4, 6, 8, 28, 29, 30, 31, 32, 34, 36, 38, 39, 40, 41, 42, 43, 45, 46, 47, 48, 49, 51, 53, 54, 55, 56, 58, 59, 199, 203, 210, 214, 218, 219, 220, 232, 244], "later": [1, 6], "co": 1, "locat": [1, 244], "share": [1, 5, 72, 128, 129], "register_librari": 1, "potenti": 1, "path": [1, 6, 145, 146], "tri": 1, "load": [1, 4], "hasn": 1, "alreadi": [1, 3], "static": [1, 6], "object": [1, 8, 26, 37, 57, 130, 135, 138, 139, 176, 192, 193, 242], "why": [1, 3], "packag": [1, 2, 4], "process": [1, 3, 67, 193, 202, 242], "logic": [1, 111], "grid": 1, "shown": 1, "below": [1, 170, 172, 196], "prepar": [1, 3], "carri": 1, "should": [1, 2, 3, 4, 6, 77, 101, 128, 151, 166, 173, 175, 180, 192, 199, 200, 201, 208, 215, 242, 245], "d": [1, 3, 113, 165, 170, 171, 172, 182, 184, 186, 194, 244], "ostringstream": 1, "kname": 1, "axpby_": 1, "general_": 1, "type_to_nam": 1, "make": [1, 3, 4, 6, 113, 199, 243, 244], "sure": [1, 3, 6, 199], "look": [1, 3], "folder": 1, "get_colocated_mtllib_path": 1, "get_kernel": 1, "str": [1, 67, 97, 105, 144, 145, 146, 173, 192, 194, 221, 222, 223, 224, 225, 226, 227, 228], "encod": [1, 213], "compute_encod": 1, "get_command_encod": 1, "setcomputepipelinest": 1, "those": [1, 3, 199], "decelar": 1, "nelem": 1, "set_array_buff": 1, "setbyt": 1, "sizeof": 1, "threadgroup": 1, "higher": 1, "than": [1, 3, 57, 67, 98, 99, 102, 103, 113, 187, 193, 213, 217, 227, 234, 244], "max": [1, 115, 186, 228, 230, 244], "allow": [1, 180, 188, 199, 243], "tgp_size": 1, "min": [1, 118, 230], "maxtotalthreadsperthreadgroup": 1, "3d": 1, "mtl": 1, "group_dim": 1, "grid_dim": 1, "divd": 1, "among": 1, "dispatchthread": 1, "few": [1, 3, 4, 5, 151, 243], "thing": [1, 3], "note": [1, 3, 6, 13, 65, 66, 88, 89, 128, 131, 199], "befor": [1, 3, 6, 24, 126, 144], "move": [1, 119, 244], "track": [1, 199], "activ": [1, 6, 207, 217, 229, 234, 235], "command": [1, 6], "instead": [1, 199], "end_encod": 1, "end": [1, 128, 214, 217, 227, 232, 234], "until": [1, 243], "limit": [1, 63], "flush": 1, "enqueu": 1, "commit": 1, "associ": [1, 145, 146], "suggest": 1, "deeper": 1, "dive": 1, "studi": 1, "come": [1, 3], "far": [1, 239], "built": [1, 6], "includ": [1, 210, 243, 245], "forward": [1, 173], "diff": 1, "push": 1, "along": [1, 22, 23, 64, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 154, 156, 160, 165, 166, 199], "primtiv": 1, "similarli": [1, 6, 113], "scale_arr": 1, "contribut": 1, "tangent_x": 1, "tangent_i": 1, "revers": [1, 169], "arg": [1, 3, 8, 47, 58, 77, 145, 146, 151], "push_back": 1, "fulli": [1, 5, 244], "primitv": 1, "overal": 1, "directori": [1, 3, 6], "extens": [1, 196], "h": [1, 65, 66, 201], "mlx_sample_extens": 1, "__init__": [1, 3, 4, 7, 8, 9, 26, 180, 199], "py": [1, 3, 6], "cmakelist": 1, "txt": 1, "setup": [1, 2, 4, 6], "strucutr": 1, "hold": [1, 3, 8, 188], "instal": 1, "pybind11": [1, 6], "sinc": [1, 3, 4, 180, 187, 244], "compon": [1, 3], "etc": [1, 128, 199], "becom": 1, "pybind11_modul": 1, "m": [1, 6, 80, 170, 182], "doc": [1, 4], "sampl": [1, 2, 3, 104, 130, 131, 132, 135, 138, 139, 228, 240], "_a": 1, "pos_onli": 1, "kw_onli": 1, "none": [1, 3, 7, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 28, 29, 30, 31, 32, 33, 34, 36, 38, 39, 40, 41, 42, 43, 45, 46, 47, 48, 49, 51, 53, 54, 55, 56, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 176, 177, 178, 179, 192, 193, 203, 208, 221, 222, 223, 224, 225, 226, 227, 228], "r": [1, 3, 173], "pbdoc": 1, "most": [1, 131, 199], "complex": [1, 88, 89, 90, 91, 92, 130, 135, 138, 139, 192, 199], "addit": [1, 3, 11, 180, 204, 205, 208, 211], "bell": 1, "whistl": 1, "liter": 1, "string": 1, "modul": [1, 3, 4, 181, 210, 215, 242], "ensur": 1, "caster": 1, "find_packag": 1, "config": 1, "add_librari": 1, "sourc": [1, 119, 169], "target_sourc": 1, "cmake_current_list_dir": 1, "header": 1, "target_include_directori": 1, "target_link_librari": 1, "attach": 1, "conveni": [1, 4], "mlx_build_metallib": 1, "target": [1, 173, 221, 222, 223, 224, 225, 226, 227], "destin": [1, 119], "automat": [1, 5, 243, 244], "practic": 1, "mlx_build_met": [1, 6], "mlx_ext_metallib": 1, "titl": 1, "include_dir": 1, "project_source_dir": 1, "mlx_include_dir": 1, "output_directori": 1, "cmake_library_output_directori": 1, "add_depend": 1, "endif": 1, "pybind11_add_modul": 1, "build_shared_lib": 1, "target_link_opt": 1, "wl": 1, "rpath": 1, "loader_path": 1, "onc": [1, 151], "describ": 1, "util": [1, 3, 5, 6, 145, 199], "__name__": [1, 3], "__main__": [1, 3], "descript": [1, 3, 196], "ext_modul": 1, "cmakeextens": 1, "cmdclass": 1, "build_ext": 1, "cmakebuild": 1, "package_dir": 1, "package_data": 1, "dylib": 1, "zip_saf": 1, "fals": [1, 3, 12, 14, 22, 23, 29, 30, 31, 32, 40, 41, 42, 43, 45, 56, 59, 60, 77, 112, 114, 116, 117, 127, 163, 174, 177, 191, 192, 193, 196, 204, 206, 208, 210, 213], "python_requir": 1, "even": [1, 3], "though": [1, 3], "j8": 1, "libmlx_ext": 1, "cpython": 1, "3x": 1, "darwin": 1, "pip": [1, 6], "after": [1, 3, 4, 24, 95, 126, 128, 204, 205, 208, 227, 244], "plai": [1, 3], "ones": [1, 3, 124, 145, 151, 170, 210], "b": [1, 3, 11, 13, 60, 73, 74, 95, 98, 99, 102, 103, 110, 113, 115, 118, 120, 128, 162, 173, 206, 243, 244], "f": [1, 2, 4, 185, 199], "item": [1, 2, 3, 4, 193, 243], "true": [1, 2, 3, 60, 154, 177, 192, 193, 196, 199, 200, 201, 204, 205, 206, 210, 213], "quick": [1, 5], "benchmark": 1, "compar": [1, 60], "time": [1, 3, 6, 151, 199, 244], "set_default_devic": 1, "256": [1, 4], "512": [1, 3, 244], "random": [1, 2, 3, 4, 5, 244, 245], "normal": [1, 2, 3, 138, 189, 204, 205, 211, 244], "bench": 1, "warm": 1, "rang": [1, 2, 3, 4, 6, 15, 104, 219, 220, 239, 240, 244], "100": [1, 2, 3, 244], "5000": 1, "simple_tim": 1, "custom_tim": 1, "3f": [1, 4], "custom": 1, "114": 1, "109": 1, "modest": 1, "improv": [1, 3, 182, 183, 184, 185, 186, 190], "awai": [1, 3], "good": [1, 6, 244], "nn": [1, 3, 4, 145, 193, 199, 239], "grad": [1, 2, 4, 173, 239, 243], "simplifi": 1, "full": [1, 4, 47, 58, 67, 154], "implement": [2, 4, 182, 183, 184, 185, 186, 187, 188, 189, 190, 202, 208, 213, 215, 217, 234], "basic": 2, "model": [2, 4, 5, 145, 180, 181, 193, 199, 208, 239], "problem": [2, 4, 199], "metadata": 2, "num_featur": 2, "num_exampl": 2, "1_000": 2, "num_it": 2, "10_000": 2, "iter": [2, 4, 193, 240], "sgd": [2, 4, 187, 239], "lr": [2, 187], "01": [2, 185], "rate": [2, 182, 183, 184, 185, 186, 187, 190, 191], "ll": [2, 4], "synthet": 2, "dataset": 2, "matrix": [2, 72, 80, 100, 113, 128, 129, 210], "ground": [2, 3, 227], "truth": [2, 227], "w_star": 2, "valu": [2, 3, 10, 15, 22, 23, 37, 57, 60, 63, 77, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 96, 104, 125, 130, 131, 132, 134, 135, 138, 139, 165, 166, 173, 176, 180, 181, 185, 189, 192, 193, 196, 208, 217, 221, 222, 224, 225, 226, 227, 234], "gaussian": [2, 203, 218, 219, 220], "nois": 2, "exampl": [2, 3, 4, 15, 165, 199, 221, 239, 240, 243], "noisi": 2, "label": [2, 222], "ep": [2, 182, 183, 184, 185, 186, 190, 204, 205, 211, 228], "1e": [2, 4, 13, 182, 183, 184, 185, 186, 190, 204, 205, 211, 228], "us": [2, 3, 4, 5, 6, 15, 72, 93, 113, 128, 129, 141, 180, 182, 184, 185, 186, 187, 188, 192, 199, 202, 203, 206, 208, 210, 213, 219, 220, 239, 240, 242, 243, 244], "weight": [2, 65, 66, 180, 185, 187, 191, 193, 199, 210, 222], "squar": [2, 3, 100, 143, 157, 173, 182, 184, 185, 186, 193, 199, 211, 225, 227], "loss": [2, 4, 173, 199, 239], "loss_fn": [2, 4, 239], "w": [2, 66, 72, 128, 129, 173, 191, 201, 206], "mean": [2, 3, 4, 173, 199, 204, 211, 221, 222, 223, 224, 225, 226, 227, 228], "grad_fn": 2, "initi": [2, 3, 180, 199, 204, 205, 211], "randomli": [2, 3], "Then": [2, 6], "repeatedli": 2, "_": [2, 3, 199, 240, 244], "verifi": 2, "close": [2, 5, 13], "error_norm": 2, "5f": 2, "someth": [2, 3], "00005": 2, "00364": 2, "complet": [2, 3, 6, 244], "logist": [2, 149, 216, 219, 220, 233], "github": [2, 4, 6], "repo": [2, 4, 6], "enabl": [3, 6, 77, 191], "larg": [3, 199], "ish": 3, "transform": [3, 5, 77, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 144, 181, 199, 204, 205, 206, 210], "compromis": 3, "eas": 3, "llama": 3, "famili": 3, "less": [3, 24, 103, 126, 213, 227], "200": 3, "line": 3, "python": [3, 37, 50, 57, 77, 180, 192, 193, 194, 242], "neural": [3, 5, 180, 190, 202, 207, 229], "network": [3, 5, 180, 190, 202], "build": [3, 5, 180], "concis": 3, "architectur": [3, 199, 244], "notabl": 3, "rope": [3, 199], "posit": [3, 24, 97, 119, 126, 173, 193, 199, 200, 201, 208, 213, 228], "option": [3, 12, 14, 15, 22, 23, 24, 25, 26, 31, 32, 64, 65, 66, 67, 72, 77, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 96, 97, 100, 104, 112, 114, 116, 117, 123, 125, 126, 127, 128, 129, 130, 131, 132, 134, 135, 137, 138, 139, 141, 144, 154, 155, 156, 159, 160, 163, 165, 166, 169, 170, 171, 172, 173, 174, 176, 178, 182, 183, 184, 185, 186, 187, 190, 191, 192, 193, 200, 201, 206, 208, 210, 213, 221, 222, 223, 224, 225, 226, 227, 228, 240, 245], "kei": [3, 130, 131, 132, 134, 135, 137, 138, 139, 189, 192, 193, 208, 240, 242], "cach": 3, "concaten": 3, "project": [3, 208], "llamaattent": 3, "self": [3, 4, 7, 9, 26, 27, 28, 29, 30, 31, 32, 33, 34, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 47, 48, 49, 51, 53, 54, 55, 56, 57, 58, 59, 180, 199, 207, 229], "dim": [3, 202, 204, 205, 208, 211, 213], "num_head": [3, 208], "super": [3, 4, 180, 199], "tradit": [3, 213], "query_proj": 3, "bia": [3, 72, 128, 129, 184, 185, 186, 193, 199, 200, 201, 206, 208, 210], "key_proj": 3, "value_proj": 3, "out_proj": [3, 180], "__call__": [3, 4, 180, 199], "queri": [3, 208], "mask": [3, 208], "extract": [3, 180, 199], "l": [3, 4, 199, 200, 227], "reshap": 3, "combin": 3, "key_cach": 3, "value_cach": 3, "sqrt": [3, 75, 182, 183, 184, 185, 190, 204, 205, 211], "score": 3, "softmax": [3, 222], "values_hat": 3, "rm": 3, "swiglu": 3, "rmsnorm": [3, 199], "llamaencoderlay": 3, "mlp_dim": 3, "norm1": 3, "norm2": 3, "linear1": 3, "linear2": 3, "linear3": 3, "sigmoid": [3, 216, 219, 220, 221, 233], "instanc": [3, 128, 180, 194, 199, 215], "embed": [3, 199], "emb": [3, 202], "token": [3, 202], "num_lay": [3, 4, 239], "vocab_s": 3, "norm": [3, 186, 187, 204, 228], "multiheadattent": [3, 199], "create_additive_causal_mask": 3, "list": [3, 8, 12, 14, 26, 29, 30, 40, 41, 42, 43, 45, 50, 53, 56, 57, 59, 61, 64, 77, 79, 82, 83, 85, 86, 88, 89, 91, 92, 96, 97, 101, 112, 114, 116, 117, 123, 125, 127, 130, 131, 132, 134, 135, 138, 139, 141, 154, 156, 159, 160, 163, 169, 173, 174, 175, 178, 180, 184, 185, 186, 187, 192, 194, 199, 242], "still": [3, 6], "consid": [3, 13, 60, 192, 193, 204, 242], "train": [3, 4], "ignor": [3, 63], "whatsoev": 3, "rest": [3, 193, 213], "subsect": 3, "prompt": 3, "autoregress": 3, "yield": [3, 4, 240], "temp": 3, "causal": 3, "save": [3, 105, 128, 145, 146], "append": [3, 113], "store": 3, "per": [3, 4, 72, 128, 129, 188, 204, 205, 211], "care": 3, "last": [3, 25, 57, 83, 86, 88, 89, 91, 92, 93, 113, 131, 155, 200, 201, 204], "logit": [3, 131, 221, 222], "next": [3, 4], "categor": 3, "lazili": [3, 199], "noth": [3, 199], "yet": [3, 180, 199, 243], "forc": [3, 4, 199, 243], "choos": [3, 213], "pars": 3, "feed": 3, "loop": [3, 4], "unsqueez": 3, "sequenc": [3, 200, 240, 244], "length": [3, 159, 200], "len": [3, 83, 86, 89, 92], "overwrit": 3, "discard": [3, 192], "old": 3, "moment": [3, 184, 185, 186], "anymor": 3, "everyth": 3, "small": [3, 151, 204, 205, 211, 228, 244], "10": [3, 4, 107, 142, 145, 151, 193, 199], "12": 3, "8192": 3, "1024": 3, "actual": [3, 15, 180], "materi": [3, 5], "could": [3, 199], "20_000": 3, "machin": [3, 5, 6, 190], "8gb": 3, "ram": 3, "32": [3, 4, 128, 129, 196], "44": 3, "doubl": 3, "bracket": 3, "becaus": [3, 199], "batch": [3, 113, 200, 201, 208], "zip": [3, 4], "haven": 3, "anyth": [3, 173], "result": [3, 15, 57, 72, 105, 113, 129, 142, 160, 177, 193], "similar": [3, 193, 208], "runtim": 3, "section": [3, 156, 228], "access": [3, 37, 180, 199, 244], "origin": [3, 182, 183, 184, 186, 187], "sentencepiec": 3, "pytorch": [3, 5, 204], "compat": [3, 131], "npz": [3, 105, 145, 146], "file": [3, 6, 105, 144, 145, 146], "directli": 3, "argpars": 3, "itertool": [3, 193], "starmap": [3, 193], "np": [3, 4, 243], "torch": 3, "map_torch_to_mlx": 3, "tok_embed": 3, "elif": 3, "replac": [3, 227], "attention_norm": 3, "ffn_norm": 3, "wq": 3, "wk": 3, "wv": 3, "wo": 3, "w1": 3, "w2": 3, "w3": 3, "ffn": 3, "separ": [3, 47, 58, 204], "submodul": [3, 4, 199], "feed_forward": 3, "parser": 3, "argumentpars": 3, "add_argu": 3, "torch_weight": 3, "output_fil": 3, "parse_arg": 3, "state": [3, 4, 188, 189, 199, 239, 240], "savez": 3, "k": [3, 80, 170, 171, 172], "v": [3, 67, 199], "left": [3, 128, 203, 213, 219, 220, 228], "disk": 3, "text": [3, 187, 207, 214, 217, 227, 228, 229, 230, 232, 234], "format": [3, 105, 144, 145, 146], "oper": [3, 5, 33, 151, 154, 161, 166, 187, 199, 243, 244, 245], "dictionari": [3, 188, 189, 192, 199, 242], "represent": [3, 128, 192, 194], "tree_unflatten": 3, "helper": 3, "weight_fil": 3, "incur": 3, "sever": [3, 65, 66, 145, 146], "futur": [3, 210], "pth": 3, "current": [3, 5, 6, 65, 66, 128, 199], "around": 3, "m1": [3, 244], "ultra": 3, "7b": 3, "me": 3, "ishmael": 3, "year": 3, "ago": 3, "never": 3, "long": 3, "info": 3, "247": 3, "press": 3, "enter": 3, "littl": 3, "monei": 3, "my": [3, 6], "purs": 3, "greater": [3, 24, 99, 126, 217, 234], "consequ": 3, "walk": 3, "down": 3, "gower": 3, "street": 3, "afternoon": 3, "heavi": 3, "rain": 3, "saw": 3, "off": [3, 6], "man": 3, "rag": 3, "who": 3, "sat": 3, "upon": [3, 193], "hi": 3, "bundl": 3, "hard": 3, "wet": 3, "he": 3, "were": [3, 244], "cry": 3, "watch": 3, "him": 3, "observ": 3, "numer": [3, 110, 112, 154, 182, 183, 184, 185, 186, 190, 204, 205, 211, 228], "crowd": 3, "wa": [3, 189], "hurri": 3, "437": 3, "330": 3, "second": [3, 113, 164, 173, 184, 185, 186, 244], "spent": 3, "amount": 3, "39": 3, "ms": 3, "By": 3, "bigger": 3, "remain": [3, 173], "almost": 3, "nobodi": 3, "took": 3, "least": [3, 63, 128], "notic": 3, "distanc": [3, 228], "had": 3, "doubt": 3, "minut": 3, "straight": 3, "slowli": 3, "rais": [3, 156], "ey": 3, "speak": 3, "resum": 3, "postur": 3, "stood": 3, "feel": 3, "pain": 3, "heart": 3, "smile": 3, "face": 3, "am": 3, "someon": 3, "three": 3, "quarter": 3, "hour": 3, "made": 3, "immedi": 3, "repli": 3, "again": [3, 199], "hand": 3, "did": 3, "accustom": 3, "thu": [3, 199], "question": 3, "reason": 3, "tell": 3, "understand": 3, "579": 3, "690": 3, "num": [3, 104, 137], "500": [3, 244], "628": 3, "went": 3, "nervou": 3, "trembl": 3, "told": 3, "And": 3, "perhap": 3, "surpris": 3, "matter": [3, 199], "shall": 3, "anyhow": 3, "friend": 3, "ye": 3, "slight": 3, "kind": 3, "longer": [3, 67], "soon": 3, "unless": [3, 180], "unlik": [3, 13], "strang": 3, "amus": 3, "That": 3, "secret": 3, "disappoint": 3, "mine": 3, "cannot": [3, 63], "happi": 3, "ask": 3, "Is": 3, "shop": 3, "bui": 3, "food": 3, "633": 3, "21": 3, "475": 3, "su": 3, "j": [3, 6, 183, 184, 186], "lu": 3, "pan": 3, "murtadha": 3, "wen": 3, "liu": 3, "2021": 3, "roform": 3, "enhanc": 3, "rotari": [3, 213], "arxiv": [3, 182, 187, 204, 205, 207, 211, 213, 229], "preprint": [3, 182, 187], "2104": [3, 213], "09864": [3, 213], "zhang": 3, "sennrich": 3, "2019": [3, 185], "root": [3, 143, 157, 211], "advanc": 3, "inform": [3, 4, 199, 203, 208, 244], "system": 3, "shazeer": 3, "2020": 3, "glu": 3, "variant": [3, 186, 227], "2002": 3, "05202": 3, "classifi": 4, "mnist": 4, "As": [4, 165, 199], "mlp": [4, 199, 239], "inherit": [4, 242], "standard": [4, 37, 57, 113, 132, 243], "idiom": 4, "input_dim": [4, 199, 206, 210], "hidden_dim": [4, 180, 239], "output_dim": [4, 199, 206, 210], "layer_s": 4, "idim": 4, "odim": 4, "maximum": [4, 22, 63, 180, 199, 212, 219, 220, 231], "cross": [4, 221, 222], "entropi": [4, 221, 222], "sub": [4, 137], "commonli": 4, "cross_entropi": [4, 199], "accuraci": 4, "valid": [4, 67, 176, 192, 242], "eval_fn": 4, "argmax": 4, "loader": 4, "num_class": [4, 239], "batch_siz": [4, 239], "num_epoch": [4, 239], "learning_r": [4, 182, 183, 184, 185, 186, 187, 190, 191, 239], "train_imag": [4, 239], "train_label": [4, 239], "test_imag": 4, "test_label": 4, "shuffl": 4, "minibatch": 4, "batch_iter": [4, 239], "perm": 4, "permut": 4, "id": [4, 6], "put": 4, "trainabl": [4, 180, 181, 199], "loss_and_grad_fn": [4, 239], "value_and_grad": [4, 180, 199, 239, 243], "epoch": 4, "test": [4, 6], "confus": 4, "decent": 4, "95": 4, "brought": 5, "research": 5, "except": [5, 80, 87, 88, 90, 91, 92, 204], "featur": [5, 65, 66, 204, 205, 206, 210, 211, 213], "main": [5, 80, 193, 199], "differ": [5, 162, 227], "lazi": [5, 180, 243], "multi": [5, 200, 201], "cpu": [5, 244], "gpu": [5, 244], "inspir": 5, "jax": [5, 240], "arrayfir": 5, "noteabl": 5, "unifi": 5, "live": [5, 244], "guid": 5, "regress": 5, "layer": [5, 180, 199, 204, 205, 206, 210, 215, 235], "perceptron": 5, "llm": 5, "infer": [5, 96], "fft": 5, "tree": [5, 77, 97, 151, 173, 176, 188, 192, 193, 194], "develop": [5, 6], "document": [5, 47, 58], "meet": 6, "seri": 6, "chip": 6, "nativ": 6, "maco": 6, "13": 6, "recommend": [6, 187], "14": 6, "sonoma": 6, "distribut": [6, 130, 131, 132, 134, 138, 139, 223, 226, 228], "probabl": [6, 135, 210, 223, 244], "platform": 6, "processor": 6, "arm": [6, 196], "i386": 6, "switch": 6, "conda": 6, "17": 6, "g": [6, 128, 190, 191, 235, 245], "clang": 6, "cmake": 6, "24": 6, "xcode": 6, "15": 6, "clone": 6, "git": 6, "com": 6, "ml": 6, "explor": 6, "cd": 6, "brew": 6, "global": [6, 136, 240], "env": 6, "cmake_build_parallel_level": 6, "edit": 6, "unittest": 6, "discov": 6, "stub": 6, "dev": 6, "generate_stub": 6, "mkdir": 6, "p": [6, 130, 184, 186, 199, 228], "either": [6, 11, 47, 57, 58, 63, 73, 74, 95, 98, 99, 102, 103, 110, 113, 115, 118, 120, 162, 173, 215], "libmlx": 6, "preprocessor": 6, "metal_path": 6, "mlx_build_test": 6, "ON": 6, "mlx_build_exampl": 6, "mlx_build_benchmark": 6, "mlx_build_python_bind": 6, "multipl": [6, 113, 120, 128, 129, 208], "wish": 6, "environ": 6, "variabl": [6, 97, 101, 173, 175, 176], "export": 6, "developer_dir": 6, "app": 6, "content": 6, "sdk": 6, "xcrun": 6, "macosx": 6, "show": [6, 196], "unabl": 6, "tool": 6, "select": [6, 177], "sudo": 6, "devicetyp": 7, "attribut": [7, 8, 9, 26, 180], "kwarg": [8, 145, 146, 245], "union": [10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 28, 29, 30, 31, 32, 33, 34, 36, 38, 39, 40, 41, 42, 43, 45, 46, 47, 48, 49, 51, 53, 54, 55, 56, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 72, 73, 74, 75, 76, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 134, 135, 137, 138, 139, 140, 141, 142, 143, 149, 150, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 177, 178, 179, 201], "wise": [10, 11, 16, 17, 18, 19, 20, 21, 62, 68, 69, 73, 74, 75, 76, 78, 94, 95, 98, 99, 102, 103, 106, 107, 108, 109, 110, 111, 115, 118, 120, 121, 140, 143, 149, 150, 152, 153, 157, 158, 162, 167, 168, 207, 216, 229, 230, 233], "absolut": [10, 13, 219, 220, 227], "semant": [11, 61, 73, 74, 98, 99, 102, 103, 110, 113, 115, 118, 120, 162, 244], "keepdim": [12, 14, 22, 23, 29, 30, 31, 32, 40, 41, 42, 43, 45, 56, 59, 112, 114, 116, 117, 127, 154, 163, 174], "reduct": [12, 14, 112, 114, 117, 127, 221, 222, 223, 224, 225, 226, 227, 228], "reduc": [12, 14, 22, 23, 112, 114, 116, 117, 127, 151, 163, 174], "unspecifi": [12, 14, 15, 22, 23, 24, 25, 64, 96, 112, 114, 116, 117, 123, 126, 127, 154, 155, 163, 165, 174, 178, 245], "entir": [12, 14, 22, 23, 112, 114, 116, 117, 127, 163, 174], "singleton": [12, 14, 22, 23, 112, 113, 114, 116, 117, 127, 163, 174], "rtol": 13, "05": [13, 204, 205, 211], "atol": 13, "08": [13, 183, 184, 185, 186, 190], "approxim": [13, 151, 203, 218, 219, 220], "comparison": [13, 74, 98, 99, 102, 103], "equal": [13, 24, 60, 80, 99, 103, 126, 135, 156], "ab": [13, 173, 204, 205, 207, 211, 213, 229], "array_equ": 13, "rel": 13, "toler": 13, "boolean": [13, 60, 111, 196], "interv": [15, 104, 135, 139], "increment": 15, "otherwis": [15, 192, 193, 217, 227, 234], "int32": [15, 135, 196, 243], "convent": [15, 67, 185], "lead": 15, "fraction": 15, "integr": [15, 165], "invers": [16, 17, 18, 19, 20, 21, 76, 84, 85, 86, 87, 88, 89], "cosin": [16, 17, 68, 69], "hyperbol": [17, 19, 21, 69, 153, 168], "sine": [18, 19, 152, 153], "minimum": [22, 23, 63], "kth": [24, 126], "partit": 24, "order": [24, 126, 128, 199, 204, 215], "undefin": [24, 126], "sort": [24, 25, 126], "partiton": 24, "flatten": [24, 25, 126, 155, 165, 166, 192], "dimension": [26, 81, 82, 83, 84, 85, 86, 90, 91, 92, 200, 201, 202, 206, 210], "val": [26, 96], "tupl": [26, 47, 58, 64, 66, 77, 79, 101, 125, 128, 141, 159, 173, 175, 184, 185, 186, 187, 192, 193, 194, 201, 215, 242], "ndarrai": [26, 243], "properti": [27, 35, 44, 50, 52], "argument": [27, 47, 58, 77, 97, 144, 173, 193, 199, 240, 244, 245], "elment": 52, "indices_or_sect": [53, 156], "nest": [57, 180, 199, 242], "correpsond": 57, "ddof": [59, 174], "equal_nan": 60, "nan": 60, "pad": [65, 66, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 200, 201], "dilat": [65, 66], "group": [65, 66, 72, 128, 129, 204, 210], "1d": [65, 67, 166], "convolut": [65, 66, 67, 200, 201], "channel": [65, 66, 200, 201], "c_in": [65, 66], "c_out": [65, 66], "convolv": [65, 66], "2d": [66, 128], "spatial": [66, 204], "symmetr": 66, "discret": [67, 81, 82, 83, 84, 85, 86, 90, 91, 92, 202], "swap": [67, 164, 210], "conv": 67, "filter": [67, 200, 201], "flip": 67, "signal": 67, "divis": [73, 95, 128], "quotient": [73, 95], "mathrm": [75, 149], "frac": [75, 128, 149, 182, 183, 184, 185, 186, 190, 204, 205, 211], "pi": 75, "int_0": 75, "dx": 75, "erf": 76, "retain_graph": [77, 144], "node": [77, 151, 176], "dict": [77, 105, 145, 180, 242], "leaf": [77, 192, 193], "preserv": [77, 141], "intend": 77, "control": [77, 240], "flow": [77, 161], "exponenti": [78, 214, 232], "insert": [79, 244], "ident": [80, 161], "diagon": [80, 170, 171, 172], "zero": [80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 170, 171, 172, 179, 199], "th": 80, "whose": [80, 181], "One": [81, 84, 90, 143], "fourier": [81, 82, 83, 84, 85, 86, 90, 91, 92], "truncat": [81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 138], "dft": [81, 82, 83, 84, 85, 86, 90, 91, 92], "rfft": 87, "real": [87, 88, 89, 90, 91, 92], "rfft2": 88, "rfftn": 89, "silent": [90, 91, 92], "start_axi": 93, "end_axi": 93, "fun": [97, 101, 173, 175, 176, 244], "argnam": [97, 173], "cpp_function": [97, 173, 176], "neither": [97, 173], "keyword": [97, 145, 146, 173, 193, 199, 240, 245], "strict": [98, 102], "binari": [105, 144, 145, 146, 217, 221, 234], "npy": [105, 144], "natur": [106, 108], "logarithm": [106, 107, 108, 109], "log": [108, 110, 112, 223, 226], "plu": 108, "exp": [110, 112, 132, 154, 214, 223, 232, 244], "stabl": [110, 112, 154], "prepend": 113, "remov": [113, 131, 159], "anoth": [63, 113, 162, 177, 199, 244], "negat": 121, "pad_with": 125, "constant_valu": 125, "pad_width": 125, "edg": [63, 125], "before_1": 125, "after_1": 125, "before_2": 125, "after_2": 125, "before_n": 125, "after_n": 125, "integ": [95, 125, 128, 129, 130, 135, 156, 176, 196, 202], "before_i": 125, "after_i": 125, "extend": 125, "side": 125, "smaller": [126, 187], "prng": [130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 240], "num_sampl": 131, "unnorm": [131, 221, 222], "draw": 131, "uint32": [131, 196], "cdf": [132, 203, 218], "accord": [132, 177, 208], "seed": 133, "low": [135, 139], "high": [135, 139, 199, 202], "lower": [128, 135, 138, 139, 170], "upper": [128, 135, 138, 139], "bound": [135, 138, 139, 203, 244], "roadcast": 135, "domain": 138, "optino": 138, "uniformli": 139, "reciproc": 143, "arr": 144, "retain": 144, "dure": 144, "uncompress": 145, "my_path": 145, "tree_flatten": [145, 193, 194, 199], "transformerencod": 145, "128": [145, 199], "flat_param": 145, "compress": 146, "simplif": 151, "reus": 151, "consumpt": 151, "meant": 151, "everi": [128, 151, 193], "overhead": [151, 244], "1m": 151, "thousand": 151, "foo": 151, "matmul": [151, 244], "twice": [151, 244], "subarrai": 156, "being": [161, 199], "prevent": [161, 228], "unchang": [161, 213], "axis1": 164, "axis2": 164, "taken": 165, "prior": [165, 166], "equial": 165, "exclud": 166, "elsewher": 170, "col": 170, "triangl": 170, "mse": 173, "param": [173, 199], "lvalu": 173, "dlvalu": 173, "dparam": 173, "lasso": 173, "l1": [173, 224, 227], "varianc": [174, 204], "divisor": 174, "cotang": 175, "in_ax": 176, "out_ax": 176, "prefix": [176, 192], "subclass": 180, "arbitrari": [180, 192], "recurs": [180, 189, 199], "concept": 180, "frozen": [180, 199, 210], "freez": [180, 199], "mymlp": 180, "in_dim": [180, 199], "out_dim": [180, 199], "16": [180, 196], "in_proj": 180, "fn": [181, 193, 243], "callabl": [181, 192, 193, 215], "wrt": 181, "rho": 182, "9": [182, 184, 185, 186, 187], "06": [182, 228], "paper": [182, 183, 184, 186, 187], "zeiler": 182, "2012": [182, 190], "adapt": [182, 183], "1212": 182, "5701": 182, "v_": [182, 183, 184, 185, 186, 190, 191], "v_t": [182, 183, 184, 185, 186, 190, 191], "g_t": [182, 183, 184, 185, 186, 187, 190, 191], "delta": 182, "w_": [182, 183, 184, 185, 186, 187, 190, 191], "u_t": 182, "epsilon": [182, 183, 184, 185, 186, 190, 204, 205, 211], "u_": 182, "w_t": [182, 183, 184, 185, 186, 187, 190, 191], "lambda": [182, 183, 184, 185, 186, 187, 190, 191, 193, 199, 214, 232], "coeffici": [182, 184, 185, 186, 187], "averag": [182, 184, 185, 186], "denomin": [182, 183, 184, 185, 186, 190], "stabil": [182, 183, 184, 185, 186, 190, 204, 205, 211], "ddefault": 182, "duchi": 183, "hazan": 183, "singer": 183, "2011": 183, "subgradi": 183, "onlin": 183, "stochast": [183, 184, 186, 191], "jmlr": 183, "999": [184, 185, 186], "omit": [184, 186], "estim": [184, 186], "kingma": [184, 186], "ba": [184, 186], "2015": [184, 186], "iclr": [184, 185, 186], "m_": [184, 185, 186, 187], "beta_1": [184, 185, 186, 187], "m_t": [184, 185, 186, 187], "beta_2": [184, 185, 186, 187], "weight_decai": [185, 187, 191], "contrast": [185, 189], "loshchilov": 185, "hutter": 185, "decoupl": 185, "decai": [185, 187, 191], "regular": [185, 207, 229], "adam": [186, 187], "infin": 186, "basi": 188, "appli": [188, 193, 200, 201, 203, 204, 205, 206, 207, 210, 211, 212, 214, 216, 217, 218, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234], "optimizerst": 188, "defaultdict": 189, "miss": 189, "present": 189, "99": [187, 190], "tieleman": 190, "hinton": 190, "lectur": 190, "coursera": 190, "smooth": [190, 222, 227], "momentum": [187, 191], "dampen": 191, "nesterov": 191, "descent": 191, "mu": 191, "tau": 191, "strength": [187, 191], "l2": 191, "penalti": 191, "is_leaf": [192, 193], "dot": [192, 208], "notat": [72, 192], "depth": 192, "hello": [192, 194], "charact": 192, "flat": [192, 194], "superset": 193, "extra": 193, "closer": 193, "dict_kei": 193, "recreat": 194, "world": 194, "42": 194, "tabl": [196, 202], "byte": 196, "bool_": 196, "uint8": 196, "unsign": [128, 129, 196], "uint16": 196, "int8": 196, "sign": [187, 196], "int16": 196, "int64": 196, "64": [72, 128, 129, 196, 210], "arbitrarili": [199, 242, 243], "done": 199, "manual": 199, "explicitli": [199, 240], "solv": 199, "intuit": 199, "finetun": 199, "enumer": 199, "caus": 199, "local": 199, "scope": 199, "l2_loss": 199, "y_hat": 199, "trainable_paramet": 199, "loss_and_grad": 199, "workhors": 199, "Its": 199, "individu": 199, "subset": 199, "action": 199, "displai": 199, "tree_map": 199, "count": 199, "num_param": 199, "preclud": 199, "pure": [199, 239], "pattern": 199, "achiev": 199, "other_input": 199, "necessari": 199, "wrap": 199, "relu": 199, "prelu": 199, "gelu": [199, 219, 220], "silu": 199, "selu": 199, "mish": 199, "conv1d": 199, "conv2d": 199, "layernorm": 199, "groupnorm": 199, "sequenti": 199, "gelu_approx": [199, 203, 218], "gelu_fast_approx": [199, 203, 218], "binary_cross_entropi": 199, "l1_loss": 199, "mse_loss": 199, "nll_loss": 199, "kl_div_loss": 199, "in_channel": [200, 201], "out_channel": [200, 201], "kernel_s": [200, 201], "nlc": 200, "learnabl": [200, 201, 215], "nhwc": 201, "height": 201, "width": [201, 210], "num_embed": 202, "lookup": 202, "typic": [202, 239], "usual": [202, 242], "vocabulari": 202, "approx": 203, "unit": [203, 212, 214, 216, 218, 219, 220, 231, 232, 233], "textrm": [128, 203, 218], "phi": [203, 218], "geluapprox": 203, "sigma": [203, 216, 219, 220, 233], "60033": [203, 219], "0433603": [203, 219], "gelufast": 203, "773": [203, 220], "regard": 203, "num_group": 204, "affin": [204, 205, 206, 210], "pytorch_compat": 204, "var": [204, 205], "gamma": [204, 205, 211], "particular": [128, 204], "split": 204, "preced": 204, "http": [204, 205, 207, 211, 213, 229], "org": [204, 205, 207, 211, 213, 229], "1803": 204, "08494": 204, "1607": 205, "06450": 205, "monoton": [207, 229], "refer": [207, 229], "1908": [207, 229], "08681": [207, 229], "tanh": [207, 229], "softplu": [207, 229], "query_input_dim": 208, "key_input_dim": 208, "value_input_dim": 208, "value_dim": 208, "value_output_dim": 208, "attent": 208, "head": 208, "aggreg": 208, "linearli": 208, "bias": [72, 128, 129, 208], "inf": 208, "neg": [208, 226, 228], "attend": 208, "num_paramet": 209, "init": 209, "25": 209, "1910": 211, "07467": 211, "rectifi": [212, 231], "10000": 213, "rotat": 213, "consecut": [128, 213], "larger": [187, 213], "slightli": [213, 244], "angular": 213, "frequenc": 213, "begin": [128, 214, 217, 227, 232, 234], "leq": [214, 232], "0507": [214, 232], "67326": [214, 232], "elu": [214, 232], "plain": 215, "known": [216, 233], "swish": [216, 233], "cdot": [216, 219, 220, 233], "threshold": [217, 227, 234], "geq": [217, 234], "faster": 218, "exact": [219, 220], "0003": 219, "015": 220, "predict": [221, 222, 223, 224, 225, 226, 227], "post": [], "612192": 221, "kullback": 223, "leibler": 223, "diverg": 223, "likelihood": 226, "nll": 226, "subsequ": 239, "implicit": 240, "fine": 240, "grain": 240, "manag": [240, 244], "uniform": [240, 244], "pseudo": 240, "altern": 240, "splittabl": 240, "threefri": 240, "counter": 240, "cycl": 242, "inspect": 243, "composit": 243, "sin": 243, "pool": 244, "advantag": 244, "don": 244, "parallel": 244, "race": 244, "interest": 244, "albeit": 244, "contriv": 244, "suppos": 244, "d1": 244, "d2": 244, "4096": 244, "dens": 244, "better": 244, "millisecond": 244, "measur": 244, "default_stream": 245, "default_devic": 245, "my_devic": 245, "decim": [48, 142], "a_min": 63, "a_max": 63, "At": 63, "group_siz": [72, 128, 129, 210], "configur": 72, "formal": [72, 128], "quantiz": [72, 129, 210], "w_i": [72, 128], "hat": [72, 128], "occupi": [72, 128, 129], "floor": 95, "divid": [95, 128], "50": 104, "evenli": 104, "w_1": 128, "w_g": 128, "align": 128, "max_i": 128, "min_i": 128, "round": 128, "pack": [128, 129], "1st": 128, "signific": 128, "2nd": 128, "dequant": 128, "w_q": 128, "bascial": 142, "tend": 187, "10x": 187, "adamw": 187, "maintain": 187, "wd": 187, "chen": 187, "symbol": 187, "discoveri": 187, "2302": 187, "06675": 187, "c_": 187, "eta": 187, "c_t": 187, "constitut": 193, "quantizedlinear": 199, "smooth_l1_loss": 199, "triplet_loss": 199, "chang": [210, 227], "classmethod": 210, "from_linear": 210, "quantize_modul": 210, "pre": 221, "105361": 221, "223144": 221, "20397": 221, "916291": 221, "label_smooth": 222, "formula": 227, "anchor": 228, "margin": 228, "triplet": 228, "l_": 228, "_p": 228, "degre": 228, "pairwis": 228, "instabl": 228, "tensor": 228}, "objects": {"mlx.core": [[7, 0, 1, "", "Device"], [8, 0, 1, "", "Dtype"], [9, 0, 1, "", "Stream"], [10, 2, 1, "", "abs"], [11, 2, 1, "", "add"], [12, 2, 1, "", "all"], [13, 2, 1, "", "allclose"], [14, 2, 1, "", "any"], [15, 2, 1, "", "arange"], [16, 2, 1, "", "arccos"], [17, 2, 1, "", "arccosh"], [18, 2, 1, "", "arcsin"], [19, 2, 1, "", "arcsinh"], [20, 2, 1, "", "arctan"], [21, 2, 1, "", "arctanh"], [22, 2, 1, "", "argmax"], [23, 2, 1, "", "argmin"], [24, 2, 1, "", "argpartition"], [25, 2, 1, "", "argsort"], [26, 0, 1, "", "array"], [60, 2, 1, "", "array_equal"], [61, 2, 1, "", "broadcast_to"], [62, 2, 1, "", "ceil"], [63, 2, 1, "", "clip"], [64, 2, 1, "", "concatenate"], [65, 2, 1, "", "conv1d"], [66, 2, 1, "", "conv2d"], [67, 2, 1, "", "convolve"], [68, 2, 1, "", "cos"], [69, 2, 1, "", "cosh"], [70, 2, 1, "", "default_device"], [71, 2, 1, "", "default_stream"], [72, 2, 1, "", "dequantize"], [73, 2, 1, "", "divide"], [74, 2, 1, "", "equal"], [75, 2, 1, "", "erf"], [76, 2, 1, "", "erfinv"], [77, 2, 1, "", "eval"], [78, 2, 1, "", "exp"], [79, 2, 1, "", "expand_dims"], [80, 2, 1, "", "eye"], [93, 2, 1, "", "flatten"], [94, 2, 1, "", "floor"], [95, 2, 1, "", "floor_divide"], [96, 2, 1, "", "full"], [97, 2, 1, "", "grad"], [98, 2, 1, "", "greater"], [99, 2, 1, "", "greater_equal"], [100, 2, 1, "", "identity"], [101, 2, 1, "", "jvp"], [102, 2, 1, "", "less"], [103, 2, 1, "", "less_equal"], [104, 2, 1, "", "linspace"], [105, 2, 1, "", "load"], [106, 2, 1, "", "log"], [107, 2, 1, "", "log10"], [108, 2, 1, "", "log1p"], [109, 2, 1, "", "log2"], [110, 2, 1, "", "logaddexp"], [111, 2, 1, "", "logical_not"], [112, 2, 1, "", "logsumexp"], [113, 2, 1, "", "matmul"], [114, 2, 1, "", "max"], [115, 2, 1, "", "maximum"], [116, 2, 1, "", "mean"], [117, 2, 1, "", "min"], [118, 2, 1, "", "minimum"], [119, 2, 1, "", "moveaxis"], [120, 2, 1, "", "multiply"], [121, 2, 1, "", "negative"], [122, 2, 1, "", "new_stream"], [123, 2, 1, "", "ones"], [124, 2, 1, "", "ones_like"], [125, 2, 1, "", "pad"], [126, 2, 1, "", "partition"], [127, 2, 1, "", "prod"], [128, 2, 1, "", "quantize"], [129, 2, 1, "", "quantized_matmul"], [140, 2, 1, "", "reciprocal"], [141, 2, 1, "", "reshape"], [142, 2, 1, "", "round"], [143, 2, 1, "", "rsqrt"], [144, 2, 1, "", "save"], [145, 2, 1, "", "savez"], [146, 2, 1, "", "savez_compressed"], [147, 2, 1, "", "set_default_device"], [148, 2, 1, "", "set_default_stream"], [149, 2, 1, "", "sigmoid"], [150, 2, 1, "", "sign"], [151, 2, 1, "", "simplify"], [152, 2, 1, "", "sin"], [153, 2, 1, "", "sinh"], [154, 2, 1, "", "softmax"], [155, 2, 1, "", "sort"], [156, 2, 1, "", "split"], [157, 2, 1, "", "sqrt"], [158, 2, 1, "", "square"], [159, 2, 1, "", "squeeze"], [160, 2, 1, "", "stack"], [161, 2, 1, "", "stop_gradient"], [162, 2, 1, "", "subtract"], [163, 2, 1, "", "sum"], [164, 2, 1, "", "swapaxes"], [165, 2, 1, "", "take"], [166, 2, 1, "", "take_along_axis"], [167, 2, 1, "", "tan"], [168, 2, 1, "", "tanh"], [169, 2, 1, "", "transpose"], [170, 2, 1, "", "tri"], [171, 2, 1, "", "tril"], [172, 2, 1, "", "triu"], [173, 2, 1, "", "value_and_grad"], [174, 2, 1, "", "var"], [175, 2, 1, "", "vjp"], [176, 2, 1, "", "vmap"], [177, 2, 1, "", "where"], [178, 2, 1, "", "zeros"], [179, 2, 1, "", "zeros_like"]], "mlx.core.Device": [[7, 1, 1, "", "__init__"]], "mlx.core.Dtype": [[8, 1, 1, "", "__init__"]], "mlx.core.Stream": [[9, 1, 1, "", "__init__"]], "mlx.core.array": [[27, 3, 1, "", "T"], [26, 1, 1, "", "__init__"], [28, 1, 1, "", "abs"], [29, 1, 1, "", "all"], [30, 1, 1, "", "any"], [31, 1, 1, "", "argmax"], [32, 1, 1, "", "argmin"], [33, 1, 1, "", "astype"], [34, 1, 1, "", "cos"], [35, 3, 1, "", "dtype"], [36, 1, 1, "", "exp"], [37, 1, 1, "", "item"], [38, 1, 1, "", "log"], [39, 1, 1, "", "log1p"], [40, 1, 1, "", "logsumexp"], [41, 1, 1, "", "max"], [42, 1, 1, "", "mean"], [43, 1, 1, "", "min"], [44, 3, 1, "", "ndim"], [45, 1, 1, "", "prod"], [46, 1, 1, "", "reciprocal"], [47, 1, 1, "", "reshape"], [48, 1, 1, "", "round"], [49, 1, 1, "", "rsqrt"], [50, 3, 1, "", "shape"], [51, 1, 1, "", "sin"], [52, 3, 1, "", "size"], [53, 1, 1, "", "split"], [54, 1, 1, "", "sqrt"], [55, 1, 1, "", "square"], [56, 1, 1, "", "sum"], [57, 1, 1, "", "tolist"], [58, 1, 1, "", "transpose"], [59, 1, 1, "", "var"]], "mlx.core.fft": [[81, 2, 1, "", "fft"], [82, 2, 1, "", "fft2"], [83, 2, 1, "", "fftn"], [84, 2, 1, "", "ifft"], [85, 2, 1, "", "ifft2"], [86, 2, 1, "", "ifftn"], [87, 2, 1, "", "irfft"], [88, 2, 1, "", "irfft2"], [89, 2, 1, "", "irfftn"], [90, 2, 1, "", "rfft"], [91, 2, 1, "", "rfft2"], [92, 2, 1, "", "rfftn"]], "mlx.core.random": [[130, 2, 1, "", "bernoulli"], [131, 2, 1, "", "categorical"], [132, 2, 1, "", "gumbel"], [133, 2, 1, "", "key"], [134, 2, 1, "", "normal"], [135, 2, 1, "", "randint"], [136, 2, 1, "", "seed"], [137, 2, 1, "", "split"], [138, 2, 1, "", "truncated_normal"], [139, 2, 1, "", "uniform"]], "mlx.nn": [[200, 0, 1, "", "Conv1d"], [201, 0, 1, "", "Conv2d"], [202, 0, 1, "", "Embedding"], [203, 0, 1, "", "GELU"], [204, 0, 1, "", "GroupNorm"], [205, 0, 1, "", "LayerNorm"], [206, 0, 1, "", "Linear"], [207, 0, 1, "", "Mish"], [180, 0, 1, "", "Module"], [208, 0, 1, "", "MultiHeadAttention"], [209, 0, 1, "", "PReLU"], [210, 0, 1, "", "QuantizedLinear"], [211, 0, 1, "", "RMSNorm"], [212, 0, 1, "", "ReLU"], [213, 0, 1, "", "RoPE"], [214, 0, 1, "", "SELU"], [215, 0, 1, "", "Sequential"], [216, 0, 1, "", "SiLU"], [217, 0, 1, "", "Step"], [218, 0, 1, "", "gelu"], [219, 0, 1, "", "gelu_approx"], [220, 0, 1, "", "gelu_fast_approx"], [229, 0, 1, "", "mish"], [230, 0, 1, "", "prelu"], [231, 0, 1, "", "relu"], [232, 0, 1, "", "selu"], [233, 0, 1, "", "silu"], [234, 0, 1, "", "step"], [181, 2, 1, "", "value_and_grad"]], "mlx.nn.Module": [[180, 1, 1, "", "__init__"]], "mlx.nn.losses": [[221, 0, 1, "", "binary_cross_entropy"], [222, 0, 1, "", "cross_entropy"], [223, 0, 1, "", "kl_div_loss"], [224, 0, 1, "", "l1_loss"], [225, 0, 1, "", "mse_loss"], [226, 0, 1, "", "nll_loss"], [227, 0, 1, "", "smooth_l1_loss"], [228, 0, 1, "", "triplet_loss"]], "mlx.optimizers": [[182, 0, 1, "", "AdaDelta"], [183, 0, 1, "", "Adagrad"], [184, 0, 1, "", "Adam"], [185, 0, 1, "", "AdamW"], [186, 0, 1, "", "Adamax"], [187, 0, 1, "", "Lion"], [188, 0, 1, "", "Optimizer"], [189, 0, 1, "", "OptimizerState"], [190, 0, 1, "", "RMSprop"], [191, 0, 1, "", "SGD"]], "mlx.optimizers.Optimizer": [[188, 4, 1, "", "state"]], "mlx.utils": [[192, 2, 1, "", "tree_flatten"], [193, 2, 1, "", "tree_map"], [194, 2, 1, "", "tree_unflatten"]]}, "objtypes": {"0": "py:class", "1": "py:method", "2": "py:function", "3": "py:property", "4": "py:attribute"}, "objnames": {"0": ["py", "class", "Python class"], "1": ["py", "method", "Python method"], "2": ["py", "function", "Python function"], "3": ["py", "property", "Python property"], "4": ["py", "attribute", "Python attribute"]}, "titleterms": {"oper": [0, 1, 238], "develop": 1, "document": 1, "introduc": 1, "exampl": [1, 5, 244], "primit": 1, "us": [1, 245], "implement": [1, 3], "cpu": 1, "backend": 1, "gpu": 1, "transform": [1, 241, 243], "build": [1, 6], "bind": 1, "python": [1, 5, 6], "cmake": 1, "setuptool": 1, "usag": [1, 5], "result": 1, "script": [1, 3], "download": [1, 3], "code": [1, 3], "linear": [2, 206], "regress": 2, "llm": 3, "infer": 3, "model": 3, "attent": 3, "layer": [3, 4, 236], "encod": 3, "full": [3, 96], "gener": 3, "put": 3, "all": [3, 12, 29], "togeth": 3, "convert": 3, "weight": 3, "load": [3, 105], "benchmark": 3, "multi": 4, "perceptron": 4, "mlx": [5, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234], "instal": [5, 6], "api": [5, 6], "refer": 5, "c": [5, 6], "further": 5, "read": 5, "from": 6, "pypi": 6, "troubleshoot": 6, "sourc": 6, "requir": 6, "option": 6, "metal": 6, "found": 6, "core": [7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179], "devic": [7, 197], "dtype": [8, 35], "stream": [9, 197, 245], "ab": [10, 28], "add": 11, "allclos": 13, "ani": [14, 30], "arang": 15, "arcco": 16, "arccosh": 17, "arcsin": 18, "arcsinh": 19, "arctan": 20, "arctanh": 21, "argmax": [22, 31], "argmin": [23, 32], "argpartit": 24, "argsort": 25, "arrai": [26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 195], "t": 27, "astyp": 33, "co": [34, 68], "exp": [36, 78], "item": 37, "log": [38, 106], "log1p": [39, 108], "logsumexp": [40, 112], "max": [41, 114], "mean": [42, 116], "min": [43, 117], "ndim": 44, "prod": [45, 127], "reciproc": [46, 140], "reshap": [47, 141], "rsqrt": [49, 143], "shape": 50, "sin": [51, 152], "size": 52, "split": [53, 137, 156], "sqrt": [54, 157], "squar": [55, 158], "sum": [56, 163], "tolist": 57, "transpos": [58, 169], "var": [59, 174], "array_equ": 60, "broadcast_to": 61, "ceil": 62, "concaten": 64, "conv1d": [65, 200], "conv2d": [66, 201], "convolv": 67, "cosh": 69, "default_devic": 70, "default_stream": 71, "divid": 73, "equal": 74, "erf": 75, "erfinv": 76, "eval": 77, "expand_dim": 79, "ey": 80, "fft": [81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 198], "fft2": 82, "fftn": 83, "ifft": 84, "ifft2": 85, "ifftn": 86, "irfft": 87, "irfft2": 88, "irfftn": 89, "rfft": 90, "rfft2": 91, "rfftn": 92, "flatten": 93, "floor": 94, "grad": [97, 199], "greater": 98, "greater_equ": 99, "ident": 100, "jvp": 101, "less": 102, "less_equ": 103, "log10": 107, "log2": 109, "logaddexp": 110, "logical_not": 111, "matmul": 113, "maximum": 115, "minimum": 118, "moveaxi": 119, "multipli": 120, "neg": 121, "new_stream": 122, "ones": 123, "ones_lik": 124, "pad": 125, "partit": 126, "random": [130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 240], "bernoulli": 130, "categor": 131, "gumbel": 132, "kei": 133, "normal": 134, "randint": 135, "seed": 136, "truncated_norm": 138, "uniform": 139, "save": 144, "savez": 145, "savez_compress": 146, "set_default_devic": 147, "set_default_stream": 148, "sigmoid": 149, "sign": 150, "simplifi": 151, "sinh": 153, "softmax": 154, "sort": 155, "squeez": 159, "stack": 160, "stop_gradi": 161, "subtract": 162, "swapax": 164, "take": 165, "take_along_axi": 166, "tan": 167, "tanh": 168, "tri": 170, "tril": 171, "triu": 172, "value_and_grad": [173, 181], "vjp": 175, "vmap": 176, "where": 177, "zero": 178, "zeros_lik": 179, "nn": [180, 181, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234], "modul": [180, 199], "optim": [182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 239], "adadelta": 182, "adagrad": 183, "adam": 184, "adamw": 185, "adamax": 186, "optimizerst": 189, "rmsprop": 190, "sgd": 191, "util": [192, 193, 194, 242], "tree_flatten": 192, "tree_map": 193, "tree_unflatten": 194, "data": 196, "type": 196, "support": 196, "neural": 199, "network": 199, "quick": [199, 243], "start": [199, 243], "The": 199, "class": 199, "paramet": 199, "updat": 199, "inspect": 199, "valu": 199, "embed": 202, "gelu": [203, 218], "groupnorm": 204, "layernorm": 205, "mish": [207, 229], "multiheadattent": 208, "prelu": [209, 230], "rmsnorm": 211, "relu": [212, 231], "rope": 213, "selu": [214, 232], "sequenti": 215, "silu": [216, 233], "step": [217, 234], "gelu_approx": 219, "gelu_fast_approx": 220, "loss": [221, 222, 223, 224, 225, 226, 227, 228, 237], "binary_cross_entropi": 221, "cross_entropi": 222, "kl_div_loss": 223, "l1_loss": 224, "mse_loss": 225, "nll_loss": 226, "function": [235, 237, 243], "tree": 242, "guid": 243, "basic": 243, "graph": 243, "unifi": 244, "memori": 244, "A": 244, "simpl": 244, "specifi": 245, "round": [48, 142], "clip": 63, "dequant": 72, "floor_divid": 95, "linspac": 104, "quantiz": 128, "quantized_matmul": 129, "lion": 187, "quantizedlinear": 210, "smooth_l1_loss": 227, "triplet_loss": 228}, "envversion": {"sphinx.domains.c": 2, "sphinx.domains.changeset": 1, "sphinx.domains.citation": 1, "sphinx.domains.cpp": 6, "sphinx.domains.index": 1, "sphinx.domains.javascript": 2, "sphinx.domains.math": 2, "sphinx.domains.python": 3, "sphinx.domains.rst": 2, "sphinx.domains.std": 2, "sphinx.ext.intersphinx": 1, "sphinx": 56}})
\ No newline at end of file
diff --git a/docs/build/html/unified_memory.html b/docs/build/html/unified_memory.html
index 853a0871d..623bf7d06 100644
--- a/docs/build/html/unified_memory.html
+++ b/docs/build/html/unified_memory.html
@@ -9,7 +9,7 @@
- Unified Memory — MLX 0.0.5 documentation
+ Unified Memory — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils
diff --git a/docs/build/html/using_streams.html b/docs/build/html/using_streams.html
index 67d61e574..a254bdb25 100644
--- a/docs/build/html/using_streams.html
+++ b/docs/build/html/using_streams.html
@@ -9,7 +9,7 @@
- Using Streams — MLX 0.0.5 documentation
+ Using Streams — MLX 0.0.6 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -186,6 +186,7 @@
mlx.core.array.prod
mlx.core.array.reciprocal
mlx.core.array.reshape
+mlx.core.array.round
mlx.core.array.rsqrt
mlx.core.array.sin
mlx.core.array.split
@@ -227,12 +228,14 @@
mlx.core.array_equal
mlx.core.broadcast_to
mlx.core.ceil
+mlx.core.clip
mlx.core.concatenate
mlx.core.convolve
mlx.core.conv1d
mlx.core.conv2d
mlx.core.cos
mlx.core.cosh
+mlx.core.dequantize
mlx.core.divide
mlx.core.equal
mlx.core.erf
@@ -240,14 +243,16 @@
mlx.core.exp
mlx.core.expand_dims
mlx.core.eye
-mlx.core.floor
mlx.core.flatten
+mlx.core.floor
+mlx.core.floor_divide
mlx.core.full
mlx.core.greater
mlx.core.greater_equal
mlx.core.identity
mlx.core.less
mlx.core.less_equal
+mlx.core.linspace
mlx.core.load
mlx.core.log
mlx.core.log2
@@ -270,8 +275,11 @@
mlx.core.partition
mlx.core.pad
mlx.core.prod
+mlx.core.quantize
+mlx.core.quantized_matmul
mlx.core.reciprocal
mlx.core.reshape
+mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
@@ -364,6 +372,7 @@
mlx.nn.RoPE
mlx.nn.MultiHeadAttention
mlx.nn.Sequential
+mlx.nn.QuantizedLinear
Functions
Loss Functions
@@ -399,6 +410,7 @@
mlx.optimizers.Adam
mlx.optimizers.AdamW
mlx.optimizers.Adamax
+mlx.optimizers.Lion
Tree Utils