docs update

This commit is contained in:
Awni Hannun
2024-04-26 08:24:09 -07:00
committed by CircleCI Docs
parent f77d99b285
commit f946f689a6
502 changed files with 14665 additions and 15103 deletions

View File

@@ -8,7 +8,7 @@
<meta charset="utf-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" /><meta name="generator" content="Docutils 0.18.1: http://docutils.sourceforge.net/" />
<title>Layers &#8212; MLX 0.10.0 documentation</title>
<title>Layers &#8212; MLX 0.12.0 documentation</title>
@@ -36,7 +36,7 @@
<link rel="preload" as="script" href="../../_static/scripts/pydata-sphinx-theme.js?digest=5b4479735964841361fd" />
<script src="../../_static/vendor/fontawesome/6.1.2/js/all.min.js?digest=5b4479735964841361fd"></script>
<script src="../../_static/documentation_options.js?v=cb265169"></script>
<script src="../../_static/documentation_options.js?v=e9e33cf0"></script>
<script src="../../_static/doctools.js?v=888ff710"></script>
<script src="../../_static/sphinx_highlight.js?v=dc90522c"></script>
<script src="../../_static/scripts/sphinx-book-theme.js?v=efea14e4"></script>
@@ -131,8 +131,8 @@
<img src="../../_static/mlx_logo.png" class="logo__image only-light" alt="MLX 0.10.0 documentation - Home"/>
<script>document.write(`<img src="../../_static/mlx_logo_dark.png" class="logo__image only-dark" alt="MLX 0.10.0 documentation - Home"/>`);</script>
<img src="../../_static/mlx_logo.png" class="logo__image only-light" alt="MLX 0.12.0 documentation - Home"/>
<script>document.write(`<img src="../../_static/mlx_logo_dark.png" class="logo__image only-dark" alt="MLX 0.12.0 documentation - Home"/>`);</script>
</a></div>
@@ -240,6 +240,7 @@
<li class="toctree-l2"><a class="reference internal" href="../_autosummary/mlx.core.new_stream.html">mlx.core.new_stream</a></li>
<li class="toctree-l2"><a class="reference internal" href="../_autosummary/mlx.core.set_default_stream.html">mlx.core.set_default_stream</a></li>
<li class="toctree-l2"><a class="reference internal" href="../_autosummary/mlx.core.stream.html">mlx.core.stream</a></li>
<li class="toctree-l2"><a class="reference internal" href="../_autosummary/mlx.core.synchronize.html">mlx.core.synchronize</a></li>
</ul>
</li>
<li class="toctree-l1 has-children"><a class="reference internal" href="../ops.html">Operations</a><input class="toctree-checkbox" id="toctree-checkbox-4" name="toctree-checkbox-4" type="checkbox"/><label class="toctree-toggle" for="toctree-checkbox-4"><i class="fa-solid fa-chevron-down"></i></label><ul>
@@ -264,6 +265,7 @@
<li class="toctree-l2"><a class="reference internal" href="../_autosummary/mlx.core.atleast_2d.html">mlx.core.atleast_2d</a></li>
<li class="toctree-l2"><a class="reference internal" href="../_autosummary/mlx.core.atleast_3d.html">mlx.core.atleast_3d</a></li>
<li class="toctree-l2"><a class="reference internal" href="../_autosummary/mlx.core.broadcast_to.html">mlx.core.broadcast_to</a></li>
<li class="toctree-l2"><a class="reference internal" href="../_autosummary/mlx.core.block_masked_mm.html">mlx.core.block_masked_mm</a></li>
<li class="toctree-l2"><a class="reference internal" href="../_autosummary/mlx.core.ceil.html">mlx.core.ceil</a></li>
<li class="toctree-l2"><a class="reference internal" href="../_autosummary/mlx.core.clip.html">mlx.core.clip</a></li>
<li class="toctree-l2"><a class="reference internal" href="../_autosummary/mlx.core.concatenate.html">mlx.core.concatenate</a></li>
@@ -277,6 +279,7 @@
<li class="toctree-l2"><a class="reference internal" href="../_autosummary/mlx.core.cummin.html">mlx.core.cummin</a></li>
<li class="toctree-l2"><a class="reference internal" href="../_autosummary/mlx.core.cumprod.html">mlx.core.cumprod</a></li>
<li class="toctree-l2"><a class="reference internal" href="../_autosummary/mlx.core.cumsum.html">mlx.core.cumsum</a></li>
<li class="toctree-l2"><a class="reference internal" href="../_autosummary/mlx.core.degrees.html">mlx.core.degrees</a></li>
<li class="toctree-l2"><a class="reference internal" href="../_autosummary/mlx.core.dequantize.html">mlx.core.dequantize</a></li>
<li class="toctree-l2"><a class="reference internal" href="../_autosummary/mlx.core.diag.html">mlx.core.diag</a></li>
<li class="toctree-l2"><a class="reference internal" href="../_autosummary/mlx.core.diagonal.html">mlx.core.diagonal</a></li>
@@ -325,6 +328,7 @@
<li class="toctree-l2"><a class="reference internal" href="../_autosummary/mlx.core.moveaxis.html">mlx.core.moveaxis</a></li>
<li class="toctree-l2"><a class="reference internal" href="../_autosummary/mlx.core.multiply.html">mlx.core.multiply</a></li>
<li class="toctree-l2"><a class="reference internal" href="../_autosummary/mlx.core.negative.html">mlx.core.negative</a></li>
<li class="toctree-l2"><a class="reference internal" href="../_autosummary/mlx.core.not_equal.html">mlx.core.not_equal</a></li>
<li class="toctree-l2"><a class="reference internal" href="../_autosummary/mlx.core.ones.html">mlx.core.ones</a></li>
<li class="toctree-l2"><a class="reference internal" href="../_autosummary/mlx.core.ones_like.html">mlx.core.ones_like</a></li>
<li class="toctree-l2"><a class="reference internal" href="../_autosummary/mlx.core.outer.html">mlx.core.outer</a></li>
@@ -333,6 +337,7 @@
<li class="toctree-l2"><a class="reference internal" href="../_autosummary/mlx.core.prod.html">mlx.core.prod</a></li>
<li class="toctree-l2"><a class="reference internal" href="../_autosummary/mlx.core.quantize.html">mlx.core.quantize</a></li>
<li class="toctree-l2"><a class="reference internal" href="../_autosummary/mlx.core.quantized_matmul.html">mlx.core.quantized_matmul</a></li>
<li class="toctree-l2"><a class="reference internal" href="../_autosummary/mlx.core.radians.html">mlx.core.radians</a></li>
<li class="toctree-l2"><a class="reference internal" href="../_autosummary/mlx.core.reciprocal.html">mlx.core.reciprocal</a></li>
<li class="toctree-l2"><a class="reference internal" href="../_autosummary/mlx.core.repeat.html">mlx.core.repeat</a></li>
<li class="toctree-l2"><a class="reference internal" href="../_autosummary/mlx.core.reshape.html">mlx.core.reshape</a></li>
@@ -436,12 +441,14 @@
<li class="toctree-l2"><a class="reference internal" href="../_autosummary/mlx.core.metal.get_cache_memory.html">mlx.core.metal.get_cache_memory</a></li>
<li class="toctree-l2"><a class="reference internal" href="../_autosummary/mlx.core.metal.set_memory_limit.html">mlx.core.metal.set_memory_limit</a></li>
<li class="toctree-l2"><a class="reference internal" href="../_autosummary/mlx.core.metal.set_cache_limit.html">mlx.core.metal.set_cache_limit</a></li>
<li class="toctree-l2"><a class="reference internal" href="../_autosummary/mlx.core.metal.clear_cache.html">mlx.core.metal.clear_cache</a></li>
<li class="toctree-l2"><a class="reference internal" href="../_autosummary/mlx.core.metal.start_capture.html">mlx.core.metal.start_capture</a></li>
<li class="toctree-l2"><a class="reference internal" href="../_autosummary/mlx.core.metal.stop_capture.html">mlx.core.metal.stop_capture</a></li>
</ul>
</li>
<li class="toctree-l1 current active has-children"><a class="reference internal" href="../nn.html">Neural Networks</a><input checked="" class="toctree-checkbox" id="toctree-checkbox-11" name="toctree-checkbox-11" type="checkbox"/><label class="toctree-toggle" for="toctree-checkbox-11"><i class="fa-solid fa-chevron-down"></i></label><ul class="current">
<li class="toctree-l2"><a class="reference internal" href="../_autosummary/mlx.nn.value_and_grad.html">mlx.nn.value_and_grad</a></li>
<li class="toctree-l2"><a class="reference internal" href="../_autosummary/mlx.nn.quantize.html">mlx.nn.quantize</a></li>
<li class="toctree-l2 has-children"><a class="reference internal" href="module.html">Module</a><input class="toctree-checkbox" id="toctree-checkbox-12" name="toctree-checkbox-12" type="checkbox"/><label class="toctree-toggle" for="toctree-checkbox-12"><i class="fa-solid fa-chevron-down"></i></label><ul>
<li class="toctree-l3"><a class="reference internal" href="_autosummary/mlx.nn.Module.training.html">mlx.nn.Module.training</a></li>
<li class="toctree-l3"><a class="reference internal" href="_autosummary/mlx.nn.Module.state.html">mlx.nn.Module.state</a></li>
@@ -488,6 +495,7 @@
<li class="toctree-l3"><a class="reference internal" href="_autosummary/mlx.nn.Mish.html">mlx.nn.Mish</a></li>
<li class="toctree-l3"><a class="reference internal" href="_autosummary/mlx.nn.MultiHeadAttention.html">mlx.nn.MultiHeadAttention</a></li>
<li class="toctree-l3"><a class="reference internal" href="_autosummary/mlx.nn.PReLU.html">mlx.nn.PReLU</a></li>
<li class="toctree-l3"><a class="reference internal" href="_autosummary/mlx.nn.QuantizedEmbedding.html">mlx.nn.QuantizedEmbedding</a></li>
<li class="toctree-l3"><a class="reference internal" href="_autosummary/mlx.nn.QuantizedLinear.html">mlx.nn.QuantizedLinear</a></li>
<li class="toctree-l3"><a class="reference internal" href="_autosummary/mlx.nn.RMSNorm.html">mlx.nn.RMSNorm</a></li>
<li class="toctree-l3"><a class="reference internal" href="_autosummary/mlx.nn.ReLU.html">mlx.nn.ReLU</a></li>
@@ -591,6 +599,7 @@
<li class="toctree-l2"><a class="reference internal" href="../_autosummary/mlx.utils.tree_flatten.html">mlx.utils.tree_flatten</a></li>
<li class="toctree-l2"><a class="reference internal" href="../_autosummary/mlx.utils.tree_unflatten.html">mlx.utils.tree_unflatten</a></li>
<li class="toctree-l2"><a class="reference internal" href="../_autosummary/mlx.utils.tree_map.html">mlx.utils.tree_map</a></li>
<li class="toctree-l2"><a class="reference internal" href="../_autosummary/mlx.utils.tree_map_with_path.html">mlx.utils.tree_map_with_path</a></li>
</ul>
</li>
</ul>
@@ -837,43 +846,46 @@ document.write(`
<tr class="row-even"><td><p><a class="reference internal" href="_autosummary/mlx.nn.PReLU.html#mlx.nn.PReLU" title="mlx.nn.PReLU"><code class="xref py py-obj docutils literal notranslate"><span class="pre">PReLU</span></code></a>([num_parameters, init])</p></td>
<td><p>Applies the element-wise parametric ReLU.</p></td>
</tr>
<tr class="row-odd"><td><p><a class="reference internal" href="_autosummary/mlx.nn.QuantizedLinear.html#mlx.nn.QuantizedLinear" title="mlx.nn.QuantizedLinear"><code class="xref py py-obj docutils literal notranslate"><span class="pre">QuantizedLinear</span></code></a>(input_dims, output_dims[, ...])</p></td>
<tr class="row-odd"><td><p><a class="reference internal" href="_autosummary/mlx.nn.QuantizedEmbedding.html#mlx.nn.QuantizedEmbedding" title="mlx.nn.QuantizedEmbedding"><code class="xref py py-obj docutils literal notranslate"><span class="pre">QuantizedEmbedding</span></code></a>(num_embeddings, dims[, ...])</p></td>
<td><p>The same as <a class="reference internal" href="_autosummary/mlx.nn.Embedding.html#mlx.nn.Embedding" title="mlx.nn.Embedding"><code class="xref py py-obj docutils literal notranslate"><span class="pre">Embedding</span></code></a> but with a quantized weight matrix.</p></td>
</tr>
<tr class="row-even"><td><p><a class="reference internal" href="_autosummary/mlx.nn.QuantizedLinear.html#mlx.nn.QuantizedLinear" title="mlx.nn.QuantizedLinear"><code class="xref py py-obj docutils literal notranslate"><span class="pre">QuantizedLinear</span></code></a>(input_dims, output_dims[, ...])</p></td>
<td><p>Applies an affine transformation to the input using a quantized weight matrix.</p></td>
</tr>
<tr class="row-even"><td><p><a class="reference internal" href="_autosummary/mlx.nn.RMSNorm.html#mlx.nn.RMSNorm" title="mlx.nn.RMSNorm"><code class="xref py py-obj docutils literal notranslate"><span class="pre">RMSNorm</span></code></a>(dims[, eps])</p></td>
<tr class="row-odd"><td><p><a class="reference internal" href="_autosummary/mlx.nn.RMSNorm.html#mlx.nn.RMSNorm" title="mlx.nn.RMSNorm"><code class="xref py py-obj docutils literal notranslate"><span class="pre">RMSNorm</span></code></a>(dims[, eps])</p></td>
<td><p>Applies Root Mean Square normalization [1] to the inputs.</p></td>
</tr>
<tr class="row-odd"><td><p><a class="reference internal" href="_autosummary/mlx.nn.ReLU.html#mlx.nn.ReLU" title="mlx.nn.ReLU"><code class="xref py py-obj docutils literal notranslate"><span class="pre">ReLU</span></code></a>()</p></td>
<tr class="row-even"><td><p><a class="reference internal" href="_autosummary/mlx.nn.ReLU.html#mlx.nn.ReLU" title="mlx.nn.ReLU"><code class="xref py py-obj docutils literal notranslate"><span class="pre">ReLU</span></code></a>()</p></td>
<td><p>Applies the Rectified Linear Unit.</p></td>
</tr>
<tr class="row-even"><td><p><a class="reference internal" href="_autosummary/mlx.nn.RNN.html#mlx.nn.RNN" title="mlx.nn.RNN"><code class="xref py py-obj docutils literal notranslate"><span class="pre">RNN</span></code></a>(input_size, hidden_size[, bias, ...])</p></td>
<tr class="row-odd"><td><p><a class="reference internal" href="_autosummary/mlx.nn.RNN.html#mlx.nn.RNN" title="mlx.nn.RNN"><code class="xref py py-obj docutils literal notranslate"><span class="pre">RNN</span></code></a>(input_size, hidden_size[, bias, ...])</p></td>
<td><p>An Elman recurrent layer.</p></td>
</tr>
<tr class="row-odd"><td><p><a class="reference internal" href="_autosummary/mlx.nn.RoPE.html#mlx.nn.RoPE" title="mlx.nn.RoPE"><code class="xref py py-obj docutils literal notranslate"><span class="pre">RoPE</span></code></a>(dims[, traditional, base, scale])</p></td>
<tr class="row-even"><td><p><a class="reference internal" href="_autosummary/mlx.nn.RoPE.html#mlx.nn.RoPE" title="mlx.nn.RoPE"><code class="xref py py-obj docutils literal notranslate"><span class="pre">RoPE</span></code></a>(dims[, traditional, base, scale])</p></td>
<td><p>Implements the rotary positional encoding.</p></td>
</tr>
<tr class="row-even"><td><p><a class="reference internal" href="_autosummary/mlx.nn.SELU.html#mlx.nn.SELU" title="mlx.nn.SELU"><code class="xref py py-obj docutils literal notranslate"><span class="pre">SELU</span></code></a>()</p></td>
<tr class="row-odd"><td><p><a class="reference internal" href="_autosummary/mlx.nn.SELU.html#mlx.nn.SELU" title="mlx.nn.SELU"><code class="xref py py-obj docutils literal notranslate"><span class="pre">SELU</span></code></a>()</p></td>
<td><p>Applies the Scaled Exponential Linear Unit.</p></td>
</tr>
<tr class="row-odd"><td><p><a class="reference internal" href="_autosummary/mlx.nn.Sequential.html#mlx.nn.Sequential" title="mlx.nn.Sequential"><code class="xref py py-obj docutils literal notranslate"><span class="pre">Sequential</span></code></a>(*modules)</p></td>
<tr class="row-even"><td><p><a class="reference internal" href="_autosummary/mlx.nn.Sequential.html#mlx.nn.Sequential" title="mlx.nn.Sequential"><code class="xref py py-obj docutils literal notranslate"><span class="pre">Sequential</span></code></a>(*modules)</p></td>
<td><p>A layer that calls the passed callables in order.</p></td>
</tr>
<tr class="row-even"><td><p><a class="reference internal" href="_autosummary/mlx.nn.SiLU.html#mlx.nn.SiLU" title="mlx.nn.SiLU"><code class="xref py py-obj docutils literal notranslate"><span class="pre">SiLU</span></code></a>()</p></td>
<tr class="row-odd"><td><p><a class="reference internal" href="_autosummary/mlx.nn.SiLU.html#mlx.nn.SiLU" title="mlx.nn.SiLU"><code class="xref py py-obj docutils literal notranslate"><span class="pre">SiLU</span></code></a>()</p></td>
<td><p>Applies the Sigmoid Linear Unit.</p></td>
</tr>
<tr class="row-odd"><td><p><a class="reference internal" href="_autosummary/mlx.nn.SinusoidalPositionalEncoding.html#mlx.nn.SinusoidalPositionalEncoding" title="mlx.nn.SinusoidalPositionalEncoding"><code class="xref py py-obj docutils literal notranslate"><span class="pre">SinusoidalPositionalEncoding</span></code></a>(dims[, ...])</p></td>
<tr class="row-even"><td><p><a class="reference internal" href="_autosummary/mlx.nn.SinusoidalPositionalEncoding.html#mlx.nn.SinusoidalPositionalEncoding" title="mlx.nn.SinusoidalPositionalEncoding"><code class="xref py py-obj docutils literal notranslate"><span class="pre">SinusoidalPositionalEncoding</span></code></a>(dims[, ...])</p></td>
<td><p>Implements sinusoidal positional encoding.</p></td>
</tr>
<tr class="row-even"><td><p><a class="reference internal" href="_autosummary/mlx.nn.Softshrink.html#mlx.nn.Softshrink" title="mlx.nn.Softshrink"><code class="xref py py-obj docutils literal notranslate"><span class="pre">Softshrink</span></code></a>([lambd])</p></td>
<tr class="row-odd"><td><p><a class="reference internal" href="_autosummary/mlx.nn.Softshrink.html#mlx.nn.Softshrink" title="mlx.nn.Softshrink"><code class="xref py py-obj docutils literal notranslate"><span class="pre">Softshrink</span></code></a>([lambd])</p></td>
<td><p>Applies the Softshrink function.</p></td>
</tr>
<tr class="row-odd"><td><p><a class="reference internal" href="_autosummary/mlx.nn.Step.html#mlx.nn.Step" title="mlx.nn.Step"><code class="xref py py-obj docutils literal notranslate"><span class="pre">Step</span></code></a>([threshold])</p></td>
<tr class="row-even"><td><p><a class="reference internal" href="_autosummary/mlx.nn.Step.html#mlx.nn.Step" title="mlx.nn.Step"><code class="xref py py-obj docutils literal notranslate"><span class="pre">Step</span></code></a>([threshold])</p></td>
<td><p>Applies the Step Activation Function.</p></td>
</tr>
<tr class="row-even"><td><p><a class="reference internal" href="_autosummary/mlx.nn.Transformer.html#mlx.nn.Transformer" title="mlx.nn.Transformer"><code class="xref py py-obj docutils literal notranslate"><span class="pre">Transformer</span></code></a>(dims, num_heads, ...)</p></td>
<tr class="row-odd"><td><p><a class="reference internal" href="_autosummary/mlx.nn.Transformer.html#mlx.nn.Transformer" title="mlx.nn.Transformer"><code class="xref py py-obj docutils literal notranslate"><span class="pre">Transformer</span></code></a>(dims, num_heads, ...)</p></td>
<td><p>Implements a standard Transformer model.</p></td>
</tr>
<tr class="row-odd"><td><p><a class="reference internal" href="_autosummary/mlx.nn.Upsample.html#mlx.nn.Upsample" title="mlx.nn.Upsample"><code class="xref py py-obj docutils literal notranslate"><span class="pre">Upsample</span></code></a>(scale_factor[, mode, align_corners])</p></td>
<tr class="row-even"><td><p><a class="reference internal" href="_autosummary/mlx.nn.Upsample.html#mlx.nn.Upsample" title="mlx.nn.Upsample"><code class="xref py py-obj docutils literal notranslate"><span class="pre">Upsample</span></code></a>(scale_factor[, mode, align_corners])</p></td>
<td><p>Upsample the input signal spatially.</p></td>
</tr>
</tbody>