update docs

This commit is contained in:
Awni Hannun
2024-02-01 13:08:29 -08:00
committed by CircleCI Docs
parent 30ea2df988
commit 21cae9cb8f
378 changed files with 32586 additions and 1950 deletions

View File

@@ -9,7 +9,7 @@
<meta charset="utf-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" /><meta name="generator" content="Docutils 0.18.1: http://docutils.sourceforge.net/" />
<title>Loss Functions &#8212; MLX 0.0.9 documentation</title>
<title>Loss Functions &#8212; MLX 0.1.0 documentation</title>
@@ -134,8 +134,8 @@
<img src="../../_static/mlx_logo.png" class="logo__image only-light" alt="MLX 0.0.9 documentation - Home"/>
<script>document.write(`<img src="../../_static/mlx_logo.png" class="logo__image only-dark" alt="MLX 0.0.9 documentation - Home"/>`);</script>
<img src="../../_static/mlx_logo.png" class="logo__image only-light" alt="MLX 0.1.0 documentation - Home"/>
<script>document.write(`<img src="../../_static/mlx_logo.png" class="logo__image only-dark" alt="MLX 0.1.0 documentation - Home"/>`);</script>
</a></div>
@@ -241,6 +241,8 @@
<li class="toctree-l2"><a class="reference internal" href="../_autosummary/mlx.core.cos.html">mlx.core.cos</a></li>
<li class="toctree-l2"><a class="reference internal" href="../_autosummary/mlx.core.cosh.html">mlx.core.cosh</a></li>
<li class="toctree-l2"><a class="reference internal" href="../_autosummary/mlx.core.dequantize.html">mlx.core.dequantize</a></li>
<li class="toctree-l2"><a class="reference internal" href="../_autosummary/mlx.core.diag.html">mlx.core.diag</a></li>
<li class="toctree-l2"><a class="reference internal" href="../_autosummary/mlx.core.diagonal.html">mlx.core.diagonal</a></li>
<li class="toctree-l2"><a class="reference internal" href="../_autosummary/mlx.core.divide.html">mlx.core.divide</a></li>
<li class="toctree-l2"><a class="reference internal" href="../_autosummary/mlx.core.divmod.html">mlx.core.divmod</a></li>
<li class="toctree-l2"><a class="reference internal" href="../_autosummary/mlx.core.equal.html">mlx.core.equal</a></li>
@@ -351,7 +353,6 @@
<li class="toctree-l2"><a class="reference internal" href="../_autosummary/mlx.core.jvp.html">mlx.core.jvp</a></li>
<li class="toctree-l2"><a class="reference internal" href="../_autosummary/mlx.core.vjp.html">mlx.core.vjp</a></li>
<li class="toctree-l2"><a class="reference internal" href="../_autosummary/mlx.core.vmap.html">mlx.core.vmap</a></li>
<li class="toctree-l2"><a class="reference internal" href="../_autosummary/mlx.core.simplify.html">mlx.core.simplify</a></li>
</ul>
</li>
<li class="toctree-l1 has-children"><a class="reference internal" href="../fft.html">FFT</a><input class="toctree-checkbox" id="toctree-checkbox-6" name="toctree-checkbox-6" type="checkbox"/><label class="toctree-toggle" for="toctree-checkbox-6"><i class="fa-solid fa-chevron-down"></i></label><ul>
@@ -371,6 +372,7 @@
</li>
<li class="toctree-l1 has-children"><a class="reference internal" href="../linalg.html">Linear Algebra</a><input class="toctree-checkbox" id="toctree-checkbox-7" name="toctree-checkbox-7" type="checkbox"/><label class="toctree-toggle" for="toctree-checkbox-7"><i class="fa-solid fa-chevron-down"></i></label><ul>
<li class="toctree-l2"><a class="reference internal" href="../_autosummary/mlx.core.linalg.norm.html">mlx.core.linalg.norm</a></li>
<li class="toctree-l2"><a class="reference internal" href="../_autosummary/mlx.core.linalg.qr.html">mlx.core.linalg.qr</a></li>
</ul>
</li>
<li class="toctree-l1 current active has-children"><a class="reference internal" href="../nn.html">Neural Networks</a><input checked="" class="toctree-checkbox" id="toctree-checkbox-8" name="toctree-checkbox-8" type="checkbox"/><label class="toctree-toggle" for="toctree-checkbox-8"><i class="fa-solid fa-chevron-down"></i></label><ul class="current">
@@ -421,6 +423,7 @@
<li class="toctree-l3"><a class="reference internal" href="_autosummary/mlx.nn.Sequential.html">mlx.nn.Sequential</a></li>
<li class="toctree-l3"><a class="reference internal" href="_autosummary/mlx.nn.SiLU.html">mlx.nn.SiLU</a></li>
<li class="toctree-l3"><a class="reference internal" href="_autosummary/mlx.nn.SinusoidalPositionalEncoding.html">mlx.nn.SinusoidalPositionalEncoding</a></li>
<li class="toctree-l3"><a class="reference internal" href="_autosummary/mlx.nn.Softshrink.html">mlx.nn.Softshrink</a></li>
<li class="toctree-l3"><a class="reference internal" href="_autosummary/mlx.nn.Step.html">mlx.nn.Step</a></li>
<li class="toctree-l3"><a class="reference internal" href="_autosummary/mlx.nn.Transformer.html">mlx.nn.Transformer</a></li>
</ul>
@@ -433,6 +436,7 @@
<li class="toctree-l3"><a class="reference internal" href="_autosummary_functions/mlx.nn.prelu.html">mlx.nn.prelu</a></li>
<li class="toctree-l3"><a class="reference internal" href="_autosummary_functions/mlx.nn.relu.html">mlx.nn.relu</a></li>
<li class="toctree-l3"><a class="reference internal" href="_autosummary_functions/mlx.nn.selu.html">mlx.nn.selu</a></li>
<li class="toctree-l3"><a class="reference internal" href="_autosummary_functions/mlx.nn.softshrink.html">mlx.nn.softshrink</a></li>
<li class="toctree-l3"><a class="reference internal" href="_autosummary_functions/mlx.nn.silu.html">mlx.nn.silu</a></li>
<li class="toctree-l3"><a class="reference internal" href="_autosummary_functions/mlx.nn.step.html">mlx.nn.step</a></li>
</ul>
@@ -441,6 +445,7 @@
<li class="toctree-l3"><a class="reference internal" href="_autosummary_functions/mlx.nn.losses.binary_cross_entropy.html">mlx.nn.losses.binary_cross_entropy</a></li>
<li class="toctree-l3"><a class="reference internal" href="_autosummary_functions/mlx.nn.losses.cosine_similarity_loss.html">mlx.nn.losses.cosine_similarity_loss</a></li>
<li class="toctree-l3"><a class="reference internal" href="_autosummary_functions/mlx.nn.losses.cross_entropy.html">mlx.nn.losses.cross_entropy</a></li>
<li class="toctree-l3"><a class="reference internal" href="_autosummary_functions/mlx.nn.losses.gaussian_nll_loss.html">mlx.nn.losses.gaussian_nll_loss</a></li>
<li class="toctree-l3"><a class="reference internal" href="_autosummary_functions/mlx.nn.losses.hinge_loss.html">mlx.nn.losses.hinge_loss</a></li>
<li class="toctree-l3"><a class="reference internal" href="_autosummary_functions/mlx.nn.losses.huber_loss.html">mlx.nn.losses.huber_loss</a></li>
<li class="toctree-l3"><a class="reference internal" href="_autosummary_functions/mlx.nn.losses.kl_div_loss.html">mlx.nn.losses.kl_div_loss</a></li>
@@ -452,14 +457,26 @@
<li class="toctree-l3"><a class="reference internal" href="_autosummary_functions/mlx.nn.losses.triplet_loss.html">mlx.nn.losses.triplet_loss</a></li>
</ul>
</li>
<li class="toctree-l2 has-children"><a class="reference internal" href="init.html">Initializers</a><input class="toctree-checkbox" id="toctree-checkbox-13" name="toctree-checkbox-13" type="checkbox"/><label class="toctree-toggle" for="toctree-checkbox-13"><i class="fa-solid fa-chevron-down"></i></label><ul>
<li class="toctree-l3"><a class="reference internal" href="_autosummary/mlx.nn.init.constant.html">mlx.nn.init.constant</a></li>
<li class="toctree-l3"><a class="reference internal" href="_autosummary/mlx.nn.init.normal.html">mlx.nn.init.normal</a></li>
<li class="toctree-l3"><a class="reference internal" href="_autosummary/mlx.nn.init.uniform.html">mlx.nn.init.uniform</a></li>
<li class="toctree-l3"><a class="reference internal" href="_autosummary/mlx.nn.init.identity.html">mlx.nn.init.identity</a></li>
<li class="toctree-l3"><a class="reference internal" href="_autosummary/mlx.nn.init.glorot_normal.html">mlx.nn.init.glorot_normal</a></li>
<li class="toctree-l3"><a class="reference internal" href="_autosummary/mlx.nn.init.glorot_uniform.html">mlx.nn.init.glorot_uniform</a></li>
<li class="toctree-l3"><a class="reference internal" href="_autosummary/mlx.nn.init.he_normal.html">mlx.nn.init.he_normal</a></li>
<li class="toctree-l3"><a class="reference internal" href="_autosummary/mlx.nn.init.he_uniform.html">mlx.nn.init.he_uniform</a></li>
</ul>
</li>
<li class="toctree-l1 has-children"><a class="reference internal" href="../optimizers.html">Optimizers</a><input class="toctree-checkbox" id="toctree-checkbox-13" name="toctree-checkbox-13" type="checkbox"/><label class="toctree-toggle" for="toctree-checkbox-13"><i class="fa-solid fa-chevron-down"></i></label><ul>
</ul>
</li>
<li class="toctree-l1 has-children"><a class="reference internal" href="../optimizers.html">Optimizers</a><input class="toctree-checkbox" id="toctree-checkbox-14" name="toctree-checkbox-14" type="checkbox"/><label class="toctree-toggle" for="toctree-checkbox-14"><i class="fa-solid fa-chevron-down"></i></label><ul>
<li class="toctree-l2"><a class="reference internal" href="../_autosummary/mlx.optimizers.OptimizerState.html">mlx.optimizers.OptimizerState</a></li>
<li class="toctree-l2"><a class="reference internal" href="../_autosummary/mlx.optimizers.Optimizer.html">mlx.optimizers.Optimizer</a></li>
<li class="toctree-l2"><a class="reference internal" href="../_autosummary/mlx.optimizers.SGD.html">mlx.optimizers.SGD</a></li>
<li class="toctree-l2"><a class="reference internal" href="../_autosummary/mlx.optimizers.RMSprop.html">mlx.optimizers.RMSprop</a></li>
<li class="toctree-l2"><a class="reference internal" href="../_autosummary/mlx.optimizers.Adagrad.html">mlx.optimizers.Adagrad</a></li>
<li class="toctree-l2"><a class="reference internal" href="../_autosummary/mlx.optimizers.Adafactor.html">mlx.optimizers.Adafactor</a></li>
<li class="toctree-l2"><a class="reference internal" href="../_autosummary/mlx.optimizers.AdaDelta.html">mlx.optimizers.AdaDelta</a></li>
<li class="toctree-l2"><a class="reference internal" href="../_autosummary/mlx.optimizers.Adam.html">mlx.optimizers.Adam</a></li>
<li class="toctree-l2"><a class="reference internal" href="../_autosummary/mlx.optimizers.AdamW.html">mlx.optimizers.AdamW</a></li>
@@ -467,7 +484,7 @@
<li class="toctree-l2"><a class="reference internal" href="../_autosummary/mlx.optimizers.Lion.html">mlx.optimizers.Lion</a></li>
</ul>
</li>
<li class="toctree-l1 has-children"><a class="reference internal" href="../tree_utils.html">Tree Utils</a><input class="toctree-checkbox" id="toctree-checkbox-14" name="toctree-checkbox-14" type="checkbox"/><label class="toctree-toggle" for="toctree-checkbox-14"><i class="fa-solid fa-chevron-down"></i></label><ul>
<li class="toctree-l1 has-children"><a class="reference internal" href="../tree_utils.html">Tree Utils</a><input class="toctree-checkbox" id="toctree-checkbox-15" name="toctree-checkbox-15" type="checkbox"/><label class="toctree-toggle" for="toctree-checkbox-15"><i class="fa-solid fa-chevron-down"></i></label><ul>
<li class="toctree-l2"><a class="reference internal" href="../_autosummary/mlx.utils.tree_flatten.html">mlx.utils.tree_flatten</a></li>
<li class="toctree-l2"><a class="reference internal" href="../_autosummary/mlx.utils.tree_unflatten.html">mlx.utils.tree_unflatten</a></li>
<li class="toctree-l2"><a class="reference internal" href="../_autosummary/mlx.utils.tree_map.html">mlx.utils.tree_map</a></li>
@@ -650,7 +667,7 @@ document.write(`
<span id="losses"></span><h1>Loss Functions<a class="headerlink" href="#loss-functions" title="Permalink to this heading">#</a></h1>
<table class="autosummary longtable table autosummary">
<tbody>
<tr class="row-odd"><td><p><a class="reference internal" href="_autosummary_functions/mlx.nn.losses.binary_cross_entropy.html#mlx.nn.losses.binary_cross_entropy" title="mlx.nn.losses.binary_cross_entropy"><code class="xref py py-obj docutils literal notranslate"><span class="pre">binary_cross_entropy</span></code></a>(logits, targets[, ...])</p></td>
<tr class="row-odd"><td><p><a class="reference internal" href="_autosummary_functions/mlx.nn.losses.binary_cross_entropy.html#mlx.nn.losses.binary_cross_entropy" title="mlx.nn.losses.binary_cross_entropy"><code class="xref py py-obj docutils literal notranslate"><span class="pre">binary_cross_entropy</span></code></a>(inputs, targets[, ...])</p></td>
<td><p>Computes the binary cross entropy loss.</p></td>
</tr>
<tr class="row-even"><td><p><a class="reference internal" href="_autosummary_functions/mlx.nn.losses.cosine_similarity_loss.html#mlx.nn.losses.cosine_similarity_loss" title="mlx.nn.losses.cosine_similarity_loss"><code class="xref py py-obj docutils literal notranslate"><span class="pre">cosine_similarity_loss</span></code></a>(x1, x2[, axis, eps, ...])</p></td>
@@ -659,31 +676,34 @@ document.write(`
<tr class="row-odd"><td><p><a class="reference internal" href="_autosummary_functions/mlx.nn.losses.cross_entropy.html#mlx.nn.losses.cross_entropy" title="mlx.nn.losses.cross_entropy"><code class="xref py py-obj docutils literal notranslate"><span class="pre">cross_entropy</span></code></a>(logits, targets[, weights, ...])</p></td>
<td><p>Computes the cross entropy loss.</p></td>
</tr>
<tr class="row-even"><td><p><a class="reference internal" href="_autosummary_functions/mlx.nn.losses.hinge_loss.html#mlx.nn.losses.hinge_loss" title="mlx.nn.losses.hinge_loss"><code class="xref py py-obj docutils literal notranslate"><span class="pre">hinge_loss</span></code></a>(inputs, targets[, reduction])</p></td>
<tr class="row-even"><td><p><a class="reference internal" href="_autosummary_functions/mlx.nn.losses.gaussian_nll_loss.html#mlx.nn.losses.gaussian_nll_loss" title="mlx.nn.losses.gaussian_nll_loss"><code class="xref py py-obj docutils literal notranslate"><span class="pre">gaussian_nll_loss</span></code></a>(inputs, targets, vars[, ...])</p></td>
<td><p>Computes the negative log likelihood loss for a Gaussian distribution.</p></td>
</tr>
<tr class="row-odd"><td><p><a class="reference internal" href="_autosummary_functions/mlx.nn.losses.hinge_loss.html#mlx.nn.losses.hinge_loss" title="mlx.nn.losses.hinge_loss"><code class="xref py py-obj docutils literal notranslate"><span class="pre">hinge_loss</span></code></a>(inputs, targets[, reduction])</p></td>
<td><p>Computes the hinge loss between inputs and targets.</p></td>
</tr>
<tr class="row-odd"><td><p><a class="reference internal" href="_autosummary_functions/mlx.nn.losses.huber_loss.html#mlx.nn.losses.huber_loss" title="mlx.nn.losses.huber_loss"><code class="xref py py-obj docutils literal notranslate"><span class="pre">huber_loss</span></code></a>(inputs, targets[, delta, reduction])</p></td>
<tr class="row-even"><td><p><a class="reference internal" href="_autosummary_functions/mlx.nn.losses.huber_loss.html#mlx.nn.losses.huber_loss" title="mlx.nn.losses.huber_loss"><code class="xref py py-obj docutils literal notranslate"><span class="pre">huber_loss</span></code></a>(inputs, targets[, delta, reduction])</p></td>
<td><p>Computes the Huber loss between inputs and targets.</p></td>
</tr>
<tr class="row-even"><td><p><a class="reference internal" href="_autosummary_functions/mlx.nn.losses.kl_div_loss.html#mlx.nn.losses.kl_div_loss" title="mlx.nn.losses.kl_div_loss"><code class="xref py py-obj docutils literal notranslate"><span class="pre">kl_div_loss</span></code></a>(inputs, targets[, axis, reduction])</p></td>
<tr class="row-odd"><td><p><a class="reference internal" href="_autosummary_functions/mlx.nn.losses.kl_div_loss.html#mlx.nn.losses.kl_div_loss" title="mlx.nn.losses.kl_div_loss"><code class="xref py py-obj docutils literal notranslate"><span class="pre">kl_div_loss</span></code></a>(inputs, targets[, axis, reduction])</p></td>
<td><p>Computes the Kullback-Leibler divergence loss.</p></td>
</tr>
<tr class="row-odd"><td><p><a class="reference internal" href="_autosummary_functions/mlx.nn.losses.l1_loss.html#mlx.nn.losses.l1_loss" title="mlx.nn.losses.l1_loss"><code class="xref py py-obj docutils literal notranslate"><span class="pre">l1_loss</span></code></a>(predictions, targets[, reduction])</p></td>
<tr class="row-even"><td><p><a class="reference internal" href="_autosummary_functions/mlx.nn.losses.l1_loss.html#mlx.nn.losses.l1_loss" title="mlx.nn.losses.l1_loss"><code class="xref py py-obj docutils literal notranslate"><span class="pre">l1_loss</span></code></a>(predictions, targets[, reduction])</p></td>
<td><p>Computes the L1 loss.</p></td>
</tr>
<tr class="row-even"><td><p><a class="reference internal" href="_autosummary_functions/mlx.nn.losses.log_cosh_loss.html#mlx.nn.losses.log_cosh_loss" title="mlx.nn.losses.log_cosh_loss"><code class="xref py py-obj docutils literal notranslate"><span class="pre">log_cosh_loss</span></code></a>(inputs, targets[, reduction])</p></td>
<tr class="row-odd"><td><p><a class="reference internal" href="_autosummary_functions/mlx.nn.losses.log_cosh_loss.html#mlx.nn.losses.log_cosh_loss" title="mlx.nn.losses.log_cosh_loss"><code class="xref py py-obj docutils literal notranslate"><span class="pre">log_cosh_loss</span></code></a>(inputs, targets[, reduction])</p></td>
<td><p>Computes the log cosh loss between inputs and targets.</p></td>
</tr>
<tr class="row-odd"><td><p><a class="reference internal" href="_autosummary_functions/mlx.nn.losses.mse_loss.html#mlx.nn.losses.mse_loss" title="mlx.nn.losses.mse_loss"><code class="xref py py-obj docutils literal notranslate"><span class="pre">mse_loss</span></code></a>(predictions, targets[, reduction])</p></td>
<tr class="row-even"><td><p><a class="reference internal" href="_autosummary_functions/mlx.nn.losses.mse_loss.html#mlx.nn.losses.mse_loss" title="mlx.nn.losses.mse_loss"><code class="xref py py-obj docutils literal notranslate"><span class="pre">mse_loss</span></code></a>(predictions, targets[, reduction])</p></td>
<td><p>Computes the mean squared error loss.</p></td>
</tr>
<tr class="row-even"><td><p><a class="reference internal" href="_autosummary_functions/mlx.nn.losses.nll_loss.html#mlx.nn.losses.nll_loss" title="mlx.nn.losses.nll_loss"><code class="xref py py-obj docutils literal notranslate"><span class="pre">nll_loss</span></code></a>(inputs, targets[, axis, reduction])</p></td>
<tr class="row-odd"><td><p><a class="reference internal" href="_autosummary_functions/mlx.nn.losses.nll_loss.html#mlx.nn.losses.nll_loss" title="mlx.nn.losses.nll_loss"><code class="xref py py-obj docutils literal notranslate"><span class="pre">nll_loss</span></code></a>(inputs, targets[, axis, reduction])</p></td>
<td><p>Computes the negative log likelihood loss.</p></td>
</tr>
<tr class="row-odd"><td><p><a class="reference internal" href="_autosummary_functions/mlx.nn.losses.smooth_l1_loss.html#mlx.nn.losses.smooth_l1_loss" title="mlx.nn.losses.smooth_l1_loss"><code class="xref py py-obj docutils literal notranslate"><span class="pre">smooth_l1_loss</span></code></a>(predictions, targets[, beta, ...])</p></td>
<tr class="row-even"><td><p><a class="reference internal" href="_autosummary_functions/mlx.nn.losses.smooth_l1_loss.html#mlx.nn.losses.smooth_l1_loss" title="mlx.nn.losses.smooth_l1_loss"><code class="xref py py-obj docutils literal notranslate"><span class="pre">smooth_l1_loss</span></code></a>(predictions, targets[, beta, ...])</p></td>
<td><p>Computes the smooth L1 loss.</p></td>
</tr>
<tr class="row-even"><td><p><a class="reference internal" href="_autosummary_functions/mlx.nn.losses.triplet_loss.html#mlx.nn.losses.triplet_loss" title="mlx.nn.losses.triplet_loss"><code class="xref py py-obj docutils literal notranslate"><span class="pre">triplet_loss</span></code></a>(anchors, positives, negatives)</p></td>
<tr class="row-odd"><td><p><a class="reference internal" href="_autosummary_functions/mlx.nn.losses.triplet_loss.html#mlx.nn.losses.triplet_loss" title="mlx.nn.losses.triplet_loss"><code class="xref py py-obj docutils literal notranslate"><span class="pre">triplet_loss</span></code></a>(anchors, positives, negatives)</p></td>
<td><p>Computes the triplet loss for a set of anchor, positive, and negative samples.</p></td>
</tr>
</tbody>