From d0c2c3d1ca8974ff7135ffa65da2c6d88d1614f4 Mon Sep 17 00:00:00 2001 From: Awni Hannun Date: Thu, 29 Feb 2024 12:39:18 -0800 Subject: [PATCH] docs update --- docs/build/html/.buildinfo | 2 +- .../_autosummary/mlx.core.atleast_1d.rst | 6 + .../_autosummary/mlx.core.atleast_2d.rst | 6 + .../_autosummary/mlx.core.atleast_3d.rst | 6 + .../_autosummary/mlx.core.conv_general.rst | 6 + .../nn/_autosummary/mlx.nn.Upsample.rst | 6 + .../nn/_autosummary_functions/mlx.nn.elu.rst | 6 + .../nn/_autosummary_functions/mlx.nn.glu.rst | 6 + .../mlx.nn.hardswish.rst | 6 + .../mlx.nn.leaky_relu.rst | 6 + .../mlx.nn.log_sigmoid.rst | 6 + .../mlx.nn.log_softmax.rst | 6 + .../_autosummary_functions/mlx.nn.relu6.rst | 6 + .../_autosummary_functions/mlx.nn.sigmoid.rst | 6 + .../_autosummary_functions/mlx.nn.softmax.rst | 6 + .../mlx.nn.softplus.rst | 6 + .../nn/_autosummary_functions/mlx.nn.tanh.rst | 6 + .../html/_sources/python/nn/functions.rst | 13 +- docs/build/html/_sources/python/nn/layers.rst | 1 + docs/build/html/_sources/python/ops.rst | 4 + .../mlx.optimizers.join_schedules.rst | 6 + .../mlx.optimizers.linear_schedule.rst | 6 + .../_sources/python/optimizers/schedulers.rst | 6 +- .../html/_static/documentation_options.js | 2 +- docs/build/html/_static/mlx_logo.png | Bin 7410 -> 78025 bytes docs/build/html/_static/mlx_logo_dark.png | Bin 0 -> 48758 bytes docs/build/html/cpp/ops.html | 30 +- docs/build/html/dev/extensions.html | 30 +- .../html/examples/linear_regression.html | 30 +- docs/build/html/examples/llama-inference.html | 30 +- docs/build/html/examples/mlp.html | 30 +- docs/build/html/genindex.html | 92 +- docs/build/html/index.html | 30 +- docs/build/html/install.html | 30 +- docs/build/html/objects.inv | Bin 8122 -> 8525 bytes .../python/_autosummary/mlx.core.Device.html | 30 +- .../python/_autosummary/mlx.core.Dtype.html | 30 +- .../python/_autosummary/mlx.core.Stream.html | 30 +- .../python/_autosummary/mlx.core.abs.html | 30 +- .../python/_autosummary/mlx.core.add.html | 30 +- .../python/_autosummary/mlx.core.all.html | 30 +- .../_autosummary/mlx.core.allclose.html | 30 +- .../python/_autosummary/mlx.core.any.html | 30 +- .../python/_autosummary/mlx.core.arange.html | 30 +- .../python/_autosummary/mlx.core.arccos.html | 30 +- .../python/_autosummary/mlx.core.arccosh.html | 30 +- .../python/_autosummary/mlx.core.arcsin.html | 30 +- .../python/_autosummary/mlx.core.arcsinh.html | 30 +- .../python/_autosummary/mlx.core.arctan.html | 30 +- .../python/_autosummary/mlx.core.arctanh.html | 30 +- .../python/_autosummary/mlx.core.argmax.html | 30 +- .../python/_autosummary/mlx.core.argmin.html | 30 +- .../_autosummary/mlx.core.argpartition.html | 30 +- .../python/_autosummary/mlx.core.argsort.html | 30 +- .../python/_autosummary/mlx.core.array.T.html | 30 +- .../_autosummary/mlx.core.array.abs.html | 30 +- .../_autosummary/mlx.core.array.all.html | 30 +- .../_autosummary/mlx.core.array.any.html | 30 +- .../_autosummary/mlx.core.array.argmax.html | 30 +- .../_autosummary/mlx.core.array.argmin.html | 30 +- .../_autosummary/mlx.core.array.astype.html | 30 +- .../_autosummary/mlx.core.array.cos.html | 30 +- .../_autosummary/mlx.core.array.dtype.html | 30 +- .../_autosummary/mlx.core.array.exp.html | 30 +- .../python/_autosummary/mlx.core.array.html | 30 +- .../_autosummary/mlx.core.array.item.html | 30 +- .../_autosummary/mlx.core.array.log.html | 30 +- .../_autosummary/mlx.core.array.log1p.html | 30 +- .../mlx.core.array.logsumexp.html | 30 +- .../_autosummary/mlx.core.array.max.html | 30 +- .../_autosummary/mlx.core.array.mean.html | 30 +- .../_autosummary/mlx.core.array.min.html | 30 +- .../_autosummary/mlx.core.array.ndim.html | 30 +- .../_autosummary/mlx.core.array.prod.html | 30 +- .../mlx.core.array.reciprocal.html | 30 +- .../_autosummary/mlx.core.array.reshape.html | 30 +- .../_autosummary/mlx.core.array.round.html | 30 +- .../_autosummary/mlx.core.array.rsqrt.html | 30 +- .../_autosummary/mlx.core.array.shape.html | 30 +- .../_autosummary/mlx.core.array.sin.html | 30 +- .../_autosummary/mlx.core.array.size.html | 30 +- .../_autosummary/mlx.core.array.split.html | 30 +- .../_autosummary/mlx.core.array.sqrt.html | 30 +- .../_autosummary/mlx.core.array.square.html | 30 +- .../_autosummary/mlx.core.array.sum.html | 30 +- .../_autosummary/mlx.core.array.tolist.html | 30 +- .../mlx.core.array.transpose.html | 30 +- .../_autosummary/mlx.core.array.var.html | 30 +- .../_autosummary/mlx.core.array_equal.html | 36 +- .../_autosummary/mlx.core.atleast_1d.html | 818 +++++++++++++++++ .../_autosummary/mlx.core.atleast_2d.html | 818 +++++++++++++++++ .../_autosummary/mlx.core.atleast_3d.html | 818 +++++++++++++++++ .../_autosummary/mlx.core.broadcast_to.html | 36 +- .../python/_autosummary/mlx.core.ceil.html | 30 +- .../python/_autosummary/mlx.core.clip.html | 30 +- .../python/_autosummary/mlx.core.compile.html | 36 +- .../_autosummary/mlx.core.concatenate.html | 30 +- .../python/_autosummary/mlx.core.conv1d.html | 30 +- .../python/_autosummary/mlx.core.conv2d.html | 38 +- .../_autosummary/mlx.core.conv_general.html | 841 +++++++++++++++++ .../_autosummary/mlx.core.convolve.html | 30 +- .../python/_autosummary/mlx.core.cos.html | 36 +- .../python/_autosummary/mlx.core.cosh.html | 30 +- .../_autosummary/mlx.core.default_device.html | 30 +- .../_autosummary/mlx.core.default_stream.html | 30 +- .../_autosummary/mlx.core.dequantize.html | 30 +- .../python/_autosummary/mlx.core.diag.html | 30 +- .../_autosummary/mlx.core.diagonal.html | 30 +- .../mlx.core.disable_compile.html | 30 +- .../python/_autosummary/mlx.core.divide.html | 30 +- .../python/_autosummary/mlx.core.divmod.html | 30 +- .../_autosummary/mlx.core.enable_compile.html | 30 +- .../python/_autosummary/mlx.core.equal.html | 30 +- .../python/_autosummary/mlx.core.erf.html | 30 +- .../python/_autosummary/mlx.core.erfinv.html | 30 +- .../python/_autosummary/mlx.core.eval.html | 30 +- .../python/_autosummary/mlx.core.exp.html | 30 +- .../_autosummary/mlx.core.expand_dims.html | 30 +- .../python/_autosummary/mlx.core.eye.html | 30 +- .../python/_autosummary/mlx.core.fft.fft.html | 30 +- .../_autosummary/mlx.core.fft.fft2.html | 30 +- .../_autosummary/mlx.core.fft.fftn.html | 30 +- .../_autosummary/mlx.core.fft.ifft.html | 30 +- .../_autosummary/mlx.core.fft.ifft2.html | 30 +- .../_autosummary/mlx.core.fft.ifftn.html | 30 +- .../_autosummary/mlx.core.fft.irfft.html | 30 +- .../_autosummary/mlx.core.fft.irfft2.html | 30 +- .../_autosummary/mlx.core.fft.irfftn.html | 30 +- .../_autosummary/mlx.core.fft.rfft.html | 30 +- .../_autosummary/mlx.core.fft.rfft2.html | 30 +- .../_autosummary/mlx.core.fft.rfftn.html | 30 +- .../python/_autosummary/mlx.core.flatten.html | 30 +- .../python/_autosummary/mlx.core.floor.html | 30 +- .../_autosummary/mlx.core.floor_divide.html | 30 +- .../python/_autosummary/mlx.core.full.html | 30 +- .../python/_autosummary/mlx.core.grad.html | 30 +- .../python/_autosummary/mlx.core.greater.html | 30 +- .../_autosummary/mlx.core.greater_equal.html | 30 +- .../_autosummary/mlx.core.identity.html | 30 +- .../python/_autosummary/mlx.core.inner.html | 30 +- .../python/_autosummary/mlx.core.isinf.html | 30 +- .../python/_autosummary/mlx.core.isnan.html | 30 +- .../_autosummary/mlx.core.isneginf.html | 30 +- .../_autosummary/mlx.core.isposinf.html | 30 +- .../python/_autosummary/mlx.core.jvp.html | 30 +- .../python/_autosummary/mlx.core.less.html | 30 +- .../_autosummary/mlx.core.less_equal.html | 30 +- .../_autosummary/mlx.core.linalg.norm.html | 30 +- .../_autosummary/mlx.core.linalg.qr.html | 30 +- .../_autosummary/mlx.core.linspace.html | 30 +- .../python/_autosummary/mlx.core.load.html | 30 +- .../python/_autosummary/mlx.core.log.html | 30 +- .../python/_autosummary/mlx.core.log10.html | 30 +- .../python/_autosummary/mlx.core.log1p.html | 30 +- .../python/_autosummary/mlx.core.log2.html | 30 +- .../_autosummary/mlx.core.logaddexp.html | 30 +- .../_autosummary/mlx.core.logical_and.html | 30 +- .../_autosummary/mlx.core.logical_not.html | 30 +- .../_autosummary/mlx.core.logical_or.html | 30 +- .../_autosummary/mlx.core.logsumexp.html | 30 +- .../python/_autosummary/mlx.core.matmul.html | 30 +- .../python/_autosummary/mlx.core.max.html | 30 +- .../python/_autosummary/mlx.core.maximum.html | 30 +- .../python/_autosummary/mlx.core.mean.html | 30 +- .../python/_autosummary/mlx.core.min.html | 30 +- .../python/_autosummary/mlx.core.minimum.html | 30 +- .../_autosummary/mlx.core.moveaxis.html | 30 +- .../_autosummary/mlx.core.multiply.html | 30 +- .../_autosummary/mlx.core.negative.html | 30 +- .../_autosummary/mlx.core.new_stream.html | 30 +- .../python/_autosummary/mlx.core.ones.html | 30 +- .../_autosummary/mlx.core.ones_like.html | 30 +- .../python/_autosummary/mlx.core.outer.html | 30 +- .../python/_autosummary/mlx.core.pad.html | 30 +- .../_autosummary/mlx.core.partition.html | 30 +- .../python/_autosummary/mlx.core.prod.html | 30 +- .../_autosummary/mlx.core.quantize.html | 30 +- .../mlx.core.quantized_matmul.html | 30 +- .../mlx.core.random.bernoulli.html | 30 +- .../mlx.core.random.categorical.html | 30 +- .../_autosummary/mlx.core.random.gumbel.html | 30 +- .../_autosummary/mlx.core.random.key.html | 30 +- .../_autosummary/mlx.core.random.normal.html | 30 +- .../_autosummary/mlx.core.random.randint.html | 30 +- .../_autosummary/mlx.core.random.seed.html | 30 +- .../_autosummary/mlx.core.random.split.html | 30 +- .../mlx.core.random.truncated_normal.html | 30 +- .../_autosummary/mlx.core.random.uniform.html | 30 +- .../_autosummary/mlx.core.reciprocal.html | 30 +- .../python/_autosummary/mlx.core.repeat.html | 30 +- .../python/_autosummary/mlx.core.reshape.html | 30 +- .../python/_autosummary/mlx.core.round.html | 30 +- .../python/_autosummary/mlx.core.rsqrt.html | 30 +- .../python/_autosummary/mlx.core.save.html | 30 +- .../_autosummary/mlx.core.save_gguf.html | 30 +- .../mlx.core.save_safetensors.html | 30 +- .../python/_autosummary/mlx.core.savez.html | 30 +- .../mlx.core.savez_compressed.html | 30 +- .../mlx.core.set_default_device.html | 30 +- .../mlx.core.set_default_stream.html | 30 +- .../python/_autosummary/mlx.core.sigmoid.html | 30 +- .../python/_autosummary/mlx.core.sign.html | 30 +- .../python/_autosummary/mlx.core.sin.html | 30 +- .../python/_autosummary/mlx.core.sinh.html | 30 +- .../python/_autosummary/mlx.core.softmax.html | 30 +- .../python/_autosummary/mlx.core.sort.html | 30 +- .../python/_autosummary/mlx.core.split.html | 30 +- .../python/_autosummary/mlx.core.sqrt.html | 30 +- .../python/_autosummary/mlx.core.square.html | 30 +- .../python/_autosummary/mlx.core.squeeze.html | 30 +- .../python/_autosummary/mlx.core.stack.html | 30 +- .../_autosummary/mlx.core.stop_gradient.html | 30 +- .../_autosummary/mlx.core.subtract.html | 30 +- .../python/_autosummary/mlx.core.sum.html | 30 +- .../_autosummary/mlx.core.swapaxes.html | 30 +- .../python/_autosummary/mlx.core.take.html | 30 +- .../mlx.core.take_along_axis.html | 30 +- .../python/_autosummary/mlx.core.tan.html | 30 +- .../python/_autosummary/mlx.core.tanh.html | 30 +- .../_autosummary/mlx.core.tensordot.html | 30 +- .../_autosummary/mlx.core.transpose.html | 30 +- .../python/_autosummary/mlx.core.tri.html | 30 +- .../python/_autosummary/mlx.core.tril.html | 30 +- .../python/_autosummary/mlx.core.triu.html | 30 +- .../_autosummary/mlx.core.value_and_grad.html | 30 +- .../python/_autosummary/mlx.core.var.html | 30 +- .../python/_autosummary/mlx.core.vjp.html | 30 +- .../python/_autosummary/mlx.core.vmap.html | 30 +- .../python/_autosummary/mlx.core.where.html | 30 +- .../python/_autosummary/mlx.core.zeros.html | 30 +- .../_autosummary/mlx.core.zeros_like.html | 30 +- .../_autosummary/mlx.nn.value_and_grad.html | 30 +- .../_autosummary/mlx.utils.tree_flatten.html | 30 +- .../_autosummary/mlx.utils.tree_map.html | 30 +- .../mlx.utils.tree_unflatten.html | 30 +- .../python/_autosummary/stream_class.html | 30 +- docs/build/html/python/array.html | 30 +- docs/build/html/python/data_types.html | 30 +- .../html/python/devices_and_streams.html | 30 +- docs/build/html/python/fft.html | 30 +- docs/build/html/python/linalg.html | 30 +- docs/build/html/python/nn.html | 44 +- .../python/nn/_autosummary/mlx.nn.ALiBi.html | 30 +- .../nn/_autosummary/mlx.nn.AvgPool1d.html | 30 +- .../nn/_autosummary/mlx.nn.AvgPool2d.html | 30 +- .../nn/_autosummary/mlx.nn.BatchNorm.html | 30 +- .../python/nn/_autosummary/mlx.nn.Conv1d.html | 30 +- .../python/nn/_autosummary/mlx.nn.Conv2d.html | 30 +- .../nn/_autosummary/mlx.nn.Dropout.html | 30 +- .../nn/_autosummary/mlx.nn.Dropout2d.html | 30 +- .../nn/_autosummary/mlx.nn.Dropout3d.html | 30 +- .../nn/_autosummary/mlx.nn.Embedding.html | 30 +- .../python/nn/_autosummary/mlx.nn.GELU.html | 30 +- .../nn/_autosummary/mlx.nn.GroupNorm.html | 30 +- .../nn/_autosummary/mlx.nn.InstanceNorm.html | 30 +- .../nn/_autosummary/mlx.nn.LayerNorm.html | 30 +- .../python/nn/_autosummary/mlx.nn.Linear.html | 30 +- .../nn/_autosummary/mlx.nn.MaxPool1d.html | 30 +- .../nn/_autosummary/mlx.nn.MaxPool2d.html | 30 +- .../python/nn/_autosummary/mlx.nn.Mish.html | 33 +- .../nn/_autosummary/mlx.nn.Module.apply.html | 30 +- .../mlx.nn.Module.apply_to_modules.html | 30 +- .../_autosummary/mlx.nn.Module.children.html | 30 +- .../nn/_autosummary/mlx.nn.Module.eval.html | 30 +- .../mlx.nn.Module.filter_and_map.html | 30 +- .../nn/_autosummary/mlx.nn.Module.freeze.html | 30 +- .../mlx.nn.Module.leaf_modules.html | 30 +- .../mlx.nn.Module.load_weights.html | 30 +- .../_autosummary/mlx.nn.Module.modules.html | 30 +- .../mlx.nn.Module.named_modules.html | 30 +- .../mlx.nn.Module.parameters.html | 30 +- .../mlx.nn.Module.save_weights.html | 30 +- .../nn/_autosummary/mlx.nn.Module.state.html | 30 +- .../nn/_autosummary/mlx.nn.Module.train.html | 30 +- .../mlx.nn.Module.trainable_parameters.html | 30 +- .../_autosummary/mlx.nn.Module.training.html | 30 +- .../_autosummary/mlx.nn.Module.unfreeze.html | 30 +- .../nn/_autosummary/mlx.nn.Module.update.html | 30 +- .../mlx.nn.Module.update_modules.html | 30 +- .../mlx.nn.MultiHeadAttention.html | 30 +- .../python/nn/_autosummary/mlx.nn.PReLU.html | 30 +- .../_autosummary/mlx.nn.QuantizedLinear.html | 30 +- .../nn/_autosummary/mlx.nn.RMSNorm.html | 30 +- .../python/nn/_autosummary/mlx.nn.ReLU.html | 37 +- .../python/nn/_autosummary/mlx.nn.RoPE.html | 30 +- .../python/nn/_autosummary/mlx.nn.SELU.html | 39 +- .../nn/_autosummary/mlx.nn.Sequential.html | 30 +- .../python/nn/_autosummary/mlx.nn.SiLU.html | 34 +- .../mlx.nn.SinusoidalPositionalEncoding.html | 30 +- .../nn/_autosummary/mlx.nn.Softshrink.html | 30 +- .../python/nn/_autosummary/mlx.nn.Step.html | 30 +- .../nn/_autosummary/mlx.nn.Transformer.html | 38 +- .../nn/_autosummary/mlx.nn.Upsample.html | 864 ++++++++++++++++++ .../nn/_autosummary/mlx.nn.init.constant.html | 30 +- .../mlx.nn.init.glorot_normal.html | 30 +- .../mlx.nn.init.glorot_uniform.html | 30 +- .../_autosummary/mlx.nn.init.he_normal.html | 30 +- .../_autosummary/mlx.nn.init.he_uniform.html | 30 +- .../nn/_autosummary/mlx.nn.init.identity.html | 30 +- .../nn/_autosummary/mlx.nn.init.normal.html | 30 +- .../nn/_autosummary/mlx.nn.init.uniform.html | 30 +- .../nn/_autosummary_functions/mlx.nn.elu.html | 805 ++++++++++++++++ .../_autosummary_functions/mlx.nn.gelu.html | 40 +- .../mlx.nn.gelu_approx.html | 30 +- .../mlx.nn.gelu_fast_approx.html | 41 +- .../nn/_autosummary_functions/mlx.nn.glu.html | 814 +++++++++++++++++ .../mlx.nn.hardswish.html | 807 ++++++++++++++++ .../mlx.nn.leaky_relu.html | 805 ++++++++++++++++ .../mlx.nn.log_sigmoid.html | 806 ++++++++++++++++ .../mlx.nn.log_softmax.html | 806 ++++++++++++++++ .../mlx.nn.losses.binary_cross_entropy.html | 30 +- .../mlx.nn.losses.cosine_similarity_loss.html | 30 +- .../mlx.nn.losses.cross_entropy.html | 30 +- .../mlx.nn.losses.gaussian_nll_loss.html | 30 +- .../mlx.nn.losses.hinge_loss.html | 30 +- .../mlx.nn.losses.huber_loss.html | 30 +- .../mlx.nn.losses.kl_div_loss.html | 30 +- .../mlx.nn.losses.l1_loss.html | 30 +- .../mlx.nn.losses.log_cosh_loss.html | 30 +- .../mlx.nn.losses.margin_ranking_loss.html | 30 +- .../mlx.nn.losses.mse_loss.html | 30 +- .../mlx.nn.losses.nll_loss.html | 30 +- .../mlx.nn.losses.smooth_l1_loss.html | 30 +- .../mlx.nn.losses.triplet_loss.html | 30 +- .../_autosummary_functions/mlx.nn.mish.html | 40 +- .../_autosummary_functions/mlx.nn.prelu.html | 30 +- .../_autosummary_functions/mlx.nn.relu.html | 36 +- .../_autosummary_functions/mlx.nn.relu6.html | 806 ++++++++++++++++ .../_autosummary_functions/mlx.nn.selu.html | 44 +- .../mlx.nn.sigmoid.html | 807 ++++++++++++++++ .../_autosummary_functions/mlx.nn.silu.html | 42 +- .../mlx.nn.softmax.html | 806 ++++++++++++++++ .../mlx.nn.softplus.html | 806 ++++++++++++++++ .../mlx.nn.softshrink.html | 42 +- .../_autosummary_functions/mlx.nn.step.html | 42 +- .../_autosummary_functions/mlx.nn.tanh.html | 805 ++++++++++++++++ docs/build/html/python/nn/functions.html | 89 +- docs/build/html/python/nn/init.html | 30 +- docs/build/html/python/nn/layers.html | 33 +- docs/build/html/python/nn/losses.html | 36 +- docs/build/html/python/nn/module.html | 30 +- docs/build/html/python/ops.html | 56 +- docs/build/html/python/optimizers.html | 36 +- .../_autosummary/mlx.optimizers.AdaDelta.html | 30 +- .../mlx.optimizers.Adafactor.html | 30 +- .../_autosummary/mlx.optimizers.Adagrad.html | 30 +- .../_autosummary/mlx.optimizers.Adam.html | 30 +- .../_autosummary/mlx.optimizers.AdamW.html | 30 +- .../_autosummary/mlx.optimizers.Adamax.html | 30 +- .../_autosummary/mlx.optimizers.Lion.html | 30 +- ....optimizers.Optimizer.apply_gradients.html | 30 +- .../mlx.optimizers.Optimizer.init.html | 30 +- .../mlx.optimizers.Optimizer.state.html | 30 +- .../mlx.optimizers.Optimizer.update.html | 30 +- .../_autosummary/mlx.optimizers.RMSprop.html | 30 +- .../_autosummary/mlx.optimizers.SGD.html | 30 +- .../mlx.optimizers.cosine_decay.html | 44 +- .../mlx.optimizers.exponential_decay.html | 44 +- .../mlx.optimizers.join_schedules.html | 829 +++++++++++++++++ .../mlx.optimizers.linear_schedule.html | 825 +++++++++++++++++ .../mlx.optimizers.step_decay.html | 44 +- .../python/optimizers/common_optimizers.html | 30 +- .../html/python/optimizers/optimizer.html | 30 +- .../html/python/optimizers/schedulers.html | 50 +- docs/build/html/python/random.html | 30 +- docs/build/html/python/transforms.html | 30 +- docs/build/html/python/tree_utils.html | 36 +- docs/build/html/search.html | 30 +- docs/build/html/searchindex.js | 2 +- docs/build/html/usage/compile.html | 30 +- .../build/html/usage/function_transforms.html | 30 +- docs/build/html/usage/indexing.html | 30 +- docs/build/html/usage/lazy_evaluation.html | 30 +- docs/build/html/usage/numpy.html | 30 +- docs/build/html/usage/quick_start.html | 30 +- docs/build/html/usage/saving_and_loading.html | 30 +- docs/build/html/usage/unified_memory.html | 30 +- docs/build/html/usage/using_streams.html | 30 +- 378 files changed, 23040 insertions(+), 2138 deletions(-) create mode 100644 docs/build/html/_sources/python/_autosummary/mlx.core.atleast_1d.rst create mode 100644 docs/build/html/_sources/python/_autosummary/mlx.core.atleast_2d.rst create mode 100644 docs/build/html/_sources/python/_autosummary/mlx.core.atleast_3d.rst create mode 100644 docs/build/html/_sources/python/_autosummary/mlx.core.conv_general.rst create mode 100644 docs/build/html/_sources/python/nn/_autosummary/mlx.nn.Upsample.rst create mode 100644 docs/build/html/_sources/python/nn/_autosummary_functions/mlx.nn.elu.rst create mode 100644 docs/build/html/_sources/python/nn/_autosummary_functions/mlx.nn.glu.rst create mode 100644 docs/build/html/_sources/python/nn/_autosummary_functions/mlx.nn.hardswish.rst create mode 100644 docs/build/html/_sources/python/nn/_autosummary_functions/mlx.nn.leaky_relu.rst create mode 100644 docs/build/html/_sources/python/nn/_autosummary_functions/mlx.nn.log_sigmoid.rst create mode 100644 docs/build/html/_sources/python/nn/_autosummary_functions/mlx.nn.log_softmax.rst create mode 100644 docs/build/html/_sources/python/nn/_autosummary_functions/mlx.nn.relu6.rst create mode 100644 docs/build/html/_sources/python/nn/_autosummary_functions/mlx.nn.sigmoid.rst create mode 100644 docs/build/html/_sources/python/nn/_autosummary_functions/mlx.nn.softmax.rst create mode 100644 docs/build/html/_sources/python/nn/_autosummary_functions/mlx.nn.softplus.rst create mode 100644 docs/build/html/_sources/python/nn/_autosummary_functions/mlx.nn.tanh.rst create mode 100644 docs/build/html/_sources/python/optimizers/_autosummary/mlx.optimizers.join_schedules.rst create mode 100644 docs/build/html/_sources/python/optimizers/_autosummary/mlx.optimizers.linear_schedule.rst create mode 100644 docs/build/html/_static/mlx_logo_dark.png create mode 100644 docs/build/html/python/_autosummary/mlx.core.atleast_1d.html create mode 100644 docs/build/html/python/_autosummary/mlx.core.atleast_2d.html create mode 100644 docs/build/html/python/_autosummary/mlx.core.atleast_3d.html create mode 100644 docs/build/html/python/_autosummary/mlx.core.conv_general.html create mode 100644 docs/build/html/python/nn/_autosummary/mlx.nn.Upsample.html create mode 100644 docs/build/html/python/nn/_autosummary_functions/mlx.nn.elu.html create mode 100644 docs/build/html/python/nn/_autosummary_functions/mlx.nn.glu.html create mode 100644 docs/build/html/python/nn/_autosummary_functions/mlx.nn.hardswish.html create mode 100644 docs/build/html/python/nn/_autosummary_functions/mlx.nn.leaky_relu.html create mode 100644 docs/build/html/python/nn/_autosummary_functions/mlx.nn.log_sigmoid.html create mode 100644 docs/build/html/python/nn/_autosummary_functions/mlx.nn.log_softmax.html create mode 100644 docs/build/html/python/nn/_autosummary_functions/mlx.nn.relu6.html create mode 100644 docs/build/html/python/nn/_autosummary_functions/mlx.nn.sigmoid.html create mode 100644 docs/build/html/python/nn/_autosummary_functions/mlx.nn.softmax.html create mode 100644 docs/build/html/python/nn/_autosummary_functions/mlx.nn.softplus.html create mode 100644 docs/build/html/python/nn/_autosummary_functions/mlx.nn.tanh.html create mode 100644 docs/build/html/python/optimizers/_autosummary/mlx.optimizers.join_schedules.html create mode 100644 docs/build/html/python/optimizers/_autosummary/mlx.optimizers.linear_schedule.html diff --git a/docs/build/html/.buildinfo b/docs/build/html/.buildinfo index e1d1fbf95..61885dc99 100644 --- a/docs/build/html/.buildinfo +++ b/docs/build/html/.buildinfo @@ -1,4 +1,4 @@ # Sphinx build info version 1 # This file hashes the configuration used when building these files. When it is not found, a full rebuild will be done. -config: 59497ac668d2f3a880c24eaae6f1b8ae +config: 2a306b1bab538696d27d81f66f046d0d tags: 645f666f9bcd5a90fca523b33c5a78b7 diff --git a/docs/build/html/_sources/python/_autosummary/mlx.core.atleast_1d.rst b/docs/build/html/_sources/python/_autosummary/mlx.core.atleast_1d.rst new file mode 100644 index 000000000..4137f119a --- /dev/null +++ b/docs/build/html/_sources/python/_autosummary/mlx.core.atleast_1d.rst @@ -0,0 +1,6 @@ +mlx.core.atleast\_1d +==================== + +.. currentmodule:: mlx.core + +.. autofunction:: atleast_1d \ No newline at end of file diff --git a/docs/build/html/_sources/python/_autosummary/mlx.core.atleast_2d.rst b/docs/build/html/_sources/python/_autosummary/mlx.core.atleast_2d.rst new file mode 100644 index 000000000..94f6c1d97 --- /dev/null +++ b/docs/build/html/_sources/python/_autosummary/mlx.core.atleast_2d.rst @@ -0,0 +1,6 @@ +mlx.core.atleast\_2d +==================== + +.. currentmodule:: mlx.core + +.. autofunction:: atleast_2d \ No newline at end of file diff --git a/docs/build/html/_sources/python/_autosummary/mlx.core.atleast_3d.rst b/docs/build/html/_sources/python/_autosummary/mlx.core.atleast_3d.rst new file mode 100644 index 000000000..ffbc8132e --- /dev/null +++ b/docs/build/html/_sources/python/_autosummary/mlx.core.atleast_3d.rst @@ -0,0 +1,6 @@ +mlx.core.atleast\_3d +==================== + +.. currentmodule:: mlx.core + +.. autofunction:: atleast_3d \ No newline at end of file diff --git a/docs/build/html/_sources/python/_autosummary/mlx.core.conv_general.rst b/docs/build/html/_sources/python/_autosummary/mlx.core.conv_general.rst new file mode 100644 index 000000000..18a35545b --- /dev/null +++ b/docs/build/html/_sources/python/_autosummary/mlx.core.conv_general.rst @@ -0,0 +1,6 @@ +mlx.core.conv\_general +====================== + +.. currentmodule:: mlx.core + +.. autofunction:: conv_general \ No newline at end of file diff --git a/docs/build/html/_sources/python/nn/_autosummary/mlx.nn.Upsample.rst b/docs/build/html/_sources/python/nn/_autosummary/mlx.nn.Upsample.rst new file mode 100644 index 000000000..6bd1855f1 --- /dev/null +++ b/docs/build/html/_sources/python/nn/_autosummary/mlx.nn.Upsample.rst @@ -0,0 +1,6 @@ +mlx.nn.Upsample +=============== + +.. currentmodule:: mlx.nn + +.. autoclass:: Upsample \ No newline at end of file diff --git a/docs/build/html/_sources/python/nn/_autosummary_functions/mlx.nn.elu.rst b/docs/build/html/_sources/python/nn/_autosummary_functions/mlx.nn.elu.rst new file mode 100644 index 000000000..7fceaa9dc --- /dev/null +++ b/docs/build/html/_sources/python/nn/_autosummary_functions/mlx.nn.elu.rst @@ -0,0 +1,6 @@ +mlx.nn.elu +========== + +.. currentmodule:: mlx.nn + +.. autofunction:: elu \ No newline at end of file diff --git a/docs/build/html/_sources/python/nn/_autosummary_functions/mlx.nn.glu.rst b/docs/build/html/_sources/python/nn/_autosummary_functions/mlx.nn.glu.rst new file mode 100644 index 000000000..f289859db --- /dev/null +++ b/docs/build/html/_sources/python/nn/_autosummary_functions/mlx.nn.glu.rst @@ -0,0 +1,6 @@ +mlx.nn.glu +========== + +.. currentmodule:: mlx.nn + +.. autofunction:: glu \ No newline at end of file diff --git a/docs/build/html/_sources/python/nn/_autosummary_functions/mlx.nn.hardswish.rst b/docs/build/html/_sources/python/nn/_autosummary_functions/mlx.nn.hardswish.rst new file mode 100644 index 000000000..c824569e7 --- /dev/null +++ b/docs/build/html/_sources/python/nn/_autosummary_functions/mlx.nn.hardswish.rst @@ -0,0 +1,6 @@ +mlx.nn.hardswish +================ + +.. currentmodule:: mlx.nn + +.. autofunction:: hardswish \ No newline at end of file diff --git a/docs/build/html/_sources/python/nn/_autosummary_functions/mlx.nn.leaky_relu.rst b/docs/build/html/_sources/python/nn/_autosummary_functions/mlx.nn.leaky_relu.rst new file mode 100644 index 000000000..18f8fdd49 --- /dev/null +++ b/docs/build/html/_sources/python/nn/_autosummary_functions/mlx.nn.leaky_relu.rst @@ -0,0 +1,6 @@ +mlx.nn.leaky\_relu +================== + +.. currentmodule:: mlx.nn + +.. autofunction:: leaky_relu \ No newline at end of file diff --git a/docs/build/html/_sources/python/nn/_autosummary_functions/mlx.nn.log_sigmoid.rst b/docs/build/html/_sources/python/nn/_autosummary_functions/mlx.nn.log_sigmoid.rst new file mode 100644 index 000000000..662a3a1fb --- /dev/null +++ b/docs/build/html/_sources/python/nn/_autosummary_functions/mlx.nn.log_sigmoid.rst @@ -0,0 +1,6 @@ +mlx.nn.log\_sigmoid +=================== + +.. currentmodule:: mlx.nn + +.. autofunction:: log_sigmoid \ No newline at end of file diff --git a/docs/build/html/_sources/python/nn/_autosummary_functions/mlx.nn.log_softmax.rst b/docs/build/html/_sources/python/nn/_autosummary_functions/mlx.nn.log_softmax.rst new file mode 100644 index 000000000..114bc726b --- /dev/null +++ b/docs/build/html/_sources/python/nn/_autosummary_functions/mlx.nn.log_softmax.rst @@ -0,0 +1,6 @@ +mlx.nn.log\_softmax +=================== + +.. currentmodule:: mlx.nn + +.. autofunction:: log_softmax \ No newline at end of file diff --git a/docs/build/html/_sources/python/nn/_autosummary_functions/mlx.nn.relu6.rst b/docs/build/html/_sources/python/nn/_autosummary_functions/mlx.nn.relu6.rst new file mode 100644 index 000000000..ce7dbb95a --- /dev/null +++ b/docs/build/html/_sources/python/nn/_autosummary_functions/mlx.nn.relu6.rst @@ -0,0 +1,6 @@ +mlx.nn.relu6 +============ + +.. currentmodule:: mlx.nn + +.. autofunction:: relu6 \ No newline at end of file diff --git a/docs/build/html/_sources/python/nn/_autosummary_functions/mlx.nn.sigmoid.rst b/docs/build/html/_sources/python/nn/_autosummary_functions/mlx.nn.sigmoid.rst new file mode 100644 index 000000000..26e91621e --- /dev/null +++ b/docs/build/html/_sources/python/nn/_autosummary_functions/mlx.nn.sigmoid.rst @@ -0,0 +1,6 @@ +mlx.nn.sigmoid +============== + +.. currentmodule:: mlx.nn + +.. autofunction:: sigmoid \ No newline at end of file diff --git a/docs/build/html/_sources/python/nn/_autosummary_functions/mlx.nn.softmax.rst b/docs/build/html/_sources/python/nn/_autosummary_functions/mlx.nn.softmax.rst new file mode 100644 index 000000000..6111f8079 --- /dev/null +++ b/docs/build/html/_sources/python/nn/_autosummary_functions/mlx.nn.softmax.rst @@ -0,0 +1,6 @@ +mlx.nn.softmax +============== + +.. currentmodule:: mlx.nn + +.. autofunction:: softmax \ No newline at end of file diff --git a/docs/build/html/_sources/python/nn/_autosummary_functions/mlx.nn.softplus.rst b/docs/build/html/_sources/python/nn/_autosummary_functions/mlx.nn.softplus.rst new file mode 100644 index 000000000..b029eed48 --- /dev/null +++ b/docs/build/html/_sources/python/nn/_autosummary_functions/mlx.nn.softplus.rst @@ -0,0 +1,6 @@ +mlx.nn.softplus +=============== + +.. currentmodule:: mlx.nn + +.. autofunction:: softplus \ No newline at end of file diff --git a/docs/build/html/_sources/python/nn/_autosummary_functions/mlx.nn.tanh.rst b/docs/build/html/_sources/python/nn/_autosummary_functions/mlx.nn.tanh.rst new file mode 100644 index 000000000..45b6f4ac6 --- /dev/null +++ b/docs/build/html/_sources/python/nn/_autosummary_functions/mlx.nn.tanh.rst @@ -0,0 +1,6 @@ +mlx.nn.tanh +=========== + +.. currentmodule:: mlx.nn + +.. autofunction:: tanh \ No newline at end of file diff --git a/docs/build/html/_sources/python/nn/functions.rst b/docs/build/html/_sources/python/nn/functions.rst index fc99dcad1..db276afdf 100644 --- a/docs/build/html/_sources/python/nn/functions.rst +++ b/docs/build/html/_sources/python/nn/functions.rst @@ -12,13 +12,24 @@ simple functions. :toctree: _autosummary_functions :template: nn-module-template.rst + elu gelu gelu_approx gelu_fast_approx + glu + hardswish + leaky_relu + log_sigmoid + log_softmax mish prelu relu + relu6 selu - softshrink + sigmoid silu + softmax + softplus + softshrink step + tanh diff --git a/docs/build/html/_sources/python/nn/layers.rst b/docs/build/html/_sources/python/nn/layers.rst index 0f5fca9db..f6755e8fe 100644 --- a/docs/build/html/_sources/python/nn/layers.rst +++ b/docs/build/html/_sources/python/nn/layers.rst @@ -40,3 +40,4 @@ Layers Softshrink Step Transformer + Upsample \ No newline at end of file diff --git a/docs/build/html/_sources/python/ops.rst b/docs/build/html/_sources/python/ops.rst index 09e2d5f71..2cc2b6d6b 100644 --- a/docs/build/html/_sources/python/ops.rst +++ b/docs/build/html/_sources/python/ops.rst @@ -25,6 +25,9 @@ Operations argpartition argsort array_equal + atleast_1d + atleast_2d + atleast_3d broadcast_to ceil clip @@ -32,6 +35,7 @@ Operations convolve conv1d conv2d + conv_general cos cosh dequantize diff --git a/docs/build/html/_sources/python/optimizers/_autosummary/mlx.optimizers.join_schedules.rst b/docs/build/html/_sources/python/optimizers/_autosummary/mlx.optimizers.join_schedules.rst new file mode 100644 index 000000000..0586e0694 --- /dev/null +++ b/docs/build/html/_sources/python/optimizers/_autosummary/mlx.optimizers.join_schedules.rst @@ -0,0 +1,6 @@ +mlx.optimizers.join\_schedules +============================== + +.. currentmodule:: mlx.optimizers + +.. autofunction:: join_schedules \ No newline at end of file diff --git a/docs/build/html/_sources/python/optimizers/_autosummary/mlx.optimizers.linear_schedule.rst b/docs/build/html/_sources/python/optimizers/_autosummary/mlx.optimizers.linear_schedule.rst new file mode 100644 index 000000000..0a0dee682 --- /dev/null +++ b/docs/build/html/_sources/python/optimizers/_autosummary/mlx.optimizers.linear_schedule.rst @@ -0,0 +1,6 @@ +mlx.optimizers.linear\_schedule +=============================== + +.. currentmodule:: mlx.optimizers + +.. autofunction:: linear_schedule \ No newline at end of file diff --git a/docs/build/html/_sources/python/optimizers/schedulers.rst b/docs/build/html/_sources/python/optimizers/schedulers.rst index a83883ddb..50855e1e7 100644 --- a/docs/build/html/_sources/python/optimizers/schedulers.rst +++ b/docs/build/html/_sources/python/optimizers/schedulers.rst @@ -8,6 +8,8 @@ Schedulers .. autosummary:: :toctree: _autosummary - step_decay - exponential_decay cosine_decay + exponential_decay + join_schedules + linear_schedule + step_decay diff --git a/docs/build/html/_static/documentation_options.js b/docs/build/html/_static/documentation_options.js index a117f34ff..a168ebdde 100644 --- a/docs/build/html/_static/documentation_options.js +++ b/docs/build/html/_static/documentation_options.js @@ -1,6 +1,6 @@ var DOCUMENTATION_OPTIONS = { URL_ROOT: document.getElementById("documentation_options").getAttribute('data-url_root'), - VERSION: '0.3.0', + VERSION: '0.5.0', LANGUAGE: 'en', COLLAPSE_INDEX: false, BUILDER: 'html', diff --git a/docs/build/html/_static/mlx_logo.png b/docs/build/html/_static/mlx_logo.png index 49400bd8d51fa2077363be082ae6c66c8665ac2f..be122bf7cd07560f1118eef458da8e472de6a8d2 100644 GIT binary patch literal 78025 zcmeFZg;yL+w>=Dm00|*za0w9H9fBn|B)IE9@ZjzYgaEaMQt+I8xjz0Y)*ijpkWbJFKXNJv=na#CtYNKZ2V*)Y%$ zH8iyy6G%wUtE?p@RpcclU#mDfSXkSdBO%F!C8eONsP7X706tS>Y&iR0S5fy-WAP*~ zrDI?6OW?gu#8Joj!4kpJl>hBpKudo4>*xI8^EJjMsz-i7tSEymIzRF**g3zh_+74M zcvvWn3NY-7Z;!$(N1q1d58#Gzo4i7*{i^07F)jyhI;nQIp2=)7e9EWQ#W+L>U~m!qqk+^<_$?>9TT%KVHVCH?{{mIw zs|)VU;Naj6^>X-+r+Bmyo+9G3P!2COAel!uE}>gq^K6S1|I01DoK#w78*u6L=rAyt zs5rwZnV3ocGF*yvlIZF+t+3Rut}USrb=Gyo-^z~Ztc(`n&3IaW2rdQvsl)5iTqH^a zrEz|0ZN=3Vl%#T;$E1I9axcu+3Qz6H{1olkQpHLuv!T3M_X>JBmA_dmD2pR(8BRx{ zN|-k+LPlG^!9sWVku;%SY%3^0NZ!y{@;?2 zaQjioV;Q!075dOWnMYjw{xZ$WlohtjE0)E!wK!TRJTD}O%<1pI0E+Oa4|~9vs8W2G zokd>v`ebW+v{lb>M~@^MkMx>f@qv{PDR&b4XOg->nxN%mC?)U(@d1z zU*?V!%cvf|Okbn>t-m5hqx#8+`d;uGiYhf*KN5Fv1vUCNWUu#d6-I(*bnoOlC zxs7-<6mvbw@u|TRs;?OvmuGHw?5v@nU$l$?G24TipOd4S0kysG$?Fr=6T&L&+h@vMWa|`X$ahk0IGt#qXz>8? zpw8gaz8E|WjAB#|3Bf?we%eHw8@xpf0Sv#ds`<})&GpuFdxuqGQM^;#tIndbRN(%;G9^5pR9#nt-HpwS z;~K9)nWEgjYE(;En?p;yaJ5LRBCmL(5Vt(8T(!taPf|&8V%W-7kG}G&d!l>Hjd{rG zi=7wn$ZH-_i{V?aK&4!yyohq)nYx#H_kqePhy%!7<&Ua07=m z_I~N1=bocmxNG!*$Yiivr7Nvl%E8IR@ifK6)EwqC{=VH@Ypy_{^3?5w@*KyMc!9Xp zJH9h+9O6WhOg>Nfxm=$8w5#X$()T~|s!U?@h8-a$DI2A6&8m?Tqy;hsCB0InAx_~= zX-=)@Kf(g#QwO*cyc1-LQPo4N)A#wOUQWeW>{vKi91iOZqYd+oEbKscu6LAK83|Vj zBiKH(p6GmHBS|Jn7G`zO`=vFiWv!*DB~W2vLu3P1ekuM;Z{y${VXHP~a8Im+Tw@oA$X{l3RGyDT?$7qEV3OE$}_OG)U{`&UpmXgRd+PWxUQ zlk>S8Rw*{|OV^hMq@L^;_yl+VMD(wMCICecC9s`Rs~jTRKy^FP>B0Ngh4E!?{a{FEh@}-P!GeGo@#x*RSVt zBk3Fsn*FHe4-U6fupWH(-SQxL`WNqM{Y5=B@&^?r&jiZE4i=cHjAt)AvSzqxxR=#e zOKtX9UOK0wCzrmsut2N685=Xf08uN)YKx~1In63V;2X?0m1=Hg?++4n`~73Dw>-Bs zwte;RTDNqnK38!R)O4kaYC`El zlmEm0sdQVuO^7)VQM6G& z-0zQU=Zhfao^vTVPsPRODVkcFdn<~4vE7wNvx$66Z`~ie?uzVlZaV(>t=(YW4%yJ! zaLhK(&QOets;FVl4IQ9kkBWO<#;)bfqGg1vMAi5H>0bYFbT&jQFMC3PCA8=t=~S`N zw10j;|5;s?XF79EWI2A8#zE2QSPa)UzkQ^ja=-9+09NusSu<-^(9k)=vv_@Ye&Ml> zc2{aQWV9sv)DAQazms?j7AD#?I%*+s-XGi!7$SO`N?}>;X(Z$}13L(_iL_yxGprKu zc+fI;wp zTjO<;oo;~`XUZrsqifX5!o9M=sSC@NvCsZ>kCmz?G~z3pb5l@}1L&ra{b~6cT_^aM z#q+(F>Z-#yNMN1Q@iV9K+0Kv`$AeLrQPj&<@VtB3YnzCKE3Mjd-G{)A+!R!fGR@F+V4AT_-^@}?&Jfo7hl~}{c4T1v%wD=3O8f5vHkcP z*M(jSnIPX$;W?7S?UQ%%NCDwTIKRCTe6GK~OISm}M}^}npRD5lL3#J&Q|}z@tPs*m z+Ru&8k<6)(dbJCzsE~x;GP{08&z_&_dm7=}%-fW+K!$&9Ye8{W^6}&Xy#J6x$XE*> zfnx?-V=3QuS1ckNSS52Ec?(5FBxXbz0|^zG6zM6VgpAn4kjeh1EQ8F5^yFXlC`d>s z`2Q~dbJl-LVAZJFZEu-6ZxoJK89|#u|S>Q&!AKD2J8Fl;8h7Gjz}^ zeD6}cZ+O{3YOAt8@<)?D3@*a6Y{w7Dp%9*n&cJqwXymiS2|z-9jf9Nx4hiMIih*wQ zoSmcFs^tIQYQz~opBX#SVEo?;RDzMvEMHX+JpYe2{#7K-?EgPaBV%Y#2mI1~$`UN` z-*@}(i_jRj|FaYSs@B9XcGUW%koSM@81O6X#s75o-wnKbg+iRE`9qEI|MnWu##y7uk-76N{NjJS-6YTv!+ITr_ZfzST1LyvZ$sqa8`5diEsz>A@kYxk9T zuC&MV#wY>~tG4!Q9mki^cOww|A3}0-c6G>?ES{s5xXv7n>iE(nBFB%C@3RLkrQ5#9 z2q}Q=#ha&swC|tm5Q^a~F_pWCQAg0}kWQU@WYY4n zVp4_Ku^cyL#}+~7`^0A3t|mWZB??lXxuTlc!eNN#=axT7zIX%8sDE^jLL9?yGh%$V zXbs7O9D$Qlxc%j*@M&VBNOMezI(u5OykbdWG|yy7fcXY(jok#r9Dc1%co7!`xNo6y zqY|?ES?qoy#$BQBS>O!5=iQmfGOKeRM(EA)|dD_eFUvPg25E*UFu% zF5eS%d@qPtlKgduDzoxMwXzP5x;LM0)_TbR54U8c=$Lb}yU~X@XA~JFsmH5NL z80mm}xtbz-i3a?+Gggu%IXHvg!QTH6{1{^~RjgRllqc9!bR+4_U8X~YDO7glquL?F z$+8(&Dp&T}qf#TaXJITPzEEm={XL8Mz9cLMc4V1hGgk!_stx*Cha>26UvHTq16?aU z2UrarvP)XOm$_u6!B2hq9R1trf0z#?@ASvfis|cE$ttQz^Q(`WQ3JZnB*9|8>YfF) zvW^Z%1lgGelES71l7l|GiGmL*B~rpsVPid74n$2))o?Hhk}@Y1jP7Y%1HmOBJXg zPhL@+KarpWsqiZU5WpB=scyS?gZfp{m^IJ={IE0vhV9AjhgwLTO1NO)C0)mno@`m&HaOv0l21R7J0T zQ^G=%agb8jk%Q5u@`toVjch3Gv2Qx~>zKNg^h`Wc^O&T!4)BE}BZUKWvGe0l0J-wB zktzsF4$dn!%Z)(p!phM~D`brV5>nT_PL9q|Z>zeaKzblx&b=T1$FK!@=QN;xf0FFy zJ{L9AHEHe?A*Ddst@Ewg5B1`I-7F!@fTj7 z9ibNDD`kpUC+KC6&mXZJQYpUTok)^zS|_T1g#%C3Y6>fTjhRwZO5WJDe?N!cdm)Ld z8ljC>O|1~iw>S!BHBmn7^gi~!T>teXOGH2BnL&qdv)9!;oq(C*Y(LW0sczp|@kmB` z0)x^`Q{SxrXW~p#;+Br%()sx5tLpM~EryiRKPo$nxv1u{2FL?q9}=&cCoM}p(fXAC zad}C1APHN80Ub+^51oLI5F0s;4pv~R$0>hx$Ngu8Mcn&n^*Fy2a8_=PsmHhlQusHt ztypJ+>$@=--2tH&eczW(M~zE9f%u{QJmD`~G5GxvzR+B&ChP={(&tW?%5u0QRrQSL1npV+Jzd-CdQ4uPis62gVT5 zrCUH9Ed4(B-$w%{zt6%rOE_r=?eX5M3VjE)>YE`} zN!Fxw9jce{zy`Gf`HA3H;Ym6V`9ocAT_A#^gTR6KLrFg}3YEmN*y-l*kxk5Qn*A%CUuuTxK|UV4U(QzUM8mKQ z;abA*ot>SG1O_|Sv7aHogc=iy((DZ0BxRh6oWBchrBD9TC?QftZus=rC?fi7e)oq` z3q~yx2WL_85+)z`>un7jlW0@`1iwge!4#B0m<15*Kzn4a<3=;(JoZ58f+-P9^~l!; zMjunjoD6-oV3mNj3g~vm@RL%ZW=92dOhG)V{mMBIGYaRZGc83mSDARys_r#dTjkAw zE^C&wr!&QIvc7O>Q!o85A@&`rdxD6kP|)C}u38|42k4;FW6gf4&EK%n{a-W4XC0zp1gd9O#q#7#bV{bP-gf8go$`%# zFHdR#2+H?LkN^fbfIEOm(56y&&1zAjmpmA*S2{cec}D3#NLbp`dM zey!+aS%T@|6J$Sy(_cY%I~IF@$q=Y5Y^Y!SJ>27H{Bj=sVVsZ5`6Y2C)fe5CCacLp z6*oDLTuwJ`vW<53}U_as5~)+xZkyaxLzCH7=ImypYjjUyEvX z2kIzPo=rWI^S5{hpB`t3>Qv;nQeGr0v9#`9_f21Id{1LXMnj+=qD3vjnMANrm1LSy zwI$m`6c_&$4`REm29_d_blq>hu7`(WQ}J%k7hMOnM+6-Wm?{vp~P?aJ#1 zt35BPeANeFj9A~u#RXGh$$m<{+OFU4oVfvVIbM6p)@GmvK0$xUD-1dPSkPC*b^OQn z3MBy}*4j#ME;OV0f{j27`0;Zq=G=11412CbT_ai--CaW^4M+v)8pI4y%=B>iu-Z;u zgWA25Za)@wq-|~Fw=?cdY|8R5fR*C`N%uUfrAE7iqj;=pvLF}De#1=}1rG%~Sseq- zqu;w1YHz(H1N8$~T-EOvezNkEXNpI6oHE*$UDY<%fv+zc7bhhJv){e8Z>KDN@(}3Rl<11v5e(MDTFOt*M~ltYp@fKR<@s5ffDOM z*kV><*Ja9BTJV{1;+R7Lc!*8z3uU0P7|`8dJwWPvi$AU~K<$^oqsY9tWO#k7s9!n# zacqa`^3vj2OI+Ejk3z3Z(m&YG89U1*Sg+zxpVQp*^b6|&On{{ReL@L78v321~jSZ#?vTay< z zq#R5qouut}#Vz<$_O19;iNA1NgfD%Mk7a=e?NZ6a2=mZ zu>^5&6}ZY5dojwj@;W#ZQka|o(d^Iuo#Y@T9c{0KsjQ9a;$&CW>?*vt-AkVB&}=6; zNz69zpbLE9XrEqg^{CFIdx>F&QlgYKS{FyND>NGOBo^FYD%uCAR0mYeSLyBnxPg67 z0OUD`Tc^L4^=9*2zWA;iEbU1uT>SO<5KHEXnvt@(a7L#TdoW{+ZnE>i zhSE?Dk-Up;_>n2hMd=OkZ=da&|4kZrO970LUv+1{c;JI5jy4HMV@SkV?58Uy}uqQAM?Md8x5pDwE(D%jjhuJ28ljub-`i`DJC8Tf3#m)tLk%oW!{dikpD1Nk~TDdj>w zwMFy)@%wf^ilshAE2;64gSl0-mr!+tIO+X?QBu+3y*6O#^Wk87=$;o{-wwsw;xgV3 z?I|?k+~X{_UD11YbaCFtPz-JNbBpYAtqh9pv|AyGIJK=jUv)L?1Yzz5C(2~^s-G(q z=PP(0TF9;X9C!SpRR;a1fthN*2Cv%{kD;>7R6CRyDslT!QJ`&TSK&Nods@NJFD! z7dJG=xb&`vMbJ=W_%Q3F0o(PSf`l?>60tm#Yu|kg_inmz-yTfbnJAEDf1ziRV-8(v zo9RfgA^KwB*S&5Kg-Sq|rP)N#oaiKF){_xq^5_-MXgH8!ueMq>t^Spu!Olywo-FCR z{CV8>>T3=WBdgJD(Zk)rmnguMGBEw*hb4i+_=YN)c*@Q_u*>k-Q~=7@7wCLh$B68KZ$0ZexrHhobPY$`EZGspXo zM?QuepV8Ivh~}q(T&C<-=0}EsA=%gDY7nkeiNGN?zQeMJ{3N^++aA-GLnmXJg?o4X zwIX)SaUwUA-wl}y#;$WjJ@~8KiyNI%4N+(zIt&YL)RaFSce%dj8Kkq~GE%x!2*ZySJ>?>~gf;GiWgjmskePZ1P#bIxLl9bTRX%;HNoz_$jg>0=(7BlpXJgq%k+m&=_I^J7|LcZ^IS zKa2Vru@JTKpW9&3Ucgv90`CKLL2XqLp>|aXU137x{@FjzRpr z_2(TZqCb*mqS73Z5zweTZNQsQPXg zMGw7NhCT-oyt3XJeZI(Jl~V7TL9=B5&;NOuBsrEdnA&yku-38Sd!U|!ht^2)=ByhG zm|Rw`JSXDWQ^@)NVC z-_-^gF=Pus^`G$4X}ufW?K`S5ju>p#R3gLQFgMr0g(@d8sWZEi9#DCiM2_azSzu&} zy3F><#oLO%f#9aTf^s~0AXx%k{Twgs^>?#)4~Uilp|e@_B>-wPh4Ad23)3Ru?iF{+ zd||$%VxNY}RI(7*r;?%wLTDaU{Cm*sSv@nsWpZlkUAalcM?1>HeKw~-B9|nCZ%=P4 z;2%=h1-#3dmliSZEL9>+`=_q1z*x|^%6jg%3o?kmf<#{spUZbzj2suqn__;U3hkdcpj9W}~dQ0uF}ng6;h;5^_pk+44xeTU+(sfD*~* zi9}qiJw5w)e9D|^Z!&elZ$A6EeYVJAK!5-?cU)Z9+xotV`iBYX75Rb_&s)FA+VQL# z>mD_I;>hE>2q&TZFK;ig{qW3{SaRUXXjX~)a~nexx?Z-j6h<-6k91*Cl_y4Ru1?+e zpT51?7XU7{hl*P(-PC7+LpGrG_Q?K9m!@$WwI$us>nJkxZk;QOBwUSCg4eW@TmhFXkYd<`d8-!O@^pSDF69cj#p&_3b8AdPR5gXA z9*!17k&?2~V8v26cGcRMB{s};L6i@f zCX# z4iP#{c3JBcHog^$%#i!BEaADm1EQrP96`5(dIA2SUDlz_cOoti=*-+=xOOu-{o$@= zi{a5^0dVyMX8?kKwyYM7Gp~cfpi~I4xcB@(JMyBNld=0nqfzmU(SPad-tuq+9C4i>R~x9dM*4T7+X;&!&3vL)V&Z zWL?hIf0YCYn3iz~_PIAr+t?N}3=!V*O1LV%$p{v*Ym_rpN(vS*7jP(yC}$d)RG?)E zWFVD+l_=#fA346r_=(BUHZJ0c!?M`ldB2F}tnHOL7ut9%DoA1!Jkx(iRb26PqapnU zTM~%60ytX}wG?p?0X8=8QlkT3&9s3eT0VlkE}QVIW6Go4bqzLy7q+^}xtj@nA5f@v zPIUQ|y{X~dNvG}2oiZy#3) zg1vL8K4dZ|IrSXN)9V35V!%@)(beM^lG}Xe>@*y*=Ueqm#sL?n2fxy_ z(^+ND5f0F^O_i4gkm;&?SaRWB6Vo)<4oFd=QqZ1^W_pu*e!D!d>r2NRQ+{Egw+|wk z5@A2?QXvIDA6R=vl?ApkA0iZq;IsA`6m0mzYS3%h!Fl@ByX|v~_q`fWAd5}bZkKf2 z{cNvm1A4hzi5@xyK+n-r9AtmIh0b9%H`=e&h6;mOX?$F8m)BV#HbeGta3?)WsXMS+SID2kYS3;*!`i8HHTat$qe z&?TupEXB62OO1nn;*~$iPl20kunyI1O1g{|Lkbz`q4BaSNFs-15Ds z0TNuR72rF^Uxvf*8DzAsaxkJWz(jn+y*P5;xd=Wf><)63_43;&SyT6rFeUXG3bkR~ z9QEiZZmjGj)k^tBpVQdRofBE=$G19rlC_y!0~wY6uyu6teug0@W7Lzi&b799?V7qw zH<^*bZ3n$Ws?=0ZmlntplQH4d8eWp{Z~G!eb(x=?Bi0U;ShSo~SQ3YfOS$ z^4_5=4~8m&Drww}flwG*7g~Lkr-?wkanG#$eMnSDGcKUhzmw|-3X_y_(3$+UhIz=u z8=*8>o1%jscXqxc7kXd!+SO$UT9Krp3nE$HQ$2hMdjHdfG2F&tpOam+&!~+R~b(Duf;Awct;!ppnEmn0xn4Q<`ur@ zY5Q%uz?!d1J#{~r@@Ij;Fy!DoC#u$*?xV-gfAk*mXt@65`P52SCd?SacD1%y$iiV^ z`YUZm9!Xs+HxL_WrT=vib(!4W>R(z8ztT#bvHLfYWbQXBr@}*(P8B8ZHDwbC%=LV| ziq)MIsLtc2E);*jBU0G)TAj&Ud+XX)I{aFjy$*MVuR4d0pI>^aB!pft*$K0J!|YbC78a4Oy2aA=rO))B;2FW-mAC? zu3n`oVldp?c)9 zlo3Y3QkMdmxE8NplI2(mc5ZyV4k0NYYM-0-x_AKr z+Gjp&ISeINjvyd}8)k_}vKAst$D&`7c;!yieb1|b9y5)U}1{f`io z$Xj~9=D9fpKQ8_lf*3hQhms^YeHR1oIt-u7Sb%u$3$8J*)BTTg@oARBbjzw@7#j;w ze#N^4%lj*bw@xo{_UMpT^vE2dlA;H}StgcG<{DCz&AR#>O*w;i5$DjnUZCb?H?7-! zzkNGQX<{#Ype&B{JVsunpr^33=ZUBfyR^)AiGum4a^N6oXoOZQVbY-0zkPz?gg+f9 zeCX8gT=f656(+;6{K>}~MTKl=xNr!bQWj++Y%aIqu2Nz19y=VBGBaK}9D`$lRy$Ik zu0O-#Q)5GsXI{{EQ1}CMH)o7(M{pZ;)&Jpmhd~@FPl{K$ruah66ce%O=%TjR8*U0- z1u;+99P}wkP$(nD$H;)ymS(qd1e2Xiu~abP#A`(4*PB|rZt<-rv4`f7e?jor_dU~Z zQAI9AqI*Zk++=h#m`Q2K62DSNp9U+*n8{lUaPwg~Txs=SnB_>Q(#d)6b$2&0eA8*> zo*;VYdo@2H@6n?%7xN2By|U}lTZq;gio2vjs-Dh`$HByvSci_#e;7a9geKe7F?3!? z#S@GReRP2V&>Udlaj5fg2v5ngBr3q@_1U6Lb#S5rf1OASFDjIyyZMZ3{su4lzrO_# z3IqPZq5fw>5v>XTB1|)i9!nGU&$t`XPlw|LgSREYTVo81(0W0M+)&i37drz941UYa zfz}0zCS2i!8D3HP!VV+izPOXi{srYDZ%mcc>a+T+YzdpSQIrb{wiDvMk6=b7Zo~+` zzSjbJBI=q4%Q!F%n)8*jN1OqH5f^usV+6U$o_~}bg@m>{<;X+Fv`x&+OJGwW;c3jw z)MXgf%WgOj`L=@m*D&o1!39hxqUJXK1UmS@A^85SP~|4!84Az*X+I^O+&JO-SECXxG(R{z?Lw7i&##=V5^B6%{^+qrq{-sQeN3UA*Ic z*=2f^A_RA>$!WzqZ>pJR7SOjPCl_}2<$v4e%z_9nP?Um(u?;=U6sUKeX&RG{GQpSk9o2br)*~fF598RIfbX^1Uqdnm%Uw5#bHqZcBI#|j`*5*+h z5)gMtE+tFT`<2KOsW%dwk0zuMZ=rxYS#nP$<~wfC?oFpk3jc*=dlA0Y(6QaB8=!o~`ho1D5&CGcTl=eI>ySn-`JJ~zA% zHz#~dX8PEO4^fyhB1VC&tY?c4r1mZQcd&RqjjTXP@NSqFa}^t3ioI;>a!U6l8^lg- za)J)Zc>o;D!ONgC)&81#!KQC9`i%vBZVdX%zw!fgC53!LwGa$wOthkk%)Hca_$IiN zC6lGmhVTk>ViS#%iigq{vm}7NLOk4?M=xgaxbTbWgx5Toh)Hsvg=QbDs9tzv7N%#m z>|8EbbdGau8}UPD?1iL8c}|?dgyH721cL#v860|(M(`ljA0Pnh1r6IBkpNK#0Cc2g zP<-8f^{m*$Fg*Nvn~nA;%FmR|T>;S!*q+Bi;H(hme6_-zEVvR{%nf$Y9fN zd{GN#m|;Y4C0j+Pm#nJ$%WD>#4h!+0Kf+Za@(e^Xf@O6EVxM2e(CO!vC_cSMh(cuE zFw3#*8qCzP4K{KEQF_PwVQcYL`)Xf7F*mj=p&zIbe-LlIcd$bpzzU3_ zaXcH(vxp$c?wBmiW4Wd;7++aTn|q;5lI*lGfps2nx4&banl8I3>YO?^*?(G% z9*;~?-5upgck!IHUD_*K&GNTmBfMA$yzwQg|Ev)aX7M4Cu}*}5mJ5i@Zq-geds&*D zAA*ak3Mp|d;uj9#Id%C;QzRF`thgV)meAm&kR_myUT?Rmxv!SFyWW4!~&l;JA*}DXv@ADZGn@?gieN2A&1oW!f={y-wRqR4fxI#?ol};-dy+ zEqNT(8FxQ?47VpTa@_1(d$bu?D~5Ujgq4M{|L(e_YP@c+UI0t?xBf3VF$zTlRXlF4 zG2~ezN#*sHi}6$1Z(mJ^slkt;^6OK$zkM@s5L41eP$XFjID%JMKW(bpEjaL+L>Zwj zJEG)Z7O}&54`kgQ=*R+g3xQsFwLzxH?(klxrX$TW)O!hIgh$-*hvzMTLF;5BQ=lwu ztMsR@=#uKGUS^~c*)4PTiobG|Z{tWP$|z)}SIt^&P&JAnVtUaA&I7&YFlr$VY0O!< zI{hbZH0X3+IYTpbP>q2edYH9upJG5JweZalg0Ge23Y~PRYZe!y+inI${7|%YOX5`_ zP3_baVT@4X7`ht)K9x|iaFW)A3_bP?dlFaSz4$}g7_Xj?>7jVwREhF~{7ZTGbgo!* zC;UTqUDif^RrVoJvEOOgEzOQ`^(CNVq}gl8LY!zH@ckt>6@OPZmaSu@7jX4Xc{9$W z;f?~!K9@O*=F`xOXc}_)|rtg**TvDUy4<%BVd(gX_BQb}R?d!FzHe?!Wmk z0{TfucMfrfF4dU&xFEduveUi}euQoQI41XPsy{+Q%^i21Su-Z=CtlX%7IpcXDhA! zTif^L^)}T_n6%SBGreZgkZw{q(uR4jo}X{_eLmB0Xc4^|Jzi4p6IUKIy8&HWW z)EopYzlCR3b}(TeL5n4(lSRPj^AT}H9zukw?;3PxCp~WvTNpi_v303OAJ`? zEToAoojPkbd!i$03AAWGFR_E7*W_}-aJ}*zzbYy)0N#kostKDj1pOd`03Akog+VS2_ZV7 zlbdjv>H)WJog#8nM`kPaOI(bV26SU051E8^v4j<|I#NgX!qdNF1uuLb!ASUH(88p+ zb^O~kA&O89Z`+cX2Fz}HcMU!~cHk3-pT}ULlf!6oS(>2u`~~@B3E@SS5s2qoqP|Sr z1pjbw7{zvW8#mj{y$0F8k;VAPg@||#$4ttxil+#7CwbKp3JW)vQ(cvZqnE#Zbn=%I zr?LO~d4{`^8EUD?x>gH~(TqDqQ8yS|&A6{NY+c;DS+rhp)jtbD$Np2tuin4bT>T#2 zXt%aXUJK!>o1sGlZz0uNb4i(oMiDHId0Y*rRV}+Ltg0G?al$P9DqYOB*5Qn@Lg6SQ z+9x7_?F@KlI1TA{hX?Aa@8!sjK&RK(7$$Tr_a9|vb)Y+4sg$vmWD0EaMaBZ-DkwlJ zFnS?E^U>p{Yhp)+HSGfY&)y~V(2PgkV$#p6Pq$$2e8#a7`(=4K1xzz_ra&JB%@j((;f- z=+GAlL39@IL7iAsr@4pR3+d7$Z3b(>420?pCsPoZj9#E~M3Bfbj1aD9|~9P;T%sAUz6I$^a*pFbgtf_23hUh zMT-XQQ#BoVB7ZuzZMO((%&V`1S_nbMEaU+E58qMkyUj8hXb8qIXm&1w{vqlx{<84O zV5{@~?o?}>%1P%T&9%oWB3Yu)FGIatf@>hAtZ*rLJDPLEqTX7Qdkj3> zFrMWuW8h5|YZH_TNUh?&nuo$B7*uyOVt@qX#h7~zgEr&c_@(2h(`zLNGUyU=)cf>F z7L@lAtOs3car@wMceY`4I}xi?C43P&UFBx|p@UQVg|v1?51N6NY%P}hmy@p|Rll1_ zy}z)N>W;`+Th#d5mmAV+;bVIu$l44a>?>adInTIDI!mptmB2?xLXjmopw8>0OY_{c zokg)9A+1FPOT0lXjttNWzIqlm{vf|*Gx;Z68U7f-xyzBlc1guUz}8r=wNdlk+GHo} zm1;(Mx;Dgzxo9kSD;#iFDa~j(QR^+$dPX3Sc7M9&GmeP)M-FnLkN-SO(7o#M_1Gm)GTV)aD4Z5$+3c&uM zZaw)CPw?}Yvwfo`&Dt_^eG<3u!7hy=Xc(SwZi$RwWSd`Q@Ky$gzAsx#pMCW!IaGegG{Rh&tTGsYqwf?Q^ zpSx!dUO(2*!TeJ@to9$%=lVP?;RRu_@n!xo5zUEW35g&Pg3EF_gz@c4qULVAdtDaoGhY&!E~M&wGWLIBdCb^JwggRofipW8$L4)m0KM$4lZ4I&2G#9uv2iS*KU?a3#y}Xz|Zbb>I=;6FZ@Th&4yZW$-a;^{T+kGB4G0)TKkGF1cjS zn#CUF`cPx^_KJteoBd{TdnnA%k&+Z}f`QL|!exl$=lQb{+ywDX)ouKrcwyH<0t3qr zF5g`Dq*ABwk{w-Gavt0k*r6ZLtQmXy-dD*97hGRnye0|_JL$M8;wYjjj@92%>x>PU{6vmJ{31{>gP*T2dVwumenb94 z$Z_e6EGB3H%`cB~_6eJ7CwC$=r=crb`UaL_q zTt3;bRJ)pm%DQuT?LBJ8cd}(bQZp%)C3^oKS#KE?)f@JW0)ikQAgR&~0@B@}bV*60 zbdPk(h=@pcNlAC7)X?4CBi%VP!_2$+KkswSI_rF8!K`(&=f2`s`*Y4CvrbDD%ER~P zN|8CAqWpM}LT2t{aMoGwnD|q0eQj)0DsK4`2boaH zy6KQBZ3^Zhr}H*PxOb7f*;?Ny4SqZ7r*97Rf>Z5;%Jar}BJGjUo17toUqoUPrT)l% zf&zMaSg>iMj1i?Ox2@|wLhC0} zVVZwqMqEoM9KRMKA8>XyEPSfxM0wttPVDD9^0tGE;FO~N;fA(3*UZ&@|KJ3N-m%ec z$pv_xkpmcR@ScTjmydX$KylZU()?4W3nhIzi9XyK#E{2O-$4Z=gS^i<@R?ENQoyR$ zMuZuPyQK^1Cg;Xzj)UgW*ba9tkh;oTXK!@@jTVWim-_S!HNw7CiUq>1AfNf^_qkgV ztJbSnOI{q!r_wL8Uc3=60r(oFB1@zC+f6)KU3qAW!qQ$cGQI%wQnCe#yCs839UO1UU;nqksZz)M=4?Sga6F;pPBx{A6|Vp0*rbJ74{nGy~opL6Fa#$^exs&gViLK z!+LwOJUBsu`#N`fwrXS!=!qsx^I-rI0T7OVoi~2!AIK?2Tm67+0)i_)SNM-s)fC5U zbU;G0hR?C1#LhgVH2v_phQ2Ch@UpKXJ?W*rdt4i?3 zQ^GIi;NC6H5f?eR8@WiNG&u`zArRM!Du9KX-oDl)pIWw5Z|7q1hHor-T{B(X`4Gm) zjhsleBw2s9@g&XimrW#t=HSlXVa~vrCmhR!i--Jutl9$c`!F8mD&e3=Thvd1fU9e_ z>Z_sHN^dcfs*Nx5;rjdL(*{W&kPBP~vZ_?F6TZNt{D=-AH5|)A*%Vd3e%zeMw;q~As1gcA8uFX7FxCu9lU2Zr|z4?(;8Oa%r7 z>hDYD1ZwU;wQDoNTBztu(o$x}4YPOMOtpnpyiT}_&)hk4Zvh#|f8+{Lb~f0OCP9PO zh<$NHLHVDp*x!Vc42&AtGNxD6$Jgmw84eF0D@+_O;G4MuWe$K%%hc*__zYTOtzH8N zkQB2DR`F1ANAbe9Mm79~I#q-7>@7*RG8D{yYY2;dWIiZ_YL^~J;iwWtE4;qfJH_>U zW2=Ap!BUO)@a}J!hxhdyo^6WtD??5Ca|rDQM3mcb@`=__M36*WS;{n6YsEcH3|;sA zCxDCF@)kBtnpO{i656*_`k29&Kq1IM2IF!6t9LWG(9BxwV%xYR9e;4VKGjIC#joDX zg6c1MbF;yBm!-L@VO$}qnbH%Etu_7NRbBqNrh(c2z^i*&ZCy>@NuzX*)HKrMuDpBp zpqcL?Fhdv?Ylnq*zU<(?y;xDDH$4q_B%%#>0oez@&pCLV5|6bIq5)Qi=V>bocx8VD;ld(A^LbE%?5+ zye0MRDbA@gN!x^3{=w7Cd-jKikYtVi&lcO)pIP#7^(ifvy}!nc%iMiHmZb`&0wU|B zTZcoBqy4(=wZLHB0pu>Q93^rX3XlrbiN^qafoOuI(^{^~mh7(L`E^Pm#v3v9>`vSJ z*_evDghSGVYOm_*p^vv>QbAe}f;5S*`81s$lG=A9uK!->if>ksZB#gUs^_}xb?Hxo zz2AG%jTaH4k*i3J4O2>bu*)?+q}?>zIsCIfY}k<87Rs_ex>bg~DX%&KMedv4zqTGN zBvHRVnWD$Y?}U380lY~OA-e=mV|~>hiD~c}9D}#g-GsbGyt*&;t#R9C>D@SD*|=y3 zS7v``ci8<1Y25570C29$FUO~=-#ns<+&u}^xoEO4eC}$S^e%orf9=|LR4H7?cyW^p zkLQdk+w9+KRd^r8LT8!p&ssQV;#>FEbw{nYyNWJ=w?MsMG6td;1#x>QG6SjS+kj7{ z-qhs0c4rEl1E-rt4@4K}VmYt}JX|@dt(dS)T(3D*qEFsy6*{OL~cpV+r5#V(s%X5`xw6d_6R$>CZ-kB%8I1f}3~yxP(2yg^@6 zYKKet5`5Y*vOSfFsQpr@!*F`sd?J9nFEQmfFD#a^5dCib`Ftc6PC`S8OFn^!Xdx_b zTOD_NdYP$SD@e?fApPkaU2WHuxXFhpZ!c6!9w#=Ya0IMD98cn~jJozTKWJ}Z_*c!C zAxoy^2kthk{e=;8-i7gjBHV;j?6(``%$En=N`hz)6bDBDoCv@@fT}MiHlr3Q3UNVF zi~Hm*)H@oAH^#$NfHIr;H1#=QgfS~b?3X!7=MybO+b{RD%g&6Fe|6p|X%aj9ou+=# zLtyOV#A}31;k8bjY=?}Z1A1(~n$sI+Us?;>TV8f>q}k+==_0k(0a^ce&YA)?w`w>b zC42f6wL>(P5tjhm_3FhvJ`+W2v!LMB+fw*R7r*ZdAt2%^j=WchHbO(5j9 z3%F75JyP%I<(2&q=GeL$PzMlWxaVQo)e{xJQulhI*&PyoB z8~|1ro;o_p^7-Ic`>+waDH)ZlpX#>kmy@}U`NTQs1yr_DJMgsrKPp5^YkVBTc`g|(Pmv!K673bE(I`F$*Z?f|a#`40 zS4>Nc^owd6mKMW)X-5`U`2jYYkgGrbWs4>)HkogVDiy~ztKXMJ9evE4*+l|_0>8lr z>jP$>uW&yQ=Kp4l#v87o-xGxGK}5yDfYqf?BIvK#=3mrm zsqsAC=A$y(OLh=A%Sy5N*ftST4{?_MRF4HLcQp$QCsLYe207D(YCi4p87pG;QWkXj z$x*io#RMs;7? z&TYiQ{ojsON2Cu7TJI%6;ND>Ub? zJQd&B`8mng#qlzp&l5m~ifMv~C@Y(Zvve}RA5=-HQWXMc8OYZzp)<(!!c=PeS%~eG}xU{ z(27Ax2X(K;U65cxswrJA$q*M^7gmB3mQIZIlS#^}f(84#FV;3Myh+G@S|KQ5Bx)bI zeiyS^!Y{KB&5!mU!PygDR=_#45hOqA@p`d>NC#x&)RKshblcUa=`tbho&^ETXX8#j zkj5}XX$K(?=slFEZGL`FP`!Cu`0q;+ZV?w!uL{FwQTq>dZH?rc(dwHj?(1bggUFslJV<9gr^KD}J=CF&BbK z$g^mMmKIjWzr7XLCoh=xcE#Aw_;r$)JeAv72>rL*>RRA^Ir9X_zTI`xd|& zZG&jQbPGdg4fSu2O^+krq`5!LK}OfY9~}$2!aITsuhS~Nq{!$+U(U@yCcE3@=AOPW z>1g9C`s5uuCo#U~`NPXkDQU+%Q882QjEq=chc!t!UOrEy9?=&yyTi+Rk_na^FN|1K zaJ4z`zMtzfJreUi&uzuU)?yNAhWh&0@;_+BYiA_d)7U4!d#CGnTc|BDEnBmtlEbfW zl)4%Q^D?!KW!p(ey~DwCK5F|X=QW>^{;k5vZZ>oC`K>aI4;Mn$Z}x1Vxk*rXhfkQR zjkgTxGew?;g)WfM4opsmOKE+jW|^!DgcdBzeukFpI!ufd^@W)hxw9s)ieJe|6T6b1 zNw)#cg!4bngiZ7B%F*@ap!L#^1Be-0T-5NX-|P`UwvHU7HWbu9j18JXm!^jb=ko)B z>yYe^?Vd!OHK{1bNcsPgBN>w{bfV&pXUVd+xr|e!05`%Ml+Y40AX}$wIqdQPy&d)Z zjl8QsDs2jhv7xT?x*jhJ1D1=ej)ly#>(YnCYrn82cQvH7D)O?`Tu2hjfhHjE$`M#; z>wP0`ubERq%c;#G&3zUS&APMYyvPBvQep~lQ;6N!KCh#2H>OVtzdX^|BYv@z zdysVGw+0XL6H#}c9y|`hxjLu47c4shlOlj(;TTCJey3fkAg2E~kKF`|xRj9;<@U!$ zzEK)6r>lHdz^CcCuqRS!q4d)>(v4AHY%83u`$|6`5&MwOWj_4TI}(;GRM(y~xm|pD(S(iL3Zl^JyE<#mga__G zeqVC_nn&S^ALS?E0g4vRvQ;(E{mk*q`CRC3NE~xx!j)~1W#=BZ(8noM{Q$Mz zjBMOtkA0IM@6+){$*Lg9ehhq-hX||JyyZ(hCjD!XsXMVd|u0djH5&c`Dv?rA?7=G2#Zz6qf-Mh{6A#0tviJItK%*W~$(-SI(ENGdFZM zoeSQpUT7ZqIay#W{syQr^Zfw6CI^WG4Br5uYr`o`knD~oA4y>=u*93b2(Muha5cyW zJLdoXNJ~;62~{!X9L=*WWqM{Cxz(HNYv!>n=MV~$ebXMHQTAJjc|l~`w{JFVip$L^kP0Q19l1Rd=nZwuUYWqGJ|m6X7*2o(`TmA|vX(WQ;x_C*B1G~sYGZ6DP$nb85;@3L!3RJV?!s~i9}$E> z*(tm6GF0sS!bmdW02KAbiXizV>dJugdbe{blWot3L0aN>!;s-*C0K>mD5r{I>xMkU z@VD>#3^-f9COgC%24~y0BQ>~)oV2THu__%jF*d@@DZ{@sZK`b=%iZN=`B-5@WppoD zioDcaRJC%eR;32U66ddp?xwohS!+g=`|gY0T5k5=^1HG7Z{f+}qz)g18wPF7=~_y| z-_Rk8cr2BAG?7NIyMqeBq$6zVr75ERv35G}110!9zS!66E79PG{o!iFf#=Q_4}y6w zG$35V_UdpY?(MhU+G);n&cW9zI~y?SJ^T9|SpN9RUF62H5zk9;wY{l#14i5*2ZL-7 zc@$LxSkGnoW^7-$BCZTX@0RU14!{?u&aStE32r~tC5#$B{VYm{k1t9_B3HlOACi~* zX5a=Tn-Kl_Fngetwmk4xQ|b4>OoDxZpG1>L;)=d)9U_}SD>^puJa$IPN)`IVBU>{q>k+}=5S$Ovb}7Sx^B z@WTCK%mOI2WRrbgncjOcue5lZS00UhojXE~)D!w27E0uG36!BX(Sr<@G>%$~y;D}t zgyoxYkl1xTgh9h!VXoNjh1Nu(|1qSQ{QRRF z2vG(TeIcQOkbA`h^@#whRz7VkyKvZN?}wr^$1+KOS^;#dEen#aFW6!Q`1!?)1>Kd? z?~D}6Z$Owqy>|BKxN=j@t7d!77Y0pVPdhM2jPLyh z$Csd65J00LSKT47@`%G-nWp#{6p!0A3Fs21VOScI^)Qemp>2E`EVY63AFTb2PMfXy z*~g4LF4aO(`KDhOn%?`sW(`67$Wwgezk=gC!3Jl>C&GP~l@BdqHl+kn@7iv+KmLYY z{e;qW6Gcl_xI8~co4sWX{1YHhZgeBb+cE3D4f)Bylu&m4_#?s*nDgfAQ(R*GTJ_=VR zs$yY!K=k;jWZDjLQiYz=uWTM<+P`f9n?!J?>cZa|Y0R_NO5TB2Eo3;c%~JgzTjagQ zKC?hkc}v7gYk|`m{p6|b0Y8=S>T{*PBjkJQc8k@unhXFTvOirhrv$#p0b78n9VN~} z)cFA5L#ZH3>0k~5ycOy;FX^GH?=M(LBBi}ZtAP0}*!~C!xo?KpBX*-?{P7PPK#hu4 zj$tla3jUIMHeTWpgk^Pz2y&$JpzKoc8o%&^-~@u&zNG%sC=wbriA3QCgz>hHa;32em^ zdI?e9I;yLgK%|R$&VTRkP|}pEB3FGxZJjfz7S#XU69_9{GtD^voDyDU3QPEfai`3jXHSJwW6^o+sY*D)WJ>;5m~Zm6iy#q3uqIpG&i;EGO=J{EwA; zaEjU8i2B!ki2|w>a2Va2`e6t=U1)~i13s0wlhaDm9ylzDn(TpSXUE!c$L>h$8zx@P z!=1zdz}glCI6MK~ZY7*g&oACTM$n(Wy~!&7YTq`(%bV2-)2Ukuh=LQa)~4Oc{>BXeI76e@h$DNi3kwB9>d z)qQfUxM0uU=M9Q^&MrtWa$?TL7V8#|Y9&k`7^y_2YoAv4Nb_uFyg#Gb$dgwalY5B! z2Q7Dz_pHi%yu#Fz!CBv8Wt41HN$Z;JNBu;JtHy^6m=;qAXXBiL~PSVz638$jT&|k~6U^24Cao1-$ z^B_V~nO^HlpJ|$Xh2)%Y?uO<=4b=u@YL9Yf@ph-&L3t2@Akkxk ziSYKSjlSO_sSWd-id5R4X$GU&%?EQF3d|8TdavRnI_iELS2xyEW8Qx=1!*kV&LvrOX2lAk_nuifWD z9_0#UkZ_C~=&iU8A!fkM-4%45;e&@(!>U!X8^15|fpIy)PH-iR1T=RR>stI8P zd!;e+3OjeoDTkbF-NZzgK~{_f1K?E=wK4rNgwbc;>6u(442gg&qDabn%Nx$`Tp=It zG+;t7Oc(e^{GWNq$G;|_S8)hBHCI7~@z6mH@4%+q542!tL2iKDkEk#3kKbKF_iuvF zO(cPwIL=CG6f|}D#{dUs>#&i8#oOQr#?;pBi6T?f1!kTQQuy~`v`*@&H+d>#G)FL2 zZscUDBgw(DRx!gS3Gxle=MjJp2;oIYfaPM_siyP-Eu4+qyodb)?y z$TMm|W+Z+q3HxZC_*=!rjVthZTm^pv2x{WeS#u=*bzi%H?Vbj_A2l-sS=GHJ1D}zp z?{?c#fJ^Mbo@N(6detqzuwRt5A(Qy4`|jj_Pt_HitPdyMCDjF^U`c&qNBv5-Rr!jp ztyixXB5c9F_(O@8b8l!oC2_EmqgQZj;|cF1$IlsCZw`(3EIPl#>N1Vbq{dDf{vnZv zt+x#NoMuR4NHuy9|R*WkW9)k}U)8YYcKi!Ngbg1$@dT6xfDzmYEjxPGYa182KXh;Cd$CL0|c1kqDb$a_;XgB6n&-3_qo;C1Fv=V ztOt>q?0$4y*p8wU)rD`F`qwzaK&%_cswn?>$lgo7U%cmw@XEIqi)Py{S9io#dD~@> z4;R!Ih6mCDHssHJn?GDM#Nc+36ODWEz)NKPmwH*R1r7fwlD3ce>sBPlbNr3zyu&Pi zqSE*qM=18&*S2vvLiPf*b6=HBZ?#VWxs1KkzfsaL2b9QXU;ipqbyi5Bn3mR0#NQY& zRlI^WSt?gKt%$r7430FQ5lxfDBn?x{!f?RWQ(-k8#8PT!tiF0eJlK)tCN2`8!?z)S z*v-(bJI|8!{DNmIr&T%tDr_M+p0vLyD?cN!_g?XiTG&s>*3BD;OF90#-U|eo$;53# zHG7dXZA8tVq7zHGS6CQegG_ovbI|@f&O%RL94;#Q15WdoM#|b|Tt@3R*b}eq?{3V^ z6J_hA>f60Y+tWGyQ{&VkYf)de=l9P+@_JgeQJ1-1EnX`kBGsvyg?L9 zQh+z@|FHa{_9_z^;_N{*2DTbfrH*DTk@n;5jGrZx;Y|c%nnhGEktz(x5MIR8O5l|o zMES8}^5}D=VM`?v2XsNaign@Oav`pwCFTaMRh$AzV?bSgtY@4DZ5wXAL292D!$b4GG4J>bPfABqPy z*jSQH_ykhPsRj=-6-`#KnGB2r^-)FsNp~^OsgmN_BMJGNLg$H>3Fzm5YY> z*B#K*{@VFX#}mBPqgUoxpCK;6BYOzWAHB|_(h_=Z3(ud{e=C%@?2Z8%1Y zN9qP#q)x!M{_{f(w#@_tq(gHxeQKUBRQ!ybw`-{QtfKtvti@e*-LBvABSHe`G)nlNRx5# z9p`oGa$BPC^)a~CX#WgkyJGSlmbSF@iZOkApL<~cv7$rqSxRdguVUKx{ni0y0pV)f z=FjKkp@8?N<{7^&bTR!Mg6{$e_ zj(WD$cr7ih*7vu6&SHQ~{S6*Qe%SKW5Hk#K6|b|qOiHQvaies$1$ zIASmjdeljgkPAB2wh|LO!OOi0yXk073S~RPke>G)kk{&G85ZI`-?ZTt)cO+2(7zPO z$Dfm_|3Oew9Yd4<7)(RVUk!IYjFPu?Mx_nj`WzrODciR|5dPu!tdd+zc>aY#qmqI9 zJYg)#@9lQ_<%d#54i4UD;ghvIuir0W_W{bV*i7uy%wA7?V@{n4hucXL@W&~qO7?`5 z{DcXXtjyn05qFerwF_IZ{%1X8VX&RCDS=8X$Em_wX7Ny2ez0Ed;dmD3Z~=bqZvS-@ zr1~u%k}S#e8h%n9XvXUjb5^m0upAom6vBMFsxnOaatN)Fx84k+zViFlxu1O~^33_8 zpRd-j?swHua81p6$mBpbr3yot#H0i2v)Ui9pPHxDwCXj#B3Xq9P|RWn&xw4;DQfZ9 z8>1ftL5u9_la36g672Xm7QwFlLI=7!FR|oMNLAU|S*97@$wY0FgvzA+iJ!r9HfV<6 zNvrsYTHIz$P)$r&S?(d%!JY*r^5fP0@2trK$*eT# zx1lT>5FmVQy{bI35_qjAg86t#idjI?4Ou8-X>bKTjyIgeC+r127k zHI4<2idBG4i7tt#$00$%huKE>S`AAm-S=yuS2Xm`5KoD?+Dje0KQLjo!pFji*C_&) zvcJUq*oMmmEdAhFrR*sa@{RZ>MeL?|#_7AQ;XhG+ z*(wu1xYb`O!9>&g*NzKZvi?pBlf03b4UvI2YVF^vYUb9_40&p?TOgZHdc!hx+dm1j z(?1?@{*x*i9nfKsYAa|oc^D@sWfoTFcL`ph!oHj z%25n-Gyv!9jzy>xP?Ee7R1>Fi3oKBiHJ7ryVe5o^I5KU5PguEa50dti>V3yJbyS1= z@+bGC+>%toiKK7o8X%%jBS9y%HCBp=e?lh}33-R*$vzX3d!Xn*7gam*Qog_ASUU&K}eIFS0 zel~V9cvaa~=Ky&P*vQ~~3fkFAtYYO}fCl2t-|-vuDMWN$jpM~F;Gb*BZ$u*Q>r%rg zPEPmvEk0c}<3=nMOCIIP8NR!(CzTbxjYH~Gfb|qq_guRibsPLn5DYV+2IMkw@7#DD{ni1X$dgt!Wi;se8c zd_i5E=%fV25WDH&lP4UXkO^!@&$~VieO}N9CZ`-+VU(xRW~z4+I2u?qTBqE)lhLzE zRpET11ntSQ=e0fUJMTjH_vATIcD&95!!Zc`yraWn<{1OUolaxO)q&a^1+ji7*-Y66 z=x@n@h%e-F@4z+%W?2dhG=2lPS;3ovb3J{$siH_XE~ZNGT)u-@sPu-U`by%)Smjw} zfjyH2aAVmQ(3(At*iS>Am-Ba!-9&9O@dS!G_(>BC&WjMbjQ*okQ`{i3)BLBA!@8Oi z{w_W+pg~Sr*U4(@Njlt7WGG_nI#82!=;Qs#;@^^X5Xe<8<8gRm*`Sgdq*cK}6PRB| zbEWVS)7=SUUlf_Ym(`4VA8I_A&B$)1KX7ghCvkfC`Z^j=@nD9fi+cD};L0rJA$iBd zw3b`EUB7v(&{GP#yo+BS-dxW8`8*YTIE4l?6&v|ZCfv{0No!lcaR5q2?A=9;USN6H zycohY3Cs*nU3%hN4Av;JfV+A-w_)|1df1%@x(LT`M00QzVVv4N_cMD!PP~*7#6PkW zdwek<(zh3lN0S*+7e~K}A=QM63eqt!`1*kZUtVG2yS`9aJXk%EHf7+dh^yMIpvgsHf5_yXCp<~@46+Se7eqh4hxy~?z=4j^98;U%H#*7#`n z2bZE$x}7rcU1F>k->tIuJrMt162_eAr)e3_kg4d>#|jil#%Z3X zyRQ-Iwhx#GKYD0FL^gvXH{U&$s|C{b>n_2eYvHLfV$L@M2G<|w&qJ_9epLs){wC>B zlz8*Um%Rf!2xk)hBa{d7sb`y32C>$VtKSw;`m*;3ADlskK1g-3Y*NA=O*u;2g+ar; z`QfcT-+Unry2b=WRMxy#<+4~vwcY@QEGL6P5bhG4sFsYik72A=CZz zxIhG2QtW2eFFSmX3~}`iXi>48@ZCYWBS&A7Uwc1PrG&)PD`XV!oIK7i}-nk`%iIoazf zVn}27#|`JO&9H4mxVba2ltXV}t9ZjaOqW76cNY|kp_X0t+3>iZ6$4$hijejd+{4K- zCUL{rP^7d_W@KK-^lk0()+3nKJ`zTe*zj1c25&^a&A$!&l>L&v&Jo5?Bt=K^Ts|~E zwuO!`nE?yMASO;6=BzDAums^)nKn6lscjpKcm#HI)5N@Ew-5K% zfJ`ZAe##;)1p%TsKPY6|M+v9@a-^}ksFlDlua;t6jCqkY_ zi;QJvJ$t;IccRWhznn?Xl>T{~w=hE>Dq~=u0-n8Ot{RGj8`H%z>dc(sMau&*l(Ojl z^UoV~_Op=}AvN9W!V4d8++5|gCO@GHxQT|b8Q8?zn8}l6|5+D~>L_wgmysE^(c%WN zg83U}uLa&jORArL>6BjewyQwQRhV*S0Gr*KQhnovH6WQf3p_!(kN-J?f!g7K*;LZB z)D$AI%cX=?g3nxs7?*p!Z_G5I-4jiX4yAsQhRIPf85wbi+?Jua2-gnB1yn>nm0D9h;>W z_xqgr;VW9@m^nGos_yrCtk`gT)j)PIgDQ_4x0N=?ZmIPkv zAvfJ9?$F@0@+QOo&i0^kB>(9cM0x{B4mnF)Zw2f(dUCx5H}*i4q3eEX~R(SW&tgt79}l>a;d%s)b!DnpaX$p zx;l-5)TI2-svH9Q)fmUVsrpki_Nus>`d-yp=ZN zzHhw)!ImIYWBs93+`#8j8tWaEfxDY6jUwB&m}SYHmCo3Dvc4$7oC=yW0mq(JXpo7J z@T_%nKEL5uLPm8og6wl9SZ zNaf+ZeA*&nj9MA+iEtC;EEt<>bUa8eKD>)aOQTF4On~*h7wQ5J5)ppvsw#bGOP&r5 zWHbNDb59@)?xHJKb}14cQ`e)`HSy7zZvxe`|2cm!BGzWsTI%j$htR+N;N(5nXS|)) z_QyqpmtMJO2yJh$qy#@ALHU$mc`bvPicj*-oCRw7Gt`tH?GeO-#C_5G&%`}>!wcTr zZXg5>+wPCqOnk@e4a<-)Fx<}>aJEw|HHdGhx$&&L%iA7>`1@|ij;o8Ey_59L^%F?~ zW*=h8M#PTy1bs^GnaW9Q|6zUNHqN!YX6d3;sSJ4oTYGMLm_fgBbYc!GBP2UzV zW4%6Vp~&<2$ZOX3#`dIkK9d!q$wM-tNuPLBV{y;{D;*P|wY}BCKyVGjDII~1lQ=|c zi1IiQRC-JO(8P7FRT7kgT?b-|590ir>~&@gzp>zYUtHDX1t|&ei~bM;cdigYZO1

iXtx5~f_K>t0o<3?z3+r3)&x#m?MC*%6 zxE_I=wq0oxmUQ6D;+Y;GOVcLDub)(b189%auVIsWS8{^9e*VyFF+yL>lGoiYKhUEB zfwh`T2SQ51oF#TYQ`MQ+;w9DRCa~`D56RwLsWoUJfnW#bwVef&x|`-~fqN(8W3KR?G@S#a!7*m`S) zQ@e=PnEY3>ZDb%KIgQxcWb2}_WBvb0NtQg z6$$b`Foc0&Le&_*d%P}oCuSEU(~-@dE5>4{?N7|f6F{$UT_=8XTW1#?aL#~l?x>g8 zBgzXSKYqOtYT1~LQwtPc|Z0x`n)uqMcNz|>u+!0PC|=#8gTE4hR?lnI6j3C4gWlA zZx$$va&7>tKh#Isl0l}|JAw=20R;VPECw6q&1!H|g~KF~u4h2XAJ=HapI=MUSkG%D z`&NJ67Zqf%pH>|^2bQ-2+XiGd_F^+ zM0=(>S}(cYn}ks}jS$K-FQ)EJjZ(&POa+&NHNIwDQon49`)4)WD``^~jUf3)J&WwW z;{4!Gy^cBKO80mbeQ*wAn$y^BCR7)$CZEMWB&G|&iz+ISv1p`cL9(%lfX!#Dsd%gZ z+s9*@YN7`J z+%v^M1id$Znw|(ZnLZh`u@bMwSo{n?A|J_?JO_symP?4E@FQtvCAp>Wn&f6w-@PvN z>SKpA@8Rg(^amuX=tf0&najkrO!J|U6aM~4=n}jMN|d)f`<_oxjq&qwYwRkew|v%% zuGT}s>Fg4VuP9H0GL04yai}vWpMH zl)w9kBz4Q@^Yj8`XGsL^6nH#(@69z~PsEY|2=$6-fp> zxzeGq3Dy(uG2c)qd<06NM|;(ratE8taXV-GpJgI1K!h(Hco|s?M)UmBdmw7D&Ff-9 zo^1N_=J!H}LPEun4DjJ;^d&X%zTWqnjX=J4VC#T8<94XOe6xAi@0kHon-+}9r`Lz8 zIrp53Fn3bK?r;Bzca=|V=sBP8ILJia&^Pc(qlsPz{1c2Acw^gLm63>cQmg7J2*M#t z7NG-QJ9%sCn+(Ux*0-BI9*YZyb+9p2LCupg1=Z>z-p;(6lTIC-fo%edYl{YRXG%8K zVq|c$xqZNFdCQ>yVFwmU1vbD=3%SCqtG%g$FvrAy1;fe_+?7l%K) z2r=aTWX`Y6_o{ep+>yzxUGr3S2LDhV6!_3{Za|*qfvPQ$nSBqr8iLk3$NBBJ5#c5P z=g9u=@esWL5&n8s`ke+n=9gj?3xkF}ITnLDO0pRN(-^a2y!|DXbJl40x$!3(=hAL1 zbqSqPBSz3m%4rIb3hZI~X8#BCq{SP(qna@Z!{tW*^sBrOLz~AtRDSM=Dp70CO8Y=ji#f2ImM)EKKkg}&C2bN)eqgwE< z0?DFPn!QA(kNiTw$RZH7!SYPxcc&B;DHb(vLk~VlA7LarOstY$B*%uiBS2utlol$` z>N~c=r3(D3=pXNw>_{#FZ`6CP?s6T)7rcXXQr};fCTJgx;B*rN{;+G$DfO8q&-h)R zxE=B_W|TM5&*PJSvvMvCF?!#VQ2h1IMdQl{U}f}6OdBg)ZRC+7U_T_bUML<*K0Z$t|_REB^- zEIA+J6pQ6#c)(y=Nq{{vQ(PM45CO}Tn$uv}*W14(OF(_vKOku<%Wc^B<1Hg@X<+`3 zrxLesQgeg8K}d)CP6U{5b5jhMsgvR!H(9q@5*}O+bQ(=7==l?&aJ<79276l_HiD-E zY^kIuKZaqo{cobjCKokrB$&nzEzZl#iR4Y&(~C*>Pp9M0s!kInB5<~ zK^;~(&-%|wNh6^$F_Ta@G?-rsxkH(P%1Aw zgK)$=PvHg39cY5QAnHHcFo8x8X8C!CG!O< z1qA)6qS&B=Fg~aH34Bzd*<;(PBaKESpv8QK{ZfTB-2(MF$}|0#Pq9@xab|f3AHqsQ zt>C}dObE@`9JJ4sIJMfs()bX8k~e|tPPY?YOBd=#$crg;GGMj&Jx#1tR54mLf}@5r z(H&nkm8IUy^&*uIsXIJM3KTZB3EiVfD2e(`=9E{DArH$`$CUe+Loi-)6hK8X6`~>c zqfa)Lva*JYEpTcl(Og|!dpK@ulO2uN6QQPrpy-}iJswoB(*Xj%JwP<$JhNv${#Ej+ zvyoz$FKZABb4_-7^i4U9dBdAell8S#x&xA~SL`#x02}t34LpTZs>|7KUJRuzn!@_eMi|FUEq>~{}V zvHHb2-R5y*aQ0|5$SzYcu|ilx_E!Ka;~Y&B)*8LJ-ot)D{&-MydJNK1whnZZ zz`$RtK{ONd6nxP$wW|Y_e1*M)C``-yVnz=S@HGAz6Wq~G6~8BahygpnN8M@LdpyqNb$ulGzLhq!?I^!>Y^0~mK~R- z)m!`aYJx28YtTQJlCck6+T{31SZ`_Dt0YEioOU!I%Z#<$xXW1Bc^{1c#dB-yJQPyh z``5XnKv{#Rfp-h+gGdl7Cg_WPnu^|=5hG?oldIjB-#Jq4)DKWbcFuPfuL{%uw^d{_ zp*=zALW}$MJa4Zj{r}-3C$V-L17CS0nm`vV7(rN7fK!X-c8xnqyZgtUw`cjh<&o#( zvA|-l&*88!b(^65Vro&3(iTu~Uk|=Bd&T=E%bOV~Z!&xia>eO+Q8PQ~V>h zxTq_1CD;@V*Ph?plcxq4n}hP^+^i2<$|v{wj|}9DuqxYz#obsEbhBYb0$3vM%yJ6d z4z{5!VT5~L>hZccyH#LK4nE{g6j+_iE*boA-hA+X-gEaj!fo^CvltsHBR-xsuF z+Sawe+vUn+_^Ft*eIxTdEhlDfT!?70f*B4w8<}mn4MSzD^QuN@n}Wg=d$qj6hEGzw zm?=4ISW|HN`@vuzC~x(!)GM1Z4&SxVvB(7=uGt^Ag|{}ZgD6Oe!Z*LQW1+;|DH~W( zbmorB`qQO2`N+DHzH{0NbH9iQ+uyvO5l%21eF3fY*^H4dttt$o^RBs=QyXnSO2WG& zi6w6b)Caj02FlQ_lUGRZ2a$yV`DSh2SKy^3s7cGg{~_$HqoR7Fw{KDu5TsFJ=ng@o zrKP1imF`ADU`UZ}kP?s*>23z3LAo1B>4pJ@nfHv}=l8t1*83-Gu~_SHX6}3M{kg7d z8=tT(u$iHMgJo7i;$r&u{+6h3e7(Ip`5BTJtDp7k;tKx#E2A3wcas@|lP^UG6UTHtxzPFibo#QAKx9hHgV%GUOg|6a1o5@}%4IJzxH* zX4%%mmG}O}0d#C3RGXNH5ahfSi}ezGa{3od@G1_^CVZ>ATD-46^=&UuA3V|niaGRX zLU|P0CF;aJKbCh&j~Vdfuvzf9z7U}B+K%@@tKZWSeMwfoO<)2bW6D3sSSJ1xMxEVO zO=yNvx;m&~Ok$K-46gb{UbO}X@&wub1tY$Xam(l&j_$bJ<}NnIdm1MKw>5vb%a10^Z+;PD;*9V&v4zi3Dw^LrsnPEC8{j$e}4Yvy960_ zc-6b}a7?1z*G38ZQlcHXk;fERMBkES3`QWOFp(UA&Q2SeX0Qo zv3_k<|3@2hl!*dZ&8P&w%U7|g+S_{-_#cATbrXXGBrQL2`b_I{ouAF0hvAUD4+Z`w zJIDg}LYX%ZB~UV6FDc*>jTmi!*e^PTub^IogMI`L<7H93s3Xs*korF?*(vl5Mw4IA z16UuTNZ4U{##8Al4n^>g!?l8bI|#5$gOrTxMPL+_{}zwYM3B?|$&!c&kmwdX2h&k}sdk>lNs^3v0nPho{T>^zD#(f4EGG>Xr&RxN1|vm+F}z@`J6lK*qw=x-ZF@dijGdWnm+{8HY6 zHNvsgLX_o{!ADw_@CR0MO)luUY@bbDg$^AFmc2RQtu}%m(5^`T)E<|tUG-w4we;MB ze1%a6I1!JD-IsZ`bo>>EZ;D5r_}#P~N~CW>zQVcGiWe;|faDVY-k}us(pPjl3H7U~ zR{}pDN?#eYbFOve+e1x>kmG&c9Y~jF`=hbRT1)`+I}HrmtBhtRbB32mWYHXw)cAc5 z#``WD=tQ~(PA?r0Zs||Fo5fhF#*6`|NC!_5V8TNugQbU^(l zHDp2y>`{|$M&VbC&izHm_}Od|Rl_GyFoh;zr(XY@F_XL4&R8f>|A%CtN4Kr{K;_-& zU@@@I5~=SvbRoLxr{S^!uj*e0{b23OZ}&P>2Yt*dI0L-}4`h14|ETyjLDfp0))#k) zitW8r|bwNtMGQ-FW!Ir-veSe$_?P&zQ0q<{}RFGAbWu+yoUwGT16N?SQ)o&_P zBu3*GCzyc6W6rPUz2$W?C~h zn}mO8@x4$W`Fm6^2LY}pkkOQQ`J)e!nO^aXg4g;gU8=keA`$u~HL#zO?_=6hF3reF-?57Q#O7LLzxQ1F1|yE!b9ex5N&c#5@B#&AaGJMorUpzuUjA~LW$8) z%Pb2_=<$K@Z%og5D)`~99*z40P||mDj*n2q*b98XogY8qsIbYa#?X^m!e*njf8x?t zUPZ7V`|JH6-?^^4y}xrl+HJ`=_N2GEa{kp^URJS>9Z=B2Hrmc@2|8}tPe>rp+cCp8 z2s|oLt>)&cyGvis${&4|5P}`k8M4p5ocz66hE)RHABwCF^@$6R32wo?_(L``<-l{{N*l~w z>hciE{qI^E968tFZ=pr&Hhy_qRYu5O^8@|Kdr(r+(V6Jr$Wguubo;F`@mWc%n68vG5?CdDWMpYi#po} z;K=Y%eXQL5bt<0Lno3%5;qN;TLbe{kaFIhaImc-}rrh1BFWb<0UfWr3KNneRJ@(by zmV%KdDa@6cz$-DVZO}z-(u5=>6xHi3Nh5LV_+oTjM5o5N z^{rDQyKco$Q&yq>`3E0E3EkQAhOXd6qGa!vziEYqL@Hu^7A4tt{Huj9%ktg?W|1+l z#!KMq?nwNQ7t5#+UiZoZI^DdlMSJ2u9}c{I#?ug6!vt zV8@=qi_ss!ShBR&pgVl%DDebtvqJq?WfTHs7{c8;1yQOw5|P!`_*LVKkR5^dw@Nnd zt`25!{haDwlgjIw*bBcYk0lhf7(tOv3}M3Z2=)uoB{i1noIKDE@xS_XY&9=jt_ZIS zplC4fkKNGBxht^wsf(8R9h|e5Zae^AgI_mBAvcDH{M3>a@!euss<4A$dHT^l@Ytpa zBi1etoY{{3*V;pRl2w#;-D;L)RNUv*Xy|e45Te|^Y9rSZlROuz_|qVC;Xc^ZlclyZ zFJt#C~|rl?mn*vCi~$rZwQ7u`^MQ;$Vb-j z*)CD)p0!cx{`#W&NRxw4r>LtmEwt=MB~>rCOCe|Ib^B8yH2%+FTXHmCUY9DKZA(6# z%4vzU0cW}VS~4r;)tEd6Opy>$@WmWBpBU5+-1n?^A#+q~>bq+}4_Jvw?LReUs=c^w z$|-71r)EKpJVUV3A)D8Y!#osk5vikyu+v7bidoUe`hE=(avaE=SY|36%_YoiK6TnE zdVzQ$_(EjquUKMpQ{`!I3>z|uT7;A5O>d*#xy%t>S$!G<56KUoefkx@sn;iy3W&!{SP>XbV~pfiku>kf52+9p^^7lM3p`F9aC4zo_nLQGWGa%`Tu~#F5E}F+VnXP z_m{>$V|B{()DM>$xg%`@;Y#(0vF!_nHN@&oJ|0P-i@??2iDGsTqHrJIbBcA@-4%nG zoc(PpC=~}6DA~#fq%JbI4IdHRZoNcOMQ4xuvAISZV$ZhyZpg%uDJuNq2b5Z10Q|JN zGlg9jyKNbt{i*>LOZat6&df65K+`;NoxWo612oR{A>=t8;jv3_q>hDB^bO*4i`GZv zjCS(EBzl4l>Ixq|m#amLWC}!?Gskv&2ysXw=R3)g%<`Pi~Jvu}UI{i4cP*88yv78%%n+3wr&2*Irt289*QBNF|Jwg46tsmuDN&+BK$^;PA&|NU5VW9wyHEBxIrF4-3V`HWIE z;^oSc!EBuPR3D^p9-<68di&52YjW(>An`!j6!0MHx_@cw`OM#RTf&Toa%f33KbWRe zm2Nf9qf&!>{mXY^Fozt+6HisfUc1T2)3A%2m#ra#MdKAZ=8knC!1FjDfC`@!s-=iG zJdB)YShV>ky(gD)diNK2jsqTWG9W`h>nST!&R|+}kh=&0%M`(E3 z46K=(eXd>HstUmL>(RQGuJpAvn0*g{iqK1#>t#xhnxzQdC~BU_X?N#GV2ippLuj z!e#!ZU?xH8cPGpmq&{qL5+S1@QRlWT@|9~E!S44CQaQw`l!9{2uU0_uKh^fW3PDsfJ!G7^} zoxo2g#-SxoGsv9Y&wMfnDofT8KbDdUs--3dhoQ2&TBD9n~A zci?0>`O)}si!fiC_G#Ekw*e08#mic?4TYt{Z2%t{0)Ptl*Uv&tCJm zH>v!>^&T!<_l#!e>&_BCzz>DliqYQ~x+Z4WA?4NL;UzM#BW=-a454h~%n=aEw?YgB zoCn}?D#5{Nq2Avhmf5`ShASgWGP_kb>vZ)R8vA7AM`%5jP=a-Q2~*pKPx zy(*4*Y#ftqrf17dXFo!cL~>+EjU=lKS>A^$#Rw|IVt z0h?g!*iO~3M!8m?1(2k@F*!q}g8|Aqom)^2Lm_)kVSVAapK|WP;Fe*35%n9S3j!EM zyiM`F%8?0ZqSp_;LeA%i-QAgmzB#D;{?*|lYnEfe6ZC7a4@~&5P%k6KMK?`iwN-!B z#r0KVk^r{X^=YQI424U}NgutkYe29ak2mJ2&_)QXb;7S4?{1#^oG);tj*-jk`8MP*&K@V&c z7dFdVdQsS4S@?wYX#8+*f9T82w|Y6BO~m<6w=OS*vFlR<Il+(l$6)HJ{Ib!qq>=U9vhNOZWKAvOXIS=L)K9LMG9^i|;5hl7A5>ze< zA-J^Lgqco4%@eocfp2OR15W1?Ay!pXRPN(bt^9k?KRab%p!n#t)kAP9mw<@C;3WO> zfW7UfGptU|s~*>^1Zzi<#87GHPknldH88s)@b2sgnpd^^-wMqARaXAhnwGa}+KSh5Jh@QTzgGc0(vbew|UqG)hq@tMRyRTcZgN>JD0AGp%m|3Z}%uN0H^ zbN`4ITj~eQTthteG+h7u3Zt#1C94O%(11bmFKQTN=;Kd`j2#I!oC&!!R-(R)rh5** zkp4)e2ZLvn6Y!1{2GC0bYrS?~&$F#0)fZosREx?WYeE@4aKr%!p?kIoLU_}?0@C$l z4jR?KSv|;ve$vWMD+f$9U0W&L->ZroT#>|bc5VK`+0FcQ0cLNG!fcK+-BKtdyCZNs zK0T#%f)mH6&7-=>d@k&(|g)uk=R{TVR-hB#<2A zwzlmUB7>$%3H88!F?!zX;hWZIcApd9ngxZu!&|8ZiZ{6znsl*lyw9l@t4i$@nj5YC z2G|0h1O+nX?&8UwaL z|NcGH?zef^rfDiX9FOXSA>y3vmzoo#KszycweG6XvO#)(c2dFx^kC8XAj!jMGxctMj(y3w-tQ9 zf)iEzqGZ(8p!8i4WcpY@dodHZ9&gIt5N6I&@Qk!(5)D;qB9Kl^d~w+6mqneCqSSG7 zh&ovYA}J0$a}C%XgL&*a9;oI>+#TySbD7o3WZrOzBF?Esf4b5%t})^uQf9JLQG;5u9pg=Jfjqf zgXhK0Kp{6q1_VY&MoZ%v|5u9_IcbNXl82(B(W5f-vJ~p4yFPvzxQ&`3?dbd#o4(Pl zPq;^f_OpS_uUmab-rE4r@lSfX%b&s>eD6v-l9mwDC7Qi+5cm~i<_TcK=}qgHMoi{? z+(X>s6~&W)4)hIk_OEc#6VSu3~&+ z@DH;CYK692r{b}>MP_3Alj4Z)9sfvVRXnCktSEcVU}r|~5>{hEY5dsq{+(vO+7B0r zZTz*l=@jNtZT7^`wfBnDmQCFyIApCfWp*zW*QwuGx|EBEM?$%W!x_E3$jwpA7b9x0 z@4I*U1CIHj_AG9|iNvTlhuv)*r31A@uW}9uuFS&<`BEAo@rY(6zHZ{PGC= z&^LnSd_I;Zg6txzs<)WFd@1;6fzIbxS0ZIFl45rqeX^)Uwjp6htI z8O^KJ{qJYwrSCwADkbp>|M#aymOSKq&x1}a)_43AmF#m-1IqNiU`W}=6X2cehQYq^ zn}^#ePzig)yA_u}*c3Na9Ke3`hYQv@R0|)u+b=cF%ANf+p~DF$!No!d=r3k6BX_@W z-Ac~~=$@JYv7+=%RdQAz13qUJ_|TzWgFJzBKF3(bl+z~djrE0Mt&%A?FFk7jxIJFt z0QS1tPF3xuCaA^NLmi8Q9_`0+&z?9GN*T=UQn`x$(c2FLeOP~|wy{ha!W)pZCO?|83j2TC z8>6D6lsLweInW!!n_}xk9VdL0Z5RPpV+P^PikIox;-_PRy{o^bpC9l+LZUTSv0@K( zZRZqmP#YHMj(Wb7+eY7+pBsQ+`uuo~y1)O`QZrok1!A$>$PJ5-g;D{G_4c1rQUb9k z1ZOnl7f9nAx5MdCHGA)Pq0eo4*@8e#Z)8fCxf(0;qe)bGGF%w__7PuVSFt)Yn1)uL zQy~}6fPgN5odwh25fNR=_EfMF@jTWZ>ky2oI8{8j8l*97zWVQWAg{Hauq2SX8Pu5Mn2c)-g zx=e^f@;|N^BgGi^@+mAow`IGQ=oVSitdn*cDsUV95)feg@=88l!oXj$plY**{!|yo z>JOhH>M4k<3O8NalM$OlrMT=Gh{J-GVFf<$`p5=BX5fkwyO1uev zp=9CjlUGl=&z7x48T~YOh$f+sf%;v*6t!w6eMT(?*oOE@Lwk5GM>cdtu?_;~(hRrA z+e8X?i4i~^Sq^cO3gdA2Z}omK;Vl*(W#A7$ct7F$>gmI<9`Xw-B+J#v4A>hDly(s^ zo2C`d-R&39va3;6Wqo1fCd3vtTnWdnTSg_tN|?SYcvZi-Uw;1#AGB6gGZUpr3=iAS zLi!zl><6}>`XjU(VIzhO#D-hWn|(38t8cu9t&J`>D7A7Espi16_iL#+qwD4MLUsRI zFx~*8tw@8Ru-=NE$95d*W-R>F;mTFdmC$?!k(c^sxnj+n6##C7FZ zYu|^XLi`;8R*&;mhvG3t;tTwbHyV%6O=-JPLc}r)sOm87)uaY!l{~~GYYi5~DDt0i z{!oJheN)s*>qpGivgDk~!};oNVa0l%Ml!0dY>8_&YJ*B~VvrR=x59|)lrWES(H zVtGG4lETwNVUVU*fO_jtM4R6aXfK(m%oI9$ z)&!8$#T{N#vz`Cp+b8AVMIH^ll&zKVtu&&le-Lt~ZJ9YvtTHj_eG-~Z&dqDP-2)cn zue47RUfw8l|I91d-@4fUm=tZSVR}5U63ssfKDmT$nkJ&Uzgj?}szC|J4|jH=d++#% zOLweKt220)Iq(qz2YMF=QBOUt_%gg0*}zaJi{mv$BdxdovBvoJ!P*yIswBqsw?eNP z&Fb+7q<}qR#y%|!u4W+T^ydu7i9R(epp5?gwCz8s{iPvq}eON9QbANEz98aWo0KEfR}$TV{U)pa1)bp8%J-G z48+A8D08~QKST-~yBesImsX}6IES7&Z-nJ}#KvT&mR|7Mpi5`Yfqy=m&TNEhe9l7p zv2rnE&(sZG_c^`~DWfzN@A%BXEusy^#1{B!8h?#5+fofDKU{7*a3} z-+bd|HQ^4tza0ac5aXpjyVuSKYhNRPEi0O^dkErQqLy07j{(qI%C|1*a{EfNb2-N|p)VvJwLR9* z&C-UJdJdUZo9SdL9!d`PoHD3NiKka07So$Ts!5M|gQOKOVe z{bE>bWnL)hGTzw@SX=q%cE4%7-}HSWs3G=otRqrNrml$2??lkxrj#uxY+(W079>&Q zW`0ImQO&J9)k=hGFMuG;pYHxn_>eyIatEmUo!qbCT>jiDMR}z&crga@!Bga8L5*3e zgUeDO0@0t=c(NKN)W(*(Z2aJMt}9nsR#aaHSF0(!{2ta21Gy0aJQl+XXibK%1-vFH zC08QR`g^m#>w72UH1dxqVqhJnZRM1Ko4)DYLt}S-M+=p4phI5fs|6pYaD=p-3j20G z(Uz+dyESZZVxKM3@9yC8m}i6>8C7dgH*6kNP$|bqL`>29yZ-Y;>i>ZLkFy5Ua^o%Vrt|*gq_2SzNG33?| z@|xtKv<8t-95?iYqXCZsM8ZrAF|?nA_EYx}-$fcBU^remUE|~9)^6Gid-hYarggr` zd981*RZ*EH3a=E@dF`k)FD|k2=wMMA=0ELjZN!n0K&bVd%gaw(UpOri>V^}Ps*j1c zaDAeAxd8r?#PX`?Vi4>i;mJOHp!O%?rRmc8#Y0+wQ}L1s=hUv9CM-J^1|*V@ig*o0 z1(81a6DC!pp?ewLOR|y14;OGUVL$yeO1Wqm%Gxr}NMUB67R$7kvPK#Gvt1S?_TCaH<$R(oxP-{$ z?yrays3qKUzO<8~<3t55KmX>-ihJ|8U3t%Q5OVKyfBD3dJfL$|!{vtiT_c?pj=p#u2CZpJF}e7NFb>B_ zC{M%)i=Oomoik0(EEY#zT5pmpiu?PFdv#o7Zvb*3vX~bDeggw5jb%p?>Rz?GH&Yz| zT7}}sk1QiRr7jPwIye>-0eqO?PrG*EkN8RxIjb;m)`O>>G`B{0Dw03zSEEw&MEBH; zY)y5cv;K5rIwWrk`&Lnk?5IvMP0=8J5dcjp2Q^8+W2igL1e z-a+x3@=I61)R;MTxY3a6uUvzfRT&a;P}<$D-#tk?j)%h9oKqRD3;4~SS@divy*L)e|J4T8)RX6Fhlj@$sI#J!-03D9eI znA|jC&IEC{fewO7_!F7&48CE`BrS&|974XHb8>+c5FabR;;bwgaQ1yFxGXu2(ckR76()wd7Dj`pr!-E8J~Y{rxLb{Yj1d3MeZ^|o z^R)@?O*d=xH+hy7{Lw0`jpgq#;}S9CYPrSH@Zf5$6eB4P*wj83iOgkQh;5d7L)xQC zd-*?E0DAgy^9}NtZMM&{B5i5+>jje!ioQtvwIlfJ!6P&fHdKZqYoC{s_G4e8GyyCU z*yPou&qG~0<7}Xl4>4#IzK5I-rEEMq#=a{!(+giIlJY#)e3|0`KN5GLF;aJ&<(ODK zx(KP${7B>c7mWVIt`z*sK74*IrGB)P#0+qno6?v^gd^~K!m0G}A4;(P*E z0`CgpI48~&sLV4fxVwlI612ajiBWqL`gcK^P@~B34;1=_f=+eee6CF2_;9(J$gCfG z-0|kvSW>q`Lf*hG3a&km=!t;Y9O)(54y7P3>-C;D=v)OP^xd=4_K8mdU(-Cc!fg3K zd(q@zeXBOi)sDK%M=q*EBaTuTWvAC>H}lgBkn-8JVgvAX>c!C-=`E-O0pECC>GQ7J zs(<}B)H9WIm>jDm#*1inEwN90rkLK==~3FByILwUIa!|41kNfxoHp3`aPPL#c_LZB zK88x}QAxpb@IKoic&kmdEOpK0S9TA7AlQZaU0EtUx=FJkHd=(9?f#EMyc%NRa-G}B zt~Z}d!cM2?z&-PP1ta@`jiTNgWn|bF~`s-H}-$vO70NXko z%KnY<%}^}0S){l)X1I-yu1DH8x!KkXPc)X}#D}dOJZ+!VaxI`KRiSjGdmrp@3S9|3 zRo(9m1uG*ucGq z*gnJ`8y|BquPamm`!h;M)s{WwmL(o0AZ$l*?VZ);{X;xXPKAU><--^Kpb+Y&&x}b@ zIcZn@)!&>8VLZ?5pQWw-Urn4RCtg$MFs5S9pGw!eg}R{S7kyyjTd3&BB0uc$+R<0# zRe8IeJBnVoL`KD0R*zvFE|Or^Ra%$NNyL_8LqL9HS1uW!q!xCLMmn*++1;@=B zwlFYQ3alaAuhI2YFhBBMj?^7x&oZ? zS$Pi|_KYOyXmL2hU`EQISI|f-2!O{vu&oNbMac1)sSzC2N5oMC4RCAfWK;=Iq^S+%~q3JpoC{{a9G6+Q^bezTTflhCDh<_lEfh*gqkzC;#_ve zMUgLM;Ony!!Y?>w!S6A6KG4L`8L(KPVJ=E=8@u^pjt>6Xx=Q*-QuQiP;e&t`b=-`e z2&xVPqFV<1Ua6FUe>$06GCYahKU{OTbq3!rMR~O!7~3*aWa5olg7JC0g##ZLpXz~a z0nJ4kfJ;-E2{asK73Si=w%kPd%nB~@sd6#t6WvYFsZ#IB_bNbTKZP_LaRN){*@ixV zLQKug5JWeiwp|h=CN1OV#6Q5tQ_PLr9Jpe}`apxfQ0Q;v6a`{STps|q>U4#w*k$ht zr)GPJnnE~mACeh1I5?HTf9_l!g6{#}c8_b@Q=#ypf6h+1P6gDNX5^p08lvy~ z-_A}#^f0^5(*K;DAJ?1h4S_%EH!9ws!hubmrUz4`h93fecjJ9oRF=QcQ{p# zNB-YF&X~AW1pJ1~8Cq{T70{BMo~evzIa<@Ywbv=<`sTfH);ET8vL4;WFJ6Yj!OFV6 zf5s@&hm4l(gA98K%1x}v^5QxxzgJsogbR^e0PeMPz4e}DBiBsbw5^-3!Tr$93{t_J zrCrx;s&U*FlQ3?VS4s2F5=6N^xw0%^+@R_qenj_`eV%?RWFjNulV|0Bt_w)I)*Cam zu?7~J-E}uFb=wcB#i_SP<7RcIiSSYW8?4=8qcJh&~R&`7M1%#Cy+7<>4VYu!6Q*Cpl-x zi)^BI%FmZ#p*5T5l5?3&aMBvcHJO|tLjEJidr{e0)8FGHuq$Q*Vg4-rY=012!wTjk8&Wt3+_t!hNuFj$A z!P+#7k4x}wy^}wF{3yJ12)OW0v&r5~LPpOZt{0z|KZuvO(}E~I1nrfD%Ki{mVZKyM zr^^6M#5x9{c;E(Gb-w9tmazKuWgQ7N|G*X>o$^c*6Hby0dF63_xj9)gEqgW|JG&S7 zV2M2~496J&K_9zs?gBjY8uudf3F`%o!ieqni0hjQws@MTFGe$FYkv#vrzDaw~Q7=LuG!3>kW2j18No`?`>pCPX(#okA(?!{>-A;7>kHhFG7hZsda= zY($ves&ao&R*t0pZiy7`lOrm`G=_toOM*}L;+Z;`>l!M+PIV=IfPD>M)X<%scg?L^ zxs*F^R2D^D=~FcXBOZTVK`pT~5A{^V(!@SXPD)*2wNl5o{Ni{s#B9h=4g}<4*V#Xv zqXp|w9=!QG@>9NyYm+RzsVG%{G$5oHa>-8_!xOipLmI*Wy_8*JuZ&|Ocx4k3 z@3*Rux)xl=9{TpfN<(k2{M0)`FwVl71ii@@yEzGtyv9gzz`Z$tjj!n_3LDG(#;Ba| zdr=W*-Dwig%25sdKsR}xkJ0qGX8*}>A~p`eKSm0+h#_b?+au=72cRlz79uQ`t%snX ziz%qun!z+2R=#mru3ww+kbeCE*c;nBq&>C z;qS;cQ&taAlc>8_cj-hoG|3E%@&j&s#l$#MjoKU~>Tot!KCk2XLHLX-%IIP9C;T$$ zv*8hVNFSoEjq^66?X^(2??2-;5T<~i&sUxjP51oWo;sP2!|Vr57PKG#vDH3B9V33&Q`VqtbgSb2a%o_wC;G68KxSrIW{;WLJ;#EDZ3!n$ESfdB!s4CNaomf^YG5 zMcJ*T!Z~u+mZ#T-d;5*gj@~sJMr0Qqf~~B3iT?9;ewV058GCwK@Rw{yU6tjIj`kf8 zBU@6b(oIN^Tl&n0^}^mjD!T!;a;~#z+s^W32v2aGVh0*BQ&bLKxdt(SF81fE&Cxkh z6sGEs<+q-pG3E!9mn~irqC_@H2y90i2~D2L@#1$l+G>--qSwMhA?9V$YK*;26QPX~ zlNLbE?EB=E*y%iJ)0edXQ?{d!le(Cy1ru#IURX-SbZ&Pi1cqPrqgn~K=8L70RaMw`u7SzmaLJW zv`KBv_Lf^07e=Y#T@I12DXzqNQK)H|92!zjngFLNr3+es(Nz=saP1_5VK^i!IE{{0`F&eD*Wcgj3UUQa& zWhWjPRlP%AuA17YnqUGIs#9${)%Qu*`Y{q{(7} zw`uzn-7?NyxfWv@fxQ!D2K7JZo0X`KjG3-MmN3=9{Q4%rMCmt3zWTD&ohaD>pD}%_ zu4p4ujFFDmf&gjvH2ERA`ZbmCeM~}Nw8z`?2}r|dZc5?lr7Kco^?H(q5OU@JAp5|jA#-~IUN)& znu-i|m0QB@w?T(q8zYg9Uye`rId`P6`XNuzU+RF+n_ip2rNeADPV|_DQ))0@*r58k z6H!1#HlSVA(?`*Vp58sc3iw(cxI zyfHG*`IPL^AigshW&mj@(sQSaesbJ0IvH|O@sP%ga0%K1axq4p7g$awA#uw*iXg^+ zP?I)Lt7}Y8LE3=5mv{0|0>#o`c#0G~hlp{u0Mw3>wK<5-YmkWY1FW&C7llGb|M?7g z<$4YS8P5UjO+egp{&U0B$T;hd-Y`Rcrl^ksLBCpd!7Jxv_1ac|{jv)4^m^<7x>Mvw zlcZ{~p3HGT0jDSW!Vn^og$#j}peNd2=iriYHB!&DWRZ(xO!(p!?_*C4m@yk_0%Cal%K;* zN^}W3U%iwuF|k!Wpfj$4JqL$TEING<^f=AEe8HUmaWRLf(iNt83nf@@nzcI!ooJMU%y3{!n*-D5~NDXVBZggGPWF{Bp|_!KgX=W;7vy;c9+;@fj; zinE^zv7AapQ!!3LH=*Hc)mUAfki3t31&oVVSax=HGDI!14-J8jA8x5sE-EdQ5Eo;a z=Re!!4{g15h4atR3~F(?KS%z`#U{iav9(ew^mZ+Nl?hk^Q(XVS2RC_Nfqhe6;C-A< z?m4Ow$0B&Bu!R}JBU9KT{qL{SuMA#z0tix&&_P{g-$dQ-ehop!58=THg#iLsB zyOUj$g5o#uWR@cPgGK>cUXk~g{^dP0g@!Rj72=KHY4g*&%KObf)!^--<1h3pp|_y= z7Y+zDE}|%RV4Cxka|cLbwdZsJLd2kg-^-r3KP0w`>)?#*3J4KOvJ@#zV*mB@PAiK)U6*UExo_SXSBXCr{{vlX3uqVgyz?mS zs~X8Z(w}qGDTwSq=SQ^`Gz&tb+e}cHM9F$U%`y;RRamGiDNsv-Q#>j^anzy#f7|*< zuzLDgfEc-2iaC2W$pCyp;f%=W2k*va_lDq$%{w>DZu8ug8-eMg!DL*xPqWnE(%(_T zG`k@Lhm`}B{Tip>z>f4}&T)R6^)PmUhcNaU(PO9xZIyhL5yN=8h3rJSNsQr_EhrXVEbQsU?*i=|byD^wv>IXgE9!855*WHFyWYF}HM&C8S9n zbLn?n*FMNWr04kW=ib2M5lwz3R#*EoM>!bZC9V6#ze_Rqv9b_!R#3xA9Um~!k+I0Y z=x?pfxXmTu1llExcD>P>F2BdscPywldn3NDgl(;N(}7l4Ay|5<`*5MNn992k$AUae zKcvA&H7C#UPspX%JqOo19PNWdvK(=N#QXUEz_a%8>qW@)Sz3gL5q0@Uo-eW!_!<-R z363T9R*(_(DQDnYNfHc9B|?r8hOx)tT33tSoZcL+1Uivu2nZt7*>`$wYjodEwfLDn2uBpW_;Iv}A=~>qG3%*!S)Mljpz% z4K4-UkIjxBi(UtRr={nKE1jC@o}pE}{~mLTk|;~3Ll=WV+B`8uET0?B9zvtV0D9N> z1`wF!L6>*MSRMz%!cuEYyyWaA((h>K zv^ISHe(}g-sl@V{KiA^H9Uz>Ukt+jvhnz&w@Ge4R+a+~@wkzvLH$>}2ottToqIbIvh_b0OoxbkVY(mhC%Z(a$${gJ(nPW1qpZJhtcb5#3$$EFQxm&gZNKIlJu#y!2X6MhxJ-$;|6qu8$B*937WIx?$&A}* zVd$R;9L@c96n|HtyOF3qc`tFJAu3iTCpb^xTcay9Y|f8>N0`w_?Vpv)+yo;rI2hj> zs8}mPtQUPe7(@uWA?5omGwgv_5$AhD#Yb_+qenK84E|Aa%aF9vsF8t-SS}SGLnAdi zA-#gStN!ypPJD{Q=7S8?m}e?v-Z@RbwVcYXwo8sz>!0c6sD<&CD@{7{`$5yq0*V5tH_}kZ0M+vqBlRm8pyC80(^&Z6gRxl+W zqtkxHSkBObGuWI(*z@T8>yo1!Y?%7*`H;qM8`^$9qj>I?eblYO7f z%<;bHIkqkl!Q&>=Zdz!(WBww~v(VcSpTdo%_r)VZ9q8YQi0<{i*t#u% znMt_vfKnV8pKa_33;yhOMAcr<)|+#NQZ-~j=iBV~>XjpS}QX!NrQ zJjU8D!??r|QLv{4mpfX*H?WV;dsS%)lNx&{7;y&9&%E9#PA@ySy#irbCanzFVjnAT z%zNT{`X`Kvhf>Ku8T9XYc9#qXi?`*T;s@f3v2i2CVw-$8c%jOU*-GmclRiZ%ya ziMsk*Z1(rXYP&v(yl=?fnssQ81qL1Wd<5-GnZy+Sp( z-ek9K+#WFpMMa0E(S%K2uSLQ7x5^Q_pzT>V{_!OS?^u9JTH*6PI^9A7tZnX70m4Mm#Gr^1(caYX zenERm*|55)v>B&G*Ef?KOQAF#pHQ|#DbExp?{ zmsy0Fc&%B8-li8Qi<#Rw|8^y%?oQr~*32-yueP~b1xXD$amt_qzBR@X+=RhWpse}* zk^Lt;nyaS?Dxc}mjx?d42C8tWbFsoaKJ7%yaElNoek z@a5b{UOJvvBq{L>XDAMw`vdJ9`ScN85i^^s(O-4MV|A3_F2Y8HZ|rPKyCM!#)L_VNB{nu<~ala3jHbEBh7(UIXv263j(jqK;)k%lZ}!|c{knJYcl5o z&Kd9+moxf#mab}-D#Q^~E5I5d|NeJ8f+R)_sQ>hMebrhUxiI;1Mc+%~%d@(H_J{*5 zPaHckoUJPjC73mbsP+T1>XV{g=i4DLnO|6wzMR)7mJT;sZ>U*Dg@<49BqY+{ke<^_ zV5vE5hK4J?gWG^(DX>FIlF%GTg?+!gP1;56tUVXITif;V zRB)m5zFv@Lx%q+aDTCdX$+S1O6T`(uE7q`)RBUVzjyvH_yg@=VP>{Xvba;QhO&nc( zmw+z0G~czxChv+r8hHGkoWMAU-`2NWpD{BMCXnkD_+nF&!^fwDolFl{lW1zIN#(~#nl9FfppD171&8*oLoNu0_ zv^H4a?Ui9BmM~mtZSG0lF4Opkt8r5xR6l^2?MlB6M~RF6R{=gc7c z?3K+awNEcCzkprrYdSBU^NgG%JFsxju~{$eOj7QxT@`^R4=zoA33mS+5<27!)EKtE z1IvD{Y#9i!A$Dhli;Or|qx##&w+JSE#8hY$=4jcIopnUlAn$IOsTTq@&G^33o2`N# zeG~kUU*es>>RoQzJSJGVf9fI-_|RaxnxaG9ZM%lm*zWLaDgsdEb-wcQICMd2hw@(d zx6QwtZnTt~y{ltV_R>qYOPY7|F45qhCv2T>-dbkcBh`zCW~`#X=3jL&s#a)EtJS_e z|Kj#&%VBrKs;P^KE`_WFtd;C6`o1W@nQ@Q_!;9T-oenS&c2K@7uE_a?lfR*tWDQ&CR0|M8yIQOvy@^CkXK z(1|LKpSt*u!0WBYt|`mXuQ~$XbQ64bN%D67tMuBUy%2EAA=>qs(K)$D))kMbXzfg5 ztdw$O18~LpVMhuyPHg_W%k5N;wCwD|eD;sonlH&T2(9w5Bas5V+0s=#RX5J*eW0>& zQYq%2@7{YLO!&L(bk*SSw$CE}mwV-L&;RXSS)QZ*Z8kW~DAKdY_Nse>yo zue|-yN>jlR5yBFt?beI^-9=S#3VxRcPbUAxF>zill)hd+j|q|dv-v3^Wk{l{ne;YXMaVF zNnUO$|G`qsENl*MNb5Vw-Sf)-An=KAs0l2$k`uOtg4PUEP2r1D^p!8;vOXtz*%!wV z-$xFZeTGh;J|4OD<5G1^9qP(I#&QRM7p+0t#2fEz5&=Bqk>j^Sodeooa>zcfv4+FC zR;*u3x?!!1*)9E!aWl$-xNcmMjH&3F-K~9;e+%BUGqyr`ko~(~Ok)+I-@eR~E!p>O z)0wqH7pJ&R z((c}#>p%af#>Fbt*a1uo;Or6eKV4&%rRR~CY4UBmL;;-oiJV zD=gJ7+RQ)^F@vpCmbm~(ih1P_=KHajzsPPpMv6}*X z-m?EtV-S5|Ube{nOWIpDGg)+(vV75P#a(w9P0GRQ7JoJjE8*sM@4q^Kq41zz^Mi1X zeZ+@U*3~JPnP{=RtsmaLj0rK_arKE1u5++jyG?#r zp#$V(zCj)bIhm(07Op~mdE07*j(mS%weSt}ufz~%yvM3*LGjWG)>(+#Q}{C8X7u63 zc%XO%0EGZzQLxC1y|0xwxbFo=j4uiQjE?zzyfo;<&6?D`cH(X}=xOVt#zzd(es;V| zpzOT({EESH4C^_OegF=(c5ORm`l4_QVMBpPDJ=b{>#;m7zgw=lt?m1;U503-+gR@X z6U(sSC{w(Vn~=7vBlRf&ryas|MD`?O*8=4tnKlZQnzm#*bf}b-TWkZ^|;(2)^ll3 z>KEUNonuNZ%e83dwuINyA6@%T0gan{P}gI)ek;>=>~;V7^5uSLHMh(1R0FG{gZ|KY zy?8>+ zWY-{Dt}{HmNo`7SiJP8ubElJQTt{(jOrS!WzjZb7Lu4z)+Z-w-K~Q8v$;rLbn1~L% z*`sF7^+3vHkcqLS0qhomU6cKh>5ik-Kjh@dJ=VDqmff8DNzE~Y=JM(M07yi_*_tOQP} z^wb8e?Hj^fqdqJizFgP4hGP*uGNT}o!4a*k11@d=mq~WaZRR`Iy`Ru3FUs=;+Xd$> zZP`Sk72Vy#dHt;gYKAd=J%Y6+hk&=_RPxGh*96cQQ#>SJnhthC_01w7ZQfp@^7YqwB}7( zVi<#i(h@Zy8wG9Qa_6~nI=o9pGEj`U+#@w7a@!NIF!Fj>8kS+!e z&9ZFyhl)=C6wfpZk+XjgPagl(WT+aw&v$R5acNfyKqZF$N~u^cgx5@dDwh!QbIJ6b z)>D*(!6poc5FZ%22srxfz zWnJJyDWD~)uOdml$^Ehe!g}}-`%yTVaaxa&c+c>pEjJEx5B+{Q(V{Um??fuXmpzK8 z`>22%vl1i3nvtqnzKQQB0SKuynBk4N6yQ%j#RZyp%4dg921tj#72~iK%hO(7%eCaj zd50vDtJK1=PhPBe-Kc#ozu(CXhcLw%RdjozWHESh)F?yLap3NRwdy3L!NTaGWd`nKU3kquHR`i#nLFX#^=wP zQD}Q~P_+UxI_2nsw^!t{+r{602X8D}R=2O4Jmh5d>|ELzkXh=1+2a)9d7FGRwhd0z zvWi2Npa*jd94PU}0~zCL|P=RaAUE2g@SV%J78 zj&)zOHQeU-Ds-1|B;hTCom0)N!h7Wx+gFDMmRUsS&B5H9PLcS}Uxfyz%5c%Bk zUp$Kp-~IjCQ=^JD?)2yRjTKkLnI$SqShY21`k52%+vue(9}p2`nn03QVO8bL*)CLplnv|TXdA?Ht4;OaT|41_(NjS7GEU8e~0i(=LEXj1vD zr7z1AC3@dic{SY^;d^t%AbANCR(|TsfXs&K(?v`Xp`=a(ZR@7245uHvGb@KMRWs8(kv)}W|=~;WR+6m-X5j_;(n zVhlHxRhA3SdIC`n!0C$;-D^(>jIgJOwQmrTcn-M4G^T{c6zja9N?OO3G~@*u zR7G&a^htWFK}wy^mJ!FCF5cl^x@xVGgOEuDtT-PA_AoS}UvW32(AHPJ_I(O0bhehjQ^T9-_Nn;RYO`FMZmd+j zx4q(%7I%3mO2cLAkq%Eol0Uh&T%6>E)YIb8-l`8lun0}HeQzUqy#TLQ8H8&NSO zMuj-%1aTPJ)^B_qFehw)W(;s%%Nq#oIvlB8qzyw9DM0lal6}_-PxTXi;zm*DF?!hR zwZ_QW5E8lg*V{|0?@ADFO5l05QEk63$xOX019^z4<_=8#nL!GsaWW_whqEIiS_8Fp zO=H6BAq+HtqQr+qTa6%Xkq(frr_W5?bcVwMcg}FTyH`9uAa$0%&n_5RBF_o(S$)au zAptpZE_*rYDl9%P-g2`xzr(%0{p5j(M~&IhmaES`u(J5dvrewr?wON%_*6KVRvr3T znY2%BHkGj^5sLvd@lqYITTRDA-n?!G;UB4^hXtKm1{e^)cLg*r^RdMlsvNe2>G)UW+?4qoESX(06+A~RKAZ9xLpYjNmK!m#&5!;q2A($1TjL7IUs3uVU{ z{SKTFhfSVA<+ajYNdZyAy2uEu+>IR*p=;Gm?0%Ah4HU9sxI@q3V!U%zr&I^TC3Fl= z+Y=B7+a5hP7RN#^<9!@vU=2!pjy`b?_Z%UYP;j)v7PZFb>-_^Cuj9svpr~l`Y!$V!yeO&A;Kvo=eF?w9x1GeG zn|OW++!F*GW^lNiTmR~tfC%U#c&;l^NWIuAd*wFmn%hmmw7b5ez{)*!J#7y)`$8O( zhD-3zCQF?z=Xz3<%|%i7oak#d%d`x$YcL5pJWkVJUJ+QxLiB5yuz);;YN9j zE(nbYpos!Os|Yr_Zm+!=uW+~9Pw?kws|yG|R~sc_%zIK?veWrwH$JLGanv zQ#W;;?j55&_3JvaKx~K@tD}pN6jhbg-t#<0DnZ$MlUFNZpeanp?3fElY^S6<&!q-u zJL9wni(n3MomB{WI)m+E_5_qmX4BG;-{c8$ zBrgrJNgGBP+oW=259@B^v2~Sww9eH(hptuic!QvOA%H?-+IR>zFxlBR`-@Wz7L$h!gbcVaVA%}Kt}pSF>=gE-1oRqk$yf%(MZP_dctO=fGZh=8 zW|+`n6sBK2el&G#?d$tumxSv^wLqtA>M;A$!reuyq=C_o#>r-$BHvn=k*fi;v!o0lYAx>f@Oqp#1ht$T3{nT@?+3P zC!AiQ|fcc+4pE z<3xi^7(f|ry{y|5u|Ci#0wy~r;GG0tezulCW+B5FgI~-d0+j~UYxEKT8o)J=EI1lN z4}t=83_kraqu?|woa4MFpoww>btA-8DT3g?6-#zYo>6%3>E?<+SC7M5BllwhzyM`*0E{5;EBaUnjOq*S{{%p&L#QAmYdpr?XR8c7)#f8pb@oG6` z3k3s@6sI{d&w-~7IS>S|s2kk%faI{)=(z=f^m<2i@s0XQ54JQz^*SXELF5~gyvzlu zpA?X>7)dM}>fF+@%LXF}FA~5X)j#r3v1O&%38eR@b>#kknTj_ zeKNqAgLzJdxr7ssyNsLKg4BKcg1j*JRoVx_>iqrA@TN1GUU7H~W=l)2jg$Fmh`@)*NS^-?}c~_sR*7y6dB}P+$eB zWH#-EcIT(IMLtyULV8dcvfKP$4!dIr`US$hiVrlzw#pPvb;*_}9wUtR0}!Qc-XYpRP-Qads>sti+sov$HO_2%Dtu zzKX{HkHUYRBq+74O`Q?$Rql!g7Fj{nJang;-=}a7nlt27R*>7K zS~5-2#R#hxTF-P1OXLn}OBVTmk6`vgM>y*$_w8#YoB+ddwAj<m+;Rb+#Usm3!v{jR6u2GP%^R1szV|fkCJ?1Uatl>rcE`h_#<+bbr}QttkH z`Ew-TDxHp(;pTp-0kVVHvWhpPd&>6EtiUN>?NtpAEf=qK=vGQ1H4ox6g-{BH-WMaj zEES_YTB!EKRZS~fC|9}V)I0^6k_(F-C@mU(+aCJd(xfi(Ax0i~%}@N+cJW;g46l&- zi*0!lO1s;J-`IJv<9?^!#MkX(z6Sg7DX>Lzm7X|7pf8Hz4Ib5G9$hg1IYU+QmWsu_ zR}h-1uEEjWftgp%kEz2meAkK>D~gs6!(TkyTWhMIO05hk*SgySIqu97_B2bO;Q?feA=BxI?hAV7uRrmR?#f5J`auQ4%yJ>pXuoJdPL< zGa&DBh#CIppVLML*Dl|6Y@9!j22bcf&tW}5K`kJDIpYVAJ&ee5jCNT7h7h=3oLI!m zNb5+!L*D0bJClkDyso}St#5Pf?;Q2W2`Wsz95#1;vcVJQ+K*L$Z68E;Yu?hm6QZpu z+BVD7lf}?eDl%`3F}%mn70t!V1bw#%v)lVSru)9_hc?yQOu}1=*YpEiFt~wKfPmDS zY^6VTz+e6vf-S~6VAQ9o#^zh8;cgRsjGSZcfW55%bJdF!zf>QuF9T_xJ7SR((A%5> z1x48@px0aY$^zYC;-p;~z;A=LIJ0Fy3o%xc5_7Pw5VJE1!OvC{$dGr951;=sI4Gd| z3;2p*MNz$dKp7CC3rKxUS?g(}d!GCqe@*GmS2T4Gai(iMJj|x-tfNQg%$%VOpOWe9 z1fTdQ=rRcD{ItLChNBA0LqA&V9EOJTv?4dTQymA!W}y7lfy_18qF+8+bL8miu(NFI z3Q4BelOzl-%ne{{?z4nN1ZX~Oc&GZtXL4Hu3tOfbI99V-Rhv1%YVRy{J{jn>!%8$E zhPC@=nw$hvpo4)VRbgl?v?+pARUbbEr|5ifp&+Xm=X6n1u?u0`e9m7@+&0{nM-E;| zU~W=muD-R)xx2Gzd$6A|Vq2uV!+Z1@CpakrGD4PZL@mQ<-lv?Y-6BT4bYM8T>>c_{ z*uxH2M;_Jf9L_$fEG>mv;Gp7s-Cm}63)ve`pqYv2KZc`AAWbvpvtgbPipf(pF$zeEYr=h+Mik+*`<-1S1GC*bsZ;ZHPR=}tNI-QW*UQ<>c2bH|+G3a~ zj-=efYe9CPcPNC7JnO0DnSEe9T3IMVdr5^YR)I=7ShMk5z?uRKXL|1%OT6@EF>T|B zswL14=vEft|0G`n61sUDD}=V893 z2(9ZWFE|ey%cMZauQHee#}CHty&C>zpjJT2$`?cbJ=@O=-rb9r-1W0tX3_%Vt<8nC5dhO4pS$8R?Ilv3lc?>4D_@d!FHA|saIA+ zzDb}_82h{UmF(gq`1l+5j1ZU@kM4zvyx@F!R!V zFI}p$80pwDGbnbA%>_vAVT*efw55xR5nIp0fieMm98eeusEM0dPmKAT<-^B*O7pwe zVm~R)StxsJl&Du3X@tq{2)TF}6xf+P?zhsIb*oX<{h4^DM8~`R7iFVC_3hwtcg`ZMK z&?wej6ZOknZ5#_G5ar?%3>kekyXKnhiu?AU;!bOk`(m&`Clac+dzyxeXwGOjc%4Xhv^QJ9G-r(5(ncsZY144-dQ98@(d*Cph>G8d9k4W zbS7`!W4-O3K79R0CGJxzfN_T~pG8|!^-0%|*3=7FrPiU8vZ&L8gb5($qF@2x(~Bz` z4mpv;U^{@NcXu|VK=+z$Df^)EMDM#qf|0c-?D*jD4Rdoe*C~OQeoVadBa6$ebmQI?ZR|I_O`rS9w7W0#!Y@ zFoMpUv4o{Q8YMm~Gc-b-&aSO2p*vvU7}$YBY^fQ2^MzN=ls{yI9C~CT(Aq+4RUNxZqzVNUQ6CIDR~C^9*_Hw?-&yrc&=uWJ zq$&syZcZ|GmV6;HgX0X_Qfmu?wh~i*NSWqIEil;=3>b0B#9p=QT6{~RpfSQLZfXA2 z)%Qx)g~afjH6^w3!{$IV=iGc*`=iHw4BR}j39;NRlSu{y-$Ef(1@j|!IN{_50SU=% z^Y%_Cj62*4a{%$C3?r^|*nL0Xi z_sBzg?`diGG<k};~auPURGG^x*_45h=z3a|Vm zfQS28Izqq>V|x=x!vnLn?85^x6rev&2`*s5fLi#hul~D-L-ML!~A4KpGJ-L$FMAGf~RfQ@ZxRWh%(HS> zn&1j>2fn_oX6hPprMcLbyX)m0o2IMc-33<@g3eQ1%xvV&Rj6fHb|uHueL-^6ZM`_| z196Q{T!)$^@#>7Kv1<9kDWD+&ChX=RUqVkbQQXLMjO#c=?r!sG+H9)JoSrQy%2n)M zTqBb8&^S{)j=u;rC^?Gx#a(TVdFm> zu&aUNRQ~GUcl_tAX<#is>ucrh|GMnIUYr(t?K6w4Ix75sf8dM9yBn|U`XAr&8g}Vx zIu!4I^M8NslYgF`1*_y|7k8ijKlk$FQskyxE`lRvC;oGD|GvEt%((x(!M`I2PR74; z5L{?~7s-F(*55GmH&AbEJb!nRjqUR9Uc7-|{(=QCIR1rO8zA*BSojMT{?8%jFIe~s z7XE^TjS=_@7XCXk@fR%o1q=VlNc{y1f5F0Eu<)M|*jN*PQ^0>S3xC1F|0s6+{{ag> bfi17=3PY=-_V4`%{4uz0a;@^p{b&CR!qN$P literal 7410 zcmeHMc|25Y`#+ZKX>1|e7`rfIM2sQJU}TV;%06Rxtc|fRk--Qtc4v;p81?}U+227@3q|bb*^*nds5F>m>lLf#sL7p zVN*29n%<5coNO%g>#3p=1A4>kt#7Um0QWLE_uUWC@4=pEYjXgIk_CWxA^_~tS@ARg z2!#Q_vKs&(3IRYc_*$zKlHNhYIhuNzn*)k;nhiL_zzZE{9FkCy2=!(SS&GyYq8^g7dD^a0EpQ$(gW?7?WK z5CGuhJ2)ADtJj18fN{Xr){)?7jzM?^1*o`VgFJ955dpymC;%COppyYOf;%`Oz&|hq z5upS5T>?R;57vYYy%5$Y<3Hi_I~|A* zfe?&AE%3K!yu_YEfa z1_go-^tyWlg%WfikOM>iKK}5N;Oq65rNEFs!=eWWJ?Md|s=%QC35JXC{Xbv_J%7M{ zkLwR}$OB~vCu>|tkbmfbTe>IV$lopeFXmtV{Q>w(-Y(FWp!+Y#KiL07IsVoEmzlpa z{|T_f`{L*W`MY1Lf4cQ|-rwX8qC=eVjllUkqI?5zfguO6z|~+#=zr|_C(q5o$SeZbE&t^z-UPYdoCM z*iD|#It@&h-Fgc$gqYPc~of?#Auy+$@EH^?+gx*otNYYmW&x-(;>U&r&X z>2;wvfESST_*AS2jFEw^fdPD$cwm(2zefN6hPUflvfBpF_r2B5v6HhgIdQv7l_&N- zHB?S}6)P)ue4qENYxzKR-Z}MaU+u!UY5%X4JDv40TN&CvzP=7>;TY#sijvVO^}M3R zYm)O6^&9VJhEV z?AD9geLI%ap+kb_VaoU4;9K_N?Ic>KzRg}N)!kp~LB{8@AJ3LJ56*t&Vz07Xl**s9 zz_#cH5;cRi6Rt*oX&LM0gkEE}a*)A&Eb=bdD*d%D7u`(H<^}?`;0?5MAFh12i_xj{ zi+beG_nD_gD@)_9uAw2uMR9F3k2K|cRfxpGZ;C}|b4eYdI2Yrc&6)7on*#QZk|{o+ z8N?V6yW#n;XZq@#JOrU^VHA6HPUz)wT@WE#bu`4WUG>vek&(+{KP!da*kY=8;dDq43egltqB{V z67@a$l?Zc(Wy+~u-{)6 zqV{jnU9FtG_$zh+YcT8J8k?tMJux66Emz^`(mMwm!a_3meyA?V_9kLN+j#AIuRBG_ zjawf}9?7;sri~lv;1FO@X%8MFNoOmUu~PGjTRf#etdRWii6GC5wB+lGI4!C(F+ zV^2_&;b$^GLhq?(0jGWKw4O$v<6;z+YoF zpBJpWJU;wj)$$OT2IkgV4IBU5757MG%v8tt=Ka|y;{^xOld54<$yYm>yabRPrvRmX z4HY5XW+i-O+)&baCcvE1di6mG8p7bua~_eN_|)gVq1Y>@I#)n@uwbb|d^D}ZBX(~> zTVcnSDPR;?aT&7!O^yhC{YV3B(5=X8B}m|RSh`#|IWdXqc+|{$#w+?0(Ak*2Yg2No z_O@G{;GKmUs##I3)hQ7zGB#$lV=oY~-pjX{tspg868)v=Ajhahf-N5@nVfHSOi;Xj3uW$BnYbyoZeZv4rRn+`P+}w2u5fm$=lrQfEf5 zLinZd3-+$D+im>O3uF_oALE|cF1%m6Yd&?bFtc!U@bxx}v0R}ttA6L5qt?`2El+mY zX*%t&m_ZY4({(krb(y$b@ae)^2`w@}B$DIy*Pl$ITf9!N#$;~*{9-ARrSy`$)bg=p zu;xTt11&$_$*ai!fxpwEHLd2^xIMxv-^!cx+6vFCh2ZaR&?*}=JGWn+TWWqCX!bDjTl?k^1eL~o$7 zo_Oy#w)`6QjB#nxKxK6Mf;%fpd2`LhB znx7lTES9c?44KkzA~Yhn60WGo3Tt6ozP*heV_+4{GI<-&JT0s9b;`2=-T3~B_J_4g zyJZFVeG;3bMCu6dJnTX;k*@SY>(^I)YGp>Fic(51{dje2TpI;f6OZUt)#mpbkW5b9 z9k>YHP(VLGh4=bs^u9<0=)QHAnAVg4|eOP?}wxgl_y)h3ulBJSJ`&8)e0vSHzr z8A}*ozwk)Js-*2r>DIUqnVG2fPDyVa^+YrT6GeYYLeR-6n(x!qM(Gjd+3!5biezW4 zqOjvtPreGDjU5Yo_&HytbAMxtR-WlK`tFg+gSkt{da>wF_abvEaz+OAh44!^3%sBP z(|cbYLRah^`|Es?1SnCG&%KHR@;^pLueOJ`(&wXI1<6;DGB-|XnNfN8DLcdSEZ2>O z+ir)B5^-6m$1_EilSuCsP_O&(Ba_7K2t_g~@*(9ZQ|bwy8yAmRzeh3hcjcwH%BMU7j}E6tRw=CJg+0Qc3V!o* zZ?$wA?hTot&qNUl#KTL$4=5~U8FD`@t28#>G62YWy_F$P6U)wsZ`@sr_kJHfR~XE< zONsqD?b|eAKRAgCX&MSMf2A%dzA0|drr2}Kvg`1jUOm(=(5SvFY$Z?Lq_O+@)BTWF zqrZ-99&QB6N3+*|e48y*X{Fsj{(AS;&cQj)Bm~VI*>Cod$01yN*>+=uM=8-z)PD6X z-KRH^{n(+triHnt!c#o0Ivf2`J6bMj60XumHef8eV>`47r_zMf7*WE0S&I18x(D-@ zkrzF<`<5jGJkN|wGG%GquAaSfPEXXHwuX82hWc~4H-2wRJ;djO5stW`q^&TS!N)!V zQ!=i(KO3hVE!72d4XQj;3b@yIbegny4``}o0%yfPIkV37EVJmkbDzJ^i%4yO zYh{9C1-;IxQ|KR9sf&I*g3kTN$IP6T7P*QdW)n=bmnw4bLOsYV!shdn-n<{v6n+|N zFG(S}V#jJM$5AYz_HItRa#k+l&XI)tN$+>YY5Y+d${!!6O6v*72DU8b`!7Uf*jVa> z9!&-Euh8UP+!M|1Yq%Eqp(wKMMxGp!NlGe>f$ntl(RO;?V^@vDdF~JBAy!avXx1Va z9NjWue{ONaYs~!QbMe)KdV-5-Pp`K5QsHb96x=IU^l#e8TXsvm6BCc3mA%>N{WN(##JqLsR z(4zO0T>y3J9=x8C$ z_1{3e7~>be636{U;2B@3#;~J;X!?pE)J146q$0tWh+V*@7Wd+nD3%_+SG~(=NG$JZ z-cjKf7jmR zin@JnrH;PbeN!7dA+0ztc|cFL%%l zorqn1Yb-OI^pgh9h>{jo&%yB4#qw9vd>zT2L*4TIkL0T?ls=@@vAz>yXCz-{LU*Y4 zT|5D(4lmxUS*v|5y}`Gea4qJgG?Fh>C>O=ZLe)p>X*FM7pFg`8T0(68dM@R0YOmrEmgn3{x46!-bj2iP z)v(Wx@^HBCeaq4<*i+;?UaUvw6U-83ATxG(rt@*uIgKn3nF=v z<7Cdb-B!)gByURn%*I?E+xiq5`B~D1UZc;1c4=}&i6R~fzc}%>hsNILSluIXI8A1e zY{RvV6&Xt&b|gt>0ndG-KyG;%ck8%Q!R);?h8hq)1LRj(3%_H>$+rX=HC~E9QEZRr z92V8tW}GM!7TIw4Iu}Ht*Kqg*_gz_Q5vLd4#khrMp42unx@TG5^`W=J$;H8bePR3H zrV16|So@ILY4eJE-KVApa3g(L+}61=CGS(W64gHuQ)1|gw+`RK>zE37jn9r+8_A%R zDqz21aMT65W~i``LfTa|FG($&aa%0Hf((B1vvVp6#6J!lMjaE};H!0bJlOJOv~l3> z=d>Miov*BWNwaUBdNDjg#sC@1Q?DeglryoV##imzTZxu9PB?qBhN{8IwL5c&je%jo z9n30O(q>zvL{5#3+1)Oy-_O~2hPs+tQkWG*V&+%^GkqK29d=ThOOZALFA7GY>ZvWf zi&^^ff}~qAy=r89!A>R!bb)ONY%}qOgpZI)B4m$!#)yYC+6GWA719KX$TIlQ9BuY* z^d$@}iM=`znb|Slb+m;3jb?b6>S`jO#lG%HsxsqPuo3Jw%U*V{ue_k6Mt+>MPh9Fl zs|Gi(tZlP7f1}DquA_*A(0<7c62Z=eyrgU*#xYvdxSI=K6c@s=HuuX@T)`G=CHEOCC(&kk)8hwincE_qK# zmva-da(`)-&CJ^2zQAR7zPK_cb*;WO0bU~QY{p`Cz+STRQFIuSjE`+n&5?tg(#o? zI#x7fc+6ZpN>XJtZ_9GfwlZ+1xWgNwp7EVX%aMDLJdW5DNEd_NxMPAkk<=au;^Wny zhkICiFIS5R(x8J9R-LVIOhqgDN7{yamJqlmgm^lp4vt0>Uo-?+ymPfHQtU^0X*3&H z9EB0?I564N1CvJS7Y?Zm1XQ$x)LOe<)7vXI`&5wW;`O43XKpfuoq> zuINvJRZ%hPb zhGnSb1yym>3U|RS&9$nk*vDT*)1*rtgW1H`Y^nyU_z;B-ZbEIxg^lHKszP!1MT;7% zB_f4Hch(Ks)TlNkx(0IRBKs|uO+JFZLrPogFFmmg(Fg6;D3Vjc5;8j|+RLqBFMSuv zD94!fq!Fay-lo<`r|N#ct))Tt@uXpMni1>cE~jKo%)_-RA&t+H+hZ)R%(b4&*}B9& zaw;VcCg>uWY6#|X%Y%>YS@f3mV=r&7)IzsCv*uhgHvC0>7f^~Cmv4*3#1>kW4^=rT zX@ByLXIWq?ud@*(+-t*}6IM3*fWT>Qx}k|q=_73umCDDiBR0caZ1O7#3Lyg9E4b_9 zO%F~_Dum=tQYA`6;a0&(8I)R6%&wy9d7~kt% zkCGL%J#H+(En^J2BFigPYi$!%{`$2-P!_0()VW3r4WtG|ElvK2Tdsw_uZk`guVyrbzTjR}UK0!C1O1Og05CPOK-C$zUHTuIjo7yU diff --git a/docs/build/html/_static/mlx_logo_dark.png b/docs/build/html/_static/mlx_logo_dark.png new file mode 100644 index 0000000000000000000000000000000000000000..cda3c1f61326b05c26d7248e6a44816043d0c424 GIT binary patch literal 48758 zcmeFYcT`hb_b%+QaTJjwC@7#H(tGa^3muUvRfv?(d+#}lfJl>$RHceE0qIDjcTlR- zNN7@HLQ5dYzB_o%dC&X1?;YQG$9M04hcOs|y;tU*d#*X>n)6xDiqw9fLUo?u{D~7M zsMJ&ybx)i)l}Y~n^(=UWwW{R{_|X-hZ{($G;mhLY;c8>=1Y_~?cZ0FO{OoN`oba1= z)^4S0XSqNSRZ=;Tb5rP|+S@TInGk0o=jQ=5`|c^hXD8+51q3Ossy zzBlACTG(&9GLPv-p+Msiohlpb{aFU-`*RF77atlmUlcibUlHaIoa7ohaH26DJ9_pZ z<7tMlPy6qz&;GVItX#b0>_07B#)(B=Mo1vh&Ak=bI$1qBq3IGQmJ-b(HEy3m5c6&ka}bFOYKCrLdzZMA#%{(K|wJu-vigGZYZIl8XXEjWYG;pMIlw zULBg48osF+a&&jTm(9cVfUpynPNgZuwK;h{ zNTIcpZP~u`ZNu!DZ4KmL_4{a*#Ho`s?n$pw?ls?$`?!{tc@U{&pe=^-M7c)^s1ka- z$5$nX{Nv7KbDI<|$yKWIEt?uSTNu3eLUE(s7cwRZ<_Awmxw=niT$!lngj;YqmhyRK zt^A5I>Y%Y!)z-OliCyaXXNQ-Tx73a*B}f*-f{7aJhL@t5D7o%Mpzy80H{UuR7mqfr z^PRsB(N7Yuo@w-X5-{$Kh%>7TIe9wg32)w-;N&-Mz{1+q5+>m1>;}5ti4$@VKQ{|2N0=9jCCt{| zMV=MigkWW{x0Yu$6xS5ebh`($vsVr9fawK1(64)MmNo>H%XB6A%*+;#cys_Yq-LIL{*IVQnL$ ztEl{k2=G67Ry!{*HyJ@eUteDVUr_;94_iTDX=!OeArV0l5q@w4zo);8mxUj{i{~w} zh@UbPVV+hV_HJJGt}ZNOnHH9=-d^&otl)i?KlO8V)71P+yo=`_RsiiG=x5<3C@df( z=+d2|)HJpKl0beXTYG1> zL6{TF85HUX>L>hf7_a zfiC9=kT8P^4TMAo=^NU$n zTJu|oTUqm4N=OP@N{Yi^VxW7Ft^QM#nv18Gg^Lx8EDF3LU=PX>wXv}lld!blmk_oR z;ujMVmExBYvl8K#u(Y(Y5VjB%vXBz`n+Pord(bN_oc^vAS(G&>N?bxnOhQr!#xD%B z1ho;DvgEfEwGiRAl(vG2*}$YgU$r_GMV<$lJKAdUtRezGg&$whcCzrYarJPPXVtW@ zV$st3*I0dfXPBOs1=(uCl0s5q;=EjQ7{qws0fA9qxF-d8dm?(^2$XeVQ^aUGBerZb>jNj5m$QmXjB4I5gDS2## zzodJ*+Iaa|c);%1f_4OL1^VZ)tys8zP9gWd5BIf$k>>}r7{8DR|Nm|=!9U(ukeoIC zd}TSo|IH|J#{>SBX29?LyauK(FdGW~l@9+f8mRF9%b!0c8*ZF@nR4^B?4DtorrM9Ett6-~j)>1=7@xbjL~imV{0yLrreiYaQ;UtL#2^&RjlydJ^~1$a7G9ayqcxeqX^> z967wB&z(JZt>%EibMh*9r)Vfsdh|4?)`{N*qHw>G|2QEALKxr=!-L}wD$e7Ne=h!G z5D;qkzu^qOhtnRr)W12{KNtT;#QkHC|H2@*+t2+iS^Z(Ue=hzRu>Tn3#DDD!uNRhX z9sAU0x0B>gJE>pDAAhWa|6Kh4YmhEYX68F3z#f7G5vJAA5w0XqKkOt5;Tw!+ke8YV zfbb>cL&ZU$d)Ipn@%G5yco8_9M5NRqLo(P}pKGRzsgahUq=E!Igbo870C;4T?Qr(A z*#WS>18`KY4ixVmAXQNS61-=9w7+#BD7r@yf!CB?%TK&Wyz8l^z`CG%?Tz~j_+uah z->#<>&vnIf_lruueNmPHp0Hs<;5V)9QDF{NY@cEShzP(lW9b|a8X5VbR#WJpdKP-l z+>Z4+33W4}jtPSx?N#(ch@+5rwyTxNyoP-cU~z1fWa}J^k-s9;vGqjG|9uyqZiRk@{8uqYlII zY4f9Za3F>)q^RljCvExnm|uJi<@ucw84RopKR0IVySqUZxSi(a_E~^LOGC3uO!!Nd z=|kOYXR7QI2Gb3O*kj>aHIpydu2em4Jt^(5GBCXal|&)kSFhI~w#70jHl#=IE46;!uFOBJGC8L8@XOahj%4q1Qf3y0r zT3MXG2}ww}&hw$2iGjK^@91>zE8R~-NqsDHVdaR2Dwh|A5hUZKu3ZhuW%Kdm5-=05 z&G$0cqde8e0KB&!T~5#tVAtV6z2Ge`eMpLtAZft;jY<1;h%asuO^ag_59gd6BKR<+ ziyM3cOS=&cQ#qxYHO3>jQ>RWXomFP*&2m_US1e8fu}0P95MQY;RH31vmUMObM!J_d z4-B7<4Ptea?rV1Fm!<&*J%Aex#~|I|>dQQCjbBPA#1a-ztK9 zY{24?J2)y_i1Wza>P5~*i^*~`ERI$S*Tm)i-v41ja5^G_c_}a*U|2$gFGKUEq<+02 zt*5sBy=SgPGI69UN9v7M3ZMDi+@!B##d^!lp2HBdCk@H3#O!1LxEk6PZ2cfUblk6-7=wZ4Dkxg{ZN~d8*oxbYO2}k}60d z;CEmXU#eA;5=^SRir+do@3_`upn+TB{-7_}No5FpEST4oHp=K414?RL9}#yOS8S%o z&m;6a6Ilyl;TvWBYBU%6Zv(fTv4~WRW7Fx;{P@Wn_L}d7dmEFhoXU^pl?9+RLPLBX zDXnxw22th}?lP2@*)>S#@i}rOHoe1aQ}LpRrT3YbRDLt%grv`#Q=ZoI#YJNZKsW;& zm=6|_lKT3V>Gi^<_O+T#wXgORunql}M*Z-rl9nqfP^X+ij+J&%bS9Maoq)c*8qTlb&| z7V_~Zk8X2pjTepWgt+W)I$6mib)nd!7~|phVt|<=wCk)j8J0pR)ahkFU{W@)W4kX2J$-tt}TN0_hwUz9LRw{W99e1 zcP$~`pn$Ck*M1Gbk7M<6uNRznp(aa`u|2755!p>cgUL%ApGJ|Fx9tiHvT&2;fo1nI zrQ*F>Uos3eF2gHOll~LcQl@-D zzx~w-V~j){_al*7h5baRV0KjAcqGIfrP#1HVX8*YGSl@vwD{Bfo@whxzlvGK5 zKLd~uC1@cXc+UM#Mt&;lKxjWb^C`}ZzJ z*HTa!wh+zKv2?MW!)guw02sREBlc@v+{(ebk)=~}O59zEVXyO>J^4%W-9hs{oNcCm;XtNzqT)5qs8qg`bF_yH8E^|i#U`(DlZ|HH z&Xe%!nD@Kd&T~PzX31YXSWSJ}92ZtmwrU^tD3YUx%^rAFS@+UN6MnQiW&vh#&{5dq z7L3E!JzHbufVJYHoRqjaXIk++rJ7Ce@E*T4PAB9zkhy4 z?XBv@eAvQC-o1#Pl^ciLmszGR)Wh8iP{3{xgdO&I;?t+wTRVs&jH66^Y708t48HA_ zJL!TT?puc@wq4A~O~2f={YGqO_<0H7ieji112nAwzKqiuxoOWkwoe3J@<#Bcec`qz zxDMdMGXvI(*?O;<)_U2Gycd@P_~`K6D$KolTEsUP={xOHwMf3~COQrs?Yh%|>?}`1 zfykGj_|e0~($Du@b_{t*Ix5%MA&u)QOH>{$`!yB4XXqbk9}&<2gtPZo#n%o!BWOR? zI?qV>O9V>{EW4HpT*_ks_%(;W66?6W>gXlhx?#D;n9b(haJYDbQ0q;h z`c`I*sX&kd609!_W=Lwq9(6fkhvly~7QSg38F9N9R>AkQ?@hgdX&b~e<@pv5ZNIDf zn<;LE);xuExIriG6p^n)$s7)c>pDrY08I>teP{($6Y0~{uYLfrH$lE;lk4oI_={3LsPodB@7q8z~V!>JlpQ)*rUz0!#IjAuGf_6!K~-JNO$Gl!1doeklsl`Z zgl@iuhT(KB_kwv>mG1fTo&y9Bj9-yNOr;UfFhcquQhfXHl+|*K>*q4t;1 zhu2-G>g*#>9c?GOrY~EMN5iqboc-Xk-g~`=ynv+wW--uyX*tKGJ&LZ8ClxIT5jR45 z&?Xt0S@l7+T)nR2n=WzB11I>)#`CNrbPiU(Ca?0WO9V|!6re(`Sf&H8=!z;Q?(Y2E zw&-i3OPQWS-1t4_44fR>Ao#5xvhvl5@Twy3Tffz$OsczSAo?&z zM@MSu;Z69J3|L zjia*Z<_+rTkKUmhv(_p?dkHtp(S}hJO(NZQNF@!jGQAKY)~$c&s#x7ob>q3T+13YZ z&Tmj-T52fI`|7@fS+a()kcDS`j{x&7Bw-cOkE|nQ<-cBFy2#Jwps_`h#Do|0hCVe4 zDXRyH3#ykET2W=8c`q>35Q3X=mG!x`QSEBm;gmB-AmlH7K5Pt&W?rS1_1Sz`loKMu zoi3i`5XmOt+Z-xe#q*ImDcjJIzbP;bpOd^t`t_#hL38GzrcuJ1WPWQtQ^KuSIrSk~ zv|K{IhKtdMx4M(ynv1}pC>vI@166-z7+R>BxLX{IPs&V&LjR>QiYud((1YUVxu zP{h73sm~m6=|moNnl}*l_sGiF1wPo)RxoNjE!R9ck4as&>6NA9PM4)G@530DKT^9W zejg5;z$3$Onnq8jN@>Aa?GtJbCv-pS%aSBpFjV+f>X6$s;)+mc&ZEGsnk6_rYBKPg%}zOF*e&;h zgu;7a?j-g()dDxSmo~EQwS#a<%I>8hoy!e|U27dtj8+RfF03T~I*~3ga2iAoI%w5i zT9`C?Mcz;Q?IGzfPJ)!-+QMDYa>zsWlfyPH&GEMVXWLgAis0C|Yo-gn#u8Bpg3jl6 zz8mD=_)45(O6(h*i*tLM>IyyM_WRAB)61rPeu z#U06!v;83hf0tDdZk@fNkv6;OpHlj+6ORpANSyhWi^9+gn%`#?@A&iyoS6cgcgwAb zjt%Oh&PH!X_LPc_pf$t#qMQ}D8$Bd5;oRzjTsLCFTSd}dis{pA0P$PAh|L|$@QD)dIvkbYJ#|%$l zhQ~ACTz&!Z$1Xi)KI!GLH*PqNJ*4I&Jt0CXlTyN~o6KxT?%5JeR~%Y?ue>`QzVV6w z?Vah0Iv&2gcWQ()lyp6r(w;p<3>s+5L|`rwx_Y)CaxzCYlPd!Zm!`;sp17C0M(4bT zn_h8Dm8QtIuf$1s(TX!PJ)Iv(V$$6(&r#Oc_h_VDrrRP?y*KD*qyL@a`86 zOecM!p}45`x_$$gKzqTO!1dFCBiO9_O6eFJ6i`g4q{p1%l}ll2Vm=DpPOPMDVwIPb zQr&U<^(iJL7DCmP6TWQQixM<%yLj}*xZ0-Ien-o7U=Mv6ILMIb1*=DLA>b3QHJ0K` zLAe)BrykdK=i5Sc?{-%(#w%yyZfQ0KPV!6{pBU`B!kb@F5l29K5T+%#cm|tEx79u# zu9slpUWmheYKa;lyMvxkOjah?9fi9+FIpNuUH3rNTn2z?^- zOY)2j)R6&LFR6$JLCAO`@hXScIRx|_!n8oEnEle8tr~xylbc)CX52yD5*%Ba$=-iS zse_f9PkJCOqbG)*KiX3Reb+eY{!V_zi>@_3{iF^qV*+n^bHdVZ@%}q2yD8#l_q3Y2;gLL3{#u5spIVPMBa`5)kv7u~1FB}gZ1JtmZo z7rJVk=3#glut(9iy$1H8&NH=YrN->p<>!P9xlI;uIUkH9dtKyWclzo%Mllk0@> zekyjDmZTxrl>cQC$(AzX3hhokMP)1KolZGSnw8<5Uy%r7yigWLT$+qpMcI8^^yIMJ zGxMGE1`)0cScJ--K+4;vagceAVR@kRxuRM~2JgUVus$nBMXAHx0F&|D?()2_M<}n) z;xHThbbR_-xcNt+usq#H9A{QgRnSfb(&>=`pUDSRj(tss-97kr1EiONIp`BvlQ_z} ze*;sD{spF#V_aMCHVGxDvuDrt05`fTT&t(RB3i$bc;K2e*94)5+d^jjI#G{TZG2aI zV+5=3k~rb5z@zqs-yA~{y0$K^t0Z+5L?Nu*t7Fw(30s9{dhScdw2+zN7 zNS$;b@;5By-O_dw`qD$ql*?Tl@sM2wY(v=;OW!J@q0)-3bWA&v{cC1{6iLi~UcfB) zabZCeP-9%ZR9ZdcaQ`BoX)T|>OI7P6oOs>$2kdtHRG`J$o9RZpViQl^vSRJ#v#GY> z&fyWe4Qhr<@Ygx8nuj$G<2+wfd%-TCVlBZ0=_kHP#&g~+4N4+kPD0^x^jmZVR;^jV z-#unfEWn&VuDy=ydRJ5AQJ`AGwHrtIt+Ub0g1Ord!KpDc+sjja1T=WA_MPL*{QP|b z7|{^y1!3y;)L*Dp(~^{puUQzgreCg2)Q+F{NZEeIV4XUCP<~gB>vq*OHvnbQjo7T} zw?5om%Qrq#_Ne9s0qsvnf3Q@5Zn7Y!`M2qnYG^p~DRUDqkQ1hg}*r z%A0UBV8c@qQth{GlGbr&j)s7CD_KeDQ`}K{un|{%SF~XeipDW0Rq@z8(x$S_2C*vn zh0|EXB!*EaYchXh`1%8VO{Q+_k9O*Y#z#bVT9UDphL5jA-Bn@xvtovj&h@i*FGoW( zB2+d>rk6Ll(g_NYr$;309SmwB>kkD;XoDyLg{!wD{@YI#vS##6f=r%A$Z9F?y<+L_3ZN9^7=C4!? z0nEGM#2Wry4-c?xGhDsqBK5}E{0k-`L9>48hR4OB3w>LuuuiBh5(mPfxtZ=Oukww; zX8jgpmVmUbiZ>kQ@RYIK+=((%&PrK>yW&$HrDv6SXSqKJQ7*2UwP8{-MVT7ZSoX}G z;&-^kAdY$QN5jjJ@bzYY5!<26X|s)z5U=F>*>VNDIUw|lo~hG1OF?mMS1faJ*2kni z82e+RBxTCWS+9!8nu6Ha#rz%3*}!8t6(iaAp?I_haDNW1#y53=@aQ|A&966y<@O@1 zQS2#g_zw(xIs-#gRj=PgcRKG{_U?E#tZvV`2~LAfuz1`%o7`C( zwD0ij;15_yuJ9vM3zwJSohE6_;gXc{(L+Clpe5TvaNY$}2f*7~4b`oZ6ElZJoc zCWFu6Uo9foSLNS~#VQ=y;TNcfIe~7G5dPI4rSX%@ZyUi7)Ek)N7n!{If$z#e3Ck!U zj+nc*%ksi{Eg&gc5JXWDK@>$`>q_pD>ID!*siA;?6wFD)qXW?%d#P@{W7x$1e}_$MIv@aHXZd?x9LJox6aK{& z&`-N7&~*-AXQ3UuF;Z?MeMQpedeS1oz;Hf?i+ej!|)FnWT0yaVNGjTv>wzZgN~-Az~`L$#YDok$u`2| z-w~8^n1@MCne0B>4Xb_RG9M6*gCbqW3(er!3yNk=vwjG{4dSJ{HrxKvdl~Q9?(v%T zq-A@)rBQR*_KR9#QBp(SD_C7N>EE_oz*XyZ#&IZCIjMQasWq&um+%ttaU3MW%3iSX zyE-P8342@?rhA0tmL2C0?&uz#x2E_)C#ihFcEZyWHPMU%@lKXGb)KvXEI_wRh-Sv% z+2i;XFb~2gO*ML#q!~9NiX3kwG{wP;9fR7U+<$-|!4Z@rWO%JcWFlgg`&EQSConACfCFw=P z$*>H=4^t)(U*!e~K@w<>rRJTXTU+Yi!x>#bt>i*tiheNok(&*fYfMa+6TikK@2W=i zIFIBuWDr-->Ydx#igl_Z$jp(~4B~KGb)pV^Nc&YvThDxbOsYWHgfMq-aIgnPiGE^b zhuMVyf>+wvA&2&7MX7AB+5Ml86GjI35sAE@nQnr#I%;(71k&>ZR*Zmj|;bC-ZTV zd?p=PzbB}1{L%wd#v-4wrwuleZhv$NAh=JpSOxk-@X!cT4$16puvF)9S2gLS5I)xb z$a=Xg^{x|>I!IF>S%&f=By*9*4WW9Gj*^ZOeA2`Z_?ja^1OpJsne#GB)>q`Yy_CkJ zr+WR-!xj@Yo1q#{>G2icu<9+m^IloOYkLDiWFlujf8Ffek>n2Y6oEMEO`!W`$VX1| zbYF5@dw(G9&#=FkHF`c8NiW&ewz{QA*zu`E(sVr?KWUs7zpXg!Loha~=kP7>y9~T< zznBJKbw(+6Xa` z2KFmwtUeO}yF)Yy*Xc%pHKnC9n{R)P_A0)shJyMnD2ceEI!4T^$^FLW6!PuXD*Q7p zK@NU_VQ8fG-%%3@BmJnIWqccFU8_<%D#v@m7+dBV9JR#z#55JDPKwhb$RIB{qV_1( z*Rum9da~sM*`?j?vOAw?V}Ds`=bwLk4VTguK`MOP;=P(I(Z?4pA5M}FxMHItY zW~s|(nf&i9Qtx`EdN!MR^s;*llv;u7;0q{P)^f0k#8<48HRp?#ZWJ4tT2%*R>%-`BnpG_KoVN^^HerXmw;TX`T@=D< z)fW5t9<{KMy;O-V>lvV&?(&UR15=k1)+JS9yy|SFrsr*&@rd%6IH1}zM7b@ddUsP3 z3lBm~Hr}!SM(zaHH^=!+m$vGVK9X+-1g&=$hp~%-*jpX^MfcDQNmNZ5VMC*q@mc)wY-FpM$M1ui96ezFDEX%6lQh z?Rw)Gan++egRlZ~su;+|_dH(Aw_r2POevl6k1GsVIMd=}!yS7R8G&7QadXqY$b{=d zpGy~jN=m+=jfEgp-yYJ4O(}K-0}ZRh`-xxjK}=;e&cF*{Xf^rSD1b&;8U-iTFD`vR zht!Ep+J2#mvbIEabOn(t2oL~2jGYgJ+rAueOKD!cK>cb%&PPZpwyv$t7SF&anEajxRbqJ2-AImRK+JLC6$B^-=*!GTO zSi6A2>_ZFdK4A&oPjdf+Z@T}jy z7jTb+0-)10l4chaY2}E!qak{9RNbL3($ZA}hAgE>r9z}OHJ0%or*pp@tVEo%B$-?Mc{F6^wLLp_YSq-V5T~y`SDfcjTZd=WH79r2_C5Z(> zcb^Ng2Gbr|clISOEnJlanN>?4Mzz3_MKbCd4*+n^}ywrfAd(L;kG6~Ew;r#x4Ol4&1Ey@}5Ocq^J& z;lvH-wIs1j_Eie?{@T#6fP0h$Dhn(>eoWcbxxMyl)ONh79Q36vOCmaw4ER-?#yHNr zehY4CpQ6HGx|#c{hvhT!Lmpj)r!&JV&#nrk+tA)gZ)tWTS!ND06(g^5FxFMRf+iTJ z;6ND3x;FtcbNhR^lB5`R2U+b~Q3@Saz9W`-C4PRywV0_?(~4(HVkWgMguoLbz=6o+ zts88ifhsDl1Ru?axX#PbxL+WWw~hhz-w?GLLYfTzL}T43j;VV0yr4=)!xUi6j4LX^ zKHE$0i&R$Fiv;%6FifgkiEZz&A{ka`7>Jwci81648jkiQ;B5y|L2xMs?``z{@e?NT zowv#%q=R`+>QnLI)iTLC{n^hZOstn|Tusi?IO4S?Mn3!CvO4IKCl;kY%R&yn#4Wwe zi)AHia!x}CoDoow!iK>1@W)(6?^{@|QC5aiMP3c?=JKlGjBN7Z&@I!F$1Wb6 z@~f5iG2M%4Ng$Gw3r+*0Yx7OhS|^L&Kz|IY<&QNvPiJ2}*g^+%K5gupR(y{VR_gGD z5Z4AF>{q%~8%2fihT(t0Ch03+7n&q#4wt_MU(4bIv9mLyw_8O=Ds8e<8#!7Z6*z<~ zMvT~WvNi1$q?cMgSE`cN)%Xp*pDB&qa)k#DC;%npjp~IDvZw2Z;Q)zPWi!ZdmW9eT z8H7##15^@Fd(XZ`zZFLwu8vdcNu?(htQ;hM+3bi`CFopp&-7;EU}XsN z9heo2o5KmJ5!ObF`orkWzwQhNuO$@p4%M><0v}_6A>@2KxNR@`!%m9V=;{?)wHIez z&}q;(6JKR4d`Y>R@7<7m+q^=-;CTUZ3;_@;N59wU5nPxJJVPbR1s(q zCEfiVd!)SP1BK-Q2DU1PJhbd^1di#{h{S86e4h*PGDymF=Uy zO)tMd-R(OCpR28b+}^? zEeOU^=8AJA9g_SYnDQm-<9BmMu(OyID74qJT1TbTBRD}fS-EE0H}Fcdi6osP3|x>t zOv}>%NKMy4j2)S3n-iS!M7iB~YJ`jT79ZoU9AR5*`uX1IHn%6qhxeR3sfm}*zjQh~ zi%7%WAvAbuy+jA21M^|j1Y9?S15l?&6w>;y9l3lZFE{Di@kn`AS>J8d6y-wM_(JOS zR}BxpuQfzf!c(-KYFgY^L@fnHTgy4T-zk>;szWK8`NCl&t=w4; z?Y^+SQV5p88`k2e(uN$5X^1Qyt69E>N)#NGjEOq$^NQJ29(AlsW~v?1>~%ZiY=7aO zGMgSj2(r54KIKGqte>^K91SFN09}%+J;g=Y(>25UBVnn1WA+gbSBSp6Br61x4csrX zbo=6FkoE`r#0+GgNc=YakRt_|bhldX*;AF^F(4XO26hOh>gGOEpKe3&hF!QCAi(+S z9t8`!<*2+Fi{d_ zddcV@xSE~A8?NKT$2vBK6HIO^Do!9P&G)zF&>T<3e7RSKyFU|3W0bccO?hGi9)TJW zvdMHdPYBIgQlv!B*gV~i9EHVSrNIhLh4_lxa2$?+G`(Gt3N3QLdW%KF-d<|sD;ZDO z`8%ExjN;UxI0Tc9>EY2)wJmoKLbtnOieR=_4*7k6NIi?Dh$!r~$p;I!?yv7&F~WPR ze>WhARDM%1k}0~ZUsG8hW5eekB6KioaSkQ;sNPLKXSo!_C3iaLfM(`w-tHTLd<|1)cn!)mb`{Hd zNO!rsj!xaehgyM$2PHzI82TfnowYkmy(4(8Yshy6&*A&Vw*BDLN^cIk&HqHZ`VhcE zC%v&`ee5rwMMhhy2XhGE8k{}V3z-_7mME@iHh9W@rUT6c?|gBEZJ8@fQx;LSjFPwVPi~b#5#}RDkJtPgCaRi@q)1h@UCb8NE7|n=h;y1EAF9JL z*QDck;yYGiCsi*d@M#j1qRN@Et3GRO{SF5t>ML;K;ogO-ph^CnbK+VR>h>4s>T{o>VLaJ|W9wnYIl z*QB{C4nam#94&Rcg&*b6W~8u!n*&VTt672bHzCzZQFLlhD0FKcXhk7*QBhcMS3u;k zoz%v(6J4ssm3NU6ozLNl#eQ5m_5Mm3WqY2sNxWbL!AJ7ds)FzyBVK7B|JFzvJ-c&k z9L#0&Qx%XztJ}5*vz~FAQ`IVL>RmZ_P9Q2JR^0@Ikg-WJdT_+TVIJB_$W6s?Y3Tn;VJ_}rd+_B!0y zC8gOaKDWVh;W<*8bkO2knCaGbZfQ-Z)TlC(P;#5V!Pn|Eu}9%11lFbTb)-X|?fYF$ z1X#7=nr5}sb@R^Li((96zf5sR2I(NN-k(#y945S z^@!h|IBxXjvv(#2~Knq&}Pt_f0zqT_eyIMP~#H@N0pC}*^O{Mk+7j=@vA%FCGH z)63bUUdW157!o$c_n_qi_duR08pSN-cH2B)jop8(Aaw~z+CnakZso+<6I>90X^0P{ zy^P{1;;F@KcwVF{2etw@JO-|n$lm~om@3O%$RDW5UDKLeykt5K%;rqfEx|`%Ccfc0 zr~*c)+5JIkL0>!NM*Tbd2zu|#ShYP3(L_@Zogdd9li*@RUpQeA*Oe29!ie~8KLV!s zjNab{83|Kq2Z9bB$lwEEE|Bp*?y^D|*-VJ{PZzZCs9v3V{E&Yrb(Jz__AI^pb7qsB z3~{R)hzvB~D2m`n1z94!0l&WEiE^oLxlBG(p(VeU6hy#uCo{wEfvgKz%ZG!AFC7#@ zV#9JfADm`t2gOBr78V$K4^*_9!#9vh?AVIBS(?vA`uds!$KcN8D_sZ>-1%?;JP^>N zAXSNqdqj5N*=jZs6VYhgl-@b~bqTK5AJo7+ z+aQh2?i?;P{}{l8@;xAWa;(0Y3)mQX> z##oo~!d#>G#?zf^qrz8gt;WUbG#nZOHEW+ZH0o$dkVfyx4InG3+i-E`7eO-)r-jrH zK&)B@a^T_jU&&Ppqd{^!vC3!AFxBz%^z>ata)bckn z22}hQJTs%9;qc_m5lFmPZm`YS(>hX{XsO50H+6Myr8C&SuZzKlWrh$;LkKD%&l;CA z*DyE2in&p7T7(#CIWCHeOpjSW2QBIUU}-U<>$gM(erDXRH~$6v$iILl%}Fz4;O0Vr zpV-2cB}WS6;Y1yIYm`Tm6vIZk?mPRXh4Sq;9HTlxMw08{0OCs*SPu)yF*5tz_SquI zNl$*Mvi+6#CDtWZ^(XWbaaeQy-5P{lk`#QaU|r*zH7(--aU)w&OdJ*GPVlXBNv0?5 za-KawWqXRuqM7-hEE=AlEE+8oJrU&g#s?p@FjeC|`T=X1_~8fvI)q89 zqSD-hk~N2bm>8T@Ur&I=pIm^eka-}v)T}zZqiDg&p}UyOsevBRrMg@(w_Q-(*V3nm zvog(asJ6v~0xlc}AWnF-8@+u5Ssv3{mPO((;-|a#>*jUa2#f)QNu=LBbN}V!(3bA? z7({4y1#rD`oqR$IB#vKgshbbbFbDZ`Kg9B83&p2K$5*;Mm@b<2I(HO;w40r)v2fmK z@KBSwxZE$1IQdwhCohf+kE#YS425$~P+dpMU5-18ZuWY(b4$$3izV!mj0A?#pu$|f z78Ce`G$r?Tydc$_r`l$a2TG#4MG~Bzn@b`9^rf>nS$)FUAcO%sc!CYwAO6%+YFhVT zA3Y-&Z>sP(M3gc4tGL=vjOe%M122N35J{X5ervFWqV$K64_^n`oj?s>;^lS*MA_uP z8N@#ssoHw{X8!ZtH65v6<$@%p`I)$~UB*nF?@5Hnf`P-l{PAF(F%kDM0DzEswR&bw zBA=yl0pYad!zI{$C<)7Arm@$B)J9F#-q9)6CwcQL{C+8t+_2-`SaWqw4a6VD*jKqT zf@hoFkoFdElLTCxxD>?*c(iF`cR6f_GZLAIqwC&&t^hi_eq;XY0+06+F~uKdoS^5A zFeSK#*Mt}4vpaUvi?}R8H=NL&)`1>B$`KB31i28;JG?oxviZoQl{KuZ6b9rbFT`p% zmA*e+#k1(j_49y}E`G8M$5(ttc^ki)lS-|CyUmx-mM~1HkD2uwZ##%C@Lk|jFMKtk z`Eb#Tj!!)$h^`Cd?&PvVXm5tZp&%q2Ef%bbKosoJ5m{xPLX=h>)lty1LzVFF2P~$MdhCxuu*vULm z5Lls!(2U2sD7TyC4ico}rnEmi4!(!2v!Y^T<;{HK7&h2GE25`|@|7lmeHClQ4W#1J zQ8$TK{~1-1k48?Bhr#`@)bg>&nFDw)duk83-l!d}q@zWak)Od_i0Q^2Apz3o7>xpO z+7>0(7;7`}lY_Vu%C@URS=fl8>^_E!4_i94iJKD{4plF1()I4EYNDI%g-D^jdq1vi z8C5^vGo!UA*R5^CsB|!!1&PMjI8G+aie+l13i4waRhBdo?hTu*#!*po9-njiJ{}OH zCb`HwyFO!ZAQj?@M7;Az)l+;Ry?hzJ?TauyjyEJ>MeCjlIX-A|73`G2=F2H) zpH7mJM~_4$PIA8PTeLOSp>IW?VkbjkPUNgr)iGiEhOB7;1+cU<3Mz`Yx-D({U6 z3kl(6W_Wuf6lqhFTSp%Q_&3hp9-4BiWJWxOsCYh;Au8KmeucW*cOxNf zIC#K<7Iyv!o!BFm$(6$W4pi$yiHOMq>;4Dz{DemYzW7?w>mp2qF3xGHN@?D2Yfl{k zfTf+dgzu=WQGXg3YY12LytPW3h0m*pSK;?!$?z2EsAw|>p1KH9d_J2UAmIojrf5iA zl*9B`S|gfKL!v~-a68=l;TdW4wW$B3PQ2OJ5D{|zjOxU%hx7+*ByGx^@w&3|-J={C?-zt6 zPKf=Zi5p;6@|c39CQAU}G{80$W4MAkuLLRl#!rY!fVhgB!_6+7ZrlKqeu6L1B0hs3 zyS@oBd<;N_PxS1)$d#P4g<54d-eo82TqL)O4S>*yclGjyA(mql&)Ip!lK-YtMl^f+D0g~#=c8n!dxmr z{@_^aA7egP$Wi)W?dzxkC}60s0U(fkdq5fZd zy?0oX+t)3+w{Bz$B8n6R0hJC?q=g=lCSAI86{JIyUK2Y40@9@mNRcYN39(S5OO4bh zp|==nfbhO&#ou?n^PPL2d;jHu=b5av-nHf&bBr;m++1qQ&HUgf8}Qt{ryh372zwq{ zlozW*evvShMzJ-3M6NC5yAg=5N$&8D)#dA9LNTRe5mpM7V#1Zv)^MM zlgVGZLV^FhDy=a2|-M$RRL+vO^veWlUj_}R<~4roGf3Ds{ePQIpx<#Rl2NaJRnn= z9UN8MuFSnAKjC`vVd^IF4v8;jo()UZFg!>X{s?Pkyv-SuM?+Q(JhLrgVBv| zi=*2;eI2^Cq8b-ZPkWU#@+bldP&uFgWh+Iww2Mp8ZyaaBdQw2sK@D2PLQ(2849<%> zmv0;7f6iG`6XLBJ{a2M?xQc{seKa`J$?dhXm2M9=4H4nhPPDDq$KPKx->m{H&-8$z zc$=ahixdNDm?gUu%gR0cOTIVeIRk)I4qE0*nT$yCbvyDF{rH=x_6ob9gBVdzjxQBLF#Vr0eI`2<=u4q!P*LJf~9 zT@^F{8l9tom$*+r`s-=|jn!+p?hZ`idO+T1spvSz0k_WO!7KN2I^C8{zO1Xe=E6< z6N7%PKjq=dG2_6f{bA?0g+kL{p>TB`2T5#?E9t% zb)n^yPHX>;ca75(qcpvo9;3g>_Uo|DSm|9MQ5U)JoAk$PYXgQUgweA0=y_cCVYZcm zE*IC;nS=T7QRJ_HiK{T)>};Qphd*7%cvjb7vG%z)MG`RUv-rA$y0g&(!ZFyKKRB0+ z3cS(2c){R~$Enlh2^FV9=U@2Oy_QyXHs*`}F-79)QjL$Fq178$txcZp`$EdDHG}@; z1A?n}2l|0&T=3^`u0(MxCH@%@?R>xOo-@)K+HK&c%S|FxYmKWmv`3%)nz}QU(Xb;1 z-R#L~MogJ4?;+uBDMx^L)b$LFl6n2rC-!HSerZ7NrKh)nO<+*8M*> zx&eXcX8p|(e8%uwc3F(`ts^6D^w*2@&Iwh-$nBePfN7N1*nYY%5>?q{l{`e%3-WnL z40C$@eV1Jcv*&KYgC{o`jpJP_2(h2&#VOh|`;*P!Sk+H?>#kQxPd~0V-e+sW zV=-9DiUpQE=(tHc3Gt4+1G$O9i}-c&UmZg~oBZxQw3hwdiC<2gX>7dv)t6gKw3*VAGuZ4g}*u zm*-o=S|{LPvS9^Xa~GkA^*F_C=Ih_(nhxf`yi++`lNzsaH3457i-pbQ;Q~(csI@hq zwReQ_NyvjGo*=Lm%|E=%SC6;>Lk?>uvb7%|t^y!t0%4;WLC(+V5^WSDpO%1;X;3o~ zEqM_h7b{-~2RbcYTj3m_(^53Ro~t=af`5-B0F>OH=`_%(xgr?--`#}1DfOjDSP8xw zp0}{d5FcPuX=D%!K7)~tmy$0zgz`?FbDZy9$?ggQZ+{mf`u9_v1$%bb0(;O`gbWwP z0*X+@7H6d}IM*?ZcS5Opz^eILoW^KdFYnL~8E2!rl&ELATB5hx&lqSw_t7$Hfb2}K zz`;GDs?i$moba$=eNSGjie}a07d~_C9*3#Fr>c?AErh#8HTgmw1?6Ttm3De#>SEBq zZ0uAAE!S&K#Q>0v?BVWMdNx_!lGzeZ6>nLu8?`kM<$Y z1??hX6BY2*4Pwr)gi#|}J&}Jb8|Wp05cGU!#YL&0-=D)l!A<$suSa{BpiF0C!kS^1 zC8na^fmvf0uXlHU_jyKPfvxI*$lZg1M}(m20TmJMBLWXq6_mFwS$6wJ9W&Sdu@1Vg zvNp6RRfZwR&f)HRi`d!NF}M2Cx?&<;7|p*3wDp(l;e*N~h++}&YfT@9=|#w(setWg z&|y&lf*~sA)Cz4EF3yA2Za~1i^c%rQ3PL@&^kvf`r+&g9RnxT;TjzVujJ^3q|4_os zk&2{0nl55?QyXaO4a_}`W_)b}q*6Kq7r4A9-;=+u(FnKq37BryhSO|mv}1~xWewJ} zDTBdVeTaGR<(I7;TAbY{#iEKm3{Gz|nkvA%p`_>FU~$?m*%X7iJeq2iL%q5qsdpr$ z%o-sGjoEnDh1-|i$<6gD#QDm!bS!VZ_PS7Pt(?8F;nqm1(Dq55p_M+nWoTAxor~H( ze9jV!IXu|mWpR;fw%~Ma&YPIPwYKM5KgU_h#UGuU{!I8WRxq(Ybf-Ze$tKPt$CQ4B!yV**#+Q^@sS$czFx zreDv!9;$ejeA&w@<#xS{$M2>)Uv%_&l^3f{kPyJ=$GYB&?>!JeWYWo=ANd~;6aLI{ zF0D|g+ADpE(ZNmynh%gUiZ%`ragQHE-TCvI^F8-L2PLT=J8RIH=WYcShc+7jh=?=B zK(;Ypao%41PYv!+7}9@D@W0WdKdhwW(15mb?Vi&HLlpMNpNITM(BRL*!eY{1Xr;BR z$B*lhNATKoP&Uix=npZNs3<9zhWR6<14Ug$(Z`@xm z_UpF;{ci@k{??uQ1fy&coy@Qej;H=?1ME?k-tZbfT~y6u?ms!QoZ$OK1bMCw(Z1o1=`GW%{{ zEY-a$Jk2_pG8Ie_?z(O2uW7LmBrPh3#-3ti#BGuj;#5ZakClH}0Hp;GTsW0gVB_Lg zj(P^Bn7=jIVhHW1HDcl5!EVih4rRa zx9e^g&jTV`T1a*^S?nFnpoGQ8A`g862DIjE)ZhMGa~>#i(7&;|=IE#ytDwL;Mlu{T z`W{14q&c z=85I}E-@gqVC2~71Mt0dKlWMFDMG6{H7fgx)(5^xcXFH2Dbt=$=}(eCG%yo=Qd?Qx zAh1^smK8d&z*MT?_4Uu=$cb8?k-@uhVm)i_kodxmd#Ua}J*WiJI~1r1NZ{T4DdS|& z886KMLiE~(E12Et?9qdRkljY-`I3s(70fSV5X>6cGdR5L-r*iX<#Z5b)LX+kSF-?CRp`;U9y@!lkGEx~h`lfOUyUJ|dj!SUl@@qJ4x&P>w7sccX=v#B zRs0;JFau&2cn>08)KAiP0seJMkB=r$fB$!gfZz zf2-2s4h}dja0$KhTuH*Tw4*kedlhOhwlkQ&x4m3kZc6_fAD2rG+P^-0Pv@&|8NJEb zVToYOzkJoTH(#eFqZ(O05uJ?SLNH>0()kGs5`tJc;L5K z6qqu}ywUe|t1^2nbYJhHs4N#&3lTAIP=Rar~OT!PMOT@(dsO%*m)LAk@BYu)(!-vVw)aCmWIGwLr5J>?2) ztc#x47 zZvn5rz5K_71qjkku(1is6@IbL`JO$K%qc|N%!tL)RC)-!o4mrZpnU!vXT>@uROBrY zfa&biuAFELrj%@ESGF6Yxbb>9@i9;cO>>d9)evw>iOrN+| zH|doS=G3pp*4ed))f`alsOIi9HI+#-W>0HFFp4&{FY@0knsck$7H9Xoo!W0|bW($z z4b$;zIf<*3md06>Y|A*>-!9O!BdOr;s2lL3FU&sP=*l`pGlvwlLI!-10@hBXIR)&Q zAyBd`>DWcNUb7ikC4!qMo=SE2EnMA?v)7`$FnF&&gDIYY!eR!5Yp=`4C~OMK_jo>y zznYiOgW1|L8)?yr&+U6cZuztxwruB~lLC9O;2Mgg{2uB}4ai7AC-F#CAZsxdlfL!9 zphci$#z#(=zi}RLs(jXOV4fuLr!P5i9d!9!AG#qC^zbq@x>v7G;bzI5k>XdpEog|| zS)rSZ^Wg$85HQ;ZdosVym+8UI5D>nX#8o!B>wyskc(nvO*Q409o~MQg4#qpr$?PMp z`BeWTgb93|v(zN9&R;uwGVe$=cklR)IoglI+Rf{^g^_0{#oDVem7ak)!(^q)Y1eiu z;m2xyAWRfDn;w)J^bjcAnsn8YN?W)E?(h(B#^;|#bGZ9Zrf1!i#GTI zqNRB6CidMUGL2=eThiLeJyV$6839K%qm5EUP@kXBfOhhfl|AMwjQ@RIvc`NLkwa!M zE!icSP_OgjwZ1Ml3uuPpn?zFG+`LE}KKZ7h&f??T>eYFg>xeh^$Le-1BKx9m z@DU6=Bd|;U8{SWz_^f1FZKcF!1;WJ+(zeaMmK~O=SbI{`TF=IhO0}$@g;X7GcxD#g z@KX~|;)40gdTLwEh&4hgl)azL_s~7sCr~y)%pU3{8S!@d_ zp79mlk-bmM=ZWjr{Cyz7Nl$cEl&}h=>2O13;CE^dnJ@%MFdJXSYZd@6`F8*UD}vSHcTU{0Sasyc0`QM!Y#X58}T)c6~jkAIUrK4s*)3W_@I3`>D6XTr?Il%3{$4?&bzT5C%sdpPTb~ z!FXqhq9P(WQzSbSH*4gLaUk_0!mrG-ttS|#z^^Qt=Wx)jikNcJC{PGWA}`3SWh zxP_P6Hv3=Nx9p^>Dx-byW~+nHQ53KYx?U-EK+^DD$_eL-`lQ;9X@ zrJ-bSqE}nG{rN#|%Yj!B)(=Uvqw5#^0~(6`DJ>n@0Y4vm0~j*JdvrR><1Kh!P`a*pg>Ft*eHL%_rDkH%t63kIIKW^FpQagD|5>i5 z;lJJD;iFg75=2f-H~AYdJmY&1HRs|~^_VYU{zkn}emZa3RE9|f!lNrwRlkn&`BCxz z<^r&5v-V!YNDGBkysT%wjCQ5~R({+b^45cmTKd&!-$UK&Zw^cfsyI_Co4@iyf5pWN z^9=*Hv|I;Q{#c+Wm5If6k}d50YI{N3{;kC{D3U~cCPuNwgh7JKx>u9xUrb&Euf?KSvfyV8&&w}P>P|0azE8*^O~Yi+BjSJH?oHA z?p)~-i71Me5PhAE4_f>*vHDu=bX=5pYlCDbGL`)G>(iEI%S8%@IGUs2j=3^?2)Y1g zS;cJ5=i6Pnx3Dgg+Iw8T{)9HM-_qAv95LTB%VHsu4KE9tg$_%++CsLf-K1P2WQPuQ zd4vDsVI{a5#AOP}C8+x$B7@%=W2Q4#Hk&DfyK7YkV}m(D!q6QGXx=7ZkJeEvO3(~J z9P6_WVgh}dSF+CK%b;r5;{xx`=%=3KgLyp26GG^W!#2T#Ky3wJIlLNFPs`n@Yui%E zd(53efVN&->Dqd6w~99=sG4QyhKOB6tvi&Ce;BiI@xbx5}dsEkd*I9y@RcJXP;svEsgn^{e9@JY86cer^4N z7n=2R$B4?G;$c_Hp5vi5_#n+}M72@%uK$ukBC#;K+VR(y=Qa(@8RsjeE~0ef){=wL z*1x!vJ6trx4p9Pw>IeyzKXV_EpVr)Kr`tPWo=M1dp#W7B+l&F5&4b4meR>Czlps3- zW3Ym!?N&t!S-+LXyq-eT3jOOyMp~Np@%?%Ux~bCRTTbHYle)!4X>U^hVQ-oh{5Fq$ ziS!%!Xn6{Horv^jCT|fVusGmH7gl#MY{basBXh>mnw~HB$pNLq1dkP`>4U3XFO7B) zegplXJ4+1ryVGy*xsC#n=WG!F= z0r#q96U$9XHKyMl2wKb((7G6;-jrKo-NJ8sUB*sOpbri_$Edr3vlN4$+Ca|sd9&>j zeWgP>lo5mpi5b9#_iOAKpWPgzugSn4VTe*^oeQn800Ldh?h+4^~}n#kb(ud z!OZ0N&RAWg;}z$4IJpnL)O8DyDLpg8gOI*IvoTMa8y1AsW(_5wkBzk5G8XoI&-Fe> zgo=ON@s zk5hgu21TkCVPbs1T=Zfer%T~y6ZGqa-&6Tj-?`P))Fd4aPSrQ0S-Xx7IWrPuED6h7 z{L5|&4l3HlgjeVr4H@yHW6TlCYeqX;IgOMbOx6a+8W*T&^>CGEWC7Yc{ zI|`CQVoV)V`@vXLOt5CBz@Wee-HFTKB-fhI^gJO0uwU0P6>V#X&7@QBc!+&BtOslJlpgOT(aO zZtQ+Q+e#8_@~2I)f<73|c495l&Muct>B-4%inn~Jug^QdNOqn%g$fpt@_xOF|LYli{|CD^W zSfn??SHrmJVVu*VMBqNxA~txx0=qAm#d{#ETK=cpd&)PhE7 z^Y=u23Airp#Yr62Z;M7`;X-EaPw)CtrL@GL*r;jdTi6;S%r(=Keeps$3v~H#3iRUjF30!+7ghd%g?w-Th15 zO0fkro#H)I3*@~G<$P|~bQZr81&MO`j_0Rz{|e`{DAD8MvJ51P0 z6A68{OgJHWAkpg1X7VY~vkazJUr!|tc+8v-vzOUZaUj{s(dXVR2s9d%;G&_=70Pa; z#Acb*EQMTY6Tbv(&N_RRkqVhn8BeOG=bbv!o{f!neS0wB}5kX@ShmXi>0`V#bZ zeX7v}IBHm=60-F`ZP=QRTCb#(H@gK;Aa~@^_PPD9+|aFwox~}cQdcON4a)$KjW_=l zmUq9x>Q(uLC5I^nNmTyGyG`vy;Wr#p5qqh6CW9aM83ql*SJ>2(C)IG?QQ<-)`d4s5 zI%@Vt;ZF~#^$Gcvr4Kh#?(fRLY+DC1V|-8M>$Qa?(2bg!nmQ`b`&zhF%DH<0y<06* z|8;wL_&%tmY8BB)C^D&}*r;Jd7!Hoc7X~|tz%UP0P%MsBEv@_R#+rvBd;M#d* z#f5#Tef~w&7Y?PO{_nR&w<_w%`dLpb!*@|DhvK&cqP*;R4|_At&N~aHmdQNntqJTY zyUwX>e_o$v)VG3hYs)@iasIe|ln}#ktynJi$q3{la$#0*rWYI8@xwIS)U2n5ZWOZPC$zpA>~w%$;9m`cr$6F=iN8vNTb zB80AmH-)C=DbOC4sLX3WK;VA=ZMv^%9{& zbMk5m<`3$G{C_}GZQQphM^r;lq~)h}!_VVl5Z!ZVyKe1fP|9K`MeOB$#mm#I?tVW zzT41$VLe1KEX6w&fwGgPmy(V^Wm)=0_37Wb)iru`*BWUEo>gPzl%9??nzGahYlZ7N-QeM9g{4>DXa2mwZaj8B2uOm5;&J_=Jt4| zH%&$tgcr$cXg2LHcrSNl7wy}4KPLs_#WLDZbj^>5;NMux-6+R18lyY}yDW_MglAJ& zFw18`e2gE|Mb@8a&5yVfZ*6cxLjfnYEpXelD#B|aC)y`9L`X@#PiEqpA2n~SU5AbIR692|@P{`e!g!9DcIke_;u&zxdl z(3a4TcRtx}o!Fty`)YUb<)%?flHqhmOq%swGt;~;-;@(wx}-MKDCut+Eq{|*6E2f< zUtzvq-kRGK5fiu|C^u=0TG#`K)VKd2QY`-$BIN`SDd9jM2dAE`8W6 zG2qla2GX6_z(cPd1)8-TJ{I|qrE<-7Ksfy zHb>7{UXL6jjm_13GRs8cx)0egiP0FP+mQ5ZPv_Z|>+C6Ol;70Sud%Y+fg`WvQ z-zFxj1*`2+<}uoTlSJ+TlWEXp&OQy14}U-b)3qNL2`zw(0u?f63)OyV@Um>U1ym41<4G;1>c_!B6Jewi1@0+gJ#r#Tdm`x3 zgQ`K!>d%}Q>?E9MVvq#B#mYiB2HaaIXsD102Eis$MUu%o`evJ`DiVh*+M;I_BTV}H z)%r`GnAbl23L-(zCh#5ReKwHBz%eVRukGXE1Jav#TnR(FsISA|*3(qLzH%!`6;;%U zMBNp{euPstG0*@k0-R|LR;i#g!UpqH4|w?V=KS=K+bS@5UoR9Q&Kcn=`ve!hd^-?1 z`FL=aU{SVL4eE%KUmx7*v{yfbXW6hnRv%`0cMDOy>XWF^V%%P4Q4$n7c5RJ8V^IJ9T0I5b_g?Jllm&d@KM@hukPWZ|s9{aW~t9y2I^ z4SU?dR>85otzu%N9Pn+7fmnL~F(>BiIu@7(v~Vo~TRgLfE#x>E7z;IiB+o{%h}~I< zqwevyYf2^aJIGP;Uro*6=kq_GGRCz2+fvESkSi(=Ff0*6(-Bd*vdp7YM^={%%w0)W zm6?}3X$ss8$1H}MJ^4CdU??sMgex8k7G)s^%VQS-+L-StVFcr6$rGRWrd2h-je6BPU(&tB4cl@qN#sP>D z0D|9LLEe`HAVc9k>?K>2W{dN|G41T6(}XgaHudAtyXfAd8Hr$3;}7pD{+%A_lb9~x zIi(W|f8&Yut?A>2$y)m!Th3*2kR4S)`uZ8zdC5Ph*XI8~y@LK5^(wZk_saAD)){~H zKDJ(95%BJK0ciQYcR$-DRH|gncS2#|zp$^uRVc zXj&1L|M}>66PLAgW}U(!Cil;KLJ(co^7oG4peerF<<*p&CvHS51xoP z+CU9Q-T(l*hJ=Tz_{YC|0{~c2*A;0ax59JsgfK3%V<-DOiDx}|Ev+cZAvc14KBn#P z$Sbq1?*>i=V?U@hwp7282~`jnAO55|qO>}1q~$PiV}%ttoA7Gp9mlT)HqC9VF)4>x za~n#re~4tj_fqtFh3fYmu|;6YJ73i!UIA8BkK)+v!?#s!w~weKGl#!U0gt;2r|2)eNftPVC6IV;J&?`&(QEj%`~RS3O#c^ZR_fwTZR2hhB9Gqx zCWu!Zfor2b27?9n8sBf>qpAUryAa6-?!W>=-;*x2c#(>3C_S^tTLq9y{_bHE)`AVN?sU0sT28e%$A042Wn&~jSa_z;1-{*K_=RkK89 zVVR!4_=&_e0(A}`?fQY>B<4xA_af#PiMu$k(z1uLDuk}aTdjH@?TM@Wbv91zw7hyh z5`OaT4yp6rNY#GfJ+F{c+N^~UTjFQV(zj6P3HRDL_~;TW_>Zd=UvX1iCJ9KsKAdSL zc588?M~Vp9I+Q4}gUXAczg2n9(Kw%TH{i@WyhCF4#T0yggS{%Hqv9l>A$9nLgh!d*L?zTuVg566Mv>YDZ9E}au+Ilx6}FI@p6H)aJ< zvq$Vld(Q&fP5Kt?6?B;K()0vM6!A69;6Mrdp-waD)@BVhh<+&XJiLx<5e1apv|G*5 zF)`g)4-9!?FvTP@(-$v(q$fP74*LHC*3dHRGcDF!m|4=_7cmdlvA{xrDxIW`d)mZ8 zdvNj6K2a3vh~}6tL*Pq8$BnU+$qU=EIN+ktsObEG;Dark+JKUW_%|BnXFFGRa zi9nzZ6a&)yYuXy0$4g_3GK3rFPd57oh#tPFo=2zT=gEzi>w8_Cj*Pay4}kaQzy^99 z=?)*=4-C9*UV8g7&MwT8Zvaz4vM9Z6^z>AjJ+#Ax%^?>5L7M@Uvt}BYz0NneSFlum z>QU8&3Q72cb+#jluEn)l-j=C?3nWvT+BK_@yBP^-dVR0Fx{^5HWj}11FGnkQAgb=K zCR=f1kighy$E*sEK8&+r&gc_DbWR%cHs8;3YL-Y_J>NN8A&s=lS|>ZQHV9rqC$(%3 z>28kWk?y{SxXTd0b{)YOprnDpf;vnB@n!8b_)#4PyVcd<0>hN+Y8Dy)G*Zry3JuZ_ zQJ=V(HaoqJVW8%sf4WvhrD&v>Sw>w9mnDIR^SCph0z!9n)|Zfnmh}W;0A*Q0KUvgT z)7$8Q>{|!vj+=oCSN35$ zW(@H)sq6SP#e3^L$bymFzE;ealhnsuAmKYf=4SOi+uWx3A-kXZnstr0o>S6D;7*$F z-=l#!Y0@x3U#UBiJ2Iwu(WyfA$TNYmyp9~*AV)=FK!dB>vay{VBxjGr;`ah%DhR(s zYv-9HGL!OM3)LxTU9Z^sxz1WLnFS#!IW(y1ZhDBjg!bE;>2-BEL^NG@y$fB-XZ7@- zEUS0g6u&9ireFA~9`}ubA+>XNo%W6t8sz>-VkL9NPKBf>zH)sj&{0t$S(gLD)f5!=dIL<{HEE%X zwD$vjOXS?Bg`KO%p@lM(TxLCo!$N+_nvdCop>)cx@eT^%w7|p)xQ`O#X?y$U?2T_0 z@!NiPIeTFbTFly$imuWY4SOLzJ~7C^?7Bq99u+YA-Ld?M8>KLj9Zm-b5Y^MOV*Oyd z$*)!(%B6zwr!1R%UpnUfEDh=dd5@y`qS`Q9(HvX$t*%|=UXF?`I49m(uJSe6?+mJQ zux+;@m|{NzZ0AkI`j9XKXQ#U@bnPHUHcfHwdgD@VRIcjzVH?P@=EExy9X->LGGF!q zC!7SD>%0C}-)bCTR9`d!!_Fc=3X?cjIrsjHHvtL#AcUbNsn}N4J|N>fq&$6z+0-ul z#)y7!MO*8$<>E}7|73Al2p)m-6WLSPk`fBaod~%&(X=f~A7$^!sJ#7p?MG$a=XLvO zGS63@THWD-On3pkz|TaiURuna-yGon$9N~;Qlv>0PkV1! z{7SOsj@=@|`6On*mqf-P;d$}rHN>T*qL-Ec@qA4}HLsh;gS-62=Fy95M5Vd<6Wk#y z#ZFSZS-zJVPjTu|*aN?>n~i?kt#F}$u9%w20XdJIMU+7q1_+y{$)6y6Kw}Wd(zuh} zX=h+o=Cco`2d)n*)l4}I<{Z{__-Q9L=ZF{&MMp_1hJWtalbj7_?Os|s{=Ak1K%x0j ze2e)wf!miden=FjmW5qlRiw1RR3pVVt0kbiql?L&wwYJMuNT`dESxGjq~RMoyLSPY_T&$k)-J&* zKje5fO&Xu^?=e8c(`7FpAgP1TYgl+j`=Uf!iWIU$K+DFFT|lp zGyE=TzcbOQrfYT*l@hi)F3?%y?jzgZgo^@<#xh%1;q=ggXuA+ zBm5`{e#|uvl>WclDxSv0#dXwSMrV<`OAfUw=LG`(<_Lr)C1|Sk|HRS`n&Se68U9xO z7S>WIv}z)K&H+ut{O*U^saadEwEQ5EB>p5V1&hy9KMg((IHUp{=3Lz4+I?Vm@Mm01 zJ~a;7{&WE_(2uX{=4Z?F4G&zukyf}y)79#Vee4@tMWxu;t5|)b$E4N!;6kPjs60`N z@go$rhxO5=XlB8(Enj7k;BvlQvs9SN)Q8p^6O1N?zE8m{FdPp`FuS2I5*KLjq%o^_ zzK%OGiX~xtETuS^3zRMR6vl*c%cqsWJ8t9E0;O#vi^~vSij6SnKJr}_l5-!7?xQF- zKTxmJt=hXH*6i7U{EG|b$7odauj?LMz}iu5b+KS*CC3Of`U>khj0g9Tz2~Rzx1{

7-0D2G9p^*sC+>`p!Z+iabccQ!;xRr)sMq5mO-TKkX_4t;I^S7Pb=O^ z&h?l+&?uS2$71GwX?=UnW^>&e1oYM+5!>^c_|~OWH#ZaS!=p4Ycy>9%D@mL_>Scw@ z+96d`yxE>C&OQj4y+S$FW*SV+ZQ-ZxRfp}6|P{`w!tQ(#yRdY^qd$BEW;YUQ1_ zrmJ9A?=j4;3-m2XdDEHVdxw4A`xZASAch-u!Ta;Z17Dvqmb-c$21H@j{csxew;w;L zE~c>19W8r~$8CxY?2~QJ^!VRs5;eX%1MIR$6HsS;+HTlJ^IofF4TgnK0bEh2xq{Q2 zhed!kTu3m)(6TAcS6!N* z`zkFYtY_P5U32?Y(?$WF(FovIqtEGASE)pKSDhezKeE9l1 z(`a$u`icIv#Yde{AXDhF^F}Jg^_#8{q=5AaA@C~Q z%J1)KD$H`zKip%27t+1@(I*}~sWw&~?`?I5G}d;kKf-${ZW~LfE_DgR`|CA%7xbkB zWzn2v=Pd`{>{jSdXt@I4&d}(Y3}EPAY)vFmV>-v_H)7-sTtfw{EuKNg1NfPWb)S9e zK}HSp_&O=})BbNR0L&UFlBQGBu2IzWM9RoEx67Xmcsa3_iFuL+QqE(hn+in&yZ~GI zQo73IROBvbH3Czz`Q7#?Z7aI|$X!PnoX@ub7B<*j_gs$kKwDLmu}f3b>%F2_e!PeR znsYh?|A);0PMp~+F0ArxM;6?HZK=BoKRVF_n$7LKZ8jFj7D#lO?hj~&VNW{ADz3WH zlq;NLZ^qlmGQjkKym4E}uWF8*9ad)vfvm@Tlk%#ZD;jF%n1ap$_6_&ZO~4N}oHc%6 z-n_ZZmhoMz;`~;6{OJZqF7MslQ{APw`}_9B*C|b`tWKK%?;67%i7>%HgO-4$%!@~V z`@WC`pKx=4t|iek_y9MPZdKa8o>JyCj_2APwiHU#?uFA+mtOOmJ>ffaaPc3}Jia&Z zU%*;^2cl&AY7P(|c2O(qsoiXK$|R=gVKn88xj1=KRcgQy~vcxV2wshZ`x*^W0j7}D}a849*xFm*1?Xm16e+gs6Onx z*Jzwaga0Sb>%{-$c{S~!(R3vQ5*Z9NOr~l%o4ZR7_facBAg?8z5=e6d zc3HW@oAP&;Ww>=D6D~nn#lhCN0zQrS`!jbEn<&GMuWu$;61!|gPU=Qu39Fo&a|woH zv!P|+oh3{Fhz*dvj>&I-SJQT+>!&k9DVU|iM8@D&ANYLJ?_A1w13P*C!NpKtM#{n5 zr*s}zXI%3B>j-Cz<8jV*w2k=Y)-T7)a4{Kve~!ag=u`#6j@u5<;6a`EjUSc!Tu zS7s5PfB$~qIf-2(@6Cju95$=R{{-XNg9I+-)11LS?t1q(E*6wTb`Y_HSuDWJT%YxE zgt18GxAVm6BBq-zULjE~JQ@*1wf4h#tIm~sXefyTxNfoC?~LKBRSN&lyIWN@whoH; z&o!=q6eJl>jq3k}Tn+wDWFNQ+xW2gGrB(rcyM!pQc(v2Yaw{LK zo4W31h>((50SKibg*Xk%!7KReCaHMfu$++w7=q32pPs!?@Euj`M;$3%7y(0g}T zqp)T@yW+wSwezBST}b`2r4!p2$MSBqvm#hZ*J z$PU-s#wjM(>Xlz4g2XZap8|MdMQ0bcb7UC1`fU~6M$dvA*q z^5u+kWGQXg5uuD$#oIn@@{r9s3OkCyzuOP~Gm#EJSl$5i#1$O%yn+GeyBlPz_)=!G z_h!%zJGHSrnTP?tRq;{-NSVTE4Q7D^!olio(~M6dA0U7I^r37Hob-PKUY`{t$UTQ=cfJWY*o)@ z%AJ#n`&1>R&|8ajVvQM_bMB6$=B5|Iem>UB#Q7PPC0L^@Z%6HNt5c{AN&}UB#~{#| zc^@Vyr&}W5K6#`i=~F!t_Q-GD)py(RqtT{Z^i`>k!*K%7+fgxbzwYj5!~5B3wHSF4 zchKCg6HYFiTPVQIRHgxG;T|47!rQju@Y(?7ya!c0x0H%3Jf#iR_ao>NoL?lCp&A{gS6V=XwMEw0ph z?OPaAfRe`Cp9qy3E&79QlQ%@e_RUC(%W{90+m!OWenuEJ#6i3V&xL9U{;%4aO=}(A zC@TKlUll8Fy%Auk5ShR!GN2z&dyjyijw=NmiRoH5xM~*R`G=#mM`j4K@N|XFVpU1` zoKe@On|_b^KHzVS%8;&@thth9Se|T4uh7dz@Aatj81drmO=L8j<$+zQ#{_`^kN2X$ zAi|z#C2U@aMO32zZ^;Do^cB(_o9Ye7?ZyYU9wb|M#nk8D^zTvXLqBt+63GUKD05?L zwDwMqqmIoYs2K7bumUYMJ4`tX6AV?7s5WZRJ%a%I7bm4F<<~{;Yf+p!P*qf#-EOwB zHKmU)m$IV}yHKA~5D@e6c~kD8zHl+W6aV8E<#PXeuZzzj?|61;UqwJq8C8D#@cBrc zjJrOHH)}eg<%EHqR6O|jUmSb+#OHhF_y$i-GAG41PjYNsO5{Ri7gQid$QNsrbf^J) zo(yWqv|9Klp~9-MDo5L@^FQF^dry0?T6akt8*iU3bOB@DCGfW!LUK5BFE@v(-0C2_ zk1d9@aeF^uKy*u~77wYGALVb8frD5-_tKg`RY89RmNM8l3}w}R#})p&lTkDgsJNxK=5W{G z;FtI%ji*-0m^PFhr9gb8*0F2oozI4NRg&0imZli2wT$%lxxZcX`4!zJpkz<@k}C$0 zd2MFq3PSpp69rIuyVMw!Mz3KmG9yO*<+#WTUlNb{)Ay76}}V{`aW-ZQLcs}88D|!iI~?qhKXX25^h|vvm43G3ACp7 z8<u781cO8QC( zPt13aU8Xw$ai^t1CZ-hr$b!+aJh1H;$H0c1Bwgh#)D<9#iNO_9c+lqv2x)}bJP^Pe z)7R(PW13a8+|)8}t=kq{Ut)#Jy9 z9J8?L(Y!n+bbTSyIv55l3~1h#6c{cC2V2 z>{?VDjS|r((z3zv@KqnSVIJplLA|vX|88mx9+!#w0 zH%>X5`a6H<{WBmB9gx!>bGwq1?&j-j+6uJg-SzM#0wcmt!E@;LP7l^@7O{KbT9WxZ zH;ID_tI9fECn(xBeLD+Y%V>m8$#tgS-Wk7kS`0tk;#*2{*BV~clBAOd3ct{cka(R0S3j{@-V8?A>M&8(*x%#`8asQ=a8bw)M0ZCl0T!H$B0M}&BkrZj1y zfI(DLkYWR*U!>Ql^qQlffHVOC1pyJ2CPY98Jx36vcZd)IN`!zkLkWq5^NoN&jAVbu5IhrlTJ2Cpc~{%Y~hbQ|*_#WblX0BI!!g$Jqy*EZv0`e1sBC#pNHVJvxIl z+50picRNb3f(~s6H8RDL)o>PWELT#^BM>&G$zTLB^Ul}T+NE>C8wfL55o=R5v3G1v z{)_rEMNaGs1ox5}q-lIC->Xq0C3Uc-^w~TMQtvUamdmqQ;W@~L|04rkmM{a~E}fi* z$eH4wmYfO`ro~D&h4$dnm2+rE6dALAyJ&gwlx)=vV42}67bt^7DIyzTnGqK~!a|aT zL?qjNimrnTiV$)qwIw&yzP1IN;T?dtcr0` zSJ#l*fd%1hUC~6kdQFLq*UYzX7eqKAZS#`e@J&fP4SEY(rs0PyY( zLFZ~eKi0mJbNnN#N>?xUIcQrs@Q$bzP4LH8YP^n{D$c@oIn=%Pcy(V}Vt7=FNrzUc zs&h5sC@f1(X!w>|^;%hO6r~1~C+u)JwdwQu{qK+`iDI9RaiWfHAfxZ7c>QwG`*B+g zK`c{okQsHFqRbpPIsy)!HYB=hf-|RH;o~Fx4ARRCR?lN-g#jSseV_J=GI7N0GJ(CV9$~($KG)ZKPx5$pBD~%dyBeO z1WSFc1ud;NAY!&e11aTZ$7Iy9PN;qy$2+CbPFSVMQ=;dG?2+__e0my09yl`y6r2Pj z%UGmQW{&xDLp*F?a_a(sUZq=>?;d+%^KW7 zB1UGuq5>ctP4_E$5{PF-99PdxE-<%eyRuo@8ZIbzHw7Q2YuaBf3U|?qf1CzW=FpHlNi?jbMWkM$YC3kEC zxudj@&kW{@9}8(B0+=?gvm^BnsNgp*PdmP60V{3nD^J>=k^E@wgZPm0>hcqPjKNJ9K}{!hazg%Kcf#cU&>8*mE!J z!pE1ZEe|jU!N~DI;Lzql#9I#>B9_!SfsrVO1CUURjl!eGj7{oicKtg_meeO_ne_)l zT7Go)wqfq-)(4!<~R@4WI|^4|El^>Gzs45x@ffRlF~Vn9zxY6#NcyI-~@dx;4b!I{x2+Q4fF zK=v!z%c;#0?K`@f7u|JjWVfD-fS2KLdB8pJT0fOGcT>0}KQ< zvtjQ_XBc>a>34b5bTxH&$SM7a^dYIrsGQ_-N=(|l;sE&+Zd*;%*AI z(i!Xq8|wlj^fdgu%YvwcYzEOBTiyCtE;u6iuKSUb1{jqnzmy_-?#m7AN5uhA#KLcW z^bI-@F(XCBCA{}D)MyM!f+OeoC}yba^042O^zBj@PzFQyr0n)q^-;WJ5c!HHs9pep zS*^;y*dya<_q)i_;76(C+Y`~2?jJC`Em0Utj@PFR9M?qnulNza zyhLP73a|9-ouoHXcjqXlzG~s_D~sSRJMra}b-?jt&U(8aXjx0CM9t^VvCVI>mk}00 z*KlpoIbdw)_%IwLbHGVr?HzN?) zvY96NE@5yW{es=^4KpaQ_{5fIg5!dnFElfpAkyC@hYH7;SJve9F=bWQ(8}`w(4CLK zE~M_BYL~GTZ7GadYP~>jifg}js#nEAa#(ux`DkA0+=+Ie$r+2mT7h&sk>KO!b;EmkNkp2n ztA8o@$iVIX+qgbfsFfUz@|brs4-`hjXhkyjkF5ueWGlj)~SuD}F1>s%%-T zFDeJbYM>(pVdTC?*!SW=ds3kgnqTG9yMUoqRLWbSLM69wOXX@4kYsw=Yf z)nm)HEZ04l!@J5fO%&ag%c>ZtKvpc2m#P7$_$LquT8P8WjiVA?Gu)}Iu>cv|L81hg z`E|^T9?QFb_NEubuO1MrRGrqP_R`K>tiBHCO-?8(=z7g?VJ3?{XU&2g>cz9Q59;a% zBh&c)S5OC$gb2BA#y%0t+e{yV0l8*2zle|>=Vouw2OLnlHfiyn506nts9kg(N_&OT z%cQ;PLwwB>PLQ#NfSs)2EGtQfQ@d665ir9-knucMqF$fsHCEd=MCd_}%eKg753ioN zqOfuoC|yy)pqOfY3zSR(ZCr33x-H;UHJzU#-YIo-P0tPpSto;(q_2PD8^HFnht>Y{ za1L9Xjnwq^s%5Q9v!jdX#Hinl$Ds<$o6bsT{^%{zkW8ns(jP>eH1Td)HO^fEIoLq)*bQDvnjo%t*`vVWHCeHh=|j8E`BE z9dq$%->cPdeU{v)k)oCWkMu1ei{($o6$aIndomC5`?x-IahGl4Uv8oYvuRm*J|^8& zZarZP!$K1;=}){A^~KIQ53gTchy|(hLkk_ioi#elPB1b*4DK7FTp6FH^ZzN4T;0@T z_ThLMJCjN4UPHEmxkmBi#s{CTkO(L$r?AA?RBq-r=Idi@^?M97gg&#vm(HkukBi$X zQ*|l&`1H;2%3AjBKQFCW(Q|D#ryg$#oL2-VD+|dg%K**% z!l$Al>f^Y25rp;v`8Di$TGqxMo5>&Aphh{i56OUi~UO7uy? zcJzu1k}iaM#8wgVd{F_zSB#PccPZ(r@2=+zHP6sqGUW!jDEJ|$qofJ!K3t14=WrUQ z4FGYtplb+!?aW@icOt@K-Ma@v)$Wv;_JrBDrzqZOtaU2z`DOsLoO*cAdU==>W|ppm$F7uin2fx zY&@|L%~A}!qa8$BKX<0NTQ1&X;|2~-!ceX6z4xJwh|?MLXw^JCqn@yUv2%NAZj;s5 zCypp3b11)fI>I0c{;aQMGo_}YX(L%|E^)CNO;>HdzS?9;57L!rBHw^Ug_5of#oF1$QVVgy?(ehiJl_bHDk@;<7{$HE2OgvYYcsVcP^&Z9Rsf6F4KX9EjTyOQn_sgY_HD#$KS}R3XtT(m>3TPrX9S1yDCd{Hpr?Of&Y|~| zm1JrxL19skdAH*Hl9bMPKX*9DcgIuzXJ#H3Kq z3SzJIC@%k&Knfeg9vdK+b^E%X*irxp54mwUEX_DTpy6(IYqUV$Hp=B^2a@NLRM`{8 zN`E_gS(Ve2ff9iDkD<3oP9vu^&MTm=aSC&UmLz2i^5w;=FRgXe;%75ckM(oMd7>N= zW(B67Q4aY{j*pR~$8EI4?>d zF=F{Qos`ictm(Y0J|XMXT{P|7nd%AuYDgp%OQy|%Ogh)82WdNmf} zsTvApmjwksDb#-?9Z6@(5}pXLA&zS&@Pm3UEQG#E}W)RPV62jennUt&e)H@c1_{En77}#3`1M7+jxx5k*H(>I-l>s)$ z53`8*lseqGb4D@EX=JNIFsgcizW$jut#>CHLa+TVm}{v1BlDw{SB;NYG`}Fq7)M}N zutqp5_&X(>vmKmSG7y!U4W4FK^UEzYBB-vfwyV$}Y{wB(mORU&>$y$d{i8bmRcn$- zm!Ws}W*H!v{4-lKrT=1)ec&u3c*4Hbvz6j`jVtGD0DnhbI&z8O-f@x`Hib*4L6_*8 zV3vs0c{lJqP63)yE@XaFRq>PLzKpF!k`Z_PlR*P7IC&ttiE{s96aE}7itOiu4W4KFH zeN0RABQM|fI*vz?OlD@e?KC{H)^ zff|czk>QJQD=ZUyd!!ug{#_vI*ft$|?d*_I>F7B1pr|zEh>0PPUWc)}tV}9DXE( z?GOg`05{`JUZD~X`Z{R}m(2i= zVdjgit`>Su88V?gr>W11{vrzj#2*S+_}t;^J@YTBN&^fM!jc#pn5q z!?a2RMT%a2PK$B`R_5z0b9LR4)542tXs#$Ssc59=feTwlwTN;Ze(Gj> z6+eY*WGuv_%VwpsGslUB+(I5Z4K}TVzh`r0b(_~deEJ1>yL6Kad6PK49(jBA^UI$T z@?TmEyPqN}>faXF&o6&Y$Umda|9dfP(Mw)iQ>WhjA0h1Lm;V;Ne@@7MJR$$J%YcD(XU`2=U{r&RuN0^EKXcG2`J-2`Jh3EboBtmFu M>YXh({g1o<0|C9e`Tzg` literal 0 HcmV?d00001 diff --git a/docs/build/html/cpp/ops.html b/docs/build/html/cpp/ops.html index 0c9ffa373..0f5b651d5 100644 --- a/docs/build/html/cpp/ops.html +++ b/docs/build/html/cpp/ops.html @@ -9,7 +9,7 @@ - Operations — MLX 0.3.0 documentation + Operations — MLX 0.5.0 documentation @@ -134,8 +134,8 @@ - MLX 0.3.0 documentation - Home - + MLX 0.5.0 documentation - Home + @@ -233,6 +233,9 @@

  • mlx.core.argpartition
  • mlx.core.argsort
  • mlx.core.array_equal
  • +
  • mlx.core.atleast_1d
  • +
  • mlx.core.atleast_2d
  • +
  • mlx.core.atleast_3d
  • mlx.core.broadcast_to
  • mlx.core.ceil
  • mlx.core.clip
  • @@ -240,6 +243,7 @@
  • mlx.core.convolve
  • mlx.core.conv1d
  • mlx.core.conv2d
  • +
  • mlx.core.conv_general
  • mlx.core.cos
  • mlx.core.cosh
  • mlx.core.dequantize
  • @@ -436,19 +440,31 @@
  • mlx.nn.Softshrink
  • mlx.nn.Step
  • mlx.nn.Transformer
  • +
  • mlx.nn.Upsample
  • Functions
  • Loss Functions
      @@ -502,9 +518,11 @@
  • Schedulers
  • diff --git a/docs/build/html/dev/extensions.html b/docs/build/html/dev/extensions.html index 011c76768..ffbe58941 100644 --- a/docs/build/html/dev/extensions.html +++ b/docs/build/html/dev/extensions.html @@ -9,7 +9,7 @@ - Developer Documentation — MLX 0.3.0 documentation + Developer Documentation — MLX 0.5.0 documentation @@ -133,8 +133,8 @@ - MLX 0.3.0 documentation - Home - + MLX 0.5.0 documentation - Home + @@ -232,6 +232,9 @@
  • mlx.core.argpartition
  • mlx.core.argsort
  • mlx.core.array_equal
  • +
  • mlx.core.atleast_1d
  • +
  • mlx.core.atleast_2d
  • +
  • mlx.core.atleast_3d
  • mlx.core.broadcast_to
  • mlx.core.ceil
  • mlx.core.clip
  • @@ -239,6 +242,7 @@
  • mlx.core.convolve
  • mlx.core.conv1d
  • mlx.core.conv2d
  • +
  • mlx.core.conv_general
  • mlx.core.cos
  • mlx.core.cosh
  • mlx.core.dequantize
  • @@ -435,19 +439,31 @@
  • mlx.nn.Softshrink
  • mlx.nn.Step
  • mlx.nn.Transformer
  • +
  • mlx.nn.Upsample
  • Functions
  • Loss Functions
      @@ -501,9 +517,11 @@
  • Schedulers
  • diff --git a/docs/build/html/examples/linear_regression.html b/docs/build/html/examples/linear_regression.html index e9d23dd82..bc68a0590 100644 --- a/docs/build/html/examples/linear_regression.html +++ b/docs/build/html/examples/linear_regression.html @@ -9,7 +9,7 @@ - Linear Regression — MLX 0.3.0 documentation + Linear Regression — MLX 0.5.0 documentation @@ -134,8 +134,8 @@ - MLX 0.3.0 documentation - Home - + MLX 0.5.0 documentation - Home + @@ -233,6 +233,9 @@
  • mlx.core.argpartition
  • mlx.core.argsort
  • mlx.core.array_equal
  • +
  • mlx.core.atleast_1d
  • +
  • mlx.core.atleast_2d
  • +
  • mlx.core.atleast_3d
  • mlx.core.broadcast_to
  • mlx.core.ceil
  • mlx.core.clip
  • @@ -240,6 +243,7 @@
  • mlx.core.convolve
  • mlx.core.conv1d
  • mlx.core.conv2d
  • +
  • mlx.core.conv_general
  • mlx.core.cos
  • mlx.core.cosh
  • mlx.core.dequantize
  • @@ -436,19 +440,31 @@
  • mlx.nn.Softshrink
  • mlx.nn.Step
  • mlx.nn.Transformer
  • +
  • mlx.nn.Upsample
  • Functions
  • Loss Functions
      @@ -502,9 +518,11 @@
  • Schedulers
  • diff --git a/docs/build/html/examples/llama-inference.html b/docs/build/html/examples/llama-inference.html index 3394836db..1a15bf7ea 100644 --- a/docs/build/html/examples/llama-inference.html +++ b/docs/build/html/examples/llama-inference.html @@ -9,7 +9,7 @@ - LLM inference — MLX 0.3.0 documentation + LLM inference — MLX 0.5.0 documentation @@ -134,8 +134,8 @@ - MLX 0.3.0 documentation - Home - + MLX 0.5.0 documentation - Home + @@ -233,6 +233,9 @@
  • mlx.core.argpartition
  • mlx.core.argsort
  • mlx.core.array_equal
  • +
  • mlx.core.atleast_1d
  • +
  • mlx.core.atleast_2d
  • +
  • mlx.core.atleast_3d
  • mlx.core.broadcast_to
  • mlx.core.ceil
  • mlx.core.clip
  • @@ -240,6 +243,7 @@
  • mlx.core.convolve
  • mlx.core.conv1d
  • mlx.core.conv2d
  • +
  • mlx.core.conv_general
  • mlx.core.cos
  • mlx.core.cosh
  • mlx.core.dequantize
  • @@ -436,19 +440,31 @@
  • mlx.nn.Softshrink
  • mlx.nn.Step
  • mlx.nn.Transformer
  • +
  • mlx.nn.Upsample
  • Functions
  • Loss Functions
      @@ -502,9 +518,11 @@
  • Schedulers
  • diff --git a/docs/build/html/examples/mlp.html b/docs/build/html/examples/mlp.html index 3ddeba318..c6c86236a 100644 --- a/docs/build/html/examples/mlp.html +++ b/docs/build/html/examples/mlp.html @@ -9,7 +9,7 @@ - Multi-Layer Perceptron — MLX 0.3.0 documentation + Multi-Layer Perceptron — MLX 0.5.0 documentation @@ -134,8 +134,8 @@ - MLX 0.3.0 documentation - Home - + MLX 0.5.0 documentation - Home + @@ -233,6 +233,9 @@
  • mlx.core.argpartition
  • mlx.core.argsort
  • mlx.core.array_equal
  • +
  • mlx.core.atleast_1d
  • +
  • mlx.core.atleast_2d
  • +
  • mlx.core.atleast_3d
  • mlx.core.broadcast_to
  • mlx.core.ceil
  • mlx.core.clip
  • @@ -240,6 +243,7 @@
  • mlx.core.convolve
  • mlx.core.conv1d
  • mlx.core.conv2d
  • +
  • mlx.core.conv_general
  • mlx.core.cos
  • mlx.core.cosh
  • mlx.core.dequantize
  • @@ -436,19 +440,31 @@
  • mlx.nn.Softshrink
  • mlx.nn.Step
  • mlx.nn.Transformer
  • +
  • mlx.nn.Upsample
  • Functions
  • Loss Functions
      @@ -502,9 +518,11 @@
  • Schedulers
  • diff --git a/docs/build/html/genindex.html b/docs/build/html/genindex.html index 16a3f31d9..ab8bba375 100644 --- a/docs/build/html/genindex.html +++ b/docs/build/html/genindex.html @@ -8,7 +8,7 @@ - Index — MLX 0.3.0 documentation + Index — MLX 0.5.0 documentation @@ -131,8 +131,8 @@ - MLX 0.3.0 documentation - Home - + MLX 0.5.0 documentation - Home + @@ -230,6 +230,9 @@
  • mlx.core.argpartition
  • mlx.core.argsort
  • mlx.core.array_equal
  • +
  • mlx.core.atleast_1d
  • +
  • mlx.core.atleast_2d
  • +
  • mlx.core.atleast_3d
  • mlx.core.broadcast_to
  • mlx.core.ceil
  • mlx.core.clip
  • @@ -237,6 +240,7 @@
  • mlx.core.convolve
  • mlx.core.conv1d
  • mlx.core.conv2d
  • +
  • mlx.core.conv_general
  • mlx.core.cos
  • mlx.core.cosh
  • mlx.core.dequantize
  • @@ -433,19 +437,31 @@
  • mlx.nn.Softshrink
  • mlx.nn.Step
  • mlx.nn.Transformer
  • +
  • mlx.nn.Upsample
  • Functions
  • Loss Functions
      @@ -499,9 +515,11 @@
  • Schedulers
  • @@ -729,10 +747,10 @@ document.write(`
  • apply_to_modules() (mlx.nn.Module method)
  • - - +
  • Functions
  • Loss Functions
      @@ -502,9 +518,11 @@
  • Schedulers
  • diff --git a/docs/build/html/python/transforms.html b/docs/build/html/python/transforms.html index 7816ad111..a2a7edddb 100644 --- a/docs/build/html/python/transforms.html +++ b/docs/build/html/python/transforms.html @@ -9,7 +9,7 @@ - Transforms — MLX 0.3.0 documentation + Transforms — MLX 0.5.0 documentation @@ -134,8 +134,8 @@ - MLX 0.3.0 documentation - Home - + MLX 0.5.0 documentation - Home + @@ -233,6 +233,9 @@
  • mlx.core.argpartition
  • mlx.core.argsort
  • mlx.core.array_equal
  • +
  • mlx.core.atleast_1d
  • +
  • mlx.core.atleast_2d
  • +
  • mlx.core.atleast_3d
  • mlx.core.broadcast_to
  • mlx.core.ceil
  • mlx.core.clip
  • @@ -240,6 +243,7 @@
  • mlx.core.convolve
  • mlx.core.conv1d
  • mlx.core.conv2d
  • +
  • mlx.core.conv_general
  • mlx.core.cos
  • mlx.core.cosh
  • mlx.core.dequantize
  • @@ -436,19 +440,31 @@
  • mlx.nn.Softshrink
  • mlx.nn.Step
  • mlx.nn.Transformer
  • +
  • mlx.nn.Upsample
  • Functions
  • Loss Functions
      @@ -502,9 +518,11 @@
  • Schedulers
  • diff --git a/docs/build/html/python/tree_utils.html b/docs/build/html/python/tree_utils.html index 9a29abae0..eda06d5cc 100644 --- a/docs/build/html/python/tree_utils.html +++ b/docs/build/html/python/tree_utils.html @@ -9,7 +9,7 @@ - Tree Utils — MLX 0.3.0 documentation + Tree Utils — MLX 0.5.0 documentation @@ -47,7 +47,7 @@ - + @@ -134,8 +134,8 @@ - MLX 0.3.0 documentation - Home - + MLX 0.5.0 documentation - Home + @@ -233,6 +233,9 @@
  • mlx.core.argpartition
  • mlx.core.argsort
  • mlx.core.array_equal
  • +
  • mlx.core.atleast_1d
  • +
  • mlx.core.atleast_2d
  • +
  • mlx.core.atleast_3d
  • mlx.core.broadcast_to
  • mlx.core.ceil
  • mlx.core.clip
  • @@ -240,6 +243,7 @@
  • mlx.core.convolve
  • mlx.core.conv1d
  • mlx.core.conv2d
  • +
  • mlx.core.conv_general
  • mlx.core.cos
  • mlx.core.cosh
  • mlx.core.dequantize
  • @@ -436,19 +440,31 @@
  • mlx.nn.Softshrink
  • mlx.nn.Step
  • mlx.nn.Transformer
  • +
  • mlx.nn.Upsample
  • Functions
  • Loss Functions
      @@ -502,9 +518,11 @@
  • Schedulers
  • @@ -725,12 +743,12 @@ return python trees will be using the default python

    previous

    -

    mlx.optimizers.cosine_decay

    +

    mlx.optimizers.step_decay

    - Search - MLX 0.3.0 documentation + Search - MLX 0.5.0 documentation @@ -133,8 +133,8 @@ - MLX 0.3.0 documentation - Home - + MLX 0.5.0 documentation - Home + @@ -232,6 +232,9 @@
  • mlx.core.argpartition
  • mlx.core.argsort
  • mlx.core.array_equal
  • +
  • mlx.core.atleast_1d
  • +
  • mlx.core.atleast_2d
  • +
  • mlx.core.atleast_3d
  • mlx.core.broadcast_to
  • mlx.core.ceil
  • mlx.core.clip
  • @@ -239,6 +242,7 @@
  • mlx.core.convolve
  • mlx.core.conv1d
  • mlx.core.conv2d
  • +
  • mlx.core.conv_general
  • mlx.core.cos
  • mlx.core.cosh
  • mlx.core.dequantize
  • @@ -435,19 +439,31 @@
  • mlx.nn.Softshrink
  • mlx.nn.Step
  • mlx.nn.Transformer
  • +
  • mlx.nn.Upsample
  • Functions
  • Loss Functions
      @@ -501,9 +517,11 @@
  • Schedulers
  • diff --git a/docs/build/html/searchindex.js b/docs/build/html/searchindex.js index 926c17c46..580f62bc0 100644 --- a/docs/build/html/searchindex.js +++ b/docs/build/html/searchindex.js @@ -1 +1 @@ -Search.setIndex({"docnames": ["cpp/ops", "dev/extensions", "examples/linear_regression", "examples/llama-inference", "examples/mlp", "index", "install", "python/_autosummary/mlx.core.Device", "python/_autosummary/mlx.core.Dtype", "python/_autosummary/mlx.core.abs", "python/_autosummary/mlx.core.add", "python/_autosummary/mlx.core.all", "python/_autosummary/mlx.core.allclose", "python/_autosummary/mlx.core.any", "python/_autosummary/mlx.core.arange", "python/_autosummary/mlx.core.arccos", "python/_autosummary/mlx.core.arccosh", "python/_autosummary/mlx.core.arcsin", "python/_autosummary/mlx.core.arcsinh", "python/_autosummary/mlx.core.arctan", "python/_autosummary/mlx.core.arctanh", "python/_autosummary/mlx.core.argmax", "python/_autosummary/mlx.core.argmin", "python/_autosummary/mlx.core.argpartition", "python/_autosummary/mlx.core.argsort", "python/_autosummary/mlx.core.array", "python/_autosummary/mlx.core.array.T", "python/_autosummary/mlx.core.array.abs", "python/_autosummary/mlx.core.array.all", "python/_autosummary/mlx.core.array.any", "python/_autosummary/mlx.core.array.argmax", "python/_autosummary/mlx.core.array.argmin", "python/_autosummary/mlx.core.array.astype", "python/_autosummary/mlx.core.array.cos", "python/_autosummary/mlx.core.array.dtype", "python/_autosummary/mlx.core.array.exp", "python/_autosummary/mlx.core.array.item", "python/_autosummary/mlx.core.array.log", "python/_autosummary/mlx.core.array.log1p", "python/_autosummary/mlx.core.array.logsumexp", "python/_autosummary/mlx.core.array.max", "python/_autosummary/mlx.core.array.mean", "python/_autosummary/mlx.core.array.min", "python/_autosummary/mlx.core.array.ndim", "python/_autosummary/mlx.core.array.prod", "python/_autosummary/mlx.core.array.reciprocal", "python/_autosummary/mlx.core.array.reshape", "python/_autosummary/mlx.core.array.round", "python/_autosummary/mlx.core.array.rsqrt", "python/_autosummary/mlx.core.array.shape", "python/_autosummary/mlx.core.array.sin", "python/_autosummary/mlx.core.array.size", "python/_autosummary/mlx.core.array.split", "python/_autosummary/mlx.core.array.sqrt", "python/_autosummary/mlx.core.array.square", "python/_autosummary/mlx.core.array.sum", "python/_autosummary/mlx.core.array.tolist", "python/_autosummary/mlx.core.array.transpose", "python/_autosummary/mlx.core.array.var", "python/_autosummary/mlx.core.array_equal", "python/_autosummary/mlx.core.broadcast_to", "python/_autosummary/mlx.core.ceil", "python/_autosummary/mlx.core.clip", "python/_autosummary/mlx.core.compile", "python/_autosummary/mlx.core.concatenate", "python/_autosummary/mlx.core.conv1d", "python/_autosummary/mlx.core.conv2d", "python/_autosummary/mlx.core.convolve", "python/_autosummary/mlx.core.cos", "python/_autosummary/mlx.core.cosh", "python/_autosummary/mlx.core.default_device", "python/_autosummary/mlx.core.default_stream", "python/_autosummary/mlx.core.dequantize", "python/_autosummary/mlx.core.diag", "python/_autosummary/mlx.core.diagonal", "python/_autosummary/mlx.core.disable_compile", "python/_autosummary/mlx.core.divide", "python/_autosummary/mlx.core.divmod", "python/_autosummary/mlx.core.enable_compile", "python/_autosummary/mlx.core.equal", "python/_autosummary/mlx.core.erf", "python/_autosummary/mlx.core.erfinv", "python/_autosummary/mlx.core.eval", "python/_autosummary/mlx.core.exp", "python/_autosummary/mlx.core.expand_dims", "python/_autosummary/mlx.core.eye", "python/_autosummary/mlx.core.fft.fft", "python/_autosummary/mlx.core.fft.fft2", "python/_autosummary/mlx.core.fft.fftn", "python/_autosummary/mlx.core.fft.ifft", "python/_autosummary/mlx.core.fft.ifft2", "python/_autosummary/mlx.core.fft.ifftn", "python/_autosummary/mlx.core.fft.irfft", "python/_autosummary/mlx.core.fft.irfft2", "python/_autosummary/mlx.core.fft.irfftn", "python/_autosummary/mlx.core.fft.rfft", "python/_autosummary/mlx.core.fft.rfft2", "python/_autosummary/mlx.core.fft.rfftn", "python/_autosummary/mlx.core.flatten", "python/_autosummary/mlx.core.floor", "python/_autosummary/mlx.core.floor_divide", "python/_autosummary/mlx.core.full", "python/_autosummary/mlx.core.grad", "python/_autosummary/mlx.core.greater", "python/_autosummary/mlx.core.greater_equal", "python/_autosummary/mlx.core.identity", "python/_autosummary/mlx.core.inner", "python/_autosummary/mlx.core.isinf", "python/_autosummary/mlx.core.isnan", "python/_autosummary/mlx.core.isneginf", "python/_autosummary/mlx.core.isposinf", "python/_autosummary/mlx.core.jvp", "python/_autosummary/mlx.core.less", "python/_autosummary/mlx.core.less_equal", "python/_autosummary/mlx.core.linalg.norm", "python/_autosummary/mlx.core.linalg.qr", "python/_autosummary/mlx.core.linspace", "python/_autosummary/mlx.core.load", "python/_autosummary/mlx.core.log", "python/_autosummary/mlx.core.log10", "python/_autosummary/mlx.core.log1p", "python/_autosummary/mlx.core.log2", "python/_autosummary/mlx.core.logaddexp", "python/_autosummary/mlx.core.logical_and", "python/_autosummary/mlx.core.logical_not", "python/_autosummary/mlx.core.logical_or", "python/_autosummary/mlx.core.logsumexp", "python/_autosummary/mlx.core.matmul", "python/_autosummary/mlx.core.max", "python/_autosummary/mlx.core.maximum", "python/_autosummary/mlx.core.mean", "python/_autosummary/mlx.core.min", "python/_autosummary/mlx.core.minimum", "python/_autosummary/mlx.core.moveaxis", "python/_autosummary/mlx.core.multiply", "python/_autosummary/mlx.core.negative", "python/_autosummary/mlx.core.new_stream", "python/_autosummary/mlx.core.ones", "python/_autosummary/mlx.core.ones_like", "python/_autosummary/mlx.core.outer", "python/_autosummary/mlx.core.pad", "python/_autosummary/mlx.core.partition", "python/_autosummary/mlx.core.prod", "python/_autosummary/mlx.core.quantize", "python/_autosummary/mlx.core.quantized_matmul", "python/_autosummary/mlx.core.random.bernoulli", "python/_autosummary/mlx.core.random.categorical", "python/_autosummary/mlx.core.random.gumbel", "python/_autosummary/mlx.core.random.key", "python/_autosummary/mlx.core.random.normal", "python/_autosummary/mlx.core.random.randint", "python/_autosummary/mlx.core.random.seed", "python/_autosummary/mlx.core.random.split", "python/_autosummary/mlx.core.random.truncated_normal", "python/_autosummary/mlx.core.random.uniform", "python/_autosummary/mlx.core.reciprocal", "python/_autosummary/mlx.core.repeat", "python/_autosummary/mlx.core.reshape", "python/_autosummary/mlx.core.round", "python/_autosummary/mlx.core.rsqrt", "python/_autosummary/mlx.core.save", "python/_autosummary/mlx.core.save_gguf", "python/_autosummary/mlx.core.save_safetensors", "python/_autosummary/mlx.core.savez", "python/_autosummary/mlx.core.savez_compressed", "python/_autosummary/mlx.core.set_default_device", "python/_autosummary/mlx.core.set_default_stream", "python/_autosummary/mlx.core.sigmoid", "python/_autosummary/mlx.core.sign", "python/_autosummary/mlx.core.sin", "python/_autosummary/mlx.core.sinh", "python/_autosummary/mlx.core.softmax", "python/_autosummary/mlx.core.sort", "python/_autosummary/mlx.core.split", "python/_autosummary/mlx.core.sqrt", "python/_autosummary/mlx.core.square", "python/_autosummary/mlx.core.squeeze", "python/_autosummary/mlx.core.stack", "python/_autosummary/mlx.core.stop_gradient", "python/_autosummary/mlx.core.stream", "python/_autosummary/mlx.core.subtract", "python/_autosummary/mlx.core.sum", "python/_autosummary/mlx.core.swapaxes", "python/_autosummary/mlx.core.take", "python/_autosummary/mlx.core.take_along_axis", "python/_autosummary/mlx.core.tan", "python/_autosummary/mlx.core.tanh", "python/_autosummary/mlx.core.tensordot", "python/_autosummary/mlx.core.transpose", "python/_autosummary/mlx.core.tri", "python/_autosummary/mlx.core.tril", "python/_autosummary/mlx.core.triu", "python/_autosummary/mlx.core.value_and_grad", "python/_autosummary/mlx.core.var", "python/_autosummary/mlx.core.vjp", "python/_autosummary/mlx.core.vmap", "python/_autosummary/mlx.core.where", "python/_autosummary/mlx.core.zeros", "python/_autosummary/mlx.core.zeros_like", "python/_autosummary/mlx.nn.value_and_grad", "python/_autosummary/mlx.utils.tree_flatten", "python/_autosummary/mlx.utils.tree_map", "python/_autosummary/mlx.utils.tree_unflatten", "python/_autosummary/stream_class", "python/array", "python/data_types", "python/devices_and_streams", "python/fft", "python/linalg", "python/nn", "python/nn/_autosummary/mlx.nn.ALiBi", "python/nn/_autosummary/mlx.nn.AvgPool1d", "python/nn/_autosummary/mlx.nn.AvgPool2d", "python/nn/_autosummary/mlx.nn.BatchNorm", "python/nn/_autosummary/mlx.nn.Conv1d", "python/nn/_autosummary/mlx.nn.Conv2d", "python/nn/_autosummary/mlx.nn.Dropout", "python/nn/_autosummary/mlx.nn.Dropout2d", "python/nn/_autosummary/mlx.nn.Dropout3d", "python/nn/_autosummary/mlx.nn.Embedding", "python/nn/_autosummary/mlx.nn.GELU", "python/nn/_autosummary/mlx.nn.GroupNorm", "python/nn/_autosummary/mlx.nn.InstanceNorm", "python/nn/_autosummary/mlx.nn.LayerNorm", "python/nn/_autosummary/mlx.nn.Linear", "python/nn/_autosummary/mlx.nn.MaxPool1d", "python/nn/_autosummary/mlx.nn.MaxPool2d", "python/nn/_autosummary/mlx.nn.Mish", "python/nn/_autosummary/mlx.nn.Module.apply", "python/nn/_autosummary/mlx.nn.Module.apply_to_modules", "python/nn/_autosummary/mlx.nn.Module.children", "python/nn/_autosummary/mlx.nn.Module.eval", "python/nn/_autosummary/mlx.nn.Module.filter_and_map", "python/nn/_autosummary/mlx.nn.Module.freeze", "python/nn/_autosummary/mlx.nn.Module.leaf_modules", "python/nn/_autosummary/mlx.nn.Module.load_weights", "python/nn/_autosummary/mlx.nn.Module.modules", "python/nn/_autosummary/mlx.nn.Module.named_modules", "python/nn/_autosummary/mlx.nn.Module.parameters", "python/nn/_autosummary/mlx.nn.Module.save_weights", "python/nn/_autosummary/mlx.nn.Module.state", "python/nn/_autosummary/mlx.nn.Module.train", "python/nn/_autosummary/mlx.nn.Module.trainable_parameters", "python/nn/_autosummary/mlx.nn.Module.training", "python/nn/_autosummary/mlx.nn.Module.unfreeze", "python/nn/_autosummary/mlx.nn.Module.update", "python/nn/_autosummary/mlx.nn.Module.update_modules", "python/nn/_autosummary/mlx.nn.MultiHeadAttention", "python/nn/_autosummary/mlx.nn.PReLU", "python/nn/_autosummary/mlx.nn.QuantizedLinear", "python/nn/_autosummary/mlx.nn.RMSNorm", "python/nn/_autosummary/mlx.nn.ReLU", "python/nn/_autosummary/mlx.nn.RoPE", "python/nn/_autosummary/mlx.nn.SELU", "python/nn/_autosummary/mlx.nn.Sequential", "python/nn/_autosummary/mlx.nn.SiLU", "python/nn/_autosummary/mlx.nn.SinusoidalPositionalEncoding", "python/nn/_autosummary/mlx.nn.Softshrink", "python/nn/_autosummary/mlx.nn.Step", "python/nn/_autosummary/mlx.nn.Transformer", "python/nn/_autosummary/mlx.nn.init.constant", "python/nn/_autosummary/mlx.nn.init.glorot_normal", "python/nn/_autosummary/mlx.nn.init.glorot_uniform", "python/nn/_autosummary/mlx.nn.init.he_normal", "python/nn/_autosummary/mlx.nn.init.he_uniform", "python/nn/_autosummary/mlx.nn.init.identity", "python/nn/_autosummary/mlx.nn.init.normal", "python/nn/_autosummary/mlx.nn.init.uniform", "python/nn/_autosummary_functions/mlx.nn.gelu", "python/nn/_autosummary_functions/mlx.nn.gelu_approx", "python/nn/_autosummary_functions/mlx.nn.gelu_fast_approx", "python/nn/_autosummary_functions/mlx.nn.losses.binary_cross_entropy", "python/nn/_autosummary_functions/mlx.nn.losses.cosine_similarity_loss", "python/nn/_autosummary_functions/mlx.nn.losses.cross_entropy", "python/nn/_autosummary_functions/mlx.nn.losses.gaussian_nll_loss", "python/nn/_autosummary_functions/mlx.nn.losses.hinge_loss", "python/nn/_autosummary_functions/mlx.nn.losses.huber_loss", "python/nn/_autosummary_functions/mlx.nn.losses.kl_div_loss", "python/nn/_autosummary_functions/mlx.nn.losses.l1_loss", "python/nn/_autosummary_functions/mlx.nn.losses.log_cosh_loss", "python/nn/_autosummary_functions/mlx.nn.losses.margin_ranking_loss", "python/nn/_autosummary_functions/mlx.nn.losses.mse_loss", "python/nn/_autosummary_functions/mlx.nn.losses.nll_loss", "python/nn/_autosummary_functions/mlx.nn.losses.smooth_l1_loss", "python/nn/_autosummary_functions/mlx.nn.losses.triplet_loss", "python/nn/_autosummary_functions/mlx.nn.mish", "python/nn/_autosummary_functions/mlx.nn.prelu", "python/nn/_autosummary_functions/mlx.nn.relu", "python/nn/_autosummary_functions/mlx.nn.selu", "python/nn/_autosummary_functions/mlx.nn.silu", "python/nn/_autosummary_functions/mlx.nn.softshrink", "python/nn/_autosummary_functions/mlx.nn.step", "python/nn/functions", "python/nn/init", "python/nn/layers", "python/nn/losses", "python/nn/module", "python/ops", "python/optimizers", "python/optimizers/_autosummary/mlx.optimizers.AdaDelta", "python/optimizers/_autosummary/mlx.optimizers.Adafactor", "python/optimizers/_autosummary/mlx.optimizers.Adagrad", "python/optimizers/_autosummary/mlx.optimizers.Adam", "python/optimizers/_autosummary/mlx.optimizers.AdamW", "python/optimizers/_autosummary/mlx.optimizers.Adamax", "python/optimizers/_autosummary/mlx.optimizers.Lion", "python/optimizers/_autosummary/mlx.optimizers.Optimizer.apply_gradients", "python/optimizers/_autosummary/mlx.optimizers.Optimizer.init", "python/optimizers/_autosummary/mlx.optimizers.Optimizer.state", "python/optimizers/_autosummary/mlx.optimizers.Optimizer.update", "python/optimizers/_autosummary/mlx.optimizers.RMSprop", "python/optimizers/_autosummary/mlx.optimizers.SGD", "python/optimizers/_autosummary/mlx.optimizers.cosine_decay", "python/optimizers/_autosummary/mlx.optimizers.exponential_decay", "python/optimizers/_autosummary/mlx.optimizers.step_decay", "python/optimizers/common_optimizers", "python/optimizers/optimizer", "python/optimizers/schedulers", "python/random", "python/transforms", "python/tree_utils", "usage/compile", "usage/function_transforms", "usage/indexing", "usage/lazy_evaluation", "usage/numpy", "usage/quick_start", "usage/saving_and_loading", "usage/unified_memory", "usage/using_streams"], "filenames": ["cpp/ops.rst", "dev/extensions.rst", "examples/linear_regression.rst", "examples/llama-inference.rst", "examples/mlp.rst", "index.rst", "install.rst", "python/_autosummary/mlx.core.Device.rst", "python/_autosummary/mlx.core.Dtype.rst", "python/_autosummary/mlx.core.abs.rst", "python/_autosummary/mlx.core.add.rst", "python/_autosummary/mlx.core.all.rst", "python/_autosummary/mlx.core.allclose.rst", "python/_autosummary/mlx.core.any.rst", "python/_autosummary/mlx.core.arange.rst", "python/_autosummary/mlx.core.arccos.rst", "python/_autosummary/mlx.core.arccosh.rst", "python/_autosummary/mlx.core.arcsin.rst", "python/_autosummary/mlx.core.arcsinh.rst", "python/_autosummary/mlx.core.arctan.rst", "python/_autosummary/mlx.core.arctanh.rst", "python/_autosummary/mlx.core.argmax.rst", "python/_autosummary/mlx.core.argmin.rst", "python/_autosummary/mlx.core.argpartition.rst", "python/_autosummary/mlx.core.argsort.rst", "python/_autosummary/mlx.core.array.rst", "python/_autosummary/mlx.core.array.T.rst", "python/_autosummary/mlx.core.array.abs.rst", "python/_autosummary/mlx.core.array.all.rst", "python/_autosummary/mlx.core.array.any.rst", "python/_autosummary/mlx.core.array.argmax.rst", "python/_autosummary/mlx.core.array.argmin.rst", "python/_autosummary/mlx.core.array.astype.rst", "python/_autosummary/mlx.core.array.cos.rst", "python/_autosummary/mlx.core.array.dtype.rst", "python/_autosummary/mlx.core.array.exp.rst", "python/_autosummary/mlx.core.array.item.rst", "python/_autosummary/mlx.core.array.log.rst", "python/_autosummary/mlx.core.array.log1p.rst", "python/_autosummary/mlx.core.array.logsumexp.rst", "python/_autosummary/mlx.core.array.max.rst", "python/_autosummary/mlx.core.array.mean.rst", "python/_autosummary/mlx.core.array.min.rst", "python/_autosummary/mlx.core.array.ndim.rst", "python/_autosummary/mlx.core.array.prod.rst", "python/_autosummary/mlx.core.array.reciprocal.rst", "python/_autosummary/mlx.core.array.reshape.rst", "python/_autosummary/mlx.core.array.round.rst", "python/_autosummary/mlx.core.array.rsqrt.rst", "python/_autosummary/mlx.core.array.shape.rst", "python/_autosummary/mlx.core.array.sin.rst", "python/_autosummary/mlx.core.array.size.rst", "python/_autosummary/mlx.core.array.split.rst", "python/_autosummary/mlx.core.array.sqrt.rst", "python/_autosummary/mlx.core.array.square.rst", "python/_autosummary/mlx.core.array.sum.rst", "python/_autosummary/mlx.core.array.tolist.rst", "python/_autosummary/mlx.core.array.transpose.rst", "python/_autosummary/mlx.core.array.var.rst", "python/_autosummary/mlx.core.array_equal.rst", "python/_autosummary/mlx.core.broadcast_to.rst", "python/_autosummary/mlx.core.ceil.rst", "python/_autosummary/mlx.core.clip.rst", "python/_autosummary/mlx.core.compile.rst", "python/_autosummary/mlx.core.concatenate.rst", "python/_autosummary/mlx.core.conv1d.rst", "python/_autosummary/mlx.core.conv2d.rst", "python/_autosummary/mlx.core.convolve.rst", "python/_autosummary/mlx.core.cos.rst", "python/_autosummary/mlx.core.cosh.rst", "python/_autosummary/mlx.core.default_device.rst", "python/_autosummary/mlx.core.default_stream.rst", "python/_autosummary/mlx.core.dequantize.rst", "python/_autosummary/mlx.core.diag.rst", "python/_autosummary/mlx.core.diagonal.rst", "python/_autosummary/mlx.core.disable_compile.rst", "python/_autosummary/mlx.core.divide.rst", "python/_autosummary/mlx.core.divmod.rst", "python/_autosummary/mlx.core.enable_compile.rst", "python/_autosummary/mlx.core.equal.rst", "python/_autosummary/mlx.core.erf.rst", "python/_autosummary/mlx.core.erfinv.rst", "python/_autosummary/mlx.core.eval.rst", "python/_autosummary/mlx.core.exp.rst", "python/_autosummary/mlx.core.expand_dims.rst", "python/_autosummary/mlx.core.eye.rst", "python/_autosummary/mlx.core.fft.fft.rst", "python/_autosummary/mlx.core.fft.fft2.rst", "python/_autosummary/mlx.core.fft.fftn.rst", "python/_autosummary/mlx.core.fft.ifft.rst", "python/_autosummary/mlx.core.fft.ifft2.rst", "python/_autosummary/mlx.core.fft.ifftn.rst", "python/_autosummary/mlx.core.fft.irfft.rst", "python/_autosummary/mlx.core.fft.irfft2.rst", "python/_autosummary/mlx.core.fft.irfftn.rst", "python/_autosummary/mlx.core.fft.rfft.rst", "python/_autosummary/mlx.core.fft.rfft2.rst", "python/_autosummary/mlx.core.fft.rfftn.rst", "python/_autosummary/mlx.core.flatten.rst", "python/_autosummary/mlx.core.floor.rst", "python/_autosummary/mlx.core.floor_divide.rst", "python/_autosummary/mlx.core.full.rst", "python/_autosummary/mlx.core.grad.rst", "python/_autosummary/mlx.core.greater.rst", "python/_autosummary/mlx.core.greater_equal.rst", "python/_autosummary/mlx.core.identity.rst", "python/_autosummary/mlx.core.inner.rst", "python/_autosummary/mlx.core.isinf.rst", "python/_autosummary/mlx.core.isnan.rst", "python/_autosummary/mlx.core.isneginf.rst", "python/_autosummary/mlx.core.isposinf.rst", "python/_autosummary/mlx.core.jvp.rst", "python/_autosummary/mlx.core.less.rst", "python/_autosummary/mlx.core.less_equal.rst", "python/_autosummary/mlx.core.linalg.norm.rst", "python/_autosummary/mlx.core.linalg.qr.rst", "python/_autosummary/mlx.core.linspace.rst", "python/_autosummary/mlx.core.load.rst", "python/_autosummary/mlx.core.log.rst", "python/_autosummary/mlx.core.log10.rst", "python/_autosummary/mlx.core.log1p.rst", "python/_autosummary/mlx.core.log2.rst", "python/_autosummary/mlx.core.logaddexp.rst", "python/_autosummary/mlx.core.logical_and.rst", "python/_autosummary/mlx.core.logical_not.rst", "python/_autosummary/mlx.core.logical_or.rst", "python/_autosummary/mlx.core.logsumexp.rst", "python/_autosummary/mlx.core.matmul.rst", "python/_autosummary/mlx.core.max.rst", "python/_autosummary/mlx.core.maximum.rst", "python/_autosummary/mlx.core.mean.rst", "python/_autosummary/mlx.core.min.rst", "python/_autosummary/mlx.core.minimum.rst", "python/_autosummary/mlx.core.moveaxis.rst", "python/_autosummary/mlx.core.multiply.rst", "python/_autosummary/mlx.core.negative.rst", "python/_autosummary/mlx.core.new_stream.rst", "python/_autosummary/mlx.core.ones.rst", "python/_autosummary/mlx.core.ones_like.rst", "python/_autosummary/mlx.core.outer.rst", "python/_autosummary/mlx.core.pad.rst", "python/_autosummary/mlx.core.partition.rst", "python/_autosummary/mlx.core.prod.rst", "python/_autosummary/mlx.core.quantize.rst", "python/_autosummary/mlx.core.quantized_matmul.rst", "python/_autosummary/mlx.core.random.bernoulli.rst", "python/_autosummary/mlx.core.random.categorical.rst", "python/_autosummary/mlx.core.random.gumbel.rst", "python/_autosummary/mlx.core.random.key.rst", "python/_autosummary/mlx.core.random.normal.rst", "python/_autosummary/mlx.core.random.randint.rst", "python/_autosummary/mlx.core.random.seed.rst", "python/_autosummary/mlx.core.random.split.rst", "python/_autosummary/mlx.core.random.truncated_normal.rst", "python/_autosummary/mlx.core.random.uniform.rst", "python/_autosummary/mlx.core.reciprocal.rst", "python/_autosummary/mlx.core.repeat.rst", "python/_autosummary/mlx.core.reshape.rst", "python/_autosummary/mlx.core.round.rst", "python/_autosummary/mlx.core.rsqrt.rst", "python/_autosummary/mlx.core.save.rst", "python/_autosummary/mlx.core.save_gguf.rst", "python/_autosummary/mlx.core.save_safetensors.rst", "python/_autosummary/mlx.core.savez.rst", "python/_autosummary/mlx.core.savez_compressed.rst", "python/_autosummary/mlx.core.set_default_device.rst", "python/_autosummary/mlx.core.set_default_stream.rst", "python/_autosummary/mlx.core.sigmoid.rst", "python/_autosummary/mlx.core.sign.rst", "python/_autosummary/mlx.core.sin.rst", "python/_autosummary/mlx.core.sinh.rst", "python/_autosummary/mlx.core.softmax.rst", "python/_autosummary/mlx.core.sort.rst", "python/_autosummary/mlx.core.split.rst", "python/_autosummary/mlx.core.sqrt.rst", "python/_autosummary/mlx.core.square.rst", "python/_autosummary/mlx.core.squeeze.rst", "python/_autosummary/mlx.core.stack.rst", "python/_autosummary/mlx.core.stop_gradient.rst", "python/_autosummary/mlx.core.stream.rst", "python/_autosummary/mlx.core.subtract.rst", "python/_autosummary/mlx.core.sum.rst", "python/_autosummary/mlx.core.swapaxes.rst", "python/_autosummary/mlx.core.take.rst", "python/_autosummary/mlx.core.take_along_axis.rst", "python/_autosummary/mlx.core.tan.rst", "python/_autosummary/mlx.core.tanh.rst", "python/_autosummary/mlx.core.tensordot.rst", "python/_autosummary/mlx.core.transpose.rst", "python/_autosummary/mlx.core.tri.rst", "python/_autosummary/mlx.core.tril.rst", "python/_autosummary/mlx.core.triu.rst", "python/_autosummary/mlx.core.value_and_grad.rst", "python/_autosummary/mlx.core.var.rst", "python/_autosummary/mlx.core.vjp.rst", "python/_autosummary/mlx.core.vmap.rst", "python/_autosummary/mlx.core.where.rst", "python/_autosummary/mlx.core.zeros.rst", "python/_autosummary/mlx.core.zeros_like.rst", "python/_autosummary/mlx.nn.value_and_grad.rst", "python/_autosummary/mlx.utils.tree_flatten.rst", "python/_autosummary/mlx.utils.tree_map.rst", "python/_autosummary/mlx.utils.tree_unflatten.rst", "python/_autosummary/stream_class.rst", "python/array.rst", "python/data_types.rst", "python/devices_and_streams.rst", "python/fft.rst", "python/linalg.rst", "python/nn.rst", "python/nn/_autosummary/mlx.nn.ALiBi.rst", "python/nn/_autosummary/mlx.nn.AvgPool1d.rst", "python/nn/_autosummary/mlx.nn.AvgPool2d.rst", "python/nn/_autosummary/mlx.nn.BatchNorm.rst", "python/nn/_autosummary/mlx.nn.Conv1d.rst", "python/nn/_autosummary/mlx.nn.Conv2d.rst", "python/nn/_autosummary/mlx.nn.Dropout.rst", "python/nn/_autosummary/mlx.nn.Dropout2d.rst", "python/nn/_autosummary/mlx.nn.Dropout3d.rst", "python/nn/_autosummary/mlx.nn.Embedding.rst", "python/nn/_autosummary/mlx.nn.GELU.rst", "python/nn/_autosummary/mlx.nn.GroupNorm.rst", "python/nn/_autosummary/mlx.nn.InstanceNorm.rst", "python/nn/_autosummary/mlx.nn.LayerNorm.rst", "python/nn/_autosummary/mlx.nn.Linear.rst", "python/nn/_autosummary/mlx.nn.MaxPool1d.rst", "python/nn/_autosummary/mlx.nn.MaxPool2d.rst", "python/nn/_autosummary/mlx.nn.Mish.rst", "python/nn/_autosummary/mlx.nn.Module.apply.rst", "python/nn/_autosummary/mlx.nn.Module.apply_to_modules.rst", "python/nn/_autosummary/mlx.nn.Module.children.rst", "python/nn/_autosummary/mlx.nn.Module.eval.rst", "python/nn/_autosummary/mlx.nn.Module.filter_and_map.rst", "python/nn/_autosummary/mlx.nn.Module.freeze.rst", "python/nn/_autosummary/mlx.nn.Module.leaf_modules.rst", "python/nn/_autosummary/mlx.nn.Module.load_weights.rst", "python/nn/_autosummary/mlx.nn.Module.modules.rst", "python/nn/_autosummary/mlx.nn.Module.named_modules.rst", "python/nn/_autosummary/mlx.nn.Module.parameters.rst", "python/nn/_autosummary/mlx.nn.Module.save_weights.rst", "python/nn/_autosummary/mlx.nn.Module.state.rst", "python/nn/_autosummary/mlx.nn.Module.train.rst", "python/nn/_autosummary/mlx.nn.Module.trainable_parameters.rst", "python/nn/_autosummary/mlx.nn.Module.training.rst", "python/nn/_autosummary/mlx.nn.Module.unfreeze.rst", "python/nn/_autosummary/mlx.nn.Module.update.rst", "python/nn/_autosummary/mlx.nn.Module.update_modules.rst", "python/nn/_autosummary/mlx.nn.MultiHeadAttention.rst", "python/nn/_autosummary/mlx.nn.PReLU.rst", "python/nn/_autosummary/mlx.nn.QuantizedLinear.rst", "python/nn/_autosummary/mlx.nn.RMSNorm.rst", "python/nn/_autosummary/mlx.nn.ReLU.rst", "python/nn/_autosummary/mlx.nn.RoPE.rst", "python/nn/_autosummary/mlx.nn.SELU.rst", "python/nn/_autosummary/mlx.nn.Sequential.rst", "python/nn/_autosummary/mlx.nn.SiLU.rst", "python/nn/_autosummary/mlx.nn.SinusoidalPositionalEncoding.rst", "python/nn/_autosummary/mlx.nn.Softshrink.rst", "python/nn/_autosummary/mlx.nn.Step.rst", "python/nn/_autosummary/mlx.nn.Transformer.rst", "python/nn/_autosummary/mlx.nn.init.constant.rst", "python/nn/_autosummary/mlx.nn.init.glorot_normal.rst", "python/nn/_autosummary/mlx.nn.init.glorot_uniform.rst", "python/nn/_autosummary/mlx.nn.init.he_normal.rst", "python/nn/_autosummary/mlx.nn.init.he_uniform.rst", "python/nn/_autosummary/mlx.nn.init.identity.rst", "python/nn/_autosummary/mlx.nn.init.normal.rst", "python/nn/_autosummary/mlx.nn.init.uniform.rst", "python/nn/_autosummary_functions/mlx.nn.gelu.rst", "python/nn/_autosummary_functions/mlx.nn.gelu_approx.rst", "python/nn/_autosummary_functions/mlx.nn.gelu_fast_approx.rst", "python/nn/_autosummary_functions/mlx.nn.losses.binary_cross_entropy.rst", "python/nn/_autosummary_functions/mlx.nn.losses.cosine_similarity_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.cross_entropy.rst", "python/nn/_autosummary_functions/mlx.nn.losses.gaussian_nll_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.hinge_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.huber_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.kl_div_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.l1_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.log_cosh_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.margin_ranking_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.mse_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.nll_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.smooth_l1_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.triplet_loss.rst", "python/nn/_autosummary_functions/mlx.nn.mish.rst", "python/nn/_autosummary_functions/mlx.nn.prelu.rst", "python/nn/_autosummary_functions/mlx.nn.relu.rst", "python/nn/_autosummary_functions/mlx.nn.selu.rst", "python/nn/_autosummary_functions/mlx.nn.silu.rst", "python/nn/_autosummary_functions/mlx.nn.softshrink.rst", "python/nn/_autosummary_functions/mlx.nn.step.rst", "python/nn/functions.rst", "python/nn/init.rst", "python/nn/layers.rst", "python/nn/losses.rst", "python/nn/module.rst", "python/ops.rst", "python/optimizers.rst", "python/optimizers/_autosummary/mlx.optimizers.AdaDelta.rst", "python/optimizers/_autosummary/mlx.optimizers.Adafactor.rst", "python/optimizers/_autosummary/mlx.optimizers.Adagrad.rst", "python/optimizers/_autosummary/mlx.optimizers.Adam.rst", "python/optimizers/_autosummary/mlx.optimizers.AdamW.rst", "python/optimizers/_autosummary/mlx.optimizers.Adamax.rst", "python/optimizers/_autosummary/mlx.optimizers.Lion.rst", "python/optimizers/_autosummary/mlx.optimizers.Optimizer.apply_gradients.rst", "python/optimizers/_autosummary/mlx.optimizers.Optimizer.init.rst", "python/optimizers/_autosummary/mlx.optimizers.Optimizer.state.rst", "python/optimizers/_autosummary/mlx.optimizers.Optimizer.update.rst", "python/optimizers/_autosummary/mlx.optimizers.RMSprop.rst", "python/optimizers/_autosummary/mlx.optimizers.SGD.rst", "python/optimizers/_autosummary/mlx.optimizers.cosine_decay.rst", "python/optimizers/_autosummary/mlx.optimizers.exponential_decay.rst", "python/optimizers/_autosummary/mlx.optimizers.step_decay.rst", "python/optimizers/common_optimizers.rst", "python/optimizers/optimizer.rst", "python/optimizers/schedulers.rst", "python/random.rst", "python/transforms.rst", "python/tree_utils.rst", "usage/compile.rst", "usage/function_transforms.rst", "usage/indexing.rst", "usage/lazy_evaluation.rst", "usage/numpy.rst", "usage/quick_start.rst", "usage/saving_and_loading.rst", "usage/unified_memory.rst", "usage/using_streams.rst"], "titles": ["Operations", "Developer Documentation", "Linear Regression", "LLM inference", "Multi-Layer Perceptron", "MLX", "Build and Install", "mlx.core.Device", "mlx.core.Dtype", "mlx.core.abs", "mlx.core.add", "mlx.core.all", "mlx.core.allclose", "mlx.core.any", "mlx.core.arange", "mlx.core.arccos", "mlx.core.arccosh", "mlx.core.arcsin", "mlx.core.arcsinh", "mlx.core.arctan", "mlx.core.arctanh", "mlx.core.argmax", "mlx.core.argmin", "mlx.core.argpartition", "mlx.core.argsort", "mlx.core.array", "mlx.core.array.T", "mlx.core.array.abs", "mlx.core.array.all", "mlx.core.array.any", "mlx.core.array.argmax", "mlx.core.array.argmin", "mlx.core.array.astype", "mlx.core.array.cos", "mlx.core.array.dtype", "mlx.core.array.exp", "mlx.core.array.item", "mlx.core.array.log", "mlx.core.array.log1p", "mlx.core.array.logsumexp", "mlx.core.array.max", "mlx.core.array.mean", "mlx.core.array.min", "mlx.core.array.ndim", "mlx.core.array.prod", "mlx.core.array.reciprocal", "mlx.core.array.reshape", "mlx.core.array.round", "mlx.core.array.rsqrt", "mlx.core.array.shape", "mlx.core.array.sin", "mlx.core.array.size", "mlx.core.array.split", "mlx.core.array.sqrt", "mlx.core.array.square", "mlx.core.array.sum", "mlx.core.array.tolist", "mlx.core.array.transpose", "mlx.core.array.var", "mlx.core.array_equal", "mlx.core.broadcast_to", "mlx.core.ceil", "mlx.core.clip", "mlx.core.compile", "mlx.core.concatenate", "mlx.core.conv1d", "mlx.core.conv2d", "mlx.core.convolve", "mlx.core.cos", "mlx.core.cosh", "mlx.core.default_device", "mlx.core.default_stream", "mlx.core.dequantize", "mlx.core.diag", "mlx.core.diagonal", "mlx.core.disable_compile", "mlx.core.divide", "mlx.core.divmod", "mlx.core.enable_compile", "mlx.core.equal", "mlx.core.erf", "mlx.core.erfinv", "mlx.core.eval", "mlx.core.exp", "mlx.core.expand_dims", "mlx.core.eye", "mlx.core.fft.fft", "mlx.core.fft.fft2", "mlx.core.fft.fftn", "mlx.core.fft.ifft", "mlx.core.fft.ifft2", "mlx.core.fft.ifftn", "mlx.core.fft.irfft", "mlx.core.fft.irfft2", "mlx.core.fft.irfftn", "mlx.core.fft.rfft", "mlx.core.fft.rfft2", "mlx.core.fft.rfftn", "mlx.core.flatten", "mlx.core.floor", "mlx.core.floor_divide", "mlx.core.full", "mlx.core.grad", "mlx.core.greater", "mlx.core.greater_equal", "mlx.core.identity", "mlx.core.inner", "mlx.core.isinf", "mlx.core.isnan", "mlx.core.isneginf", "mlx.core.isposinf", "mlx.core.jvp", "mlx.core.less", "mlx.core.less_equal", "mlx.core.linalg.norm", "mlx.core.linalg.qr", "mlx.core.linspace", "mlx.core.load", "mlx.core.log", "mlx.core.log10", "mlx.core.log1p", "mlx.core.log2", "mlx.core.logaddexp", "mlx.core.logical_and", "mlx.core.logical_not", "mlx.core.logical_or", "mlx.core.logsumexp", "mlx.core.matmul", "mlx.core.max", "mlx.core.maximum", "mlx.core.mean", "mlx.core.min", "mlx.core.minimum", "mlx.core.moveaxis", "mlx.core.multiply", "mlx.core.negative", "mlx.core.new_stream", "mlx.core.ones", "mlx.core.ones_like", "mlx.core.outer", "mlx.core.pad", "mlx.core.partition", "mlx.core.prod", "mlx.core.quantize", "mlx.core.quantized_matmul", "mlx.core.random.bernoulli", "mlx.core.random.categorical", "mlx.core.random.gumbel", "mlx.core.random.key", "mlx.core.random.normal", "mlx.core.random.randint", "mlx.core.random.seed", "mlx.core.random.split", "mlx.core.random.truncated_normal", "mlx.core.random.uniform", "mlx.core.reciprocal", "mlx.core.repeat", "mlx.core.reshape", "mlx.core.round", "mlx.core.rsqrt", "mlx.core.save", "mlx.core.save_gguf", "mlx.core.save_safetensors", "mlx.core.savez", "mlx.core.savez_compressed", "mlx.core.set_default_device", "mlx.core.set_default_stream", "mlx.core.sigmoid", "mlx.core.sign", "mlx.core.sin", "mlx.core.sinh", "mlx.core.softmax", "mlx.core.sort", "mlx.core.split", "mlx.core.sqrt", "mlx.core.square", "mlx.core.squeeze", "mlx.core.stack", "mlx.core.stop_gradient", "mlx.core.stream", "mlx.core.subtract", "mlx.core.sum", "mlx.core.swapaxes", "mlx.core.take", "mlx.core.take_along_axis", "mlx.core.tan", "mlx.core.tanh", "mlx.core.tensordot", "mlx.core.transpose", "mlx.core.tri", "mlx.core.tril", "mlx.core.triu", "mlx.core.value_and_grad", "mlx.core.var", "mlx.core.vjp", "mlx.core.vmap", "mlx.core.where", "mlx.core.zeros", "mlx.core.zeros_like", "mlx.nn.value_and_grad", "mlx.utils.tree_flatten", "mlx.utils.tree_map", "mlx.utils.tree_unflatten", "mlx.core.Stream", "Array", "Data Types", "Devices and Streams", "FFT", "Linear Algebra", "Neural Networks", "mlx.nn.ALiBi", "mlx.nn.AvgPool1d", "mlx.nn.AvgPool2d", "mlx.nn.BatchNorm", "mlx.nn.Conv1d", "mlx.nn.Conv2d", "mlx.nn.Dropout", "mlx.nn.Dropout2d", "mlx.nn.Dropout3d", "mlx.nn.Embedding", "mlx.nn.GELU", "mlx.nn.GroupNorm", "mlx.nn.InstanceNorm", "mlx.nn.LayerNorm", "mlx.nn.Linear", "mlx.nn.MaxPool1d", "mlx.nn.MaxPool2d", "mlx.nn.Mish", "mlx.nn.Module.apply", "mlx.nn.Module.apply_to_modules", "mlx.nn.Module.children", "mlx.nn.Module.eval", "mlx.nn.Module.filter_and_map", "mlx.nn.Module.freeze", "mlx.nn.Module.leaf_modules", "mlx.nn.Module.load_weights", "mlx.nn.Module.modules", "mlx.nn.Module.named_modules", "mlx.nn.Module.parameters", "mlx.nn.Module.save_weights", "mlx.nn.Module.state", "mlx.nn.Module.train", "mlx.nn.Module.trainable_parameters", "mlx.nn.Module.training", "mlx.nn.Module.unfreeze", "mlx.nn.Module.update", "mlx.nn.Module.update_modules", "mlx.nn.MultiHeadAttention", "mlx.nn.PReLU", "mlx.nn.QuantizedLinear", "mlx.nn.RMSNorm", "mlx.nn.ReLU", "mlx.nn.RoPE", "mlx.nn.SELU", "mlx.nn.Sequential", "mlx.nn.SiLU", "mlx.nn.SinusoidalPositionalEncoding", "mlx.nn.Softshrink", "mlx.nn.Step", "mlx.nn.Transformer", "mlx.nn.init.constant", "mlx.nn.init.glorot_normal", "mlx.nn.init.glorot_uniform", "mlx.nn.init.he_normal", "mlx.nn.init.he_uniform", "mlx.nn.init.identity", "mlx.nn.init.normal", "mlx.nn.init.uniform", "mlx.nn.gelu", "mlx.nn.gelu_approx", "mlx.nn.gelu_fast_approx", "mlx.nn.losses.binary_cross_entropy", "mlx.nn.losses.cosine_similarity_loss", "mlx.nn.losses.cross_entropy", "mlx.nn.losses.gaussian_nll_loss", "mlx.nn.losses.hinge_loss", "mlx.nn.losses.huber_loss", "mlx.nn.losses.kl_div_loss", "mlx.nn.losses.l1_loss", "mlx.nn.losses.log_cosh_loss", "mlx.nn.losses.margin_ranking_loss", "mlx.nn.losses.mse_loss", "mlx.nn.losses.nll_loss", "mlx.nn.losses.smooth_l1_loss", "mlx.nn.losses.triplet_loss", "mlx.nn.mish", "mlx.nn.prelu", "mlx.nn.relu", "mlx.nn.selu", "mlx.nn.silu", "mlx.nn.softshrink", "mlx.nn.step", "Functions", "Initializers", "Layers", "Loss Functions", "Module", "Operations", "Optimizers", "mlx.optimizers.AdaDelta", "mlx.optimizers.Adafactor", "mlx.optimizers.Adagrad", "mlx.optimizers.Adam", "mlx.optimizers.AdamW", "mlx.optimizers.Adamax", "mlx.optimizers.Lion", "mlx.optimizers.Optimizer.apply_gradients", "mlx.optimizers.Optimizer.init", "mlx.optimizers.Optimizer.state", "mlx.optimizers.Optimizer.update", "mlx.optimizers.RMSprop", "mlx.optimizers.SGD", "mlx.optimizers.cosine_decay", "mlx.optimizers.exponential_decay", "mlx.optimizers.step_decay", "Common Optimizers", "Optimizer", "Schedulers", "Random", "Transforms", "Tree Utils", "Compilation", "Function Transforms", "Indexing Arrays", "Lazy Evaluation", "Conversion to NumPy and Other Frameworks", "Quick Start Guide", "Saving and Loading Arrays", "Unified Memory", "Using Streams"], "terms": {"mlx": [1, 2, 3, 4, 6, 209, 293, 296, 298, 316, 318, 320, 321, 322, 323, 324, 325, 326, 327, 328], "provid": [1, 3, 72, 102, 187, 192, 201, 209, 228, 233, 235, 244, 245, 246, 249, 259, 292, 296, 327, 329], "open": [1, 6, 14, 150, 154], "flexibl": [1, 5, 246], "which": [1, 3, 4, 5, 6, 14, 32, 63, 74, 82, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 102, 107, 108, 109, 110, 111, 114, 115, 117, 143, 146, 147, 156, 157, 160, 161, 162, 163, 164, 176, 177, 183, 192, 194, 195, 212, 217, 218, 220, 226, 228, 232, 252, 273, 276, 280, 283, 293, 306, 307, 318, 321, 322, 323, 324, 328, 329], "user": [1, 3, 209], "mai": [1, 114, 217, 322, 323], "add": [1, 3, 84, 122, 140, 143, 214, 215, 322, 328], "special": 1, "without": [1, 3, 5, 178, 247, 292, 320, 321, 324, 325, 328], "much": [1, 3, 211, 212, 225, 226, 321, 324], "hassl": 1, "while": [1, 3, 6, 157, 252, 324, 325], "librari": [1, 6, 209], "suppli": 1, "effici": [1, 3, 5, 217, 252, 324, 326], "can": [1, 3, 5, 6, 10, 14, 46, 57, 63, 74, 75, 76, 77, 79, 82, 103, 104, 112, 113, 114, 122, 129, 132, 134, 145, 146, 150, 153, 154, 161, 180, 192, 209, 212, 219, 226, 232, 244, 254, 273, 293, 296, 298, 306, 307, 318, 320, 321, 322, 323, 324, 325, 326, 327, 328, 329], "compos": [1, 5, 209, 321, 322, 326], "ani": [1, 3, 5, 14, 200, 201, 202, 209, 220, 228, 229, 232, 240, 249, 259, 293, 320, 321, 322, 324, 326, 327, 328], "number": [1, 14, 51, 63, 66, 72, 85, 102, 105, 111, 116, 140, 143, 144, 146, 149, 152, 154, 156, 158, 187, 189, 192, 194, 195, 209, 213, 214, 215, 217, 218, 221, 222, 247, 248, 259, 261, 262, 263, 264, 312, 318, 321, 322, 329], "applic": [1, 6], "aris": [1, 325], "case": [1, 3, 88, 91, 92, 94, 95, 96, 97, 98, 115, 127, 157, 176, 212, 217, 226, 253, 258, 283, 288, 290, 291, 306, 307, 321, 322, 326, 327, 328, 329], "where": [1, 4, 85, 143, 192, 195, 211, 212, 213, 214, 215, 216, 217, 218, 220, 221, 222, 223, 224, 225, 226, 232, 248, 250, 253, 255, 258, 263, 264, 268, 269, 270, 274, 280, 286, 288, 289, 291, 307, 322, 323], "new": [1, 4, 60, 74, 133, 136, 157, 177, 188, 201, 247, 296, 298, 309, 321, 323, 324, 325], "function": [1, 2, 3, 4, 5, 12, 63, 77, 80, 81, 102, 111, 114, 115, 127, 167, 192, 194, 195, 199, 201, 209, 220, 227, 229, 233, 244, 248, 254, 257, 258, 259, 268, 269, 270, 285, 290, 291, 293, 298, 307, 318, 320, 323, 324, 325, 327], "highli": [1, 6], "optim": [1, 2, 4, 5, 245, 321, 322, 324], "ar": [1, 2, 3, 4, 5, 6, 12, 14, 59, 60, 62, 63, 67, 74, 82, 85, 87, 88, 90, 91, 93, 94, 96, 97, 98, 102, 107, 108, 109, 110, 111, 114, 115, 117, 127, 139, 140, 141, 143, 144, 145, 146, 147, 150, 153, 154, 163, 164, 176, 177, 183, 192, 194, 195, 200, 201, 213, 214, 215, 216, 217, 218, 221, 222, 223, 224, 235, 247, 249, 271, 273, 274, 292, 296, 305, 307, 320, 321, 322, 323, 324, 325, 326, 327, 328], "need": [1, 3, 4, 5, 59, 143, 209, 245, 246, 256, 259, 318, 322, 324, 325, 326, 328], "For": [1, 3, 6, 114, 143, 202, 209, 213, 217, 228, 233, 241, 244, 249, 252, 256, 261, 262, 263, 264, 293, 318, 321, 322, 323, 324, 325, 326, 327, 328], "you": [1, 3, 4, 5, 6, 209, 256, 259, 293, 318, 321, 322, 323, 325, 327, 328], "design": [1, 2, 5, 318, 328], "your": [1, 3, 6, 296, 322, 324], "own": [1, 6, 325], "link": [1, 6], "top": [1, 224], "core": [1, 2, 3, 4, 209, 211, 212, 213, 222, 225, 226, 235, 238, 242, 260, 261, 262, 263, 264, 265, 266, 267, 271, 273, 280, 293, 296, 298, 321, 325, 326], "we": [1, 2, 3, 4, 72, 143, 144, 209, 219, 254, 303, 305, 318, 320, 321, 322, 324, 328], "inner": [1, 321], "work": [1, 3, 6, 321, 322, 323, 324], "go": [1, 3, 322], "over": [1, 3, 4, 11, 13, 21, 22, 23, 24, 65, 66, 88, 91, 94, 97, 106, 114, 116, 126, 128, 130, 131, 141, 142, 159, 171, 172, 181, 187, 193, 213, 214, 215, 221, 223, 250, 273, 312, 322], "simpl": [1, 3, 4, 209, 219, 292, 321, 322, 324], "learn": [1, 2, 4, 5, 213, 221, 222, 223, 248, 250, 299, 300, 301, 302, 303, 304, 305, 310, 311], "step": [1, 3, 4, 14, 209, 300, 307, 312, 314, 321], "involv": [1, 298, 321], "ad": [1, 2, 6, 222, 296, 299, 300, 301, 302, 303, 304, 310, 324, 327], "let": [1, 2, 3, 321, 322, 324, 325], "s": [1, 2, 3, 4, 34, 43, 63, 71, 72, 87, 88, 90, 91, 93, 94, 96, 97, 102, 114, 117, 130, 139, 143, 146, 158, 161, 162, 179, 192, 193, 195, 199, 209, 212, 226, 232, 233, 235, 239, 240, 244, 298, 307, 308, 318, 321, 322, 324, 325, 326, 327, 328], "sai": [1, 3, 293, 324], "would": [1, 3, 323, 324, 325, 328], "like": [1, 3, 5, 138, 198, 218, 279, 307, 309, 321, 322, 324, 325, 326, 328], "an": [1, 3, 4, 6, 8, 11, 13, 25, 60, 65, 66, 82, 85, 98, 101, 105, 114, 117, 128, 131, 133, 137, 138, 140, 142, 143, 144, 156, 157, 158, 173, 176, 182, 183, 184, 187, 189, 195, 197, 198, 200, 201, 209, 211, 212, 216, 221, 223, 224, 225, 226, 228, 247, 248, 249, 259, 260, 261, 262, 263, 264, 265, 266, 267, 269, 286, 293, 299, 309, 313, 316, 318, 320, 321, 322, 323, 324, 325, 326, 327, 328, 329], "take": [1, 3, 4, 63, 102, 111, 129, 132, 138, 144, 184, 192, 194, 195, 198, 247, 318, 322, 323, 327, 328, 329], "two": [1, 10, 12, 59, 74, 76, 79, 87, 90, 96, 103, 104, 112, 113, 115, 122, 127, 129, 132, 134, 139, 182, 212, 226, 249, 272, 321, 322, 323, 328], "arrai": [1, 3, 4, 5, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 72, 73, 74, 76, 77, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 209, 213, 228, 235, 238, 242, 248, 260, 261, 262, 263, 264, 265, 266, 267, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 291, 293, 296, 311, 312, 313, 314, 321, 322, 324, 325, 326, 328], "x": [1, 2, 3, 4, 80, 105, 114, 144, 147, 158, 163, 167, 190, 191, 196, 201, 209, 211, 212, 213, 220, 221, 222, 223, 224, 225, 226, 227, 228, 248, 250, 251, 253, 255, 256, 258, 268, 269, 270, 283, 285, 286, 287, 288, 289, 290, 291, 296, 298, 305, 321, 322, 323, 324, 325, 326, 328], "y": [1, 2, 3, 4, 196, 209, 213, 217, 221, 222, 223, 224, 250, 275, 280, 283, 298, 301, 321, 322, 324, 325], "scale": [1, 3, 72, 143, 144, 149, 217, 218, 247, 252, 253, 256, 288, 300], "them": [1, 3, 209, 233, 244, 328], "both": [1, 10, 76, 77, 79, 103, 104, 112, 113, 114, 122, 129, 132, 134, 146, 180, 211, 212, 222, 225, 226, 298, 321, 322, 326, 328], "some": [1, 2, 3, 4, 233, 244, 307, 321, 322, 324], "coeffici": [1, 299, 300, 302, 303, 304, 305], "alpha": [1, 143, 253, 284, 286, 288, 303, 310], "beta": [1, 72, 143, 213, 221, 222, 223, 283, 302, 303, 304, 305], "respect": [1, 2, 4, 102, 143, 192, 201, 209, 213, 220, 221, 222, 223, 296, 322, 326], "togeth": [1, 4, 143, 201], "get": [1, 2, 4, 6, 66, 70, 71, 148, 209, 321, 322, 324, 328], "z": [1, 321, 324], "well": [1, 3, 209, 233, 244, 247, 324], "veri": [1, 3, 247, 324, 328], "easili": 1, "do": [1, 3, 6, 209, 234, 244, 293, 296, 303, 321, 322, 324], "just": [1, 4, 321, 323], "write": [1, 3, 209, 325], "out": [1, 6, 211, 212, 217, 218, 225, 226, 241, 321, 322, 323], "follow": [1, 3, 4, 5, 6, 14, 67, 72, 114, 143, 209, 269, 270, 277, 299, 300, 301, 302, 303, 304, 305, 311, 318, 321, 322, 328], "import": [1, 2, 3, 4, 6, 114, 163, 192, 200, 201, 202, 209, 211, 212, 213, 222, 225, 226, 235, 271, 273, 280, 293, 296, 321, 322, 323, 324, 325, 326], "mx": [1, 2, 3, 4, 98, 114, 115, 117, 163, 192, 209, 211, 212, 213, 222, 225, 226, 228, 235, 239, 251, 260, 261, 262, 263, 264, 265, 266, 267, 271, 272, 273, 277, 280, 287, 293, 296, 298, 318, 321, 322, 323, 324, 325, 326, 327, 328, 329], "def": [1, 2, 3, 4, 192, 209, 296, 321, 322, 323, 324, 325, 328], "simple_axpbi": 1, "float": [1, 12, 14, 56, 100, 101, 114, 144, 145, 149, 150, 153, 154, 205, 213, 216, 217, 218, 221, 222, 223, 228, 250, 252, 256, 258, 259, 260, 261, 262, 263, 264, 266, 267, 272, 273, 274, 276, 280, 283, 284, 290, 291, 299, 300, 301, 302, 303, 304, 305, 310, 311, 312, 313, 314], "return": [1, 2, 3, 4, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 32, 36, 49, 56, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 72, 73, 74, 76, 77, 79, 80, 81, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 152, 153, 154, 155, 156, 157, 158, 159, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 209, 230, 232, 234, 236, 237, 238, 242, 249, 260, 261, 262, 263, 264, 265, 266, 267, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 293, 296, 306, 320, 321, 322, 323, 324, 325, 327, 328], "thi": [1, 3, 4, 6, 11, 12, 13, 14, 21, 22, 23, 24, 78, 111, 114, 115, 122, 126, 127, 128, 130, 131, 141, 142, 146, 166, 171, 172, 173, 181, 183, 193, 209, 216, 217, 218, 229, 230, 232, 233, 236, 237, 238, 242, 244, 245, 246, 247, 249, 258, 261, 262, 263, 264, 269, 270, 279, 291, 296, 307, 320, 321, 322, 324, 325, 327], "perform": [1, 3, 5, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 127, 144, 158, 171, 183, 209, 221, 259, 263, 264, 321, 323, 324, 328], "leav": [1, 82, 201], "differenti": [1, 5], "howev": [1, 209, 220, 221, 307, 318, 321, 324, 325], "vector": [1, 2, 5, 106, 111, 114, 183, 194, 195, 219, 273, 326], "math": [1, 3, 284, 321], "often": [1, 218], "realiz": 1, "axpbi": 1, "routin": 1, "defin": [1, 2, 3, 4, 6, 114, 144, 200, 325], "same": [1, 3, 6, 12, 59, 60, 63, 66, 67, 92, 95, 96, 97, 102, 111, 140, 146, 158, 194, 196, 209, 212, 213, 216, 221, 222, 226, 249, 260, 261, 262, 263, 264, 265, 266, 267, 273, 284, 296, 306, 318, 321, 323, 328], "realli": 1, "part": [1, 322, 323], "doe": [1, 3, 6, 209, 321, 323, 324, 325], "fast": [1, 220, 270, 328], "so": [1, 3, 6, 102, 192, 216, 298, 321, 324, 328], "decid": [1, 201, 232], "want": [1, 3, 322, 328], "reli": 1, "acceler": [1, 213], "framework": [1, 5], "continu": [1, 322], "impos": 1, "our": [1, 3, 4, 254, 299, 300, 301, 302, 304, 305], "assumpt": 1, "also": [1, 3, 4, 5, 6, 10, 75, 76, 77, 79, 88, 91, 94, 97, 103, 104, 112, 113, 122, 129, 132, 134, 143, 180, 199, 209, 232, 245, 247, 249, 253, 255, 268, 288, 289, 292, 298, 321, 322, 323, 324, 325, 326, 329], "assum": [1, 3, 115, 201, 209, 211, 212, 221, 225, 226], "how": [1, 3, 4, 209, 211, 212, 214, 215, 219, 225, 226, 306, 321, 323, 328], "gradient": [1, 2, 4, 102, 178, 192, 199, 209, 233, 245, 249, 259, 279, 296, 298, 299, 300, 302, 303, 304, 305, 306, 309, 311, 321, 322, 323, 324, 325, 326], "ins": 1, "what": [1, 3, 201], "coincid": 1, "right": [1, 6, 143, 211, 212, 220, 225, 226, 269, 270, 274, 276, 284], "place": [1, 3, 158, 324, 325], "cours": [1, 322], "The": [1, 3, 4, 5, 6, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 32, 34, 43, 49, 56, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 72, 73, 74, 76, 77, 79, 80, 81, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 152, 153, 154, 155, 156, 157, 161, 162, 167, 168, 169, 170, 171, 172, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 205, 211, 212, 213, 214, 215, 216, 217, 218, 219, 221, 222, 223, 224, 225, 226, 229, 235, 239, 240, 245, 246, 247, 249, 250, 252, 254, 256, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 291, 293, 296, 298, 299, 300, 301, 302, 303, 304, 305, 308, 310, 311, 312, 316, 321, 322, 323, 324, 325, 326, 327, 328, 329], "structur": [1, 306, 322], "from": [1, 3, 4, 5, 72, 74, 93, 94, 96, 97, 101, 114, 117, 127, 138, 143, 145, 146, 147, 148, 150, 153, 163, 176, 178, 180, 183, 184, 196, 198, 200, 201, 202, 209, 224, 233, 235, 247, 261, 262, 263, 264, 266, 267, 274, 283, 293, 320, 321, 322, 324, 325, 326, 327, 328], "frontend": 1, "api": [1, 322], "redirect": 1, "when": [1, 3, 5, 6, 114, 117, 214, 215, 263, 264, 277, 283, 296, 318, 321, 328], "appropri": [1, 321], "fallback": 1, "metal": 1, "vjp": [1, 326], "jvp": [1, 326], "In": [1, 3, 4, 127, 143, 201, 209, 217, 221, 296, 299, 301, 302, 304, 305, 306, 320, 321, 322, 324, 327, 328], "one": [1, 3, 6, 56, 62, 66, 84, 85, 114, 120, 127, 144, 146, 176, 180, 244, 273, 328], "sentenc": 1, "comput": [1, 2, 3, 4, 5, 6, 72, 102, 111, 114, 122, 130, 139, 143, 171, 178, 187, 192, 193, 194, 199, 209, 213, 221, 222, 223, 233, 245, 249, 250, 252, 259, 261, 262, 263, 264, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 298, 299, 300, 302, 303, 304, 305, 309, 321, 322, 326, 328], "graph": [1, 3, 4, 5, 322], "rule": 1, "evalu": [1, 3, 4, 5, 82, 111, 194, 209, 231, 241, 296, 298, 321, 326], "said": [1, 3], "start": [1, 2, 3, 5, 6, 14, 116, 173, 321, 323, 328], "discuss": 1, "more": [1, 4, 8, 56, 74, 127, 161, 162, 209, 213, 217, 252, 256, 259, 261, 262, 263, 264, 318, 321, 322, 323, 326, 328], "detail": [1, 8, 209, 217, 252, 256, 261, 262, 263, 264, 299, 301, 302, 304, 305, 323, 326], "thei": [1, 2, 3, 12, 67, 254, 275, 296, 305, 320, 321, 324, 326, 327, 328], "c": [1, 3, 114, 205, 211, 212, 213, 214, 215, 217, 218, 222, 225, 226, 325, 326, 328], "scalar": [1, 10, 12, 25, 36, 56, 59, 60, 62, 76, 77, 79, 100, 101, 102, 103, 104, 112, 113, 114, 116, 122, 123, 124, 125, 127, 129, 132, 134, 140, 150, 153, 154, 161, 180, 192, 196, 199, 284, 322, 324, 326], "sum": [1, 2, 10, 106, 114, 126, 171, 187, 209, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 323, 325], "element": [1, 9, 10, 15, 16, 17, 18, 19, 20, 23, 51, 61, 68, 69, 72, 76, 77, 79, 80, 81, 83, 85, 99, 100, 103, 104, 107, 108, 109, 110, 112, 113, 118, 119, 120, 121, 122, 123, 124, 125, 129, 132, 134, 135, 141, 143, 144, 155, 156, 159, 167, 168, 169, 170, 174, 175, 180, 183, 185, 186, 192, 196, 216, 217, 218, 227, 248, 252, 255, 285, 286, 289, 321, 322], "wise": [1, 9, 10, 15, 16, 17, 18, 19, 20, 61, 68, 69, 76, 77, 79, 80, 81, 83, 99, 100, 103, 104, 112, 113, 118, 119, 120, 121, 122, 123, 124, 125, 129, 132, 134, 135, 155, 159, 167, 168, 169, 170, 174, 175, 180, 185, 186, 217, 218, 227, 248, 255, 285, 286, 289, 321], "numpi": [1, 3, 4, 5, 10, 12, 14, 60, 76, 77, 79, 103, 104, 112, 113, 122, 127, 129, 132, 134, 180, 324, 326, 327], "style": [1, 10, 12, 76, 77, 79, 103, 104, 112, 113, 122, 127, 129, 132, 134, 180], "broadcast": [1, 10, 12, 60, 62, 76, 77, 79, 101, 103, 104, 112, 113, 122, 127, 129, 132, 134, 145, 146, 153, 154, 180, 184, 196, 247], "between": [1, 5, 62, 98, 259, 272, 275, 276, 279, 324, 328], "input": [1, 2, 3, 9, 10, 11, 12, 13, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 73, 74, 76, 77, 79, 80, 81, 83, 84, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 102, 103, 104, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 138, 139, 140, 141, 142, 143, 144, 152, 155, 156, 157, 158, 159, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 180, 181, 182, 183, 184, 185, 186, 187, 188, 190, 191, 192, 193, 195, 196, 198, 211, 212, 213, 214, 215, 217, 218, 219, 221, 222, 223, 224, 225, 226, 247, 249, 250, 252, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 271, 272, 274, 275, 276, 277, 279, 280, 282, 284, 291, 293, 321, 322, 323, 326, 327], "upcast": 1, "const": [1, 274], "factor": [1, 115, 273, 313, 314], "streamordevic": 1, "stream": [1, 5, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 27, 28, 29, 30, 31, 32, 33, 35, 37, 38, 39, 40, 41, 42, 44, 45, 46, 47, 48, 50, 52, 53, 54, 55, 57, 58, 59, 60, 61, 62, 64, 65, 66, 67, 68, 69, 71, 72, 73, 74, 76, 77, 79, 80, 81, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 103, 104, 105, 106, 107, 108, 109, 110, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 149, 150, 152, 153, 154, 155, 156, 157, 158, 159, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 193, 196, 197, 198, 328], "schedul": [1, 298, 312, 313, 314, 316, 328], "itself": [1, 307], "call": [1, 3, 4, 26, 100, 209, 219, 233, 244, 254, 296, 298, 307, 321, 322, 324], "other": [1, 3, 5, 114, 209, 234, 296, 305, 321, 323, 324, 326], "within": [1, 23], "simplest": [1, 209], "wai": [1, 3, 6, 209, 321, 322, 323], "about": [1, 3, 4, 324, 328], "term": [1, 274, 299, 300, 301, 302, 303, 304, 310], "exist": [1, 3, 233, 244], "auto": [1, 6], "ax": [1, 11, 13, 21, 22, 57, 84, 87, 88, 90, 91, 93, 94, 96, 97, 98, 106, 114, 126, 128, 130, 131, 140, 142, 171, 176, 181, 182, 187, 188, 193, 322], "multipli": [1, 143, 144, 216, 256], "earlier": 1, "goal": 1, "themselv": [1, 321], "contain": [1, 3, 23, 24, 49, 63, 74, 92, 93, 94, 114, 123, 124, 125, 143, 173, 196, 209, 232, 234, 235, 240, 259, 280, 293, 296, 321, 322], "act": [1, 279], "data": [1, 4, 5, 8, 14, 85, 95, 96, 101, 105, 116, 137, 153, 189, 197, 218, 260, 261, 262, 263, 264, 265, 266, 267, 321, 323, 325], "nor": [1, 102, 192], "rather": [1, 322, 328], "easi": [1, 209], "interfac": 1, "block": [1, 3, 259], "A": [1, 3, 5, 6, 7, 49, 59, 63, 102, 111, 114, 115, 117, 126, 127, 143, 145, 146, 147, 149, 150, 153, 154, 173, 177, 179, 192, 194, 195, 199, 200, 201, 202, 203, 209, 213, 217, 221, 222, 223, 227, 232, 236, 237, 245, 246, 250, 254, 256, 259, 261, 262, 264, 270, 284, 285, 296, 298, 302, 304, 306, 307, 309, 321, 322, 324, 325], "It": [1, 3, 6, 102, 166, 192, 209, 246, 249, 306, 316, 325, 327], "creat": [1, 3, 6, 85, 105, 179, 209, 296, 298, 321, 323, 325], "output": [1, 3, 6, 11, 12, 13, 14, 23, 60, 63, 85, 92, 95, 96, 97, 101, 102, 105, 114, 116, 126, 128, 130, 131, 137, 138, 141, 142, 145, 146, 147, 149, 150, 153, 154, 163, 164, 171, 176, 181, 184, 189, 192, 193, 194, 195, 196, 197, 198, 211, 212, 213, 214, 215, 222, 224, 225, 226, 247, 249, 258, 259, 261, 262, 263, 264, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 291, 293, 321, 322, 323, 324, 325, 326, 327, 328], "given": [1, 11, 13, 23, 60, 62, 64, 72, 74, 82, 84, 86, 87, 88, 89, 90, 91, 95, 96, 97, 101, 114, 126, 128, 130, 131, 136, 142, 150, 158, 166, 171, 173, 181, 189, 190, 191, 193, 203, 211, 212, 216, 225, 226, 232, 247, 272, 274, 280], "set": [1, 3, 4, 6, 75, 78, 165, 166, 179, 220, 224, 231, 233, 240, 241, 244, 245, 249, 252, 258, 272, 284, 291, 296, 300, 307, 318, 322, 324], "further": [1, 6, 322], "class": [1, 3, 4, 7, 8, 25, 203, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 273, 296, 299, 300, 301, 302, 303, 304, 305, 310, 311, 316], "under": [1, 114], "These": [1, 63, 184, 273, 328], "word": 1, "bit": [1, 72, 143, 144, 205, 228, 249], "abstract": 1, "back": [1, 3, 325], "give": [1, 3, 4, 23, 321], "ourselv": 1, "concret": [1, 224, 324, 328], "imag": [1, 215, 217, 218], "public": [1, 209], "explicit": [1, 307, 318, 325], "alpha_": 1, "beta_": 1, "must": [1, 6, 62, 101, 114, 145, 146, 150, 153, 154, 196, 325], "know": [1, 3], "popul": 1, "To": [1, 2, 3, 4, 6, 209, 293, 321, 322, 326], "avoid": [1, 321], "unnecessari": [1, 3], "alloc": [1, 296], "respons": 1, "space": [1, 116, 282], "void": 1, "eval_cpu": 1, "std": [1, 266], "overrid": [1, 78], "eval_gpu": 1, "jacobian": [1, 111, 194, 326], "product": [1, 106, 111, 127, 139, 142, 187, 194, 247, 326], "primal": [1, 111, 194], "tangent": [1, 19, 20, 111, 185, 186], "int": [1, 3, 4, 7, 11, 13, 14, 21, 22, 23, 24, 28, 29, 30, 31, 39, 40, 41, 42, 44, 47, 49, 52, 55, 56, 58, 60, 64, 65, 66, 72, 73, 74, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 101, 102, 105, 114, 116, 126, 128, 130, 131, 133, 137, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 156, 157, 158, 171, 172, 173, 176, 177, 181, 182, 183, 184, 187, 188, 189, 190, 191, 192, 193, 195, 197, 203, 209, 211, 212, 213, 214, 215, 219, 221, 222, 223, 224, 225, 226, 247, 249, 250, 252, 256, 259, 272, 273, 277, 282, 284, 296, 312, 314], "argnum": [1, 102, 192, 322], "cotan": 1, "across": [1, 221], "pair": [1, 140, 235, 252], "repres": [1, 3, 280, 284, 325], "axi": [1, 3, 4, 11, 13, 21, 22, 23, 24, 28, 29, 30, 31, 39, 40, 41, 42, 44, 52, 55, 58, 64, 74, 84, 86, 89, 92, 93, 94, 95, 96, 97, 98, 114, 126, 128, 130, 131, 133, 140, 141, 142, 146, 156, 171, 172, 173, 176, 177, 181, 182, 183, 184, 188, 193, 195, 211, 212, 225, 226, 272, 273, 277, 282, 284, 323], "correspond": [1, 11, 13, 56, 62, 72, 74, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 126, 128, 131, 142, 181, 187, 195, 201, 322], "dimens": [1, 3, 11, 13, 21, 22, 43, 49, 56, 66, 74, 84, 93, 94, 96, 97, 98, 106, 114, 115, 126, 127, 128, 130, 131, 142, 143, 146, 152, 181, 184, 187, 188, 193, 213, 214, 215, 217, 218, 221, 222, 223, 247, 250, 252, 259, 273, 321, 322], "vmap": [1, 322, 324, 326], "print": [1, 2, 3, 4, 6, 200, 201, 202, 209, 318, 321, 322, 323, 324, 325, 326], "ostream": 1, "os": [1, 6], "equival": [1, 26, 46, 57, 77, 100, 183, 220, 246, 248, 249, 257], "check": [1, 6, 59, 235, 322, 323], "bool": [1, 11, 12, 13, 21, 22, 28, 29, 30, 31, 39, 40, 41, 42, 44, 55, 56, 58, 59, 114, 117, 126, 128, 130, 131, 142, 144, 145, 150, 153, 154, 181, 193, 213, 214, 215, 221, 222, 223, 224, 228, 232, 233, 235, 241, 244, 247, 249, 252, 256, 259, 271, 274, 300, 311], "is_equival": 1, "privat": 1, "fall": 1, "eval": [1, 2, 3, 4, 209, 296, 298, 321, 322, 324, 326], "deriv": [1, 322, 324], "base": [1, 114, 119, 121, 252, 259, 296, 298, 304, 316, 318, 321, 323], "abov": [1, 3, 6, 143, 190, 209, 303, 322, 323, 324, 328], "demonstr": [1, 325], "treat": [1, 93, 94, 96, 97, 183, 321], "paramet": [1, 2, 3, 4, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 32, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 72, 73, 74, 76, 77, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 228, 229, 232, 233, 235, 240, 241, 244, 245, 246, 247, 248, 249, 250, 252, 254, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 291, 292, 293, 296, 298, 299, 300, 301, 302, 303, 304, 305, 306, 307, 309, 310, 311, 312, 313, 314, 316, 321, 322, 324], "produc": [1, 63, 247, 293], "through": [1, 178, 259, 305, 321, 322, 325], "construct": [1, 4, 73, 101, 137, 197], "its": [1, 6, 127, 141, 152, 189, 199, 202, 209, 249, 302, 303, 304, 325, 328], "type": [1, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 32, 49, 56, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 72, 73, 74, 76, 77, 79, 80, 81, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 152, 153, 154, 155, 156, 157, 158, 159, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 200, 209, 259, 260, 261, 262, 263, 264, 265, 266, 267, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 321, 323], "shape": [1, 3, 4, 46, 59, 60, 65, 66, 74, 86, 89, 92, 95, 96, 97, 101, 111, 127, 137, 138, 145, 146, 147, 149, 150, 153, 154, 157, 184, 194, 196, 197, 198, 209, 211, 212, 213, 214, 215, 217, 218, 222, 224, 225, 226, 235, 260, 261, 262, 263, 264, 265, 266, 267, 273, 284, 298, 321, 322, 323, 326, 328], "pass": [1, 3, 4, 46, 57, 139, 140, 192, 199, 200, 201, 209, 233, 244, 245, 246, 249, 254, 321, 324], "re": [1, 4, 6, 293], "now": [1, 3, 6, 249, 321, 325], "promot": 1, "dtype": [1, 3, 14, 25, 32, 56, 85, 98, 101, 105, 114, 115, 116, 137, 147, 149, 150, 153, 154, 189, 197, 205, 260, 261, 262, 263, 264, 265, 266, 267, 271, 273, 280, 312, 313, 314, 321, 322, 323, 325, 326, 327], "promoted_dtyp": 1, "promote_typ": 1, "float32": [1, 14, 85, 105, 114, 115, 116, 137, 147, 149, 153, 154, 189, 197, 205, 260, 261, 262, 263, 264, 265, 266, 267, 271, 273, 280, 312, 313, 314, 321, 322, 323, 324, 325, 326, 327], "non": [1, 6, 227, 242, 285, 296], "point": [1, 2, 3, 6, 100, 144, 205], "out_dtyp": 1, "is_floating_point": 1, "cast": [1, 32, 95, 96, 97, 117, 228, 325], "up": [1, 3, 249, 321], "determin": [1, 74, 239, 327], "x_cast": 1, "astyp": [1, 3, 228, 325], "y_cast": 1, "broadcasted_input": 1, "broadcast_arrai": 1, "out_shap": 1, "0": [1, 2, 3, 4, 6, 7, 14, 47, 52, 58, 64, 65, 66, 73, 74, 85, 98, 102, 114, 115, 140, 145, 149, 154, 156, 158, 173, 177, 189, 190, 191, 192, 193, 195, 200, 209, 211, 212, 213, 214, 215, 216, 217, 218, 220, 221, 222, 223, 225, 226, 248, 251, 252, 253, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 269, 270, 271, 273, 275, 276, 280, 283, 284, 286, 287, 288, 290, 291, 293, 296, 299, 300, 302, 303, 304, 305, 307, 310, 311, 312, 313, 314, 318, 321, 322, 323, 324, 325, 326, 327], "unique_ptr": 1, "make_uniqu": 1, "to_stream": 1, "handl": [1, 209, 321], "resolv": 1, "No": [1, 3], "happen": [1, 3, 259, 298, 321, 324], "alon": [1, 325], "effect": [1, 217, 321, 324], "onli": [1, 3, 5, 6, 59, 65, 66, 114, 143, 205, 209, 232, 233, 235, 241, 244, 245, 246, 296, 321, 322, 327, 328], "execut": [1, 6, 325, 328], "depend": [1, 2, 56, 114, 323, 327, 328], "devic": [1, 5, 6, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 27, 28, 29, 30, 31, 32, 33, 35, 37, 38, 39, 40, 41, 42, 44, 45, 46, 47, 48, 50, 52, 53, 54, 55, 57, 58, 59, 60, 61, 62, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 76, 77, 79, 80, 81, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 103, 104, 105, 106, 107, 108, 109, 110, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 149, 150, 152, 153, 154, 155, 156, 157, 158, 159, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 193, 196, 197, 198, 203, 328, 329], "specifi": [1, 14, 32, 66, 74, 93, 94, 101, 102, 114, 116, 133, 137, 146, 156, 182, 183, 184, 187, 188, 192, 195, 197, 213, 258, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 291, 322, 328], "memori": [1, 5, 259, 296, 300, 321, 324, 325], "ha": [1, 3, 4, 5, 56, 63, 74, 92, 93, 95, 96, 97, 102, 146, 213, 224, 296, 298, 321, 323, 324, 326, 328], "been": [1, 3, 324], "try": [1, 6], "naiv": [1, 322], "gener": [1, 2, 14, 85, 93, 94, 116, 145, 149, 150, 153, 154, 259, 318, 321, 323, 324, 329], "version": [1, 6, 72, 122, 126, 143, 171, 195, 318, 322, 323], "declar": 1, "member": [1, 209, 238, 242], "method": [1, 3, 7, 8, 25, 203, 209, 239, 296, 299, 300, 301, 302, 303, 304, 305, 307, 310, 311, 316], "each": [1, 49, 72, 82, 127, 140, 143, 144, 146, 156, 163, 164, 173, 188, 195, 196, 217, 218, 219, 221, 252, 259, 271, 273, 318, 321, 324], "find": [1, 2, 6], "pointwis": 1, "captur": [1, 63, 209, 321], "templat": 1, "axpby_impl": 1, "typenam": 1, "t": [1, 3, 80, 144, 192, 209, 211, 225, 299, 300, 301, 302, 303, 304, 305, 310, 311, 321, 322, 328], "readi": 1, "fill": [1, 101, 138, 189, 198, 260, 261, 262, 263, 264, 266, 267], "malloc_or_wait": 1, "synchron": [1, 321], "avail": [1, 2, 3, 4, 6, 8, 205, 328], "There": [1, 209, 321], "wait": [1, 3], "here": [1, 3, 321, 322, 324, 327, 328], "request": 1, "pressur": 1, "condit": [1, 196, 328], "set_data": 1, "nbyte": 1, "collect": [1, 201, 320], "pointer": 1, "x_ptr": 1, "y_ptr": 1, "out_ptr": 1, "relev": 1, "static_cast": 1, "size_t": 1, "out_idx": 1, "size": [1, 3, 4, 49, 66, 72, 84, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 101, 105, 114, 143, 144, 146, 157, 173, 176, 209, 211, 212, 214, 215, 219, 222, 225, 226, 249, 300, 324, 325], "map": [1, 4, 117, 201, 219, 228], "linear": [1, 3, 4, 5, 201, 209, 220, 235, 249, 251, 253, 255, 268, 269, 270, 287, 288, 289, 293, 296, 307, 321], "indic": [1, 12, 21, 22, 23, 24, 102, 107, 108, 109, 110, 173, 183, 184, 192, 241, 243, 273, 280, 323], "offset": [1, 3, 74], "x_offset": 1, "elem_to_loc": 1, "stride": [1, 65, 66, 211, 212, 214, 215, 225, 226, 252, 323], "y_offset": 1, "contigu": 1, "regularli": 1, "default": [1, 6, 11, 12, 13, 14, 21, 22, 23, 24, 59, 63, 64, 65, 66, 70, 71, 72, 73, 74, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 102, 105, 114, 115, 116, 117, 126, 128, 130, 131, 137, 141, 142, 143, 144, 145, 146, 147, 149, 150, 152, 153, 154, 156, 157, 158, 165, 166, 172, 173, 176, 177, 179, 181, 187, 188, 189, 190, 191, 192, 193, 195, 197, 205, 211, 212, 213, 214, 215, 222, 224, 225, 226, 228, 233, 235, 241, 244, 247, 248, 249, 252, 256, 257, 259, 260, 261, 262, 263, 264, 265, 266, 267, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 296, 299, 300, 301, 302, 303, 304, 305, 310, 311, 318, 320, 321, 322, 325, 327, 329], "row": [1, 85, 105, 143, 189], "major": 1, "henc": [1, 143, 321], "doesn": [1, 209], "addit": [1, 3, 10, 117, 213, 221, 223, 247, 250, 296, 322], "abl": [1, 143], "all": [1, 4, 6, 12, 23, 66, 85, 88, 91, 94, 97, 127, 140, 141, 176, 209, 228, 229, 233, 236, 237, 238, 242, 244, 247, 249, 256, 259, 293, 296, 316, 318, 321, 323, 324, 326, 329], "incom": 1, "accordingli": 1, "dispatch": 1, "float16": [1, 117, 205, 228, 324, 325], "bfloat16": [1, 325], "complex64": 1, "throw": 1, "error": [1, 6, 80, 81, 173, 220, 249, 268, 269, 270, 279, 281, 322, 325], "encount": [1, 322], "unexpect": [1, 14], "regist": [1, 4], "op": [1, 139, 233, 324], "assert": 1, "2": [1, 2, 3, 4, 66, 73, 74, 80, 87, 90, 92, 93, 94, 95, 96, 97, 98, 114, 115, 121, 127, 143, 152, 187, 189, 190, 191, 205, 209, 211, 212, 215, 220, 225, 226, 250, 256, 260, 261, 262, 263, 264, 265, 266, 267, 269, 273, 274, 276, 283, 284, 293, 296, 299, 301, 302, 303, 307, 310, 321, 322, 323, 324, 325, 326, 327, 328], "1": [1, 3, 4, 14, 23, 24, 65, 66, 73, 74, 86, 87, 89, 90, 92, 93, 94, 95, 96, 97, 98, 106, 114, 115, 127, 139, 141, 143, 146, 149, 154, 167, 172, 183, 192, 205, 209, 211, 212, 213, 214, 215, 216, 217, 218, 220, 221, 222, 223, 224, 225, 226, 248, 250, 252, 253, 256, 258, 261, 262, 263, 264, 265, 266, 267, 269, 270, 271, 272, 273, 274, 275, 276, 277, 279, 280, 282, 283, 284, 288, 291, 293, 296, 298, 299, 300, 301, 302, 303, 304, 305, 307, 310, 311, 312, 313, 314, 321, 322, 323, 325, 326, 327, 328], "correct": [1, 6, 302, 303, 304, 323, 324], "els": [1, 3, 209, 233, 324], "float16_t": 1, "bfloat16_t": 1, "complex64_t": 1, "runtime_error": 1, "support": [1, 3, 5, 6, 12, 65, 66, 98, 115, 117, 127, 143, 322, 323, 325, 327], "have": [1, 3, 6, 12, 59, 93, 94, 96, 97, 127, 146, 200, 247, 254, 305, 307, 320, 321, 323, 324, 328], "rememb": 1, "3": [1, 3, 6, 98, 114, 115, 262, 264, 300, 305, 318, 321, 323, 325, 326], "complic": 1, "keep": [1, 11, 13, 21, 22, 126, 128, 130, 131, 142, 181, 193, 209, 232, 322, 324], "mind": [1, 3], "half": [1, 14, 150, 154, 252, 324], "precis": [1, 3, 209, 220, 306, 321], "direct": [1, 3, 230, 305, 328], "fix": [1, 3, 6, 324], "possibl": [1, 3, 127, 173, 219, 321, 323, 328], "due": 1, "transpos": [1, 3, 26, 144], "aren": 1, "guarante": 1, "fit": [1, 143, 328], "requir": [1, 3, 209, 324, 325], "column": [1, 85, 105, 143], "inplac": 1, "expect": [1, 3, 214, 215, 216, 217, 218, 256, 259, 274, 321, 323], "answer": 1, "copi": [1, 3, 5, 141, 172, 325], "simpli": [1, 3, 6, 251, 287, 296, 321, 322], "catlas_saxpbi": 1, "axpby_impl_acceler": 1, "first": [1, 2, 3, 4, 6, 74, 98, 102, 123, 125, 127, 141, 152, 182, 187, 192, 200, 209, 212, 221, 226, 272, 280, 300, 302, 303, 304, 307, 321, 322, 325, 328], "mode": [1, 67, 231, 241, 243, 263, 264], "i": [1, 3, 111, 114, 209, 214, 215, 217, 218, 233, 279, 303, 321, 322], "e": [1, 4, 6, 80, 111, 167, 213, 214, 215, 217, 218, 221, 222, 223, 233, 250, 292, 298, 301, 321, 324, 329], "match": [1, 6, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 235, 273, 323, 325], "transposit": 1, "data_s": 1, "items": 1, "flag": [1, 321, 325], "copy_inplac": 1, "copytyp": 1, "n": [1, 3, 25, 65, 66, 85, 86, 88, 89, 91, 92, 95, 97, 105, 189, 193, 211, 212, 213, 214, 215, 217, 218, 225, 226, 279, 284], "incx": 1, "inci": 1, "great": 1, "But": [1, 328], "criteria": 1, "luckili": [1, 324], "alwai": [1, 200, 322], "With": 1, "final": [1, 2, 3, 4], "singl": [1, 4, 82, 111, 117, 140, 194, 212, 226, 321, 323, 327], "row_contigu": 1, "col_contigu": 1, "common": [1, 298, 321, 324], "hit": 1, "mileston": 1, "enough": [1, 324], "run": [1, 3, 4, 5, 6, 7, 139, 203, 213, 228, 299, 300, 302, 303, 304, 321, 324, 328, 329], "If": [1, 3, 6, 11, 12, 13, 14, 21, 22, 23, 24, 56, 59, 62, 64, 67, 73, 74, 82, 95, 96, 97, 100, 101, 102, 114, 117, 126, 127, 128, 130, 131, 137, 140, 141, 142, 146, 156, 171, 172, 173, 181, 183, 184, 187, 192, 193, 195, 197, 201, 213, 214, 215, 221, 223, 224, 233, 235, 244, 249, 252, 254, 256, 271, 273, 284, 300, 321, 322, 324, 327, 328, 329], "plan": [1, 321], "stop": [1, 3, 14, 116, 178, 322, 323], "enjoi": 1, "speed": 1, "appl": [1, 3, 5, 6, 328], "silicon": [1, 3, 5, 6, 328], "address": 1, "shade": 1, "languag": [1, 205], "kernel": [1, 65, 66, 211, 225, 321, 323], "written": 1, "help": [1, 3, 321, 328], "resourc": 1, "walkthrough": 1, "pipelin": 1, "specif": [1, 6, 322], "cpp": 1, "algorithm": [1, 305], "launch": [1, 323], "exactli": [1, 3, 235, 322], "mani": [1, 173, 214, 215, 219, 321, 324], "thread": 1, "pick": 1, "updat": [1, 2, 3, 4, 63, 201, 213, 228, 235, 240, 246, 298, 300, 303, 305, 306, 307, 311, 312, 313, 314, 321, 324], "assign": [1, 296], "axpby_gener": 1, "buffer": [1, 325], "constant": [1, 3, 6, 140, 209, 213, 221, 223, 250, 274, 284, 310, 312, 321, 325], "4": [1, 3, 72, 98, 114, 143, 144, 163, 205, 211, 212, 213, 222, 225, 226, 249, 259, 261, 262, 263, 271, 321, 323, 326, 328], "5": [1, 2, 3, 6, 114, 145, 211, 213, 216, 217, 218, 222, 225, 257, 260, 263, 264, 283, 290, 293, 310, 312, 313, 321, 322, 323], "x_stride": 1, "6": [1, 3, 114, 163, 259, 262, 269, 270, 274, 284, 310, 321, 323, 326], "y_stride": 1, "7": [1, 3, 114, 143, 323], "ndim": [1, 98, 114], "8": [1, 3, 6, 114, 143, 205, 212, 222, 226, 259, 272, 299, 300, 301, 302, 303, 304, 310, 321, 323, 326, 328], "uint": 1, "index": [1, 5, 7, 23, 84, 85, 102, 141, 183, 184, 192, 203], "thread_position_in_grid": 1, "convert": [1, 56, 98, 249, 324, 325, 326], "instanti": [1, 4, 324], "uniqu": [1, 318], "host": 1, "name": [1, 117, 143, 144, 161, 162, 163, 164, 209, 221, 232, 235, 237, 323, 327], "identifi": [1, 200, 320], "instantiate_axpbi": 1, "type_nam": 1, "host_nam": 1, "axpby_general_": 1, "compil": [1, 5, 6, 75, 78, 322, 324], "mlx_ext": 1, "metallib": [1, 6], "see": [1, 3, 4, 6, 8, 27, 28, 29, 30, 31, 33, 35, 37, 38, 39, 40, 41, 42, 44, 45, 46, 47, 48, 50, 52, 53, 54, 55, 57, 58, 114, 161, 162, 209, 213, 217, 220, 231, 248, 249, 252, 253, 256, 257, 261, 262, 263, 264, 268, 269, 270, 288, 321, 322, 323, 326, 328], "later": [1, 6], "co": [1, 256, 322], "locat": [1, 245, 246, 328], "share": [1, 5, 72, 143, 144], "register_librari": 1, "potenti": 1, "path": [1, 6, 163, 164, 235], "tri": 1, "load": [1, 4, 5, 235], "hasn": 1, "alreadi": [1, 3], "static": [1, 6], "object": [1, 8, 25, 36, 56, 145, 150, 153, 154, 195, 200, 201, 217, 320], "why": [1, 3], "packag": [1, 2, 4, 293], "process": [1, 3, 67, 201, 218, 219, 259, 320], "logic": [1, 123, 124, 125], "grid": 1, "shown": 1, "below": [1, 6, 114, 189, 191, 205, 324], "prepar": [1, 3], "carri": 1, "should": [1, 2, 3, 4, 6, 74, 111, 143, 184, 192, 194, 200, 209, 214, 215, 217, 218, 241, 247, 254, 273, 275, 280, 296, 320, 321, 322, 324, 325, 329], "d": [1, 3, 73, 74, 106, 114, 127, 139, 183, 189, 190, 191, 202, 218, 299, 302, 304, 328], "ostringstream": 1, "kname": 1, "axpby_": 1, "general_": 1, "type_to_nam": 1, "make": [1, 3, 4, 6, 127, 136, 166, 209, 312, 313, 314, 321, 324, 326, 328], "sure": [1, 3, 6, 209, 321], "look": [1, 3], "folder": 1, "get_colocated_mtllib_path": 1, "get_kernel": 1, "str": [1, 67, 102, 114, 117, 160, 161, 162, 163, 164, 192, 200, 202, 228, 229, 232, 233, 235, 237, 239, 244, 263, 264, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284], "encod": [1, 252, 256, 259, 273], "compute_encod": 1, "get_command_encod": 1, "setcomputepipelinest": 1, "those": [1, 3, 209], "nelem": 1, "set_array_buff": 1, "setbyt": 1, "sizeof": 1, "threadgroup": 1, "higher": [1, 106, 280, 322], "than": [1, 3, 56, 67, 74, 77, 103, 104, 112, 113, 127, 201, 252, 258, 280, 283, 291, 300, 305, 321, 322, 328], "max": [1, 114, 129, 225, 226, 248, 272, 274, 275, 280, 284, 286, 300, 304, 321, 322, 328], "allow": [1, 209, 246, 296, 316, 323, 326], "tgp_size": 1, "min": [1, 114, 132, 248, 286], "maxtotalthreadsperthreadgroup": 1, "3d": [1, 213, 218], "mtl": 1, "group_dim": 1, "grid_dim": 1, "divid": [1, 100, 143], "among": 1, "dispatchthread": 1, "few": [1, 3, 4, 5, 324, 326], "thing": [1, 3], "note": [1, 3, 6, 12, 65, 66, 93, 94, 114, 143, 146, 209, 325, 327], "befor": [1, 3, 6, 23, 141, 232, 259, 307, 323, 324], "move": [1, 133, 328], "track": [1, 209, 213], "activ": [1, 6, 217, 227, 258, 259, 285, 290, 291, 292, 321], "command": [1, 6], "instead": [1, 6, 209, 246, 256, 322, 324], "end_encod": 1, "end": [1, 74, 143, 212, 226, 253, 258, 276, 283, 288, 290, 291], "until": [1, 324, 326], "limit": [1, 62, 323], "flush": 1, "enqueu": 1, "commit": 1, "associ": [1, 163, 164, 324], "suggest": 1, "deeper": 1, "dive": 1, "studi": 1, "come": [1, 3, 322], "far": [1, 298], "built": [1, 6, 324], "includ": [1, 229, 240, 249, 274, 321, 322, 323, 326, 327, 329], "forward": [1, 192, 321, 324], "diff": 1, "push": 1, "along": [1, 21, 22, 63, 64, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 114, 156, 171, 173, 177, 183, 184, 187, 209], "similarli": [1, 6, 127, 322, 324], "scale_arr": 1, "contribut": 1, "tangent_x": 1, "tangent_i": 1, "revers": [1, 188, 256], "arg": [1, 3, 8, 46, 57, 82, 163, 164], "push_back": 1, "fulli": [1, 5, 321, 325, 328], "overal": 1, "directori": [1, 3, 6], "extens": [1, 117, 205, 239, 327], "h": [1, 65, 66, 114, 212, 213, 215, 217, 218, 226, 322, 324], "mlx_sample_extens": 1, "__init__": [1, 3, 4, 7, 8, 25, 203, 209, 296], "py": [1, 3, 6], "cmakelist": 1, "txt": 1, "setup": [1, 2, 4, 6, 321], "hold": [1, 3, 8, 114, 321], "instal": 1, "pybind11": [1, 6], "sinc": [1, 3, 4, 296, 305, 325, 328], "compon": [1, 3], "etc": [1, 143, 209], "pybind11_modul": 1, "m": [1, 6, 85, 114, 189, 211, 212, 225, 226, 299], "doc": [1, 4], "sampl": [1, 2, 3, 116, 145, 146, 147, 150, 153, 154, 261, 262, 263, 264, 266, 267, 274, 280, 284, 318, 321], "_a": 1, "pos_onli": 1, "kw_onli": 1, "none": [1, 3, 7, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 27, 28, 29, 30, 31, 32, 33, 35, 37, 38, 39, 40, 41, 42, 44, 45, 46, 47, 48, 50, 52, 53, 54, 55, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 162, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 195, 196, 197, 198, 200, 201, 203, 211, 212, 220, 225, 226, 228, 232, 233, 244, 247, 256, 259, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 300, 316, 323], "r": [1, 3, 115, 192, 217], "pbdoc": 1, "most": [1, 146, 209, 309, 321, 322, 323, 324], "complex": [1, 93, 94, 95, 96, 97, 145, 150, 153, 154, 200, 209, 246, 321, 322], "bell": 1, "whistl": 1, "liter": [1, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284], "string": [1, 325, 327], "modul": [1, 3, 4, 199, 249, 254, 259, 293, 309, 320, 321, 324], "ensur": [1, 6, 279], "caster": 1, "find_packag": 1, "config": 1, "add_librari": 1, "sourc": [1, 133, 188], "target_sourc": 1, "cmake_current_list_dir": 1, "header": 1, "target_include_directori": 1, "target_link_librari": 1, "attach": 1, "conveni": [1, 4], "mlx_build_metallib": 1, "target": [1, 192, 271, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 321], "destin": [1, 133], "automat": [1, 5, 117, 326, 327, 328], "practic": [1, 321], "mlx_build_met": [1, 6], "mlx_ext_metallib": 1, "titl": 1, "include_dir": 1, "project_source_dir": 1, "mlx_include_dir": 1, "output_directori": 1, "cmake_library_output_directori": 1, "add_depend": 1, "endif": 1, "pybind11_add_modul": 1, "build_shared_lib": 1, "target_link_opt": 1, "wl": 1, "rpath": 1, "loader_path": 1, "onc": [1, 321], "describ": [1, 324], "util": [1, 3, 5, 6, 163, 209], "__name__": [1, 3], "__main__": [1, 3], "descript": [1, 3, 205], "ext_modul": 1, "cmakeextens": 1, "cmdclass": 1, "build_ext": 1, "cmakebuild": 1, "package_dir": 1, "package_data": 1, "dylib": 1, "zip_saf": 1, "fals": [1, 3, 11, 12, 13, 21, 22, 28, 29, 30, 31, 39, 40, 41, 42, 44, 55, 58, 59, 114, 117, 126, 128, 130, 131, 142, 181, 193, 196, 200, 201, 205, 221, 222, 224, 233, 235, 244, 247, 249, 252, 256, 259, 271, 274, 300, 311, 325], "python_requir": 1, "even": [1, 3, 321, 324, 325], "though": [1, 3, 321, 324, 325], "j8": 1, "libmlx_ext": 1, "cpython": 1, "3x": 1, "darwin": 1, "pip": [1, 6], "after": [1, 3, 4, 23, 98, 100, 141, 143, 213, 221, 223, 247, 259, 283, 321, 328], "plai": [1, 3], "ones": [1, 3, 138, 163, 189, 245, 246, 249, 323], "b": [1, 3, 10, 12, 59, 76, 77, 79, 100, 103, 104, 106, 112, 113, 114, 122, 123, 125, 127, 129, 132, 134, 139, 143, 180, 187, 192, 224, 322, 323, 324, 325, 326, 327, 328], "f": [1, 2, 4, 114, 209, 303, 321, 325], "item": [1, 2, 3, 4, 201, 324, 325, 326], "true": [1, 2, 3, 12, 59, 114, 117, 144, 171, 196, 200, 201, 205, 209, 213, 214, 215, 221, 222, 223, 224, 232, 233, 235, 241, 244, 249, 252, 256, 259, 271, 279, 300], "quick": [1, 5], "benchmark": [1, 321], "compar": [1, 59, 321], "time": [1, 3, 6, 209, 211, 212, 225, 226, 321, 322, 324, 328], "set_default_devic": 1, "256": [1, 4], "512": [1, 3, 259, 328], "random": [1, 2, 3, 4, 5, 211, 212, 213, 222, 225, 226, 235, 241, 321, 322, 328, 329], "normal": [1, 2, 3, 153, 209, 211, 212, 213, 221, 222, 223, 225, 226, 250, 259, 261, 263, 325, 328], "bench": 1, "warm": [1, 321], "rang": [1, 2, 3, 4, 6, 14, 98, 116, 262, 264, 269, 270, 298, 312, 313, 314, 318, 321, 322, 324, 328], "100": [1, 2, 3, 321, 322, 324, 328], "5000": 1, "simple_tim": 1, "custom_tim": 1, "3f": [1, 4, 321], "custom": [1, 259], "114": 1, "109": 1, "modest": 1, "improv": [1, 3, 299, 300, 301, 302, 303, 304, 310, 321], "awai": [1, 3], "good": [1, 6, 321, 328], "nn": [1, 3, 4, 163, 201, 209, 293, 296, 298, 307, 309, 321, 324], "grad": [1, 2, 4, 192, 298, 306, 321, 322, 323, 324, 326], "full": [1, 4, 46, 57, 67, 171, 245, 246, 274, 321, 324], "implement": [2, 4, 114, 219, 232, 247, 252, 254, 256, 258, 259, 291, 299, 300, 301, 302, 304, 305, 306, 316, 321, 322, 325], "basic": [2, 158, 322], "model": [2, 4, 5, 163, 199, 201, 209, 228, 231, 233, 235, 239, 241, 243, 244, 245, 247, 259, 293, 296, 298, 306, 307, 309, 321, 324], "problem": [2, 4, 209], "metadata": [2, 117, 161, 162], "num_featur": [2, 213], "num_exampl": 2, "1_000": 2, "num_it": 2, "10_000": 2, "iter": [2, 4, 201, 318, 321, 324], "sgd": [2, 4, 298, 305, 307, 312, 313, 314, 321], "lr": [2, 305], "01": [2, 303], "rate": [2, 299, 300, 301, 302, 303, 304, 305, 310, 311], "ll": [2, 4, 276, 321, 322], "synthet": 2, "dataset": [2, 324], "matrix": [2, 72, 73, 85, 105, 114, 115, 127, 143, 144, 249, 265, 293], "ground": [2, 3, 273, 283], "truth": [2, 273, 283], "w_star": 2, "valu": [2, 3, 9, 12, 14, 21, 22, 36, 56, 59, 62, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 101, 114, 116, 140, 145, 146, 147, 149, 150, 153, 154, 161, 183, 184, 192, 195, 199, 200, 201, 205, 212, 216, 217, 218, 222, 224, 226, 232, 247, 248, 257, 258, 259, 260, 271, 272, 273, 274, 275, 276, 278, 279, 280, 281, 282, 283, 291, 296, 300, 303, 312, 313, 314, 322], "gaussian": [2, 220, 268, 269, 270, 274], "nois": 2, "exampl": [2, 3, 4, 14, 98, 114, 115, 179, 183, 209, 211, 212, 213, 222, 225, 226, 233, 235, 241, 244, 260, 261, 262, 263, 264, 265, 266, 267, 271, 273, 280, 293, 298, 307, 312, 313, 314, 318, 322, 323, 324, 325, 326, 327], "noisi": 2, "label": [2, 273, 280], "ep": [2, 213, 221, 222, 223, 250, 272, 274, 284, 299, 300, 301, 302, 303, 304, 310], "1e": [2, 4, 12, 213, 221, 222, 223, 250, 272, 274, 284, 299, 300, 301, 302, 303, 304, 307, 310, 312, 313, 314], "us": [2, 3, 4, 5, 6, 14, 72, 75, 77, 98, 114, 115, 127, 143, 144, 156, 157, 200, 209, 212, 217, 219, 220, 224, 226, 228, 232, 239, 245, 246, 247, 249, 252, 256, 259, 263, 264, 269, 270, 272, 293, 296, 298, 299, 300, 302, 303, 304, 305, 306, 307, 316, 318, 320, 321, 322, 323, 326, 328], "weight": [2, 65, 66, 201, 209, 235, 239, 249, 271, 273, 296, 300, 303, 305, 307, 311, 322, 324], "squar": [2, 3, 105, 159, 174, 192, 201, 209, 250, 281, 283, 299, 300, 302, 303, 304, 322, 325], "loss": [2, 4, 192, 209, 298, 321, 322, 324], "loss_fn": [2, 4, 298, 321, 322], "w": [2, 66, 72, 143, 144, 192, 212, 213, 215, 217, 218, 224, 226, 311, 322], "mean": [2, 3, 4, 149, 192, 209, 213, 221, 233, 250, 266, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 321, 322, 325], "grad_fn": [2, 321, 322], "initi": [2, 3, 209, 213, 221, 222, 223, 224, 248, 250, 260, 261, 262, 263, 264, 265, 266, 267, 296, 307, 312, 313, 314, 321, 324], "randomli": [2, 3, 216, 217, 218], "Then": [2, 6], "repeatedli": 2, "_": [2, 3, 209, 312, 313, 314, 318, 321, 324, 328], "verifi": [2, 6], "close": [2, 5, 6, 12], "error_norm": 2, "5f": 2, "someth": [2, 3, 323], "00005": 2, "00364": 2, "complet": [2, 3, 6, 245, 246, 322, 328], "logist": [2, 167, 255, 269, 270, 289], "github": [2, 4, 6, 321], "repo": [2, 4, 6, 321], "enabl": [3, 6, 78, 311], "larg": [3, 209, 247, 279, 321, 324], "ish": 3, "transform": [3, 5, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 199, 209, 213, 221, 223, 224, 232, 233, 244, 249, 252, 323], "compromis": 3, "eas": 3, "llama": 3, "famili": 3, "less": [3, 23, 113, 141, 252, 283], "200": 3, "line": [3, 324, 325], "python": [3, 36, 49, 56, 82, 200, 201, 202, 296, 306, 307, 309, 320, 322, 325], "neural": [3, 5, 219, 227, 261, 262, 285, 293, 296, 310], "network": [3, 5, 213, 217, 219, 261, 262, 293, 296, 310], "build": [3, 5, 263, 296, 321], "concis": 3, "architectur": [3, 6, 209, 246, 328], "notabl": [3, 5], "rope": [3, 209], "posit": [3, 23, 74, 98, 102, 110, 133, 141, 192, 201, 209, 214, 215, 247, 252, 256, 274, 284], "option": [3, 11, 13, 14, 21, 22, 23, 24, 25, 30, 31, 63, 64, 65, 66, 67, 72, 73, 74, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 101, 102, 105, 109, 110, 114, 115, 116, 117, 126, 128, 130, 131, 137, 140, 141, 142, 143, 144, 145, 146, 147, 149, 150, 152, 153, 154, 156, 157, 162, 171, 172, 173, 176, 177, 181, 183, 184, 187, 188, 189, 190, 191, 192, 193, 195, 197, 200, 201, 211, 212, 213, 214, 215, 224, 225, 226, 228, 232, 233, 235, 244, 247, 249, 252, 256, 259, 260, 261, 262, 263, 264, 265, 266, 267, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 299, 300, 301, 302, 303, 304, 305, 307, 310, 311, 318, 321, 327, 329], "kei": [3, 145, 146, 147, 149, 150, 152, 153, 154, 200, 201, 232, 233, 244, 247, 307, 318, 320, 322], "cach": [3, 321], "concaten": 3, "project": [3, 247], "llamaattent": 3, "self": [3, 4, 7, 25, 26, 27, 28, 29, 30, 31, 32, 33, 35, 36, 37, 38, 39, 40, 41, 42, 44, 45, 46, 47, 48, 50, 52, 53, 54, 55, 56, 57, 58, 203, 209, 227, 285, 296], "dim": [3, 219, 221, 222, 223, 247, 250, 252, 256, 259], "num_head": [3, 247, 259], "super": [3, 4, 209, 296], "tradit": [3, 217, 218, 252], "query_proj": 3, "bia": [3, 72, 143, 144, 201, 209, 214, 215, 224, 233, 235, 244, 247, 249, 302, 303, 304, 307, 322], "key_proj": 3, "value_proj": 3, "out_proj": [3, 296], "__call__": [3, 4, 209, 296], "queri": [3, 247], "mask": [3, 241, 247, 323], "extract": [3, 73, 74, 209, 232, 296], "l": [3, 4, 209, 211, 213, 214, 225, 283], "reshap": [3, 114, 323], "combin": 3, "key_cach": 3, "value_cach": 3, "sqrt": [3, 80, 213, 221, 222, 223, 224, 250, 256, 261, 262, 263, 264, 299, 301, 302, 303, 310, 321], "score": [3, 280], "softmax": [3, 273], "values_hat": 3, "rm": [3, 6, 300], "swiglu": 3, "rmsnorm": [3, 209], "llamaencoderlay": 3, "mlp_dim": [3, 259], "norm1": 3, "norm2": 3, "linear1": 3, "linear2": 3, "linear3": 3, "sigmoid": [3, 255, 269, 270, 289], "instanc": [3, 143, 202, 209, 222, 228, 229, 230, 233, 236, 237, 244, 246, 254, 296, 325], "embed": [3, 209, 252, 256, 272], "emb": [3, 219, 256], "token": [3, 219], "num_lay": [3, 4, 298], "vocab_s": 3, "norm": [3, 221, 284, 304, 305], "multiheadattent": [3, 209], "create_additive_causal_mask": 3, "list": [3, 8, 11, 13, 25, 28, 29, 39, 40, 41, 42, 44, 52, 55, 56, 58, 60, 63, 64, 82, 84, 87, 88, 90, 91, 93, 94, 96, 97, 101, 102, 111, 114, 126, 128, 130, 131, 137, 140, 142, 145, 146, 147, 149, 150, 153, 154, 157, 161, 171, 173, 176, 177, 181, 187, 188, 192, 193, 194, 197, 200, 202, 209, 233, 235, 236, 237, 238, 242, 244, 245, 246, 296, 302, 303, 304, 305, 320, 321, 322, 324], "still": [3, 6, 114, 321, 324], "consid": [3, 12, 59, 200, 201, 221, 320], "train": [3, 4, 209, 213, 216, 217, 218, 231, 233, 244, 261, 262], "ignor": [3, 62, 63, 82, 300], "whatsoev": 3, "rest": [3, 201, 252], "subsect": 3, "prompt": 3, "autoregress": 3, "yield": [3, 4, 318], "temp": 3, "causal": 3, "save": [3, 5, 117, 143, 161, 162, 163, 164, 239, 324], "append": [3, 127, 321, 324], "store": 3, "per": [3, 4, 72, 143, 144, 213, 221, 222, 223, 250, 316, 321, 324], "care": [3, 324], "last": [3, 24, 56, 88, 91, 93, 94, 96, 97, 98, 106, 115, 127, 146, 172, 187, 214, 215, 217, 218, 221, 325], "logit": [3, 146, 271, 273, 321], "next": [3, 4], "categor": 3, "lazili": [3, 209], "noth": [3, 209, 324], "yet": [3, 114, 209, 296, 307, 322, 323, 324, 326], "forc": [3, 4, 209, 326], "choos": [3, 252], "pars": 3, "feed": 3, "loop": [3, 4, 321, 322, 324], "unsqueez": 3, "sequenc": [3, 213, 214, 259, 318, 328], "length": [3, 176, 213, 214], "len": [3, 88, 91, 94, 97], "overwrit": 3, "discard": [3, 200], "old": 3, "moment": [3, 300, 302, 303, 304], "anymor": 3, "everyth": 3, "small": [3, 213, 221, 223, 250, 274, 279, 284, 321, 328], "10": [3, 4, 119, 158, 163, 201, 209, 235, 293, 314, 321, 323], "12": 3, "8192": 3, "1024": 3, "actual": [3, 14, 235, 296, 324], "materi": [3, 5], "could": [3, 209], "20_000": 3, "machin": [3, 5, 6, 310], "8gb": 3, "ram": 3, "32": [3, 4, 143, 144, 205, 212, 226, 321], "44": 3, "doubl": 3, "bracket": 3, "becaus": [3, 209, 324], "batch": [3, 127, 213, 214, 215, 217, 218, 247, 324], "zip": [3, 4], "haven": 3, "anyth": [3, 192, 324], "result": [3, 14, 56, 72, 106, 114, 117, 127, 139, 144, 156, 158, 177, 187, 196, 201, 256, 321, 322, 325], "similar": [3, 201, 245, 246, 247, 272, 325, 327], "runtim": [3, 321], "section": [3, 6, 173, 284, 321, 322], "access": [3, 36, 209, 296, 307, 324, 328], "origin": [3, 74, 213, 240, 261, 262, 263, 264, 299, 300, 301, 302, 304, 305, 325], "sentencepiec": 3, "pytorch": [3, 5, 221, 322], "compat": [3, 146, 327], "npz": [3, 117, 163, 164, 235, 239, 327], "file": [3, 6, 117, 160, 161, 162, 163, 164, 235, 239, 322, 327], "directli": 3, "argpars": 3, "itertool": [3, 201], "starmap": [3, 201], "np": [3, 4, 325, 326], "torch": [3, 325], "map_torch_to_mlx": 3, "tok_embed": 3, "elif": 3, "replac": [3, 245, 246, 259, 283], "attention_norm": 3, "ffn_norm": 3, "wq": 3, "wk": 3, "wv": 3, "wo": 3, "w1": 3, "w2": 3, "w3": 3, "ffn": 3, "separ": [3, 46, 57, 221, 280], "submodul": [3, 4, 209, 233, 234, 244, 246], "feed_forward": 3, "parser": 3, "argumentpars": 3, "add_argu": 3, "torch_weight": 3, "output_fil": 3, "parse_arg": 3, "state": [3, 4, 209, 298, 307, 318, 321], "savez": [3, 239, 327], "k": [3, 73, 85, 189, 190, 191, 211, 224, 225, 233], "v": [3, 67, 209, 233, 325], "left": [3, 114, 143, 211, 212, 220, 225, 226, 252, 269, 270, 274, 276, 284], "disk": 3, "text": [3, 211, 212, 225, 226, 227, 253, 258, 261, 262, 263, 264, 274, 275, 276, 279, 280, 283, 285, 286, 288, 290, 291, 300, 305], "format": [3, 117, 160, 161, 162, 163, 164, 325], "oper": [3, 5, 7, 32, 171, 178, 184, 203, 209, 259, 305, 321, 322, 323, 324, 325, 326, 328, 329], "dictionari": [3, 63, 117, 161, 162, 200, 209, 232, 240, 245, 246, 308, 320, 327], "represent": [3, 143, 200, 202], "tree_unflatten": 3, "helper": [3, 321], "weight_fil": 3, "incur": 3, "sever": [3, 65, 66, 163, 164, 321, 327], "futur": [3, 249, 323, 324], "pth": 3, "current": [3, 5, 6, 65, 66, 143, 209, 300, 324], "around": 3, "m1": [3, 321, 322, 328], "ultra": 3, "7b": 3, "me": 3, "ishmael": 3, "year": 3, "ago": 3, "never": [3, 324], "long": 3, "info": [3, 6], "247": 3, "press": [3, 114], "enter": 3, "littl": 3, "monei": 3, "my": [3, 6], "purs": 3, "greater": [3, 23, 104, 141, 258, 291], "consequ": 3, "walk": 3, "down": 3, "gower": 3, "street": 3, "afternoon": 3, "heavi": 3, "rain": 3, "saw": [3, 322], "off": [3, 6, 324], "man": 3, "rag": 3, "who": 3, "sat": 3, "upon": [3, 201], "hi": 3, "bundl": 3, "hard": 3, "wet": 3, "he": [3, 263, 264], "were": [3, 328], "cry": 3, "watch": [3, 321], "him": 3, "observ": 3, "numer": [3, 114, 122, 126, 171, 213, 221, 222, 223, 250, 272, 274, 284, 299, 300, 301, 302, 303, 304, 310, 321, 324], "crowd": 3, "wa": [3, 324], "hurri": 3, "437": 3, "330": 3, "second": [3, 74, 123, 125, 127, 182, 192, 212, 226, 272, 280, 300, 302, 303, 304, 322, 328], "spent": 3, "amount": [3, 211, 225], "39": 3, "ms": [3, 321], "By": [3, 322, 325], "bigger": [3, 300], "remain": [3, 192, 216, 217, 218], "almost": 3, "nobodi": 3, "took": 3, "least": [3, 62, 115, 143], "notic": [3, 322, 327], "distanc": [3, 284], "had": 3, "doubt": 3, "minut": 3, "straight": 3, "slowli": 3, "rais": [3, 114, 173, 235], "ey": 3, "speak": [3, 114], "resum": 3, "postur": 3, "stood": 3, "feel": 3, "pain": 3, "heart": 3, "smile": 3, "face": 3, "am": 3, "someon": 3, "three": 3, "quarter": 3, "hour": 3, "made": 3, "immedi": [3, 228], "repli": 3, "again": [3, 6, 209, 321], "hand": [3, 322, 324], "did": 3, "accustom": 3, "thu": [3, 209], "question": [3, 324], "reason": [3, 323], "tell": [3, 321, 325], "understand": [3, 261, 262], "579": 3, "690": 3, "num": [3, 116, 152], "500": [3, 328], "628": 3, "went": 3, "nervou": 3, "trembl": 3, "told": 3, "And": 3, "perhap": 3, "surpris": 3, "matter": [3, 209], "shall": 3, "anyhow": 3, "friend": 3, "ye": 3, "slight": [3, 324], "kind": 3, "longer": [3, 67, 322], "soon": 3, "unless": [3, 12, 114, 296], "unlik": [3, 12, 217, 218, 240], "strang": 3, "amus": 3, "That": 3, "secret": 3, "disappoint": 3, "mine": 3, "cannot": [3, 62, 323, 325], "happi": 3, "ask": 3, "Is": [3, 256, 259], "shop": 3, "bui": 3, "food": 3, "633": 3, "21": [3, 314], "475": 3, "su": 3, "j": [3, 6, 114, 217, 301, 302, 304], "lu": 3, "pan": 3, "murtadha": 3, "wen": 3, "liu": 3, "2021": 3, "roform": [3, 252], "enhanc": [3, 252, 324], "rotari": [3, 252], "arxiv": [3, 221, 222, 223, 227, 250, 285, 299, 305], "preprint": [3, 299, 305], "2104": 3, "09864": 3, "zhang": 3, "sennrich": 3, "2019": [3, 303], "root": [3, 159, 174, 250], "advanc": [3, 321], "inform": [3, 4, 6, 161, 162, 209, 213, 220, 247, 322, 328], "system": [3, 6], "shazeer": 3, "2020": 3, "glu": 3, "variant": [3, 283, 304], "2002": 3, "05202": 3, "classifi": 4, "mnist": 4, "As": [4, 183, 209, 321], "mlp": [4, 209, 259, 298], "inherit": [4, 320], "standard": [4, 36, 56, 127, 147, 149, 259, 261, 263, 266, 326], "idiom": [4, 321], "input_dim": [4, 209, 224, 249], "hidden_dim": [4, 296, 298], "output_dim": [4, 209, 224, 249], "layer_s": 4, "idim": 4, "odim": 4, "maximum": [4, 21, 62, 209, 251, 256, 269, 270, 287, 296, 324], "cross": [4, 271, 273], "entropi": [4, 271, 273], "sub": [4, 74, 152], "commonli": [4, 245, 293, 321], "cross_entropi": [4, 209], "accuraci": 4, "valid": [4, 67, 98, 195, 200, 233, 244, 320], "eval_fn": 4, "argmax": 4, "loader": 4, "num_class": [4, 298], "batch_siz": [4, 298], "num_epoch": [4, 298], "learning_r": [4, 298, 299, 300, 301, 302, 303, 304, 305, 307, 310, 311, 312, 313, 314, 321], "train_imag": [4, 298], "train_label": [4, 298], "test_imag": 4, "test_label": 4, "shuffl": 4, "minibatch": 4, "batch_iter": [4, 298], "perm": 4, "permut": 4, "id": [4, 6], "put": [4, 321], "trainabl": [4, 199, 209, 296], "loss_and_grad_fn": [4, 298, 321, 322], "value_and_grad": [4, 209, 245, 296, 298, 309, 321, 322, 325, 326], "epoch": 4, "test": [4, 6], "confus": 4, "decent": 4, "95": 4, "brought": 5, "research": 5, "except": [5, 85, 92, 93, 95, 96, 97, 221, 235, 323, 325], "featur": [5, 65, 66, 213, 221, 222, 223, 224, 249, 250, 252, 259, 321, 324], "main": [5, 74, 85, 201, 209], "differ": [5, 180, 283, 322], "lazi": [5, 296, 326], "multi": [5, 214, 215, 323, 325], "cpu": [5, 115, 321, 328], "gpu": [5, 321, 323, 328], "inspir": 5, "jax": [5, 318], "arrayfir": 5, "unifi": 5, "live": [5, 328], "guid": 5, "convers": 5, "regress": [5, 279], "layer": [5, 209, 211, 212, 217, 218, 221, 223, 224, 225, 226, 241, 246, 249, 254, 259, 292, 296], "perceptron": 5, "llm": 5, "infer": [5, 101, 117], "fft": 5, "algebra": 5, "tree": [5, 63, 82, 102, 192, 195, 200, 201, 202, 306, 307, 309, 316, 322], "develop": [5, 6], "document": [5, 46, 57, 161, 162, 321, 322, 323], "pypi": 6, "meet": 6, "seri": 6, "chip": 6, "nativ": 6, "maco": 6, "13": 6, "recommend": [6, 305], "14": 6, "sonoma": 6, "conda": 6, "forg": 6, "distribut": [6, 145, 146, 147, 149, 153, 154, 224, 261, 262, 263, 264, 266, 267, 274, 277, 282, 284, 293], "probabl": [6, 150, 216, 217, 218, 249, 271, 273, 277, 328], "platform": 6, "processor": 6, "arm": [6, 205], "i386": 6, "switch": 6, "17": 6, "g": [6, 114, 143, 292, 310, 311, 324, 329], "clang": 6, "cmake": 6, "24": 6, "xcode": 6, "15": [6, 114, 321], "environ": [6, 75, 78], "via": [6, 306, 309, 324, 325], "rosetta": 6, "unam": 6, "p": [6, 145, 209, 216, 217, 218, 284, 302, 304], "clone": 6, "git": 6, "com": 6, "ml": 6, "explor": 6, "cd": 6, "brew": 6, "global": [6, 75, 78, 151, 318, 321], "env": 6, "cmake_build_parallel_level": 6, "edit": [6, 246], "unittest": 6, "discov": 6, "stub": 6, "dev": 6, "generate_stub": 6, "mkdir": 6, "either": [6, 10, 46, 56, 57, 62, 76, 77, 79, 100, 103, 104, 112, 113, 114, 122, 127, 129, 132, 134, 180, 192, 212, 226, 254, 263, 264], "libmlx": 6, "preprocessor": 6, "metal_path": 6, "mlx_build_test": 6, "ON": 6, "mlx_build_exampl": 6, "mlx_build_benchmark": 6, "mlx_build_python_bind": 6, "multipl": [6, 127, 134, 143, 144, 247, 256, 313, 314, 321, 324, 327], "wish": 6, "variabl": [6, 63, 75, 78, 102, 111, 192, 194, 195], "export": 6, "developer_dir": 6, "app": 6, "content": [6, 232, 321], "sdk": 6, "xcrun": 6, "macosx": 6, "show": [6, 205, 321], "unabl": 6, "tool": 6, "select": [6, 196, 228, 232], "sudo": 6, "ouptut": 6, "finder": 6, "iterm": 6, "termin": 6, "click": 6, "uncheck": 6, "window": [6, 211, 212, 225, 226], "restart": 6, "grep": 6, "cmake_host_system_processor": 6, "arm64": 6, "x86_64": 6, "wipe": 6, "cahc": 6, "rf": 6, "devicetyp": 7, "attribut": [7, 8, 25, 203, 240, 296, 316], "kwarg": [8, 163, 164, 329], "union": [9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 27, 28, 29, 30, 31, 32, 33, 35, 37, 38, 39, 40, 41, 42, 44, 45, 46, 47, 48, 50, 52, 53, 54, 55, 57, 58, 59, 60, 61, 62, 64, 65, 66, 67, 68, 69, 72, 73, 74, 76, 77, 79, 80, 81, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 149, 150, 152, 153, 154, 155, 156, 157, 158, 159, 161, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 196, 197, 198, 211, 212, 215, 225, 226, 233, 235, 244, 311], "absolut": [9, 12, 269, 270, 283], "semant": [10, 60, 76, 77, 79, 103, 104, 112, 113, 122, 127, 129, 132, 134, 180, 328], "keepdim": [11, 13, 21, 22, 28, 29, 30, 31, 39, 40, 41, 42, 44, 55, 58, 114, 126, 128, 130, 131, 142, 171, 181, 193], "reduct": [11, 13, 126, 128, 131, 142, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284], "reduc": [11, 13, 21, 22, 126, 128, 130, 131, 142, 181, 193, 213, 259, 279], "unspecifi": [11, 13, 14, 21, 22, 23, 24, 64, 101, 126, 128, 130, 131, 137, 141, 142, 156, 171, 172, 181, 183, 193, 197, 329], "entir": [11, 13, 21, 22, 126, 128, 130, 131, 142, 181, 193, 217, 218], "singleton": [11, 13, 21, 22, 126, 127, 128, 130, 131, 142, 181, 193], "rtol": 12, "05": [12, 213, 221, 222, 223, 250], "atol": 12, "08": [12, 272, 301, 302, 303, 304, 310], "equal_nan": [12, 59], "approxim": [12, 220, 268, 269, 270], "comparison": [12, 79, 103, 104, 112, 113], "infinit": 12, "equal": [12, 23, 59, 85, 104, 113, 141, 150, 173, 222, 224], "sign": [12, 205, 305], "nan": [12, 59, 108], "ab": [12, 114, 192, 221, 222, 223, 227, 250, 285, 321], "array_equ": 12, "rel": [12, 300, 321], "toler": 12, "boolean": [12, 59, 107, 108, 109, 110, 123, 124, 125, 205, 243, 323], "interv": [14, 116, 150, 154], "increment": 14, "otherwis": [14, 200, 201, 233, 235, 244, 258, 259, 271, 276, 283, 290, 291, 324, 325], "int32": [14, 98, 114, 150, 205, 323, 326], "convent": [14, 67, 303], "lead": [14, 321], "fraction": 14, "integr": [14, 183, 324], "invers": [15, 16, 17, 18, 19, 20, 81, 89, 90, 91, 92, 93, 94], "cosin": [15, 16, 68, 69, 272, 312, 322], "hyperbol": [16, 18, 20, 69, 170, 186], "sine": [17, 18, 169, 170, 322], "uint32": [21, 22, 23, 24, 146, 205], "minimum": [22, 62, 256, 272], "kth": [23, 141], "partit": 23, "order": [23, 114, 141, 143, 209, 221, 245, 254, 307, 321, 322], "undefin": [23, 141, 323], "sort": [23, 24, 141], "flatten": [23, 24, 114, 139, 141, 156, 172, 183, 184, 200], "dimension": [25, 86, 87, 88, 89, 90, 91, 95, 96, 97, 211, 212, 213, 214, 215, 219, 224, 225, 226, 249, 256, 323, 325], "val": [25, 101], "tupl": [25, 46, 49, 57, 64, 66, 77, 82, 84, 111, 114, 115, 140, 143, 157, 176, 192, 194, 200, 201, 202, 211, 212, 215, 225, 226, 235, 237, 254, 300, 302, 303, 304, 305, 320, 322], "ndarrai": [25, 323, 324, 326], "properti": [26, 34, 43, 49, 51, 240, 243, 308, 322], "argument": [26, 46, 57, 63, 82, 102, 192, 201, 209, 318, 322, 327, 328, 329], "decim": [47, 158], "indices_or_sect": [52, 173], "nest": [56, 63, 209, 296, 320, 322], "ddof": [58, 193], "a_min": 62, "a_max": 62, "edg": [62, 140, 321], "At": 62, "anoth": [62, 127, 180, 196, 209, 228, 321, 322, 323, 328], "fun": [63, 102, 111, 192, 194, 195, 321, 323, 324, 328], "dict": [63, 82, 117, 161, 162, 163, 238, 242, 245, 246, 296, 306, 307, 309, 320, 322, 327], "dure": [63, 216, 217, 218, 325], "arbitrarili": [63, 209, 320, 322, 326], "leaf": [63, 200, 201, 232], "node": [63, 82, 195], "pad": [65, 66, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 211, 212, 214, 215, 225, 226], "dilat": [65, 66], "group": [65, 66, 72, 143, 144, 221, 249], "1d": [65, 67, 161, 184], "convolut": [65, 66, 67, 214, 215, 217, 218], "channel": [65, 66, 213, 214, 215, 217, 218], "c_in": [65, 66], "c_out": [65, 66], "convolv": [65, 66], "2d": [66, 74, 143, 213, 217], "spatial": [66, 211, 221, 225], "symmetr": 66, "discret": [67, 86, 87, 88, 89, 90, 91, 95, 96, 97, 219], "swap": [67, 182, 246, 249], "conv": 67, "filter": [67, 214, 215, 228, 232], "flip": 67, "signal": 67, "bias": [72, 143, 144, 233, 244, 247], "group_siz": [72, 143, 144, 249], "64": [72, 143, 144, 205, 249], "configur": 72, "formal": [72, 143], "notat": [72, 200, 237], "quantiz": [72, 117, 144, 249], "w_i": [72, 143], "hat": [72, 143], "occupi": [72, 143, 144], "diagon": [73, 85, 189, 190, 191], "th": [73, 85], "axis1": [74, 182], "axis2": [74, 182], "subarrai": [74, 173], "remov": [74, 127, 146, 176, 273], "insert": [74, 84, 328], "neg": [74, 98, 109, 225, 226, 247, 274, 282, 284, 323], "taken": [74, 183], "disabl": [75, 321], "mlx_disable_compil": [75, 78, 321], "divis": [76, 100, 143], "quotient": [76, 77, 100], "remaind": 77, "fuction": 77, "faster": [77, 268, 321, 322], "mathrm": [80, 167, 222], "frac": [80, 143, 167, 211, 212, 213, 216, 217, 218, 221, 222, 223, 224, 225, 226, 250, 261, 262, 263, 264, 272, 274, 276, 279, 299, 301, 302, 303, 304, 310], "pi": [80, 256, 322], "int_0": 80, "dt": 80, "erf": [81, 321], "exponenti": [83, 253, 288, 313], "ident": [85, 178, 209, 241], "zero": [85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 189, 190, 191, 198, 209, 211, 212, 216, 217, 218, 235, 260, 261, 262, 263, 264, 265, 266, 267, 293, 300, 323], "whose": [85, 199], "One": [86, 89, 95, 159, 321, 322], "fourier": [86, 87, 88, 89, 90, 91, 95, 96, 97], "truncat": [86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 153], "dft": [86, 87, 88, 89, 90, 91, 95, 96, 97], "rfft": 92, "real": [92, 93, 94, 95, 96, 97], "rfft2": 93, "rfftn": 94, "silent": [95, 96, 97], "start_axi": 98, "end_axi": 98, "inclus": 98, "outsid": 98, "clamp": 98, "integ": [100, 114, 140, 143, 144, 145, 150, 173, 187, 195, 205, 219, 323], "floor": 100, "argnam": [102, 192], "neither": [102, 192], "keyword": [102, 163, 164, 192, 201, 209, 318, 327, 329], "strict": [103, 112, 233, 235, 244], "ordinari": 106, "inifn": 107, "infin": [107, 109, 110, 225, 226, 304], "ord": 114, "tabl": [114, 205, 219], "frobeniu": 114, "matric": [114, 115], "strictli": 114, "mathemat": 114, "variou": 114, "purpos": 114, "calcul": [114, 274, 280, 300], "fro": 114, "inf": [114, 247], "largest": 114, "sing": 114, "smallest": 114, "singular": 114, "nuclear": 114, "_f": 114, "sum_": [114, 211, 212, 279], "a_": 114, "valueerror": [114, 235, 322], "refer": [114, 222, 227, 240, 261, 262, 263, 264, 285, 323], "golub": 114, "van": 114, "loan": 114, "baltimor": 114, "md": 114, "john": 114, "hopkin": 114, "univers": 114, "1985": 114, "pg": 114, "la": 114, "arang": [114, 323, 325], "9": [114, 273, 299, 302, 303, 304, 305, 307, 313, 314, 325], "74597": 114, "20": 114, "84804": 114, "41421": 114, "23607": [114, 115], "74166": 114, "24264": 114, "11": 114, "225": 114, "factorizatoin": 115, "q": 115, "894427": 115, "447214": 115, "57771": 115, "50": 116, "evenli": 116, "return_metadata": 117, "binari": [117, 160, 161, 162, 163, 164, 258, 271, 291, 321], "npy": [117, 160, 327], "safetensor": [117, 162, 235, 239, 324, 327], "gguf": [117, 161, 327], "matadata": 117, "unsupport": 117, "tensor": [117, 187, 211, 212, 225, 226, 284, 325], "natur": [118, 120, 324], "logarithm": [118, 119, 120, 121], "log": [120, 122, 126, 274, 277, 279, 282], "plu": 120, "exp": [122, 126, 147, 171, 253, 277, 288, 321, 328], "stabl": [122, 126, 171, 279], "prepend": 127, "negat": 135, "beforehand": 139, "pad_with": 140, "constant_valu": 140, "pad_width": 140, "before_1": 140, "after_1": 140, "before_2": 140, "after_2": 140, "before_n": 140, "after_n": 140, "before_i": 140, "after_i": 140, "extend": 140, "side": [140, 211, 212, 225, 226, 321], "smaller": [141, 305, 321], "everi": [143, 201, 314, 322], "particular": [143, 221], "consecut": [143, 252], "w_1": 143, "w_g": 143, "begin": [143, 212, 226, 253, 258, 276, 283, 288, 290, 291], "align": [143, 212, 226], "max_i": 143, "min_i": 143, "textrm": [143, 220, 268], "round": 143, "pack": [143, 144], "unsign": [143, 144, 205], "lower": [143, 150, 153, 154, 189, 267], "upper": [143, 150, 153, 154, 267], "1st": 143, "signific": 143, "2nd": 143, "dequant": 143, "w_q": 143, "whether": [144, 232, 247, 271, 274, 280], "prng": [145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 318], "num_sampl": 146, "unnorm": [146, 271, 273], "draw": 146, "cdf": [147, 220, 268], "accord": [147, 196, 247, 261, 262, 263, 264], "seed": 148, "loc": 149, "deviat": [149, 261, 263, 266], "low": [150, 154, 267, 293], "high": [150, 154, 209, 219, 267, 293], "bound": [150, 153, 154, 220, 267, 321, 323, 328], "roadcast": 150, "domain": 153, "uniformli": 154, "repetit": 156, "preserv": [157, 322], "reciproc": 159, "arr": [160, 323], "obj": 161, "uncompress": 163, "my_path": 163, "tree_flatten": [163, 201, 202, 209], "transformerencod": 163, "128": [163, 209], "flat_param": 163, "compress": 164, "being": [178, 209], "prevent": [178, 284, 325], "flow": [178, 324], "unchang": [178, 252], "prior": [183, 184], "exclud": 184, "dot": [187, 200, 237, 247], "elsewher": [189, 323], "col": 189, "triangl": 189, "mse": 192, "param": [192, 209, 293, 322], "lvalu": 192, "dlvalu": 192, "dparam": 192, "lasso": 192, "l1": [192, 276, 278, 279, 283], "varianc": [193, 213, 221, 274], "divisor": 193, "cotang": 194, "in_ax": [195, 322], "out_ax": [195, 322], "prefix": [195, 200], "fn": [199, 201, 326], "callabl": [199, 200, 201, 228, 229, 232, 254, 259, 260, 261, 262, 263, 264, 265, 266, 267, 311], "wrt": 199, "rho": 299, "06": [274, 284, 299], "paper": [213, 256, 299, 300, 301, 302, 304, 305], "zeiler": 299, "2012": [299, 310], "adapt": [299, 300, 301], "1212": 299, "5701": 299, "v_": [299, 301, 302, 303, 304, 310, 311], "v_t": [299, 301, 302, 303, 304, 310, 311], "g_t": [299, 301, 302, 303, 304, 305, 310, 311], "delta": [276, 299], "w_": [212, 226, 299, 300, 301, 302, 303, 304, 305, 310, 311], "u_t": 299, "epsilon": [213, 221, 222, 223, 250, 272, 274, 299, 301, 302, 303, 304, 310], "u_": 299, "w_t": [299, 301, 302, 303, 304, 305, 310, 311], "lambda": [201, 209, 228, 233, 253, 257, 288, 290, 299, 300, 301, 302, 303, 304, 305, 310, 311, 321, 322], "averag": [211, 212, 299, 300, 302, 303, 304], "denomin": [222, 272, 299, 301, 302, 303, 304, 310], "stabil": [213, 221, 222, 223, 250, 272, 274, 299, 300, 301, 302, 303, 304, 310], "30": 300, "001": 300, "clip_threshold": 300, "decay_r": [300, 313, 314], "beta_1": [300, 302, 303, 304, 305], "weight_decai": [300, 303, 305, 311], "scale_paramet": 300, "relative_step": 300, "warmup_init": 300, "sublinear": 300, "cost": [300, 324], "epsilon_1": 300, "epsilon_2": 300, "parameter_scal": 300, "clip": 300, "unscal": 300, "decai": [300, 303, 305, 311, 312, 313, 314], "duchi": 301, "hazan": 301, "singer": 301, "2011": 301, "subgradi": 301, "onlin": 301, "stochast": [301, 302, 304, 311, 324], "jmlr": 301, "999": [302, 303, 304], "omit": [302, 304], "estim": [302, 304], "kingma": [302, 304], "ba": [302, 304], "2015": [217, 302, 304], "iclr": [302, 303, 304], "m_": [302, 303, 304, 305], "m_t": [302, 303, 304, 305], "beta_2": [302, 303, 304, 305], "contrast": 303, "loshchilov": 303, "hutter": 303, "decoupl": 303, "regular": [217, 227, 285, 303, 321, 323], "adam": [298, 304, 305], "99": [305, 310], "tend": 305, "larger": [252, 305], "10x": 305, "adamw": [298, 305], "maintain": [217, 218, 305], "strength": [305, 311], "wd": 305, "chen": 305, "symbol": 305, "discoveri": 305, "2302": 305, "06675": 305, "c_": 305, "eta": 305, "c_t": 305, "momentum": [213, 305, 307, 311, 321], "appli": [201, 209, 211, 212, 213, 214, 215, 217, 218, 220, 221, 222, 223, 224, 225, 226, 227, 229, 241, 248, 249, 250, 251, 253, 255, 257, 258, 268, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 293, 306, 309, 316, 321], "opt": 306, "superset": [201, 306], "trainable_paramet": [209, 232, 307], "tieleman": 310, "hinton": 310, "lectur": 310, "coursera": 310, "smooth": [273, 283, 310], "dampen": 311, "nesterov": 311, "descent": [311, 321, 324], "mu": 311, "tau": 311, "l2": [276, 279, 311], "penalti": 311, "is_leaf": [200, 201], "arbitrari": [200, 296], "depth": [200, 218, 322], "hello": [200, 202], "charact": 200, "flat": [200, 202], "extra": 201, "closer": 201, "constitut": 201, "dict_kei": [201, 307], "recreat": 202, "world": 202, "42": 202, "byte": 205, "bool_": 205, "uint8": 205, "uint16": 205, "16": [205, 211, 222, 225, 228, 296], "uint64": 205, "int8": 205, "int16": 205, "int64": 205, "done": [209, 216, 321, 324, 325], "manual": 209, "explicitli": [209, 318], "solv": 209, "intuit": 209, "freez": [209, 244, 296], "finetun": 209, "in_dim": [209, 296], "out_dim": [209, 296], "enumer": 209, "caus": [209, 321, 324], "local": [209, 217], "scope": 209, "l2_loss": 209, "y_hat": 209, "loss_and_grad": 209, "workhors": 209, "Its": 209, "recurs": [209, 232, 233, 238, 242, 244, 296], "frozen": [209, 233, 242, 244, 249, 296], "individu": [209, 217, 218], "subset": [209, 232], "action": 209, "displai": 209, "tree_map": 209, "count": 209, "num_param": 209, "preclud": 209, "pure": [209, 298], "pattern": [209, 324], "achiev": 209, "other_input": 209, "necessari": 209, "wrap": 209, "apply_to_modul": [209, 233], "children": 209, "filter_and_map": 209, "leaf_modul": 209, "load_weight": [209, 324], "named_modul": 209, "save_weight": 209, "unfreez": [209, 233], "update_modul": 209, "alibi": 209, "batchnorm": 209, "conv1d": 209, "conv2d": 209, "dropout": [209, 217, 218, 241, 259, 321], "dropout2d": 209, "dropout3d": 209, "gelu": [209, 269, 270, 321], "groupnorm": 209, "instancenorm": 209, "layernorm": 209, "mish": 209, "prelu": 209, "quantizedlinear": 209, "relu": [209, 248, 259, 286, 293], "selu": 209, "sequenti": [209, 293], "silu": 209, "sinusoidalpositionalencod": 209, "softshrink": 209, "gelu_approx": [209, 220, 268], "gelu_fast_approx": [209, 220, 268], "binary_cross_entropi": [209, 321], "cosine_similarity_loss": 209, "gaussian_nll_loss": 209, "hinge_loss": 209, "huber_loss": 209, "kl_div_loss": 209, "l1_loss": 209, "log_cosh_loss": 209, "margin_ranking_loss": 209, "mse_loss": 209, "nll_loss": 209, "smooth_l1_loss": 209, "triplet_loss": 209, "init": [209, 248, 293, 298, 312, 313, 314], "uniform": [209, 224, 235, 262, 264, 293, 318, 321, 322, 328], "glorot_norm": 209, "glorot_uniform": 209, "he_norm": 209, "he_uniform": 209, "affin": [213, 221, 222, 223, 224, 249], "track_running_stat": 213, "var": [213, 221, 222, 223, 274], "gamma": [213, 221, 222, 223, 250, 261, 262, 263, 264], "nc": 213, "nlc": [213, 214], "four": 213, "nhwc": [213, 215], "height": [212, 213, 215, 217, 218, 226], "width": [212, 213, 215, 217, 218, 226, 249], "deep": [213, 261, 262, 263, 264], "intern": 213, "covari": 213, "shift": 213, "bn": 213, "in_channel": [214, 215], "out_channel": [214, 215], "kernel_s": [211, 212, 214, 215, 225, 226], "learnabl": [214, 215, 254], "portion": 216, "independ": [217, 218], "nwhc": 217, "whc": 217, "entri": [217, 218], "benefici": [217, 218, 324], "earli": 217, "adjac": 217, "pixel": 217, "correl": 217, "thompson": 217, "goroshin": 217, "jain": 217, "lecun": 217, "bregler": 217, "cvpr": 217, "ndhwc": 218, "dhwc": 218, "medic": 218, "video": 218, "num_embed": 219, "lookup": 219, "typic": [219, 298, 321, 324], "usual": [219, 320, 324], "vocabulari": 219, "approx": 220, "unit": [220, 251, 253, 255, 261, 262, 263, 264, 268, 269, 270, 287, 288, 289], "phi": [220, 268], "geluapprox": 220, "sigma": [220, 255, 261, 262, 263, 264, 269, 270, 289], "60033": [220, 269], "0433603": [220, 269], "gelufast": 220, "773": [220, 270], "regard": 220, "num_group": 221, "pytorch_compat": 221, "split": 221, "preced": 221, "http": [221, 222, 223, 227, 250, 285], "org": [221, 222, 223, 227, 250, 285], "1803": 221, "08494": 221, "inorm": 222, "1607": [222, 223], "08022": 222, "06450": 223, "mathcal": 224, "u": 224, "d_i": 224, "monoton": [227, 285], "1908": [227, 285], "08681": [227, 285], "tanh": [227, 285], "softplu": [227, 285], "map_fn": [228, 232], "filter_fn": [228, 232], "valid_parameter_filt": 228, "apply_fn": 229, "descend": 230, "is_leaf_fn": 232, "found": 232, "drop": 232, "idempot": [233, 244], "attent": [233, 247, 256, 259], "endswith": 233, "file_or_weight": 235, "miss": [235, 327], "ok": [235, 322], "save_safetensor": [239, 327], "reflect": [240, 321, 323, 325], "certain": [241, 321], "ie": 244, "noop": 244, "unfrozen": 244, "chang": [166, 245, 249, 276, 283, 321, 325], "tracer": 245, "partial": [245, 246, 321, 324], "child": 246, "programmat": 246, "query_input_dim": 247, "key_input_dim": 247, "value_input_dim": 247, "value_dim": 247, "value_output_dim": 247, "head": [247, 259], "aggreg": 247, "linearli": 247, "attend": 247, "num_paramet": 248, "25": 248, "parametr": [248, 286], "classmethod": 249, "from_linear": 249, "quantize_modul": 249, "1910": 250, "07467": 250, "rectifi": [251, 263, 264, 287], "10000": 252, "rotat": 252, "slightli": [252, 328], "angular": 252, "frequenc": [252, 256], "_cos_sin_theta_kei": [], "precomput": [], "_cos_sin_theta_valu": [], "leq": [253, 276, 288], "0507": [253, 288], "67326": [253, 288], "elu": [253, 288], "plain": 254, "known": [255, 289], "swish": [255, 289], "cdot": [255, 269, 270, 272, 275, 289], "min_freq": 256, "0001": 256, "max_freq": 256, "cos_first": 256, "full_turn": 256, "sinusoid": 256, "sin": [256, 322, 326], "lambd": [257, 290], "threshold": [258, 276, 283, 291], "geq": [258, 291], "num_encoder_lay": 259, "num_decoder_lay": 259, "custom_encod": 259, "custom_decod": 259, "norm_first": 259, "checkpoint": 259, "decod": 259, "interact": 259, "mechan": 259, "hidden": 259, "chekpoint": 259, "usag": [259, 321], "expens": 259, "init_fn": [260, 261, 262, 263, 264, 265, 266, 267, 293], "glorot": [261, 262], "fan_in": [261, 262, 263, 264], "fan_out": [261, 262, 263, 264], "difficulti": [261, 262], "feedforward": [261, 262], "191107": 261, "61278": 261, "150594": 261, "363207": 261, "gain": [261, 262, 263, 264], "89613": 261, "53947": 261, "48095": 261, "995016": 261, "223404": 262, "890597": 262, "379159": 262, "776856": 262, "90041": 262, "02264": 262, "912766": 262, "12451": 262, "fan": [263, 264], "delv": [263, 264], "surpass": [263, 264], "human": [263, 264], "level": [263, 264], "imagenet": [263, 264], "classif": [263, 264], "25211": 263, "458835": 263, "177208": 263, "0137595": 263, "6967": 263, "02765": 263, "15268": 263, "75787": 263, "kaim": 264, "0300242": 264, "0184009": 264, "793615": 264, "666329": 264, "64331": 264, "16506": 264, "08619": 264, "79854": 264, "982273": 266, "534422": 266, "380709": 266, "0645099": 266, "883935": 267, "863726": 267, "617261": 267, "417497": 267, "exact": [269, 270], "0003": 269, "015": 270, "with_logit": 271, "predict": [271, 274, 275, 276, 277, 278, 279, 281, 282, 283], "105361": 271, "223144": 271, "20397": 271, "916291": 271, "539245": 271, "prob": 271, "510826": 271, "x1": 272, "x2": 272, "x_1": [272, 280], "x_2": [272, 280], "label_smooth": 273, "hot": 273, "0485873": 273, "348587": 273, "likelihood": [274, 282], "nll": [274, 282], "hing": 275, "y_": [275, 279], "pred": [275, 279], "huber": 276, "l_": [211, 225, 276], "kullback": 277, "leibler": 277, "diverg": 277, "cosh": 279, "logcosh": 279, "sensit": 279, "outlier": 279, "dual": 279, "behavior": [279, 323, 324], "offer": 279, "balanc": 279, "robust": 279, "approach": [279, 322], "task": 279, "inputs1": 280, "inputs2": 280, "margin": [280, 284], "rank": 280, "573409": 280, "765166": 280, "0638": 280, "75596": 280, "225763": 280, "256995": 280, "773433": 280, "formula": 283, "anchor": 284, "triplet": 284, "_p": 284, "degre": 284, "pairwis": 284, "instabl": 284, "subclass": 296, "concept": 296, "mymlp": 296, "in_proj": 296, "basi": 316, "subsequ": 298, "apply_gradi": 298, "implicit": [318, 321, 322], "fine": [318, 324], "grain": 318, "control": [318, 324], "manag": [179, 318, 328], "pseudo": 318, "altern": 318, "splittabl": 318, "threefri": 318, "counter": 318, "cycl": 320, "merg": 321, "fuse": 321, "big": 321, "awar": [321, 324], "36788": 321, "compiled_fun": 321, "code": [321, 324], "slow": 321, "Not": 321, "recompil": 321, "stack": 321, "rerun": [321, 324], "too": [321, 324], "frequent": [321, 324], "destroi": 321, "anonym": 321, "don": [321, 328], "nonlinear": 321, "unari": 321, "overhead": [321, 324, 328], "bandwidth": 321, "fusibl": 321, "consider": 321, "versu": 321, "timeit": [321, 322], "tic": 321, "perf_count": 321, "toc": 321, "tpi": 321, "1e3": 321, "1000": [312, 321], "4096": [321, 322, 328], "On": [321, 322, 324], "millisecond": [321, 328], "five": 321, "latest": 321, "won": 321, "trace": 321, "placehold": 321, "insid": 321, "crash": 321, "inspect": [321, 326], "disable_compil": 321, "okai": [321, 324], "intend": 321, "deal": 321, "pretti": [321, 324], "inconveni": 321, "functool": 321, "particularli": 321, "backward": [321, 322], "squeez": 321, "checkout": 321, "compiled_grad_fn": 321, "71828": 321, "outer": [321, 324], "opportun": 321, "idea": [322, 324], "behind": 322, "dfdx": [322, 323], "d2fdx2": 322, "differentiaion": 322, "zero_grad": 322, "detach": 322, "requires_grad": 322, "dloss_dw": 322, "dloss_dx": 322, "lot": 322, "redund": 322, "suppos": [322, 328], "nice": [322, 324], "propag": [322, 323], "stop_gradi": 322, "autom": 322, "contriv": [322, 328], "sake": 322, "clariti": 322, "quit": [322, 325], "power": [322, 325], "difficult": 322, "primit": 322, "issu": [322, 325], "priorit": 322, "xs": 322, "ys": 322, "naive_add": 322, "vmap_add": 322, "total": 322, "390": 322, "wherea": 322, "025": 322, "ten": [322, 324], "Of": 322, "better": [322, 328], "handi": 322, "slice": 323, "ellipsi": 323, "syntax": 323, "idx": 323, "mix": 323, "take_along_axi": 323, "lack": 323, "extrem": [323, 324], "ineffici": [323, 324], "nonzero": 323, "record": 324, "dynam": 324, "easier": 324, "worri": 324, "fun1": 324, "expensive_fun": 324, "consum": 324, "eager": 324, "thank": 324, "weights_fp16": 324, "trade": 324, "bad": 324, "grow": 324, "computation": 324, "costli": 324, "wide": 324, "thousand": 324, "value_and_grad_fn": 324, "implicitli": 324, "anytim": 324, "memoryview": [324, 325], "perfectli": 324, "first_lay": 324, "second_layer_a": 324, "second_layer_b": 324, "protocol": 325, "receiv": 325, "pep": 325, "3118": 325, "view": 325, "a_view": 325, "owndata": 325, "extern": 325, "x_view": 325, "modifi": 325, "df": 325, "x\u00b2": 325, "2x": 325, "indirectli": 325, "modif": 325, "seen": 325, "occur": 325, "incorpor": 325, "incorrect": 325, "experiment": 325, "break": 325, "advis": 325, "intermedi": 325, "jnp": 325, "tf": 325, "page": 326, "composit": 326, "archiv": 327, "savez_compress": 327, "save_gguf": 327, "arr_0": 327, "pool": [211, 212, 225, 226, 328], "advantag": 328, "parallel": 328, "race": 328, "interest": 328, "albeit": 328, "d1": 328, "d2": 328, "matmul": 328, "dens": 328, "twice": 328, "measur": 328, "default_stream": 329, "default_devic": 329, "my_devic": 329, "streamcontext": 179, "context": 179, "avgpool1d": 209, "avgpool2d": 209, "maxpool1d": 209, "maxpool2d": [209, 212], "n_i": [211, 212, 225, 226], "c_j": [211, 212, 225, 226], "ldot": [211, 212, 225, 226], "lfloor": [211, 212, 225, 226], "rfloor": [211, 212, 225, 226], "k_h": [212, 226], "k_w": [212, 226], "h_": [212, 226], "max_": [225, 226], "rmsprop": 298, "adagrad": 298, "adafactor": 298, "adadelta": 298, "adamax": 298, "lion": 298, "step_decai": 298, "exponential_decai": 298, "cosine_decai": 298, "decay_step": 312, "beyond": 312, "lr_schedul": [312, 313, 314], "0999961": 312, "06561": 313, "step_siz": 314, "081": 314}, "objects": {"mlx.core": [[7, 0, 1, "", "Device"], [8, 0, 1, "", "Dtype"], [203, 0, 1, "", "Stream"], [9, 2, 1, "", "abs"], [10, 2, 1, "", "add"], [11, 2, 1, "", "all"], [12, 2, 1, "", "allclose"], [13, 2, 1, "", "any"], [14, 2, 1, "", "arange"], [15, 2, 1, "", "arccos"], [16, 2, 1, "", "arccosh"], [17, 2, 1, "", "arcsin"], [18, 2, 1, "", "arcsinh"], [19, 2, 1, "", "arctan"], [20, 2, 1, "", "arctanh"], [21, 2, 1, "", "argmax"], [22, 2, 1, "", "argmin"], [23, 2, 1, "", "argpartition"], [24, 2, 1, "", "argsort"], [25, 0, 1, "", "array"], [59, 2, 1, "", "array_equal"], [60, 2, 1, "", "broadcast_to"], [61, 2, 1, "", "ceil"], [62, 2, 1, "", "clip"], [63, 2, 1, "", "compile"], [64, 2, 1, "", "concatenate"], [65, 2, 1, "", "conv1d"], [66, 2, 1, "", "conv2d"], [67, 2, 1, "", "convolve"], [68, 2, 1, "", "cos"], [69, 2, 1, "", "cosh"], [70, 2, 1, "", "default_device"], [71, 2, 1, "", "default_stream"], [72, 2, 1, "", "dequantize"], [73, 2, 1, "", "diag"], [74, 2, 1, "", "diagonal"], [75, 2, 1, "", "disable_compile"], [76, 2, 1, "", "divide"], [77, 2, 1, "", "divmod"], [78, 2, 1, "", "enable_compile"], [79, 2, 1, "", "equal"], [80, 2, 1, "", "erf"], [81, 2, 1, "", "erfinv"], [82, 2, 1, "", "eval"], [83, 2, 1, "", "exp"], [84, 2, 1, "", "expand_dims"], [85, 2, 1, "", "eye"], [98, 2, 1, "", "flatten"], [99, 2, 1, "", "floor"], [100, 2, 1, "", "floor_divide"], [101, 2, 1, "", "full"], [102, 2, 1, "", "grad"], [103, 2, 1, "", "greater"], [104, 2, 1, "", "greater_equal"], [105, 2, 1, "", "identity"], [106, 2, 1, "", "inner"], [107, 2, 1, "", "isinf"], [108, 2, 1, "", "isnan"], [109, 2, 1, "", "isneginf"], [110, 2, 1, "", "isposinf"], [111, 2, 1, "", "jvp"], [112, 2, 1, "", "less"], [113, 2, 1, "", "less_equal"], [116, 2, 1, "", "linspace"], [117, 2, 1, "", "load"], [118, 2, 1, "", "log"], [119, 2, 1, "", "log10"], [120, 2, 1, "", "log1p"], [121, 2, 1, "", "log2"], [122, 2, 1, "", "logaddexp"], [123, 2, 1, "", "logical_and"], [124, 2, 1, "", "logical_not"], [125, 2, 1, "", "logical_or"], [126, 2, 1, "", "logsumexp"], [127, 2, 1, "", "matmul"], [128, 2, 1, "", "max"], [129, 2, 1, "", "maximum"], [130, 2, 1, "", "mean"], [131, 2, 1, "", "min"], [132, 2, 1, "", "minimum"], [133, 2, 1, "", "moveaxis"], [134, 2, 1, "", "multiply"], [135, 2, 1, "", "negative"], [136, 2, 1, "", "new_stream"], [137, 2, 1, "", "ones"], [138, 2, 1, "", "ones_like"], [139, 2, 1, "", "outer"], [140, 2, 1, "", "pad"], [141, 2, 1, "", "partition"], [142, 2, 1, "", "prod"], [143, 2, 1, "", "quantize"], [144, 2, 1, "", "quantized_matmul"], [155, 2, 1, "", "reciprocal"], [156, 2, 1, "", "repeat"], [157, 2, 1, "", "reshape"], [158, 2, 1, "", "round"], [159, 2, 1, "", "rsqrt"], [160, 2, 1, "", "save"], [161, 2, 1, "", "save_gguf"], [162, 2, 1, "", "save_safetensors"], [163, 2, 1, "", "savez"], [164, 2, 1, "", "savez_compressed"], [165, 2, 1, "", "set_default_device"], [166, 2, 1, "", "set_default_stream"], [167, 2, 1, "", "sigmoid"], [168, 2, 1, "", "sign"], [169, 2, 1, "", "sin"], [170, 2, 1, "", "sinh"], [171, 2, 1, "", "softmax"], [172, 2, 1, "", "sort"], [173, 2, 1, "", "split"], [174, 2, 1, "", "sqrt"], [175, 2, 1, "", "square"], [176, 2, 1, "", "squeeze"], [177, 2, 1, "", "stack"], [178, 2, 1, "", "stop_gradient"], [179, 2, 1, "", "stream"], [180, 2, 1, "", "subtract"], [181, 2, 1, "", "sum"], [182, 2, 1, "", "swapaxes"], [183, 2, 1, "", "take"], [184, 2, 1, "", "take_along_axis"], [185, 2, 1, "", "tan"], [186, 2, 1, "", "tanh"], [187, 2, 1, "", "tensordot"], [188, 2, 1, "", "transpose"], [189, 2, 1, "", "tri"], [190, 2, 1, "", "tril"], [191, 2, 1, "", "triu"], [192, 2, 1, "", "value_and_grad"], [193, 2, 1, "", "var"], [194, 2, 1, "", "vjp"], [195, 2, 1, "", "vmap"], [196, 2, 1, "", "where"], [197, 2, 1, "", "zeros"], [198, 2, 1, "", "zeros_like"]], "mlx.core.Device": [[7, 1, 1, "", "__init__"]], "mlx.core.Dtype": [[8, 1, 1, "", "__init__"]], "mlx.core.Stream": [[203, 1, 1, "", "__init__"]], "mlx.core.array": [[26, 3, 1, "", "T"], [25, 1, 1, "", "__init__"], [27, 1, 1, "", "abs"], [28, 1, 1, "", "all"], [29, 1, 1, "", "any"], [30, 1, 1, "", "argmax"], [31, 1, 1, "", "argmin"], [32, 1, 1, "", "astype"], [33, 1, 1, "", "cos"], [34, 3, 1, "", "dtype"], [35, 1, 1, "", "exp"], [36, 1, 1, "", "item"], [37, 1, 1, "", "log"], [38, 1, 1, "", "log1p"], [39, 1, 1, "", "logsumexp"], [40, 1, 1, "", "max"], [41, 1, 1, "", "mean"], [42, 1, 1, "", "min"], [43, 3, 1, "", "ndim"], [44, 1, 1, "", "prod"], [45, 1, 1, "", "reciprocal"], [46, 1, 1, "", "reshape"], [47, 1, 1, "", "round"], [48, 1, 1, "", "rsqrt"], [49, 3, 1, "", "shape"], [50, 1, 1, "", "sin"], [51, 3, 1, "", "size"], [52, 1, 1, "", "split"], [53, 1, 1, "", "sqrt"], [54, 1, 1, "", "square"], [55, 1, 1, "", "sum"], [56, 1, 1, "", "tolist"], [57, 1, 1, "", "transpose"], [58, 1, 1, "", "var"]], "mlx.core.fft": [[86, 2, 1, "", "fft"], [87, 2, 1, "", "fft2"], [88, 2, 1, "", "fftn"], [89, 2, 1, "", "ifft"], [90, 2, 1, "", "ifft2"], [91, 2, 1, "", "ifftn"], [92, 2, 1, "", "irfft"], [93, 2, 1, "", "irfft2"], [94, 2, 1, "", "irfftn"], [95, 2, 1, "", "rfft"], [96, 2, 1, "", "rfft2"], [97, 2, 1, "", "rfftn"]], "mlx.core.linalg": [[114, 2, 1, "", "norm"], [115, 2, 1, "", "qr"]], "mlx.core.random": [[145, 2, 1, "", "bernoulli"], [146, 2, 1, "", "categorical"], [147, 2, 1, "", "gumbel"], [148, 2, 1, "", "key"], [149, 2, 1, "", "normal"], [150, 2, 1, "", "randint"], [151, 2, 1, "", "seed"], [152, 2, 1, "", "split"], [153, 2, 1, "", "truncated_normal"], [154, 2, 1, "", "uniform"]], "mlx.nn": [[210, 0, 1, "", "ALiBi"], [211, 0, 1, "", "AvgPool1d"], [212, 0, 1, "", "AvgPool2d"], [213, 0, 1, "", "BatchNorm"], [214, 0, 1, "", "Conv1d"], [215, 0, 1, "", "Conv2d"], [216, 0, 1, "", "Dropout"], [217, 0, 1, "", "Dropout2d"], [218, 0, 1, "", "Dropout3d"], [219, 0, 1, "", "Embedding"], [220, 0, 1, "", "GELU"], [221, 0, 1, "", "GroupNorm"], [222, 0, 1, "", "InstanceNorm"], [223, 0, 1, "", "LayerNorm"], [224, 0, 1, "", "Linear"], [225, 0, 1, "", "MaxPool1d"], [226, 0, 1, "", "MaxPool2d"], [227, 0, 1, "", "Mish"], [296, 0, 1, "", "Module"], [247, 0, 1, "", "MultiHeadAttention"], [248, 0, 1, "", "PReLU"], [249, 0, 1, "", "QuantizedLinear"], [250, 0, 1, "", "RMSNorm"], [251, 0, 1, "", "ReLU"], [252, 0, 1, "", "RoPE"], [253, 0, 1, "", "SELU"], [254, 0, 1, "", "Sequential"], [255, 0, 1, "", "SiLU"], [256, 0, 1, "", "SinusoidalPositionalEncoding"], [257, 0, 1, "", "Softshrink"], [258, 0, 1, "", "Step"], [259, 0, 1, "", "Transformer"], [268, 2, 1, "", "gelu"], [269, 2, 1, "", "gelu_approx"], [270, 2, 1, "", "gelu_fast_approx"], [285, 2, 1, "", "mish"], [286, 2, 1, "", "prelu"], [287, 2, 1, "", "relu"], [288, 2, 1, "", "selu"], [289, 2, 1, "", "silu"], [290, 2, 1, "", "softshrink"], [291, 2, 1, "", "step"], [199, 2, 1, "", "value_and_grad"]], "mlx.nn.Module": [[228, 1, 1, "", "apply"], [229, 1, 1, "", "apply_to_modules"], [230, 1, 1, "", "children"], [231, 1, 1, "", "eval"], [232, 1, 1, "", "filter_and_map"], [233, 1, 1, "", "freeze"], [234, 1, 1, "", "leaf_modules"], [235, 1, 1, "", "load_weights"], [236, 1, 1, "", "modules"], [237, 1, 1, "", "named_modules"], [238, 1, 1, "", "parameters"], [239, 1, 1, "", "save_weights"], [240, 3, 1, "", "state"], [241, 1, 1, "", "train"], [242, 1, 1, "", "trainable_parameters"], [243, 3, 1, "", "training"], [244, 1, 1, "", "unfreeze"], [245, 1, 1, "", "update"], [246, 1, 1, "", "update_modules"]], "mlx.nn.init": [[260, 2, 1, "", "constant"], [261, 2, 1, "", "glorot_normal"], [262, 2, 1, "", "glorot_uniform"], [263, 2, 1, "", "he_normal"], [264, 2, 1, "", "he_uniform"], [265, 2, 1, "", "identity"], [266, 2, 1, "", "normal"], [267, 2, 1, "", "uniform"]], "mlx.nn.losses": [[271, 2, 1, "", "binary_cross_entropy"], [272, 2, 1, "", "cosine_similarity_loss"], [273, 2, 1, "", "cross_entropy"], [274, 2, 1, "", "gaussian_nll_loss"], [275, 2, 1, "", "hinge_loss"], [276, 2, 1, "", "huber_loss"], [277, 2, 1, "", "kl_div_loss"], [278, 2, 1, "", "l1_loss"], [279, 2, 1, "", "log_cosh_loss"], [280, 2, 1, "", "margin_ranking_loss"], [281, 2, 1, "", "mse_loss"], [282, 2, 1, "", "nll_loss"], [283, 2, 1, "", "smooth_l1_loss"], [284, 2, 1, "", "triplet_loss"]], "mlx.optimizers": [[299, 0, 1, "", "AdaDelta"], [300, 0, 1, "", "Adafactor"], [301, 0, 1, "", "Adagrad"], [302, 0, 1, "", "Adam"], [303, 0, 1, "", "AdamW"], [304, 0, 1, "", "Adamax"], [305, 0, 1, "", "Lion"], [316, 0, 1, "", "Optimizer"], [310, 0, 1, "", "RMSprop"], [311, 0, 1, "", "SGD"], [312, 2, 1, "", "cosine_decay"], [313, 2, 1, "", "exponential_decay"], [314, 2, 1, "", "step_decay"]], "mlx.optimizers.Optimizer": [[306, 1, 1, "", "apply_gradients"], [307, 1, 1, "", "init"], [308, 3, 1, "", "state"], [309, 1, 1, "", "update"]], "mlx.utils": [[200, 2, 1, "", "tree_flatten"], [201, 2, 1, "", "tree_map"], [202, 2, 1, "", "tree_unflatten"]]}, "objtypes": {"0": "py:class", "1": "py:method", "2": "py:function", "3": "py:property"}, "objnames": {"0": ["py", "class", "Python class"], "1": ["py", "method", "Python method"], "2": ["py", "function", "Python function"], "3": ["py", "property", "Python property"]}, "titleterms": {"oper": [0, 1, 297], "develop": 1, "document": 1, "introduc": 1, "exampl": [1, 5, 321, 328], "primit": 1, "us": [1, 324, 329], "implement": [1, 3], "cpu": 1, "backend": 1, "gpu": 1, "transform": [1, 259, 319, 321, 322, 324, 326], "build": [1, 6], "bind": 1, "python": [1, 5, 6], "cmake": 1, "setuptool": 1, "usag": [1, 5], "result": 1, "script": [1, 3], "download": [1, 3], "code": [1, 3], "linear": [2, 208, 224], "regress": 2, "llm": 3, "infer": 3, "model": 3, "attent": 3, "layer": [3, 4, 294], "encod": 3, "full": [3, 101], "gener": 3, "put": 3, "all": [3, 11, 28], "togeth": 3, "convert": 3, "weight": 3, "load": [3, 117, 327], "benchmark": 3, "multi": 4, "perceptron": 4, "mlx": [5, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 299, 300, 301, 302, 303, 304, 305, 306, 307, 308, 309, 310, 311, 312, 313, 314], "instal": [5, 6], "api": [5, 6], "refer": 5, "c": [5, 6], "further": 5, "read": 5, "troubleshoot": 6, "from": [6, 323], "sourc": 6, "requir": 6, "option": 6, "metal": 6, "found": 6, "x86": 6, "shell": 6, "core": [7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 203], "devic": [7, 206], "dtype": [8, 34], "stream": [179, 203, 206, 329], "ab": [9, 27], "add": 10, "allclos": 12, "ani": [13, 29], "arang": 14, "arcco": 15, "arccosh": 16, "arcsin": 17, "arcsinh": 18, "arctan": 19, "arctanh": 20, "argmax": [21, 30], "argmin": [22, 31], "argpartit": 23, "argsort": 24, "arrai": [25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 204, 323, 327], "t": 26, "astyp": 32, "co": [33, 68], "exp": [35, 83], "item": 36, "log": [37, 118], "log1p": [38, 120], "logsumexp": [39, 126], "max": [40, 128], "mean": [41, 130], "min": [42, 131], "ndim": 43, "prod": [44, 142], "reciproc": [45, 155], "reshap": [46, 157], "round": [47, 158], "rsqrt": [48, 159], "shape": 49, "sin": [50, 169], "size": 51, "split": [52, 152, 173], "sqrt": [53, 174], "squar": [54, 175], "sum": [55, 181], "tolist": 56, "transpos": [57, 188], "var": [58, 193], "array_equ": 59, "broadcast_to": 60, "ceil": 61, "clip": 62, "compil": [63, 321], "concaten": 64, "conv1d": [65, 214], "conv2d": [66, 215], "convolv": 67, "cosh": 69, "default_devic": 70, "default_stream": 71, "dequant": 72, "diag": 73, "diagon": 74, "disable_compil": 75, "divid": 76, "divmod": 77, "enable_compil": 78, "equal": 79, "erf": 80, "erfinv": 81, "eval": [82, 231], "expand_dim": 84, "ey": 85, "fft": [86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 207], "fft2": 87, "fftn": 88, "ifft": 89, "ifft2": 90, "ifftn": 91, "irfft": 92, "irfft2": 93, "irfftn": 94, "rfft": 95, "rfft2": 96, "rfftn": 97, "flatten": 98, "floor": 99, "floor_divid": 100, "grad": [102, 209], "greater": 103, "greater_equ": 104, "ident": [105, 265], "inner": 106, "isinf": 107, "isnan": 108, "isneginf": 109, "isposinf": 110, "jvp": 111, "less": 112, "less_equ": 113, "linalg": [114, 115], "norm": 114, "qr": 115, "linspac": 116, "log10": 119, "log2": 121, "logaddexp": 122, "logical_and": 123, "logical_not": 124, "logical_or": 125, "matmul": 127, "maximum": 129, "minimum": 132, "moveaxi": 133, "multipli": 134, "neg": 135, "new_stream": 136, "ones": 137, "ones_lik": 138, "outer": 139, "pad": 140, "partit": 141, "quantiz": 143, "quantized_matmul": 144, "random": [145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 318], "bernoulli": 145, "categor": 146, "gumbel": 147, "kei": 148, "normal": [149, 266], "randint": 150, "seed": 151, "truncated_norm": 153, "uniform": [154, 267], "repeat": 156, "save": [160, 327], "save_gguf": 161, "save_safetensor": 162, "savez": 163, "savez_compress": 164, "set_default_devic": 165, "set_default_stream": 166, "sigmoid": 167, "sign": 168, "sinh": 170, "softmax": 171, "sort": 172, "squeez": 176, "stack": 177, "stop_gradi": 178, "subtract": 180, "swapax": 182, "take": 183, "take_along_axi": 184, "tan": 185, "tanh": 186, "tensordot": 187, "tri": 189, "tril": 190, "triu": 191, "value_and_grad": [192, 199], "vjp": 194, "vmap": 195, "where": 196, "zero": 197, "zeros_lik": 198, "nn": [199, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291], "optim": [298, 299, 300, 301, 302, 303, 304, 305, 306, 307, 308, 309, 310, 311, 312, 313, 314, 315, 316], "adadelta": 299, "adafactor": 300, "adagrad": 301, "adam": 302, "adamw": 303, "adamax": 304, "lion": 305, "apply_gradi": 306, "init": [260, 261, 262, 263, 264, 265, 266, 267, 307], "state": [240, 308], "updat": [209, 245, 309, 323], "rmsprop": 310, "sgd": 311, "util": [200, 201, 202, 320], "tree_flatten": 200, "tree_map": 201, "tree_unflatten": 202, "data": 205, "type": 205, "support": 205, "algebra": 208, "neural": 209, "network": 209, "quick": [209, 326], "start": [209, 326], "The": 209, "modul": [209, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 296], "class": 209, "paramet": [209, 238], "inspect": 209, "valu": 209, "alibi": 210, "batchnorm": 213, "dropout": 216, "dropout2d": 217, "dropout3d": 218, "embed": 219, "gelu": [220, 268], "groupnorm": 221, "instancenorm": 222, "layernorm": 223, "mish": [227, 285], "appli": 228, "apply_to_modul": 229, "children": 230, "filter_and_map": 232, "freez": 233, "leaf_modul": 234, "load_weight": 235, "named_modul": 237, "save_weight": 239, "train": [241, 243, 321], "trainable_paramet": 242, "unfreez": 244, "update_modul": 246, "multiheadattent": 247, "prelu": [248, 286], "quantizedlinear": 249, "rmsnorm": 250, "relu": [251, 287], "rope": 252, "selu": [253, 288], "sequenti": 254, "silu": [255, 289], "sinusoidalpositionalencod": 256, "softshrink": [257, 290], "step": [258, 291], "constant": 260, "glorot_norm": 261, "glorot_uniform": 262, "he_norm": 263, "he_uniform": 264, "gelu_approx": 269, "gelu_fast_approx": 270, "loss": [271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 295], "binary_cross_entropi": 271, "cosine_similarity_loss": 272, "cross_entropi": 273, "gaussian_nll_loss": 274, "hinge_loss": 275, "huber_loss": 276, "kl_div_loss": 277, "l1_loss": 278, "log_cosh_loss": 279, "margin_ranking_loss": 280, "mse_loss": 281, "nll_loss": 282, "smooth_l1_loss": 283, "triplet_loss": 284, "function": [292, 295, 321, 322, 326], "initi": 293, "tree": 320, "basic": [321, 326], "speedup": 321, "debug": 321, "pure": 321, "graph": [321, 324, 326], "automat": 322, "differenti": 322, "vector": 322, "index": 323, "differ": 323, "numpi": [323, 325], "In": 323, "place": 323, "lazi": 324, "evalu": 324, "why": 324, "comput": 324, "onli": 324, "what": 324, "you": 324, "when": 324, "convers": 325, "other": 325, "framework": 325, "pytorch": 325, "jax": 325, "tensorflow": 325, "guid": 326, "serial": 327, "format": 327, "unifi": 328, "memori": 328, "A": 328, "simpl": 328, "specifi": 329, "avgpool1d": 211, "avgpool2d": 212, "maxpool1d": 225, "maxpool2d": 226, "cosine_decai": 312, "exponential_decai": 313, "step_decai": 314, "common": 315, "schedul": 317}, "envversion": {"sphinx.domains.c": 2, "sphinx.domains.changeset": 1, "sphinx.domains.citation": 1, "sphinx.domains.cpp": 6, "sphinx.domains.index": 1, "sphinx.domains.javascript": 2, "sphinx.domains.math": 2, "sphinx.domains.python": 3, "sphinx.domains.rst": 2, "sphinx.domains.std": 2, "sphinx.ext.intersphinx": 1, "sphinx": 56}}) \ No newline at end of file +Search.setIndex({"docnames": ["cpp/ops", "dev/extensions", "examples/linear_regression", "examples/llama-inference", "examples/mlp", "index", "install", "python/_autosummary/mlx.core.Device", "python/_autosummary/mlx.core.Dtype", "python/_autosummary/mlx.core.abs", "python/_autosummary/mlx.core.add", "python/_autosummary/mlx.core.all", "python/_autosummary/mlx.core.allclose", "python/_autosummary/mlx.core.any", "python/_autosummary/mlx.core.arange", "python/_autosummary/mlx.core.arccos", "python/_autosummary/mlx.core.arccosh", "python/_autosummary/mlx.core.arcsin", "python/_autosummary/mlx.core.arcsinh", "python/_autosummary/mlx.core.arctan", "python/_autosummary/mlx.core.arctanh", "python/_autosummary/mlx.core.argmax", "python/_autosummary/mlx.core.argmin", "python/_autosummary/mlx.core.argpartition", "python/_autosummary/mlx.core.argsort", "python/_autosummary/mlx.core.array", "python/_autosummary/mlx.core.array.T", "python/_autosummary/mlx.core.array.abs", "python/_autosummary/mlx.core.array.all", "python/_autosummary/mlx.core.array.any", "python/_autosummary/mlx.core.array.argmax", "python/_autosummary/mlx.core.array.argmin", "python/_autosummary/mlx.core.array.astype", "python/_autosummary/mlx.core.array.cos", "python/_autosummary/mlx.core.array.dtype", "python/_autosummary/mlx.core.array.exp", "python/_autosummary/mlx.core.array.item", "python/_autosummary/mlx.core.array.log", "python/_autosummary/mlx.core.array.log1p", "python/_autosummary/mlx.core.array.logsumexp", "python/_autosummary/mlx.core.array.max", "python/_autosummary/mlx.core.array.mean", "python/_autosummary/mlx.core.array.min", "python/_autosummary/mlx.core.array.ndim", "python/_autosummary/mlx.core.array.prod", "python/_autosummary/mlx.core.array.reciprocal", "python/_autosummary/mlx.core.array.reshape", "python/_autosummary/mlx.core.array.round", "python/_autosummary/mlx.core.array.rsqrt", "python/_autosummary/mlx.core.array.shape", "python/_autosummary/mlx.core.array.sin", "python/_autosummary/mlx.core.array.size", "python/_autosummary/mlx.core.array.split", "python/_autosummary/mlx.core.array.sqrt", "python/_autosummary/mlx.core.array.square", "python/_autosummary/mlx.core.array.sum", "python/_autosummary/mlx.core.array.tolist", "python/_autosummary/mlx.core.array.transpose", "python/_autosummary/mlx.core.array.var", "python/_autosummary/mlx.core.array_equal", "python/_autosummary/mlx.core.atleast_1d", "python/_autosummary/mlx.core.atleast_2d", "python/_autosummary/mlx.core.atleast_3d", "python/_autosummary/mlx.core.broadcast_to", "python/_autosummary/mlx.core.ceil", "python/_autosummary/mlx.core.clip", "python/_autosummary/mlx.core.compile", "python/_autosummary/mlx.core.concatenate", "python/_autosummary/mlx.core.conv1d", "python/_autosummary/mlx.core.conv2d", "python/_autosummary/mlx.core.conv_general", "python/_autosummary/mlx.core.convolve", "python/_autosummary/mlx.core.cos", "python/_autosummary/mlx.core.cosh", "python/_autosummary/mlx.core.default_device", "python/_autosummary/mlx.core.default_stream", "python/_autosummary/mlx.core.dequantize", "python/_autosummary/mlx.core.diag", "python/_autosummary/mlx.core.diagonal", "python/_autosummary/mlx.core.disable_compile", "python/_autosummary/mlx.core.divide", "python/_autosummary/mlx.core.divmod", "python/_autosummary/mlx.core.enable_compile", "python/_autosummary/mlx.core.equal", "python/_autosummary/mlx.core.erf", "python/_autosummary/mlx.core.erfinv", "python/_autosummary/mlx.core.eval", "python/_autosummary/mlx.core.exp", "python/_autosummary/mlx.core.expand_dims", "python/_autosummary/mlx.core.eye", "python/_autosummary/mlx.core.fft.fft", "python/_autosummary/mlx.core.fft.fft2", "python/_autosummary/mlx.core.fft.fftn", "python/_autosummary/mlx.core.fft.ifft", "python/_autosummary/mlx.core.fft.ifft2", "python/_autosummary/mlx.core.fft.ifftn", "python/_autosummary/mlx.core.fft.irfft", "python/_autosummary/mlx.core.fft.irfft2", "python/_autosummary/mlx.core.fft.irfftn", "python/_autosummary/mlx.core.fft.rfft", "python/_autosummary/mlx.core.fft.rfft2", "python/_autosummary/mlx.core.fft.rfftn", "python/_autosummary/mlx.core.flatten", "python/_autosummary/mlx.core.floor", "python/_autosummary/mlx.core.floor_divide", "python/_autosummary/mlx.core.full", "python/_autosummary/mlx.core.grad", "python/_autosummary/mlx.core.greater", "python/_autosummary/mlx.core.greater_equal", "python/_autosummary/mlx.core.identity", "python/_autosummary/mlx.core.inner", "python/_autosummary/mlx.core.isinf", "python/_autosummary/mlx.core.isnan", "python/_autosummary/mlx.core.isneginf", "python/_autosummary/mlx.core.isposinf", "python/_autosummary/mlx.core.jvp", "python/_autosummary/mlx.core.less", "python/_autosummary/mlx.core.less_equal", "python/_autosummary/mlx.core.linalg.norm", "python/_autosummary/mlx.core.linalg.qr", "python/_autosummary/mlx.core.linspace", "python/_autosummary/mlx.core.load", "python/_autosummary/mlx.core.log", "python/_autosummary/mlx.core.log10", "python/_autosummary/mlx.core.log1p", "python/_autosummary/mlx.core.log2", "python/_autosummary/mlx.core.logaddexp", "python/_autosummary/mlx.core.logical_and", "python/_autosummary/mlx.core.logical_not", "python/_autosummary/mlx.core.logical_or", "python/_autosummary/mlx.core.logsumexp", "python/_autosummary/mlx.core.matmul", "python/_autosummary/mlx.core.max", "python/_autosummary/mlx.core.maximum", "python/_autosummary/mlx.core.mean", "python/_autosummary/mlx.core.min", "python/_autosummary/mlx.core.minimum", "python/_autosummary/mlx.core.moveaxis", "python/_autosummary/mlx.core.multiply", "python/_autosummary/mlx.core.negative", "python/_autosummary/mlx.core.new_stream", "python/_autosummary/mlx.core.ones", "python/_autosummary/mlx.core.ones_like", "python/_autosummary/mlx.core.outer", "python/_autosummary/mlx.core.pad", "python/_autosummary/mlx.core.partition", "python/_autosummary/mlx.core.prod", "python/_autosummary/mlx.core.quantize", "python/_autosummary/mlx.core.quantized_matmul", "python/_autosummary/mlx.core.random.bernoulli", "python/_autosummary/mlx.core.random.categorical", "python/_autosummary/mlx.core.random.gumbel", "python/_autosummary/mlx.core.random.key", "python/_autosummary/mlx.core.random.normal", "python/_autosummary/mlx.core.random.randint", "python/_autosummary/mlx.core.random.seed", "python/_autosummary/mlx.core.random.split", "python/_autosummary/mlx.core.random.truncated_normal", "python/_autosummary/mlx.core.random.uniform", "python/_autosummary/mlx.core.reciprocal", "python/_autosummary/mlx.core.repeat", "python/_autosummary/mlx.core.reshape", "python/_autosummary/mlx.core.round", "python/_autosummary/mlx.core.rsqrt", "python/_autosummary/mlx.core.save", "python/_autosummary/mlx.core.save_gguf", "python/_autosummary/mlx.core.save_safetensors", "python/_autosummary/mlx.core.savez", "python/_autosummary/mlx.core.savez_compressed", "python/_autosummary/mlx.core.set_default_device", "python/_autosummary/mlx.core.set_default_stream", "python/_autosummary/mlx.core.sigmoid", "python/_autosummary/mlx.core.sign", "python/_autosummary/mlx.core.sin", "python/_autosummary/mlx.core.sinh", "python/_autosummary/mlx.core.softmax", "python/_autosummary/mlx.core.sort", "python/_autosummary/mlx.core.split", "python/_autosummary/mlx.core.sqrt", "python/_autosummary/mlx.core.square", "python/_autosummary/mlx.core.squeeze", "python/_autosummary/mlx.core.stack", "python/_autosummary/mlx.core.stop_gradient", "python/_autosummary/mlx.core.stream", "python/_autosummary/mlx.core.subtract", "python/_autosummary/mlx.core.sum", "python/_autosummary/mlx.core.swapaxes", "python/_autosummary/mlx.core.take", "python/_autosummary/mlx.core.take_along_axis", "python/_autosummary/mlx.core.tan", "python/_autosummary/mlx.core.tanh", "python/_autosummary/mlx.core.tensordot", "python/_autosummary/mlx.core.transpose", "python/_autosummary/mlx.core.tri", "python/_autosummary/mlx.core.tril", "python/_autosummary/mlx.core.triu", "python/_autosummary/mlx.core.value_and_grad", "python/_autosummary/mlx.core.var", "python/_autosummary/mlx.core.vjp", "python/_autosummary/mlx.core.vmap", "python/_autosummary/mlx.core.where", "python/_autosummary/mlx.core.zeros", "python/_autosummary/mlx.core.zeros_like", "python/_autosummary/mlx.nn.value_and_grad", "python/_autosummary/mlx.utils.tree_flatten", "python/_autosummary/mlx.utils.tree_map", "python/_autosummary/mlx.utils.tree_unflatten", "python/_autosummary/stream_class", "python/array", "python/data_types", "python/devices_and_streams", "python/fft", "python/linalg", "python/nn", "python/nn/_autosummary/mlx.nn.ALiBi", "python/nn/_autosummary/mlx.nn.AvgPool1d", "python/nn/_autosummary/mlx.nn.AvgPool2d", "python/nn/_autosummary/mlx.nn.BatchNorm", "python/nn/_autosummary/mlx.nn.Conv1d", "python/nn/_autosummary/mlx.nn.Conv2d", "python/nn/_autosummary/mlx.nn.Dropout", "python/nn/_autosummary/mlx.nn.Dropout2d", "python/nn/_autosummary/mlx.nn.Dropout3d", "python/nn/_autosummary/mlx.nn.Embedding", "python/nn/_autosummary/mlx.nn.GELU", "python/nn/_autosummary/mlx.nn.GroupNorm", "python/nn/_autosummary/mlx.nn.InstanceNorm", "python/nn/_autosummary/mlx.nn.LayerNorm", "python/nn/_autosummary/mlx.nn.Linear", "python/nn/_autosummary/mlx.nn.MaxPool1d", "python/nn/_autosummary/mlx.nn.MaxPool2d", "python/nn/_autosummary/mlx.nn.Mish", "python/nn/_autosummary/mlx.nn.Module.apply", "python/nn/_autosummary/mlx.nn.Module.apply_to_modules", "python/nn/_autosummary/mlx.nn.Module.children", "python/nn/_autosummary/mlx.nn.Module.eval", "python/nn/_autosummary/mlx.nn.Module.filter_and_map", "python/nn/_autosummary/mlx.nn.Module.freeze", "python/nn/_autosummary/mlx.nn.Module.leaf_modules", "python/nn/_autosummary/mlx.nn.Module.load_weights", "python/nn/_autosummary/mlx.nn.Module.modules", "python/nn/_autosummary/mlx.nn.Module.named_modules", "python/nn/_autosummary/mlx.nn.Module.parameters", "python/nn/_autosummary/mlx.nn.Module.save_weights", "python/nn/_autosummary/mlx.nn.Module.state", "python/nn/_autosummary/mlx.nn.Module.train", "python/nn/_autosummary/mlx.nn.Module.trainable_parameters", "python/nn/_autosummary/mlx.nn.Module.training", "python/nn/_autosummary/mlx.nn.Module.unfreeze", "python/nn/_autosummary/mlx.nn.Module.update", "python/nn/_autosummary/mlx.nn.Module.update_modules", "python/nn/_autosummary/mlx.nn.MultiHeadAttention", "python/nn/_autosummary/mlx.nn.PReLU", "python/nn/_autosummary/mlx.nn.QuantizedLinear", "python/nn/_autosummary/mlx.nn.RMSNorm", "python/nn/_autosummary/mlx.nn.ReLU", "python/nn/_autosummary/mlx.nn.RoPE", "python/nn/_autosummary/mlx.nn.SELU", "python/nn/_autosummary/mlx.nn.Sequential", "python/nn/_autosummary/mlx.nn.SiLU", "python/nn/_autosummary/mlx.nn.SinusoidalPositionalEncoding", "python/nn/_autosummary/mlx.nn.Softshrink", "python/nn/_autosummary/mlx.nn.Step", "python/nn/_autosummary/mlx.nn.Transformer", "python/nn/_autosummary/mlx.nn.Upsample", "python/nn/_autosummary/mlx.nn.init.constant", "python/nn/_autosummary/mlx.nn.init.glorot_normal", "python/nn/_autosummary/mlx.nn.init.glorot_uniform", "python/nn/_autosummary/mlx.nn.init.he_normal", "python/nn/_autosummary/mlx.nn.init.he_uniform", "python/nn/_autosummary/mlx.nn.init.identity", "python/nn/_autosummary/mlx.nn.init.normal", "python/nn/_autosummary/mlx.nn.init.uniform", "python/nn/_autosummary_functions/mlx.nn.elu", "python/nn/_autosummary_functions/mlx.nn.gelu", "python/nn/_autosummary_functions/mlx.nn.gelu_approx", "python/nn/_autosummary_functions/mlx.nn.gelu_fast_approx", "python/nn/_autosummary_functions/mlx.nn.glu", "python/nn/_autosummary_functions/mlx.nn.hardswish", "python/nn/_autosummary_functions/mlx.nn.leaky_relu", "python/nn/_autosummary_functions/mlx.nn.log_sigmoid", "python/nn/_autosummary_functions/mlx.nn.log_softmax", "python/nn/_autosummary_functions/mlx.nn.losses.binary_cross_entropy", "python/nn/_autosummary_functions/mlx.nn.losses.cosine_similarity_loss", "python/nn/_autosummary_functions/mlx.nn.losses.cross_entropy", "python/nn/_autosummary_functions/mlx.nn.losses.gaussian_nll_loss", "python/nn/_autosummary_functions/mlx.nn.losses.hinge_loss", "python/nn/_autosummary_functions/mlx.nn.losses.huber_loss", "python/nn/_autosummary_functions/mlx.nn.losses.kl_div_loss", "python/nn/_autosummary_functions/mlx.nn.losses.l1_loss", "python/nn/_autosummary_functions/mlx.nn.losses.log_cosh_loss", "python/nn/_autosummary_functions/mlx.nn.losses.margin_ranking_loss", "python/nn/_autosummary_functions/mlx.nn.losses.mse_loss", "python/nn/_autosummary_functions/mlx.nn.losses.nll_loss", "python/nn/_autosummary_functions/mlx.nn.losses.smooth_l1_loss", "python/nn/_autosummary_functions/mlx.nn.losses.triplet_loss", "python/nn/_autosummary_functions/mlx.nn.mish", "python/nn/_autosummary_functions/mlx.nn.prelu", "python/nn/_autosummary_functions/mlx.nn.relu", "python/nn/_autosummary_functions/mlx.nn.relu6", "python/nn/_autosummary_functions/mlx.nn.selu", "python/nn/_autosummary_functions/mlx.nn.sigmoid", "python/nn/_autosummary_functions/mlx.nn.silu", "python/nn/_autosummary_functions/mlx.nn.softmax", "python/nn/_autosummary_functions/mlx.nn.softplus", "python/nn/_autosummary_functions/mlx.nn.softshrink", "python/nn/_autosummary_functions/mlx.nn.step", "python/nn/_autosummary_functions/mlx.nn.tanh", "python/nn/functions", "python/nn/init", "python/nn/layers", "python/nn/losses", "python/nn/module", "python/ops", "python/optimizers", "python/optimizers/_autosummary/mlx.optimizers.AdaDelta", "python/optimizers/_autosummary/mlx.optimizers.Adafactor", "python/optimizers/_autosummary/mlx.optimizers.Adagrad", "python/optimizers/_autosummary/mlx.optimizers.Adam", "python/optimizers/_autosummary/mlx.optimizers.AdamW", "python/optimizers/_autosummary/mlx.optimizers.Adamax", "python/optimizers/_autosummary/mlx.optimizers.Lion", "python/optimizers/_autosummary/mlx.optimizers.Optimizer.apply_gradients", "python/optimizers/_autosummary/mlx.optimizers.Optimizer.init", "python/optimizers/_autosummary/mlx.optimizers.Optimizer.state", "python/optimizers/_autosummary/mlx.optimizers.Optimizer.update", "python/optimizers/_autosummary/mlx.optimizers.RMSprop", "python/optimizers/_autosummary/mlx.optimizers.SGD", "python/optimizers/_autosummary/mlx.optimizers.cosine_decay", "python/optimizers/_autosummary/mlx.optimizers.exponential_decay", "python/optimizers/_autosummary/mlx.optimizers.join_schedules", "python/optimizers/_autosummary/mlx.optimizers.linear_schedule", "python/optimizers/_autosummary/mlx.optimizers.step_decay", "python/optimizers/common_optimizers", "python/optimizers/optimizer", "python/optimizers/schedulers", "python/random", "python/transforms", "python/tree_utils", "usage/compile", "usage/function_transforms", "usage/indexing", "usage/lazy_evaluation", "usage/numpy", "usage/quick_start", "usage/saving_and_loading", "usage/unified_memory", "usage/using_streams"], "filenames": ["cpp/ops.rst", "dev/extensions.rst", "examples/linear_regression.rst", "examples/llama-inference.rst", "examples/mlp.rst", "index.rst", "install.rst", "python/_autosummary/mlx.core.Device.rst", "python/_autosummary/mlx.core.Dtype.rst", "python/_autosummary/mlx.core.abs.rst", "python/_autosummary/mlx.core.add.rst", "python/_autosummary/mlx.core.all.rst", "python/_autosummary/mlx.core.allclose.rst", "python/_autosummary/mlx.core.any.rst", "python/_autosummary/mlx.core.arange.rst", "python/_autosummary/mlx.core.arccos.rst", "python/_autosummary/mlx.core.arccosh.rst", "python/_autosummary/mlx.core.arcsin.rst", "python/_autosummary/mlx.core.arcsinh.rst", "python/_autosummary/mlx.core.arctan.rst", "python/_autosummary/mlx.core.arctanh.rst", "python/_autosummary/mlx.core.argmax.rst", "python/_autosummary/mlx.core.argmin.rst", "python/_autosummary/mlx.core.argpartition.rst", "python/_autosummary/mlx.core.argsort.rst", "python/_autosummary/mlx.core.array.rst", "python/_autosummary/mlx.core.array.T.rst", "python/_autosummary/mlx.core.array.abs.rst", "python/_autosummary/mlx.core.array.all.rst", "python/_autosummary/mlx.core.array.any.rst", "python/_autosummary/mlx.core.array.argmax.rst", "python/_autosummary/mlx.core.array.argmin.rst", "python/_autosummary/mlx.core.array.astype.rst", "python/_autosummary/mlx.core.array.cos.rst", "python/_autosummary/mlx.core.array.dtype.rst", "python/_autosummary/mlx.core.array.exp.rst", "python/_autosummary/mlx.core.array.item.rst", "python/_autosummary/mlx.core.array.log.rst", "python/_autosummary/mlx.core.array.log1p.rst", "python/_autosummary/mlx.core.array.logsumexp.rst", "python/_autosummary/mlx.core.array.max.rst", "python/_autosummary/mlx.core.array.mean.rst", "python/_autosummary/mlx.core.array.min.rst", "python/_autosummary/mlx.core.array.ndim.rst", "python/_autosummary/mlx.core.array.prod.rst", "python/_autosummary/mlx.core.array.reciprocal.rst", "python/_autosummary/mlx.core.array.reshape.rst", "python/_autosummary/mlx.core.array.round.rst", "python/_autosummary/mlx.core.array.rsqrt.rst", "python/_autosummary/mlx.core.array.shape.rst", "python/_autosummary/mlx.core.array.sin.rst", "python/_autosummary/mlx.core.array.size.rst", "python/_autosummary/mlx.core.array.split.rst", "python/_autosummary/mlx.core.array.sqrt.rst", "python/_autosummary/mlx.core.array.square.rst", "python/_autosummary/mlx.core.array.sum.rst", "python/_autosummary/mlx.core.array.tolist.rst", "python/_autosummary/mlx.core.array.transpose.rst", "python/_autosummary/mlx.core.array.var.rst", "python/_autosummary/mlx.core.array_equal.rst", "python/_autosummary/mlx.core.atleast_1d.rst", "python/_autosummary/mlx.core.atleast_2d.rst", "python/_autosummary/mlx.core.atleast_3d.rst", "python/_autosummary/mlx.core.broadcast_to.rst", "python/_autosummary/mlx.core.ceil.rst", "python/_autosummary/mlx.core.clip.rst", "python/_autosummary/mlx.core.compile.rst", "python/_autosummary/mlx.core.concatenate.rst", "python/_autosummary/mlx.core.conv1d.rst", "python/_autosummary/mlx.core.conv2d.rst", "python/_autosummary/mlx.core.conv_general.rst", "python/_autosummary/mlx.core.convolve.rst", "python/_autosummary/mlx.core.cos.rst", "python/_autosummary/mlx.core.cosh.rst", "python/_autosummary/mlx.core.default_device.rst", "python/_autosummary/mlx.core.default_stream.rst", "python/_autosummary/mlx.core.dequantize.rst", "python/_autosummary/mlx.core.diag.rst", "python/_autosummary/mlx.core.diagonal.rst", "python/_autosummary/mlx.core.disable_compile.rst", "python/_autosummary/mlx.core.divide.rst", "python/_autosummary/mlx.core.divmod.rst", "python/_autosummary/mlx.core.enable_compile.rst", "python/_autosummary/mlx.core.equal.rst", "python/_autosummary/mlx.core.erf.rst", "python/_autosummary/mlx.core.erfinv.rst", "python/_autosummary/mlx.core.eval.rst", "python/_autosummary/mlx.core.exp.rst", "python/_autosummary/mlx.core.expand_dims.rst", "python/_autosummary/mlx.core.eye.rst", "python/_autosummary/mlx.core.fft.fft.rst", "python/_autosummary/mlx.core.fft.fft2.rst", "python/_autosummary/mlx.core.fft.fftn.rst", "python/_autosummary/mlx.core.fft.ifft.rst", "python/_autosummary/mlx.core.fft.ifft2.rst", "python/_autosummary/mlx.core.fft.ifftn.rst", "python/_autosummary/mlx.core.fft.irfft.rst", "python/_autosummary/mlx.core.fft.irfft2.rst", "python/_autosummary/mlx.core.fft.irfftn.rst", "python/_autosummary/mlx.core.fft.rfft.rst", "python/_autosummary/mlx.core.fft.rfft2.rst", "python/_autosummary/mlx.core.fft.rfftn.rst", "python/_autosummary/mlx.core.flatten.rst", "python/_autosummary/mlx.core.floor.rst", "python/_autosummary/mlx.core.floor_divide.rst", "python/_autosummary/mlx.core.full.rst", "python/_autosummary/mlx.core.grad.rst", "python/_autosummary/mlx.core.greater.rst", "python/_autosummary/mlx.core.greater_equal.rst", "python/_autosummary/mlx.core.identity.rst", "python/_autosummary/mlx.core.inner.rst", "python/_autosummary/mlx.core.isinf.rst", "python/_autosummary/mlx.core.isnan.rst", "python/_autosummary/mlx.core.isneginf.rst", "python/_autosummary/mlx.core.isposinf.rst", "python/_autosummary/mlx.core.jvp.rst", "python/_autosummary/mlx.core.less.rst", "python/_autosummary/mlx.core.less_equal.rst", "python/_autosummary/mlx.core.linalg.norm.rst", "python/_autosummary/mlx.core.linalg.qr.rst", "python/_autosummary/mlx.core.linspace.rst", "python/_autosummary/mlx.core.load.rst", "python/_autosummary/mlx.core.log.rst", "python/_autosummary/mlx.core.log10.rst", "python/_autosummary/mlx.core.log1p.rst", "python/_autosummary/mlx.core.log2.rst", "python/_autosummary/mlx.core.logaddexp.rst", "python/_autosummary/mlx.core.logical_and.rst", "python/_autosummary/mlx.core.logical_not.rst", "python/_autosummary/mlx.core.logical_or.rst", "python/_autosummary/mlx.core.logsumexp.rst", "python/_autosummary/mlx.core.matmul.rst", "python/_autosummary/mlx.core.max.rst", "python/_autosummary/mlx.core.maximum.rst", "python/_autosummary/mlx.core.mean.rst", "python/_autosummary/mlx.core.min.rst", "python/_autosummary/mlx.core.minimum.rst", "python/_autosummary/mlx.core.moveaxis.rst", "python/_autosummary/mlx.core.multiply.rst", "python/_autosummary/mlx.core.negative.rst", "python/_autosummary/mlx.core.new_stream.rst", "python/_autosummary/mlx.core.ones.rst", "python/_autosummary/mlx.core.ones_like.rst", "python/_autosummary/mlx.core.outer.rst", "python/_autosummary/mlx.core.pad.rst", "python/_autosummary/mlx.core.partition.rst", "python/_autosummary/mlx.core.prod.rst", "python/_autosummary/mlx.core.quantize.rst", "python/_autosummary/mlx.core.quantized_matmul.rst", "python/_autosummary/mlx.core.random.bernoulli.rst", "python/_autosummary/mlx.core.random.categorical.rst", "python/_autosummary/mlx.core.random.gumbel.rst", "python/_autosummary/mlx.core.random.key.rst", "python/_autosummary/mlx.core.random.normal.rst", "python/_autosummary/mlx.core.random.randint.rst", "python/_autosummary/mlx.core.random.seed.rst", "python/_autosummary/mlx.core.random.split.rst", "python/_autosummary/mlx.core.random.truncated_normal.rst", "python/_autosummary/mlx.core.random.uniform.rst", "python/_autosummary/mlx.core.reciprocal.rst", "python/_autosummary/mlx.core.repeat.rst", "python/_autosummary/mlx.core.reshape.rst", "python/_autosummary/mlx.core.round.rst", "python/_autosummary/mlx.core.rsqrt.rst", "python/_autosummary/mlx.core.save.rst", "python/_autosummary/mlx.core.save_gguf.rst", "python/_autosummary/mlx.core.save_safetensors.rst", "python/_autosummary/mlx.core.savez.rst", "python/_autosummary/mlx.core.savez_compressed.rst", "python/_autosummary/mlx.core.set_default_device.rst", "python/_autosummary/mlx.core.set_default_stream.rst", "python/_autosummary/mlx.core.sigmoid.rst", "python/_autosummary/mlx.core.sign.rst", "python/_autosummary/mlx.core.sin.rst", "python/_autosummary/mlx.core.sinh.rst", "python/_autosummary/mlx.core.softmax.rst", "python/_autosummary/mlx.core.sort.rst", "python/_autosummary/mlx.core.split.rst", "python/_autosummary/mlx.core.sqrt.rst", "python/_autosummary/mlx.core.square.rst", "python/_autosummary/mlx.core.squeeze.rst", "python/_autosummary/mlx.core.stack.rst", "python/_autosummary/mlx.core.stop_gradient.rst", "python/_autosummary/mlx.core.stream.rst", "python/_autosummary/mlx.core.subtract.rst", "python/_autosummary/mlx.core.sum.rst", "python/_autosummary/mlx.core.swapaxes.rst", "python/_autosummary/mlx.core.take.rst", "python/_autosummary/mlx.core.take_along_axis.rst", "python/_autosummary/mlx.core.tan.rst", "python/_autosummary/mlx.core.tanh.rst", "python/_autosummary/mlx.core.tensordot.rst", "python/_autosummary/mlx.core.transpose.rst", "python/_autosummary/mlx.core.tri.rst", "python/_autosummary/mlx.core.tril.rst", "python/_autosummary/mlx.core.triu.rst", "python/_autosummary/mlx.core.value_and_grad.rst", "python/_autosummary/mlx.core.var.rst", "python/_autosummary/mlx.core.vjp.rst", "python/_autosummary/mlx.core.vmap.rst", "python/_autosummary/mlx.core.where.rst", "python/_autosummary/mlx.core.zeros.rst", "python/_autosummary/mlx.core.zeros_like.rst", "python/_autosummary/mlx.nn.value_and_grad.rst", "python/_autosummary/mlx.utils.tree_flatten.rst", "python/_autosummary/mlx.utils.tree_map.rst", "python/_autosummary/mlx.utils.tree_unflatten.rst", "python/_autosummary/stream_class.rst", "python/array.rst", "python/data_types.rst", "python/devices_and_streams.rst", "python/fft.rst", "python/linalg.rst", "python/nn.rst", "python/nn/_autosummary/mlx.nn.ALiBi.rst", "python/nn/_autosummary/mlx.nn.AvgPool1d.rst", "python/nn/_autosummary/mlx.nn.AvgPool2d.rst", "python/nn/_autosummary/mlx.nn.BatchNorm.rst", "python/nn/_autosummary/mlx.nn.Conv1d.rst", "python/nn/_autosummary/mlx.nn.Conv2d.rst", "python/nn/_autosummary/mlx.nn.Dropout.rst", "python/nn/_autosummary/mlx.nn.Dropout2d.rst", "python/nn/_autosummary/mlx.nn.Dropout3d.rst", "python/nn/_autosummary/mlx.nn.Embedding.rst", "python/nn/_autosummary/mlx.nn.GELU.rst", "python/nn/_autosummary/mlx.nn.GroupNorm.rst", "python/nn/_autosummary/mlx.nn.InstanceNorm.rst", "python/nn/_autosummary/mlx.nn.LayerNorm.rst", "python/nn/_autosummary/mlx.nn.Linear.rst", "python/nn/_autosummary/mlx.nn.MaxPool1d.rst", "python/nn/_autosummary/mlx.nn.MaxPool2d.rst", "python/nn/_autosummary/mlx.nn.Mish.rst", "python/nn/_autosummary/mlx.nn.Module.apply.rst", "python/nn/_autosummary/mlx.nn.Module.apply_to_modules.rst", "python/nn/_autosummary/mlx.nn.Module.children.rst", "python/nn/_autosummary/mlx.nn.Module.eval.rst", "python/nn/_autosummary/mlx.nn.Module.filter_and_map.rst", "python/nn/_autosummary/mlx.nn.Module.freeze.rst", "python/nn/_autosummary/mlx.nn.Module.leaf_modules.rst", "python/nn/_autosummary/mlx.nn.Module.load_weights.rst", "python/nn/_autosummary/mlx.nn.Module.modules.rst", "python/nn/_autosummary/mlx.nn.Module.named_modules.rst", "python/nn/_autosummary/mlx.nn.Module.parameters.rst", "python/nn/_autosummary/mlx.nn.Module.save_weights.rst", "python/nn/_autosummary/mlx.nn.Module.state.rst", "python/nn/_autosummary/mlx.nn.Module.train.rst", "python/nn/_autosummary/mlx.nn.Module.trainable_parameters.rst", "python/nn/_autosummary/mlx.nn.Module.training.rst", "python/nn/_autosummary/mlx.nn.Module.unfreeze.rst", "python/nn/_autosummary/mlx.nn.Module.update.rst", "python/nn/_autosummary/mlx.nn.Module.update_modules.rst", "python/nn/_autosummary/mlx.nn.MultiHeadAttention.rst", "python/nn/_autosummary/mlx.nn.PReLU.rst", "python/nn/_autosummary/mlx.nn.QuantizedLinear.rst", "python/nn/_autosummary/mlx.nn.RMSNorm.rst", "python/nn/_autosummary/mlx.nn.ReLU.rst", "python/nn/_autosummary/mlx.nn.RoPE.rst", "python/nn/_autosummary/mlx.nn.SELU.rst", "python/nn/_autosummary/mlx.nn.Sequential.rst", "python/nn/_autosummary/mlx.nn.SiLU.rst", "python/nn/_autosummary/mlx.nn.SinusoidalPositionalEncoding.rst", "python/nn/_autosummary/mlx.nn.Softshrink.rst", "python/nn/_autosummary/mlx.nn.Step.rst", "python/nn/_autosummary/mlx.nn.Transformer.rst", "python/nn/_autosummary/mlx.nn.Upsample.rst", "python/nn/_autosummary/mlx.nn.init.constant.rst", "python/nn/_autosummary/mlx.nn.init.glorot_normal.rst", "python/nn/_autosummary/mlx.nn.init.glorot_uniform.rst", "python/nn/_autosummary/mlx.nn.init.he_normal.rst", "python/nn/_autosummary/mlx.nn.init.he_uniform.rst", "python/nn/_autosummary/mlx.nn.init.identity.rst", "python/nn/_autosummary/mlx.nn.init.normal.rst", "python/nn/_autosummary/mlx.nn.init.uniform.rst", "python/nn/_autosummary_functions/mlx.nn.elu.rst", "python/nn/_autosummary_functions/mlx.nn.gelu.rst", "python/nn/_autosummary_functions/mlx.nn.gelu_approx.rst", "python/nn/_autosummary_functions/mlx.nn.gelu_fast_approx.rst", "python/nn/_autosummary_functions/mlx.nn.glu.rst", "python/nn/_autosummary_functions/mlx.nn.hardswish.rst", "python/nn/_autosummary_functions/mlx.nn.leaky_relu.rst", "python/nn/_autosummary_functions/mlx.nn.log_sigmoid.rst", "python/nn/_autosummary_functions/mlx.nn.log_softmax.rst", "python/nn/_autosummary_functions/mlx.nn.losses.binary_cross_entropy.rst", "python/nn/_autosummary_functions/mlx.nn.losses.cosine_similarity_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.cross_entropy.rst", "python/nn/_autosummary_functions/mlx.nn.losses.gaussian_nll_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.hinge_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.huber_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.kl_div_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.l1_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.log_cosh_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.margin_ranking_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.mse_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.nll_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.smooth_l1_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.triplet_loss.rst", "python/nn/_autosummary_functions/mlx.nn.mish.rst", "python/nn/_autosummary_functions/mlx.nn.prelu.rst", "python/nn/_autosummary_functions/mlx.nn.relu.rst", "python/nn/_autosummary_functions/mlx.nn.relu6.rst", "python/nn/_autosummary_functions/mlx.nn.selu.rst", "python/nn/_autosummary_functions/mlx.nn.sigmoid.rst", "python/nn/_autosummary_functions/mlx.nn.silu.rst", "python/nn/_autosummary_functions/mlx.nn.softmax.rst", "python/nn/_autosummary_functions/mlx.nn.softplus.rst", "python/nn/_autosummary_functions/mlx.nn.softshrink.rst", "python/nn/_autosummary_functions/mlx.nn.step.rst", "python/nn/_autosummary_functions/mlx.nn.tanh.rst", "python/nn/functions.rst", "python/nn/init.rst", "python/nn/layers.rst", "python/nn/losses.rst", "python/nn/module.rst", "python/ops.rst", "python/optimizers.rst", "python/optimizers/_autosummary/mlx.optimizers.AdaDelta.rst", "python/optimizers/_autosummary/mlx.optimizers.Adafactor.rst", "python/optimizers/_autosummary/mlx.optimizers.Adagrad.rst", "python/optimizers/_autosummary/mlx.optimizers.Adam.rst", "python/optimizers/_autosummary/mlx.optimizers.AdamW.rst", "python/optimizers/_autosummary/mlx.optimizers.Adamax.rst", "python/optimizers/_autosummary/mlx.optimizers.Lion.rst", "python/optimizers/_autosummary/mlx.optimizers.Optimizer.apply_gradients.rst", "python/optimizers/_autosummary/mlx.optimizers.Optimizer.init.rst", "python/optimizers/_autosummary/mlx.optimizers.Optimizer.state.rst", "python/optimizers/_autosummary/mlx.optimizers.Optimizer.update.rst", "python/optimizers/_autosummary/mlx.optimizers.RMSprop.rst", "python/optimizers/_autosummary/mlx.optimizers.SGD.rst", "python/optimizers/_autosummary/mlx.optimizers.cosine_decay.rst", "python/optimizers/_autosummary/mlx.optimizers.exponential_decay.rst", "python/optimizers/_autosummary/mlx.optimizers.join_schedules.rst", "python/optimizers/_autosummary/mlx.optimizers.linear_schedule.rst", "python/optimizers/_autosummary/mlx.optimizers.step_decay.rst", "python/optimizers/common_optimizers.rst", "python/optimizers/optimizer.rst", "python/optimizers/schedulers.rst", "python/random.rst", "python/transforms.rst", "python/tree_utils.rst", "usage/compile.rst", "usage/function_transforms.rst", "usage/indexing.rst", "usage/lazy_evaluation.rst", "usage/numpy.rst", "usage/quick_start.rst", "usage/saving_and_loading.rst", "usage/unified_memory.rst", "usage/using_streams.rst"], "titles": ["Operations", "Developer Documentation", "Linear Regression", "LLM inference", "Multi-Layer Perceptron", "MLX", "Build and Install", "mlx.core.Device", "mlx.core.Dtype", "mlx.core.abs", "mlx.core.add", "mlx.core.all", "mlx.core.allclose", "mlx.core.any", "mlx.core.arange", "mlx.core.arccos", "mlx.core.arccosh", "mlx.core.arcsin", "mlx.core.arcsinh", "mlx.core.arctan", "mlx.core.arctanh", "mlx.core.argmax", "mlx.core.argmin", "mlx.core.argpartition", "mlx.core.argsort", "mlx.core.array", "mlx.core.array.T", "mlx.core.array.abs", "mlx.core.array.all", "mlx.core.array.any", "mlx.core.array.argmax", "mlx.core.array.argmin", "mlx.core.array.astype", "mlx.core.array.cos", "mlx.core.array.dtype", "mlx.core.array.exp", "mlx.core.array.item", "mlx.core.array.log", "mlx.core.array.log1p", "mlx.core.array.logsumexp", "mlx.core.array.max", "mlx.core.array.mean", "mlx.core.array.min", "mlx.core.array.ndim", "mlx.core.array.prod", "mlx.core.array.reciprocal", "mlx.core.array.reshape", "mlx.core.array.round", "mlx.core.array.rsqrt", "mlx.core.array.shape", "mlx.core.array.sin", "mlx.core.array.size", "mlx.core.array.split", "mlx.core.array.sqrt", "mlx.core.array.square", "mlx.core.array.sum", "mlx.core.array.tolist", "mlx.core.array.transpose", "mlx.core.array.var", "mlx.core.array_equal", "mlx.core.atleast_1d", "mlx.core.atleast_2d", "mlx.core.atleast_3d", "mlx.core.broadcast_to", "mlx.core.ceil", "mlx.core.clip", "mlx.core.compile", "mlx.core.concatenate", "mlx.core.conv1d", "mlx.core.conv2d", "mlx.core.conv_general", "mlx.core.convolve", "mlx.core.cos", "mlx.core.cosh", "mlx.core.default_device", "mlx.core.default_stream", "mlx.core.dequantize", "mlx.core.diag", "mlx.core.diagonal", "mlx.core.disable_compile", "mlx.core.divide", "mlx.core.divmod", "mlx.core.enable_compile", "mlx.core.equal", "mlx.core.erf", "mlx.core.erfinv", "mlx.core.eval", "mlx.core.exp", "mlx.core.expand_dims", "mlx.core.eye", "mlx.core.fft.fft", "mlx.core.fft.fft2", "mlx.core.fft.fftn", "mlx.core.fft.ifft", "mlx.core.fft.ifft2", "mlx.core.fft.ifftn", "mlx.core.fft.irfft", "mlx.core.fft.irfft2", "mlx.core.fft.irfftn", "mlx.core.fft.rfft", "mlx.core.fft.rfft2", "mlx.core.fft.rfftn", "mlx.core.flatten", "mlx.core.floor", "mlx.core.floor_divide", "mlx.core.full", "mlx.core.grad", "mlx.core.greater", "mlx.core.greater_equal", "mlx.core.identity", "mlx.core.inner", "mlx.core.isinf", "mlx.core.isnan", "mlx.core.isneginf", "mlx.core.isposinf", "mlx.core.jvp", "mlx.core.less", "mlx.core.less_equal", "mlx.core.linalg.norm", "mlx.core.linalg.qr", "mlx.core.linspace", "mlx.core.load", "mlx.core.log", "mlx.core.log10", "mlx.core.log1p", "mlx.core.log2", "mlx.core.logaddexp", "mlx.core.logical_and", "mlx.core.logical_not", "mlx.core.logical_or", "mlx.core.logsumexp", "mlx.core.matmul", "mlx.core.max", "mlx.core.maximum", "mlx.core.mean", "mlx.core.min", "mlx.core.minimum", "mlx.core.moveaxis", "mlx.core.multiply", "mlx.core.negative", "mlx.core.new_stream", "mlx.core.ones", "mlx.core.ones_like", "mlx.core.outer", "mlx.core.pad", "mlx.core.partition", "mlx.core.prod", "mlx.core.quantize", "mlx.core.quantized_matmul", "mlx.core.random.bernoulli", "mlx.core.random.categorical", "mlx.core.random.gumbel", "mlx.core.random.key", "mlx.core.random.normal", "mlx.core.random.randint", "mlx.core.random.seed", "mlx.core.random.split", "mlx.core.random.truncated_normal", "mlx.core.random.uniform", "mlx.core.reciprocal", "mlx.core.repeat", "mlx.core.reshape", "mlx.core.round", "mlx.core.rsqrt", "mlx.core.save", "mlx.core.save_gguf", "mlx.core.save_safetensors", "mlx.core.savez", "mlx.core.savez_compressed", "mlx.core.set_default_device", "mlx.core.set_default_stream", "mlx.core.sigmoid", "mlx.core.sign", "mlx.core.sin", "mlx.core.sinh", "mlx.core.softmax", "mlx.core.sort", "mlx.core.split", "mlx.core.sqrt", "mlx.core.square", "mlx.core.squeeze", "mlx.core.stack", "mlx.core.stop_gradient", "mlx.core.stream", "mlx.core.subtract", "mlx.core.sum", "mlx.core.swapaxes", "mlx.core.take", "mlx.core.take_along_axis", "mlx.core.tan", "mlx.core.tanh", "mlx.core.tensordot", "mlx.core.transpose", "mlx.core.tri", "mlx.core.tril", "mlx.core.triu", "mlx.core.value_and_grad", "mlx.core.var", "mlx.core.vjp", "mlx.core.vmap", "mlx.core.where", "mlx.core.zeros", "mlx.core.zeros_like", "mlx.nn.value_and_grad", "mlx.utils.tree_flatten", "mlx.utils.tree_map", "mlx.utils.tree_unflatten", "mlx.core.Stream", "Array", "Data Types", "Devices and Streams", "FFT", "Linear Algebra", "Neural Networks", "mlx.nn.ALiBi", "mlx.nn.AvgPool1d", "mlx.nn.AvgPool2d", "mlx.nn.BatchNorm", "mlx.nn.Conv1d", "mlx.nn.Conv2d", "mlx.nn.Dropout", "mlx.nn.Dropout2d", "mlx.nn.Dropout3d", "mlx.nn.Embedding", "mlx.nn.GELU", "mlx.nn.GroupNorm", "mlx.nn.InstanceNorm", "mlx.nn.LayerNorm", "mlx.nn.Linear", "mlx.nn.MaxPool1d", "mlx.nn.MaxPool2d", "mlx.nn.Mish", "mlx.nn.Module.apply", "mlx.nn.Module.apply_to_modules", "mlx.nn.Module.children", "mlx.nn.Module.eval", "mlx.nn.Module.filter_and_map", "mlx.nn.Module.freeze", "mlx.nn.Module.leaf_modules", "mlx.nn.Module.load_weights", "mlx.nn.Module.modules", "mlx.nn.Module.named_modules", "mlx.nn.Module.parameters", "mlx.nn.Module.save_weights", "mlx.nn.Module.state", "mlx.nn.Module.train", "mlx.nn.Module.trainable_parameters", "mlx.nn.Module.training", "mlx.nn.Module.unfreeze", "mlx.nn.Module.update", "mlx.nn.Module.update_modules", "mlx.nn.MultiHeadAttention", "mlx.nn.PReLU", "mlx.nn.QuantizedLinear", "mlx.nn.RMSNorm", "mlx.nn.ReLU", "mlx.nn.RoPE", "mlx.nn.SELU", "mlx.nn.Sequential", "mlx.nn.SiLU", "mlx.nn.SinusoidalPositionalEncoding", "mlx.nn.Softshrink", "mlx.nn.Step", "mlx.nn.Transformer", "mlx.nn.Upsample", "mlx.nn.init.constant", "mlx.nn.init.glorot_normal", "mlx.nn.init.glorot_uniform", "mlx.nn.init.he_normal", "mlx.nn.init.he_uniform", "mlx.nn.init.identity", "mlx.nn.init.normal", "mlx.nn.init.uniform", "mlx.nn.elu", "mlx.nn.gelu", "mlx.nn.gelu_approx", "mlx.nn.gelu_fast_approx", "mlx.nn.glu", "mlx.nn.hardswish", "mlx.nn.leaky_relu", "mlx.nn.log_sigmoid", "mlx.nn.log_softmax", "mlx.nn.losses.binary_cross_entropy", "mlx.nn.losses.cosine_similarity_loss", "mlx.nn.losses.cross_entropy", "mlx.nn.losses.gaussian_nll_loss", "mlx.nn.losses.hinge_loss", "mlx.nn.losses.huber_loss", "mlx.nn.losses.kl_div_loss", "mlx.nn.losses.l1_loss", "mlx.nn.losses.log_cosh_loss", "mlx.nn.losses.margin_ranking_loss", "mlx.nn.losses.mse_loss", "mlx.nn.losses.nll_loss", "mlx.nn.losses.smooth_l1_loss", "mlx.nn.losses.triplet_loss", "mlx.nn.mish", "mlx.nn.prelu", "mlx.nn.relu", "mlx.nn.relu6", "mlx.nn.selu", "mlx.nn.sigmoid", "mlx.nn.silu", "mlx.nn.softmax", "mlx.nn.softplus", "mlx.nn.softshrink", "mlx.nn.step", "mlx.nn.tanh", "Functions", "Initializers", "Layers", "Loss Functions", "Module", "Operations", "Optimizers", "mlx.optimizers.AdaDelta", "mlx.optimizers.Adafactor", "mlx.optimizers.Adagrad", "mlx.optimizers.Adam", "mlx.optimizers.AdamW", "mlx.optimizers.Adamax", "mlx.optimizers.Lion", "mlx.optimizers.Optimizer.apply_gradients", "mlx.optimizers.Optimizer.init", "mlx.optimizers.Optimizer.state", "mlx.optimizers.Optimizer.update", "mlx.optimizers.RMSprop", "mlx.optimizers.SGD", "mlx.optimizers.cosine_decay", "mlx.optimizers.exponential_decay", "mlx.optimizers.join_schedules", "mlx.optimizers.linear_schedule", "mlx.optimizers.step_decay", "Common Optimizers", "Optimizer", "Schedulers", "Random", "Transforms", "Tree Utils", "Compilation", "Function Transforms", "Indexing Arrays", "Lazy Evaluation", "Conversion to NumPy and Other Frameworks", "Quick Start Guide", "Saving and Loading Arrays", "Unified Memory", "Using Streams"], "terms": {"mlx": [1, 2, 3, 4, 6, 213, 309, 312, 314, 334, 336, 338, 339, 340, 341, 342, 343, 344, 345, 346], "provid": [1, 3, 76, 106, 191, 196, 205, 213, 232, 237, 239, 248, 249, 250, 253, 263, 264, 308, 312, 345, 347], "open": [1, 6, 14, 154, 158], "flexibl": [1, 5, 250], "which": [1, 3, 4, 5, 6, 14, 32, 66, 70, 78, 86, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 106, 111, 112, 113, 114, 115, 118, 119, 121, 147, 150, 151, 160, 161, 164, 165, 166, 167, 168, 180, 181, 187, 196, 198, 199, 216, 221, 222, 224, 230, 232, 236, 256, 284, 287, 291, 294, 309, 322, 323, 336, 339, 340, 341, 342, 346, 347], "user": [1, 3, 213], "mai": [1, 118, 221, 340, 341], "add": [1, 3, 88, 126, 144, 147, 218, 219, 340, 346], "special": 1, "without": [1, 3, 5, 182, 251, 308, 338, 339, 342, 343, 346], "much": [1, 3, 215, 216, 229, 230, 339, 342], "hassl": 1, "while": [1, 3, 6, 161, 256, 342, 343], "librari": [1, 6, 213], "suppli": 1, "effici": [1, 3, 5, 221, 256, 342, 344], "can": [1, 3, 5, 6, 10, 14, 46, 57, 66, 78, 79, 80, 81, 83, 86, 107, 108, 116, 117, 118, 126, 133, 136, 138, 149, 150, 154, 157, 158, 165, 184, 196, 213, 216, 223, 230, 236, 248, 258, 264, 284, 309, 312, 314, 322, 323, 336, 338, 339, 340, 341, 342, 343, 344, 345, 346, 347], "compos": [1, 5, 213, 339, 340, 344], "ani": [1, 3, 5, 14, 66, 204, 205, 206, 213, 224, 232, 233, 236, 244, 253, 263, 264, 309, 331, 338, 339, 340, 342, 344, 345, 346], "number": [1, 14, 51, 66, 69, 70, 76, 89, 106, 109, 115, 120, 144, 147, 148, 150, 153, 156, 158, 160, 162, 191, 193, 196, 198, 199, 213, 217, 218, 219, 221, 222, 225, 226, 251, 252, 263, 264, 266, 267, 268, 269, 328, 330, 331, 336, 339, 340, 347], "applic": [1, 6], "aris": [1, 343], "case": [1, 3, 92, 95, 96, 98, 99, 100, 101, 102, 119, 131, 161, 180, 216, 221, 230, 262, 294, 300, 305, 306, 322, 323, 339, 340, 344, 345, 346, 347], "where": [1, 4, 89, 147, 196, 199, 215, 216, 217, 218, 219, 220, 221, 222, 224, 225, 226, 227, 228, 229, 230, 236, 252, 254, 262, 268, 269, 273, 274, 275, 276, 285, 291, 297, 300, 302, 306, 323, 340, 341], "new": [1, 4, 63, 78, 137, 140, 161, 181, 192, 205, 251, 312, 314, 325, 330, 339, 341, 342, 343], "function": [1, 2, 3, 4, 5, 12, 66, 81, 84, 85, 106, 115, 118, 119, 131, 171, 196, 198, 199, 203, 205, 213, 224, 231, 233, 237, 248, 252, 255, 257, 258, 259, 261, 262, 263, 274, 275, 276, 277, 278, 280, 281, 296, 301, 303, 304, 305, 306, 307, 309, 314, 323, 336, 338, 341, 342, 343, 345], "highli": [1, 6], "optim": [1, 2, 4, 5, 249, 339, 340, 342], "ar": [1, 2, 3, 4, 5, 6, 12, 14, 59, 63, 65, 66, 70, 71, 78, 86, 89, 91, 92, 94, 95, 97, 98, 100, 101, 102, 106, 111, 112, 113, 114, 115, 118, 119, 121, 131, 143, 144, 145, 147, 148, 149, 150, 151, 154, 157, 158, 167, 168, 180, 181, 187, 196, 198, 199, 204, 205, 217, 218, 219, 220, 221, 222, 225, 226, 227, 228, 239, 251, 253, 264, 282, 284, 285, 308, 312, 321, 323, 338, 339, 340, 341, 342, 343, 344, 345, 346], "need": [1, 3, 4, 5, 59, 147, 213, 249, 250, 260, 263, 336, 340, 342, 343, 344, 346], "For": [1, 3, 6, 118, 147, 206, 213, 217, 221, 232, 237, 245, 248, 253, 256, 260, 264, 266, 267, 268, 269, 309, 336, 339, 340, 341, 342, 343, 344, 345, 346], "you": [1, 3, 4, 5, 6, 213, 260, 263, 309, 336, 339, 340, 341, 343, 345, 346], "design": [1, 2, 5, 336, 346], "your": [1, 3, 6, 312, 340, 342], "own": [1, 6, 343], "link": [1, 6], "top": [1, 228, 264], "core": [1, 2, 3, 4, 213, 215, 216, 217, 226, 229, 230, 239, 242, 246, 264, 265, 266, 267, 268, 269, 270, 271, 272, 282, 284, 291, 309, 312, 314, 339, 343, 344], "we": [1, 2, 3, 4, 76, 147, 148, 213, 223, 258, 319, 321, 336, 338, 339, 340, 342, 346], "inner": [1, 339], "work": [1, 3, 6, 339, 340, 341, 342], "go": [1, 3, 340], "over": [1, 3, 4, 11, 13, 21, 22, 23, 24, 68, 69, 70, 92, 95, 98, 101, 110, 118, 120, 130, 132, 134, 135, 145, 146, 163, 175, 176, 185, 191, 197, 217, 218, 219, 225, 227, 254, 284, 328, 331, 340], "simpl": [1, 3, 4, 213, 223, 308, 339, 340, 342], "learn": [1, 2, 4, 5, 217, 225, 226, 227, 252, 254, 315, 316, 317, 318, 319, 320, 321, 326, 327], "step": [1, 3, 4, 14, 213, 316, 323, 328, 330, 331, 332, 339], "involv": [1, 314, 339], "ad": [1, 2, 6, 226, 312, 315, 316, 317, 318, 319, 320, 326, 342, 345], "let": [1, 2, 3, 339, 340, 342, 343], "s": [1, 2, 3, 4, 34, 43, 66, 75, 76, 91, 92, 94, 95, 97, 98, 100, 101, 106, 118, 121, 134, 143, 147, 150, 162, 165, 166, 183, 196, 197, 199, 203, 213, 216, 230, 236, 237, 239, 243, 244, 248, 314, 323, 324, 336, 339, 340, 342, 343, 344, 345, 346], "sai": [1, 3, 309, 342], "would": [1, 3, 264, 341, 342, 343, 346], "like": [1, 3, 5, 142, 202, 222, 290, 323, 325, 339, 340, 342, 343, 344, 346], "an": [1, 3, 4, 6, 8, 11, 13, 25, 60, 61, 62, 63, 68, 69, 70, 86, 89, 102, 105, 109, 118, 121, 132, 135, 137, 141, 142, 144, 146, 147, 148, 160, 161, 162, 177, 180, 186, 187, 188, 191, 193, 199, 201, 202, 204, 205, 213, 215, 216, 220, 225, 227, 228, 229, 230, 232, 251, 252, 253, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 275, 297, 309, 315, 325, 329, 334, 336, 338, 339, 340, 341, 342, 343, 344, 345, 346, 347], "take": [1, 3, 4, 66, 106, 115, 133, 136, 142, 148, 188, 196, 198, 199, 202, 251, 336, 340, 341, 345, 346, 347], "two": [1, 10, 12, 59, 61, 78, 80, 83, 91, 94, 100, 107, 108, 116, 117, 119, 126, 131, 133, 136, 138, 143, 186, 216, 230, 253, 264, 277, 283, 339, 340, 341, 346], "arrai": [1, 3, 4, 5, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 76, 77, 78, 80, 81, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 213, 217, 232, 239, 242, 246, 252, 264, 265, 266, 267, 268, 269, 270, 271, 272, 274, 277, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294, 295, 296, 297, 306, 309, 312, 327, 328, 329, 330, 331, 332, 339, 340, 342, 343, 344, 346], "x": [1, 2, 3, 4, 84, 109, 118, 148, 151, 162, 167, 171, 194, 195, 200, 205, 213, 215, 216, 217, 224, 225, 226, 227, 228, 229, 230, 231, 232, 252, 254, 255, 260, 262, 264, 273, 274, 275, 276, 277, 278, 279, 280, 281, 294, 296, 297, 298, 299, 300, 301, 302, 303, 304, 305, 306, 307, 312, 314, 321, 339, 340, 341, 342, 343, 344, 346], "y": [1, 2, 3, 4, 200, 213, 217, 221, 225, 226, 227, 228, 254, 286, 291, 294, 314, 317, 339, 340, 342, 343], "scale": [1, 3, 76, 147, 148, 153, 221, 222, 251, 256, 257, 260, 264, 300, 316], "them": [1, 3, 213, 237, 248, 346], "both": [1, 10, 80, 81, 83, 107, 108, 116, 117, 118, 126, 133, 136, 138, 150, 184, 215, 216, 226, 229, 230, 264, 314, 339, 340, 344, 346], "some": [1, 2, 3, 4, 237, 248, 323, 339, 340, 342], "coeffici": [1, 315, 316, 318, 319, 320, 321], "alpha": [1, 147, 273, 295, 297, 300, 319, 326], "beta": [1, 76, 147, 217, 225, 226, 227, 294, 318, 319, 320, 321], "respect": [1, 2, 4, 106, 147, 196, 205, 213, 217, 224, 225, 226, 227, 312, 340, 344], "togeth": [1, 4, 147, 205], "get": [1, 2, 4, 6, 69, 70, 74, 75, 152, 213, 339, 340, 342, 346], "z": [1, 339, 342], "well": [1, 3, 213, 237, 248, 251, 342], "veri": [1, 3, 251, 342, 346], "easili": 1, "do": [1, 3, 6, 213, 238, 248, 309, 312, 319, 339, 340, 342], "just": [1, 4, 339, 341], "write": [1, 3, 213, 343], "out": [1, 6, 215, 216, 221, 222, 229, 230, 245, 339, 340, 341], "follow": [1, 3, 4, 5, 6, 14, 71, 76, 118, 147, 213, 275, 276, 288, 315, 316, 317, 318, 319, 320, 321, 327, 336, 339, 340, 346], "import": [1, 2, 3, 4, 6, 118, 167, 196, 204, 205, 206, 213, 215, 216, 217, 226, 229, 230, 239, 264, 282, 284, 291, 309, 312, 339, 340, 341, 342, 343, 344], "mx": [1, 2, 3, 4, 102, 118, 119, 121, 167, 196, 213, 215, 216, 217, 226, 229, 230, 232, 239, 243, 255, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 279, 282, 283, 284, 288, 291, 298, 307, 309, 312, 314, 336, 339, 340, 341, 342, 343, 344, 345, 346, 347], "def": [1, 2, 3, 4, 196, 213, 312, 339, 340, 341, 342, 343, 346], "simple_axpbi": 1, "float": [1, 12, 14, 56, 104, 105, 118, 148, 149, 153, 154, 157, 158, 209, 217, 220, 221, 222, 225, 226, 227, 232, 254, 256, 260, 262, 263, 264, 265, 266, 267, 268, 269, 271, 272, 283, 284, 285, 287, 291, 294, 295, 305, 306, 315, 316, 317, 318, 319, 320, 321, 326, 327, 328, 329, 331, 332], "return": [1, 2, 3, 4, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 32, 36, 49, 56, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 76, 77, 78, 80, 81, 83, 84, 85, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 156, 157, 158, 159, 160, 161, 162, 163, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 213, 234, 236, 238, 240, 241, 242, 246, 253, 265, 266, 267, 268, 269, 270, 271, 272, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294, 295, 309, 312, 322, 338, 339, 340, 341, 342, 343, 345, 346], "thi": [1, 3, 4, 6, 11, 12, 13, 14, 21, 22, 23, 24, 82, 115, 118, 119, 126, 130, 131, 132, 134, 135, 145, 146, 150, 170, 175, 176, 177, 185, 187, 197, 213, 220, 221, 222, 233, 234, 236, 237, 240, 241, 242, 246, 248, 249, 250, 251, 253, 262, 266, 267, 268, 269, 275, 276, 277, 290, 306, 312, 323, 338, 339, 340, 342, 343, 345], "perform": [1, 3, 5, 70, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 131, 148, 162, 175, 187, 213, 225, 263, 268, 269, 339, 341, 342, 346], "leav": [1, 86, 205], "differenti": [1, 5], "howev": [1, 213, 224, 225, 323, 336, 339, 342, 343], "vector": [1, 2, 5, 110, 115, 118, 187, 198, 199, 223, 284, 344], "math": [1, 3, 295, 339], "often": [1, 222], "realiz": 1, "axpbi": 1, "routin": 1, "defin": [1, 2, 3, 4, 6, 118, 148, 204, 343], "same": [1, 3, 6, 12, 59, 63, 66, 69, 70, 71, 96, 99, 100, 101, 106, 115, 144, 150, 162, 198, 200, 213, 216, 217, 220, 225, 226, 230, 253, 265, 266, 267, 268, 269, 270, 271, 272, 284, 295, 312, 322, 336, 339, 341, 346], "realli": 1, "part": [1, 340, 341], "doe": [1, 3, 6, 213, 339, 341, 342, 343], "fast": [1, 224, 276, 346], "so": [1, 3, 6, 106, 196, 220, 264, 314, 339, 342, 346], "decid": [1, 205, 236], "want": [1, 3, 340, 346], "reli": 1, "acceler": [1, 217], "framework": [1, 5], "continu": [1, 340], "impos": 1, "our": [1, 3, 4, 258, 315, 316, 317, 318, 320, 321], "assumpt": 1, "also": [1, 3, 4, 5, 6, 10, 79, 80, 81, 83, 92, 95, 98, 101, 107, 108, 116, 117, 126, 133, 136, 138, 147, 184, 203, 213, 236, 249, 251, 253, 259, 274, 300, 302, 308, 314, 339, 340, 341, 342, 343, 344, 347], "assum": [1, 3, 119, 205, 213, 215, 216, 225, 229, 230], "how": [1, 3, 4, 213, 215, 216, 218, 219, 223, 229, 230, 264, 322, 339, 341, 346], "gradient": [1, 2, 4, 106, 182, 196, 203, 213, 237, 249, 253, 263, 290, 312, 314, 315, 316, 318, 319, 320, 321, 322, 325, 327, 339, 340, 341, 342, 343, 344], "ins": 1, "what": [1, 3, 205], "coincid": 1, "right": [1, 6, 147, 215, 216, 224, 229, 230, 264, 275, 276, 285, 287, 295], "place": [1, 3, 162, 342, 343], "cours": [1, 340], "The": [1, 3, 4, 5, 6, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 32, 34, 43, 49, 56, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 76, 77, 78, 80, 81, 83, 84, 85, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 156, 157, 158, 159, 160, 161, 165, 166, 171, 172, 173, 174, 175, 176, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 209, 215, 216, 217, 218, 219, 220, 221, 222, 223, 225, 226, 227, 228, 229, 230, 233, 239, 243, 244, 249, 250, 251, 253, 254, 256, 258, 260, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 277, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294, 295, 306, 309, 312, 314, 315, 316, 317, 318, 319, 320, 321, 324, 326, 327, 328, 331, 334, 339, 340, 341, 342, 343, 344, 345, 346, 347], "structur": [1, 322, 340], "from": [1, 3, 4, 5, 76, 78, 97, 98, 100, 101, 105, 118, 121, 131, 142, 147, 149, 150, 151, 152, 154, 157, 167, 180, 182, 184, 187, 188, 200, 202, 204, 205, 206, 213, 228, 237, 239, 251, 266, 267, 268, 269, 271, 272, 285, 294, 309, 338, 339, 340, 342, 343, 344, 345, 346], "frontend": 1, "api": [1, 340], "redirect": 1, "when": [1, 3, 5, 6, 66, 70, 118, 121, 218, 219, 264, 268, 269, 288, 294, 312, 330, 336, 339, 346], "appropri": [1, 339], "fallback": 1, "metal": 1, "vjp": [1, 344], "jvp": [1, 344], "In": [1, 3, 4, 131, 147, 205, 213, 221, 225, 312, 315, 317, 318, 320, 321, 322, 338, 339, 340, 342, 345, 346], "one": [1, 3, 6, 56, 60, 65, 69, 70, 88, 89, 118, 124, 131, 148, 150, 180, 184, 248, 264, 284, 346], "sentenc": 1, "comput": [1, 2, 3, 4, 5, 6, 76, 106, 115, 118, 126, 134, 143, 147, 175, 182, 191, 196, 197, 198, 203, 213, 217, 225, 226, 227, 237, 249, 253, 254, 256, 263, 266, 267, 268, 269, 275, 276, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294, 295, 314, 315, 316, 318, 319, 320, 321, 325, 339, 340, 344, 346], "graph": [1, 3, 4, 5, 340], "rule": 1, "evalu": [1, 3, 4, 5, 86, 115, 198, 213, 235, 245, 312, 314, 339, 344], "said": [1, 3], "start": [1, 2, 3, 5, 6, 14, 120, 177, 339, 341, 346], "discuss": 1, "more": [1, 4, 8, 56, 78, 131, 165, 166, 213, 217, 221, 256, 260, 263, 264, 266, 267, 268, 269, 336, 339, 340, 341, 344, 346], "detail": [1, 8, 213, 221, 256, 260, 264, 266, 267, 268, 269, 315, 317, 318, 320, 321, 341, 344], "thei": [1, 2, 3, 12, 71, 258, 286, 312, 321, 338, 339, 342, 344, 345, 346], "c": [1, 3, 118, 209, 215, 216, 217, 218, 219, 221, 222, 226, 229, 230, 343, 344, 346], "scalar": [1, 10, 12, 25, 36, 56, 59, 63, 65, 80, 81, 83, 104, 105, 106, 107, 108, 116, 117, 118, 120, 126, 127, 128, 129, 131, 133, 136, 138, 144, 154, 157, 158, 165, 184, 196, 200, 203, 295, 340, 342, 344], "sum": [1, 2, 10, 110, 118, 130, 175, 191, 213, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294, 295, 341, 343], "element": [1, 9, 10, 15, 16, 17, 18, 19, 20, 23, 51, 64, 72, 73, 76, 80, 81, 83, 84, 85, 87, 89, 103, 104, 107, 108, 111, 112, 113, 114, 116, 117, 122, 123, 124, 125, 126, 127, 128, 129, 133, 136, 138, 139, 145, 147, 148, 159, 160, 163, 171, 172, 173, 174, 178, 179, 184, 187, 189, 190, 196, 200, 220, 221, 222, 231, 252, 256, 278, 280, 281, 296, 297, 299, 302, 303, 304, 339, 340], "wise": [1, 9, 10, 15, 16, 17, 18, 19, 20, 64, 72, 73, 80, 81, 83, 84, 85, 87, 103, 104, 107, 108, 116, 117, 122, 123, 124, 125, 126, 127, 128, 129, 133, 136, 138, 139, 159, 163, 171, 172, 173, 174, 178, 179, 184, 189, 190, 221, 222, 231, 252, 278, 280, 281, 296, 297, 299, 302, 303, 304, 339], "numpi": [1, 3, 4, 5, 10, 12, 14, 63, 80, 81, 83, 107, 108, 116, 117, 126, 131, 133, 136, 138, 184, 342, 344, 345], "style": [1, 10, 12, 80, 81, 83, 107, 108, 116, 117, 126, 131, 133, 136, 138, 184], "broadcast": [1, 10, 12, 63, 65, 80, 81, 83, 105, 107, 108, 116, 117, 126, 131, 133, 136, 138, 149, 150, 157, 158, 184, 188, 200, 251], "between": [1, 5, 65, 102, 263, 283, 286, 287, 290, 330, 342, 346], "input": [1, 2, 3, 9, 10, 11, 12, 13, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 77, 78, 80, 81, 83, 84, 85, 87, 88, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 106, 107, 108, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 142, 143, 144, 145, 146, 147, 148, 156, 159, 160, 161, 162, 163, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 184, 185, 186, 187, 188, 189, 190, 191, 192, 194, 195, 196, 197, 199, 200, 202, 215, 216, 217, 218, 219, 221, 222, 223, 225, 226, 227, 228, 229, 230, 251, 253, 254, 256, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 277, 282, 283, 285, 286, 287, 288, 290, 291, 293, 295, 306, 309, 339, 340, 341, 344, 345], "upcast": 1, "const": [1, 285], "factor": [1, 119, 264, 284, 329, 332], "streamordevic": 1, "stream": [1, 5, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 27, 28, 29, 30, 31, 32, 33, 35, 37, 38, 39, 40, 41, 42, 44, 45, 46, 47, 48, 50, 52, 53, 54, 55, 57, 58, 59, 60, 61, 62, 63, 64, 65, 67, 68, 69, 70, 71, 72, 73, 75, 76, 77, 78, 80, 81, 83, 84, 85, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 107, 108, 109, 110, 111, 112, 113, 114, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 153, 154, 156, 157, 158, 159, 160, 161, 162, 163, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 197, 200, 201, 202, 346], "schedul": [1, 314, 328, 329, 330, 331, 332, 334, 346], "itself": [1, 323], "call": [1, 3, 4, 26, 104, 213, 223, 237, 248, 258, 312, 314, 323, 339, 340, 342], "other": [1, 3, 5, 118, 213, 238, 312, 321, 339, 341, 342, 344], "within": [1, 23], "simplest": [1, 213], "wai": [1, 3, 6, 213, 264, 339, 340, 341], "about": [1, 3, 4, 342, 346], "term": [1, 285, 315, 316, 317, 318, 319, 320, 326], "exist": [1, 3, 237, 248], "auto": [1, 6], "ax": [1, 11, 13, 21, 22, 57, 88, 91, 92, 94, 95, 97, 98, 100, 101, 102, 110, 118, 130, 132, 134, 135, 144, 146, 175, 180, 185, 186, 191, 192, 197, 340], "multipli": [1, 147, 148, 220, 260, 264], "earlier": 1, "goal": 1, "themselv": [1, 339], "contain": [1, 3, 23, 24, 49, 66, 78, 96, 97, 98, 118, 127, 128, 129, 147, 177, 200, 213, 236, 238, 239, 244, 263, 291, 309, 312, 339, 340], "act": [1, 290], "data": [1, 4, 5, 8, 14, 89, 99, 100, 105, 109, 120, 141, 157, 193, 201, 222, 265, 266, 267, 268, 269, 270, 271, 272, 339, 341, 343], "nor": [1, 106, 196], "rather": [1, 340, 346], "easi": [1, 213], "interfac": 1, "block": [1, 3, 263], "A": [1, 3, 5, 6, 7, 49, 59, 66, 106, 115, 118, 119, 121, 130, 131, 147, 149, 150, 151, 153, 154, 157, 158, 177, 181, 183, 196, 198, 199, 203, 204, 205, 206, 207, 213, 217, 221, 225, 226, 227, 236, 240, 241, 249, 250, 254, 258, 260, 263, 266, 267, 269, 276, 295, 296, 312, 314, 318, 320, 322, 323, 325, 330, 339, 340, 342, 343], "It": [1, 3, 6, 106, 170, 196, 213, 250, 253, 322, 334, 343, 345], "creat": [1, 3, 6, 89, 109, 183, 213, 312, 314, 330, 339, 341, 343], "output": [1, 3, 6, 11, 12, 13, 14, 23, 63, 66, 89, 96, 99, 100, 101, 105, 106, 109, 118, 120, 130, 132, 134, 135, 141, 142, 145, 146, 149, 150, 151, 153, 154, 157, 158, 167, 168, 175, 180, 185, 188, 193, 196, 197, 198, 199, 200, 201, 202, 215, 216, 217, 218, 219, 226, 228, 229, 230, 251, 253, 262, 263, 264, 266, 267, 268, 269, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294, 295, 306, 309, 339, 340, 341, 342, 343, 344, 345, 346], "given": [1, 11, 13, 23, 63, 65, 67, 76, 78, 86, 88, 90, 91, 92, 93, 94, 95, 99, 100, 101, 105, 118, 130, 132, 134, 135, 140, 146, 154, 162, 170, 175, 177, 185, 193, 194, 195, 197, 207, 215, 216, 220, 229, 230, 236, 251, 283, 285, 291], "set": [1, 3, 4, 6, 66, 79, 82, 169, 170, 183, 224, 228, 235, 237, 244, 245, 248, 249, 253, 256, 262, 283, 295, 306, 312, 316, 323, 336, 340, 342], "further": [1, 6, 340], "class": [1, 3, 4, 7, 8, 25, 207, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 284, 312, 315, 316, 317, 318, 319, 320, 321, 326, 327, 334], "under": [1, 118], "These": [1, 66, 188, 284, 346], "word": 1, "bit": [1, 76, 147, 148, 209, 232, 253], "abstract": 1, "back": [1, 3, 343], "give": [1, 3, 4, 23, 339], "ourselv": 1, "concret": [1, 228, 342, 346], "imag": [1, 219, 221, 222, 264], "public": [1, 213], "explicit": [1, 323, 336, 343], "alpha_": 1, "beta_": 1, "must": [1, 6, 65, 105, 118, 149, 150, 154, 157, 158, 200, 264, 343], "know": [1, 3], "popul": 1, "To": [1, 2, 3, 4, 6, 213, 309, 339, 340, 344], "avoid": [1, 339], "unnecessari": [1, 3], "alloc": [1, 312], "respons": 1, "space": [1, 120, 293], "void": 1, "eval_cpu": 1, "std": [1, 271], "overrid": [1, 82], "eval_gpu": 1, "jacobian": [1, 115, 198, 344], "product": [1, 110, 115, 131, 143, 146, 191, 198, 251, 344], "primal": [1, 115, 198], "tangent": [1, 19, 20, 115, 189, 190, 307], "int": [1, 3, 4, 7, 11, 13, 14, 21, 22, 23, 24, 28, 29, 30, 31, 39, 40, 41, 42, 44, 47, 49, 52, 55, 56, 58, 63, 67, 68, 69, 70, 76, 77, 78, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 105, 106, 109, 118, 120, 130, 132, 134, 135, 137, 141, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 160, 161, 162, 175, 176, 177, 180, 181, 185, 186, 187, 188, 191, 192, 193, 194, 195, 196, 197, 199, 201, 207, 213, 215, 216, 217, 218, 219, 223, 225, 226, 227, 228, 229, 230, 251, 253, 254, 256, 260, 263, 277, 283, 284, 288, 293, 295, 312, 328, 330, 331, 332], "argnum": [1, 106, 196, 340], "cotan": 1, "across": [1, 225], "pair": [1, 144, 239, 256], "repres": [1, 3, 291, 295, 343], "axi": [1, 3, 4, 11, 13, 21, 22, 23, 24, 28, 29, 30, 31, 39, 40, 41, 42, 44, 52, 55, 58, 67, 78, 88, 90, 93, 96, 97, 98, 99, 100, 101, 102, 118, 130, 132, 134, 135, 137, 144, 145, 146, 150, 160, 175, 176, 177, 180, 181, 185, 186, 187, 188, 192, 197, 199, 215, 216, 229, 230, 277, 281, 283, 284, 288, 293, 295, 303, 341], "correspond": [1, 11, 13, 56, 65, 76, 78, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 130, 132, 135, 146, 185, 191, 199, 205, 340], "dimens": [1, 3, 11, 13, 21, 22, 43, 49, 56, 60, 61, 62, 66, 69, 70, 78, 88, 97, 98, 100, 101, 102, 110, 118, 119, 130, 131, 132, 134, 135, 146, 147, 150, 156, 185, 188, 191, 192, 197, 217, 218, 219, 221, 222, 225, 226, 227, 251, 254, 256, 263, 264, 277, 284, 339, 340], "vmap": [1, 340, 342, 344], "print": [1, 2, 3, 4, 6, 204, 205, 206, 213, 336, 339, 340, 341, 342, 343, 344], "ostream": 1, "os": [1, 6], "equival": [1, 26, 46, 57, 81, 104, 187, 224, 250, 252, 253, 255, 257, 259, 261], "check": [1, 6, 59, 239, 340, 341], "bool": [1, 11, 12, 13, 21, 22, 28, 29, 30, 31, 39, 40, 41, 42, 44, 55, 56, 58, 59, 66, 70, 118, 121, 130, 132, 134, 135, 146, 148, 149, 154, 157, 158, 185, 197, 217, 218, 219, 225, 226, 227, 228, 232, 236, 237, 239, 245, 248, 251, 253, 256, 260, 263, 264, 282, 285, 316, 327], "is_equival": 1, "privat": 1, "fall": 1, "eval": [1, 2, 3, 4, 213, 312, 314, 339, 340, 342, 344], "deriv": [1, 340, 342], "base": [1, 118, 123, 125, 256, 263, 312, 314, 320, 334, 336, 339, 341], "abov": [1, 3, 6, 147, 194, 213, 264, 319, 340, 341, 342, 346], "demonstr": [1, 343], "treat": [1, 97, 98, 100, 101, 187, 264, 339], "paramet": [1, 2, 3, 4, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 32, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 76, 77, 78, 80, 81, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 232, 233, 236, 237, 239, 244, 245, 248, 249, 250, 251, 252, 253, 254, 256, 258, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 277, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294, 295, 306, 308, 309, 312, 314, 315, 316, 317, 318, 319, 320, 321, 322, 323, 325, 326, 327, 328, 329, 330, 331, 332, 334, 339, 340, 342], "produc": [1, 66, 251, 309], "through": [1, 182, 263, 321, 339, 340, 343], "construct": [1, 4, 77, 105, 141, 201], "its": [1, 6, 131, 145, 156, 193, 203, 206, 213, 253, 318, 319, 320, 343, 346], "type": [1, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 32, 49, 56, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 76, 77, 78, 80, 81, 83, 84, 85, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 156, 157, 158, 159, 160, 161, 162, 163, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 204, 213, 263, 265, 266, 267, 268, 269, 270, 271, 272, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294, 295, 339, 341], "shape": [1, 3, 4, 46, 59, 63, 66, 68, 69, 70, 78, 90, 93, 96, 99, 100, 101, 105, 115, 131, 141, 142, 149, 150, 151, 153, 154, 157, 158, 161, 188, 198, 200, 201, 202, 213, 215, 216, 217, 218, 219, 221, 222, 226, 228, 229, 230, 239, 265, 266, 267, 268, 269, 270, 271, 272, 284, 295, 314, 339, 340, 341, 344, 346], "pass": [1, 3, 4, 46, 57, 143, 144, 196, 203, 204, 205, 213, 237, 248, 249, 250, 253, 258, 339, 342], "re": [1, 4, 6, 309], "now": [1, 3, 6, 253, 339, 343], "promot": 1, "dtype": [1, 3, 14, 25, 32, 56, 89, 102, 105, 109, 118, 119, 120, 141, 151, 153, 154, 157, 158, 193, 201, 209, 264, 265, 266, 267, 268, 269, 270, 271, 272, 282, 284, 291, 328, 329, 330, 331, 332, 339, 340, 341, 343, 344, 345], "promoted_dtyp": 1, "promote_typ": 1, "float32": [1, 14, 89, 109, 118, 119, 120, 141, 151, 153, 157, 158, 193, 201, 209, 264, 265, 266, 267, 268, 269, 270, 271, 272, 282, 284, 291, 328, 329, 330, 331, 332, 339, 340, 341, 342, 343, 344, 345], "non": [1, 6, 246, 296, 312], "point": [1, 2, 3, 6, 104, 148, 209], "out_dtyp": 1, "is_floating_point": 1, "cast": [1, 32, 99, 100, 101, 121, 232, 343], "up": [1, 3, 253, 339], "determin": [1, 78, 243, 345], "x_cast": 1, "astyp": [1, 3, 232, 343], "y_cast": 1, "broadcasted_input": 1, "broadcast_arrai": 1, "out_shap": 1, "0": [1, 2, 3, 4, 6, 7, 14, 47, 52, 58, 67, 68, 69, 70, 77, 78, 89, 102, 106, 118, 119, 144, 149, 153, 158, 160, 162, 177, 181, 193, 194, 195, 196, 197, 199, 204, 213, 215, 216, 217, 218, 219, 220, 221, 222, 224, 225, 226, 227, 229, 230, 252, 255, 256, 260, 261, 262, 263, 265, 266, 267, 268, 269, 270, 271, 272, 273, 275, 276, 278, 279, 282, 284, 286, 287, 291, 294, 295, 297, 298, 299, 300, 305, 306, 309, 312, 315, 316, 318, 319, 320, 321, 323, 326, 327, 328, 329, 330, 331, 332, 336, 339, 340, 341, 342, 343, 344, 345], "unique_ptr": 1, "make_uniqu": 1, "to_stream": 1, "handl": [1, 213, 339], "resolv": 1, "No": [1, 3], "happen": [1, 3, 263, 314, 339, 342], "alon": [1, 343], "effect": [1, 221, 339, 342], "onli": [1, 3, 5, 6, 59, 68, 69, 70, 118, 147, 209, 213, 236, 237, 239, 245, 248, 249, 250, 312, 339, 340, 345, 346], "execut": [1, 6, 60, 61, 62, 343, 346], "depend": [1, 2, 56, 118, 341, 345, 346], "devic": [1, 5, 6, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 27, 28, 29, 30, 31, 32, 33, 35, 37, 38, 39, 40, 41, 42, 44, 45, 46, 47, 48, 50, 52, 53, 54, 55, 57, 58, 59, 60, 61, 62, 63, 64, 65, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 80, 81, 83, 84, 85, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 107, 108, 109, 110, 111, 112, 113, 114, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 153, 154, 156, 157, 158, 159, 160, 161, 162, 163, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 197, 200, 201, 202, 207, 346, 347], "specifi": [1, 14, 32, 69, 70, 78, 97, 98, 105, 106, 118, 120, 137, 141, 150, 160, 186, 187, 188, 191, 192, 196, 199, 201, 217, 262, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294, 295, 306, 340, 346], "memori": [1, 5, 263, 312, 316, 339, 342, 343], "ha": [1, 3, 4, 5, 56, 66, 78, 96, 97, 99, 100, 101, 106, 150, 217, 228, 312, 314, 339, 341, 342, 344, 346], "been": [1, 3, 342], "try": [1, 6], "naiv": [1, 340], "gener": [1, 2, 14, 70, 89, 97, 98, 120, 149, 153, 154, 157, 158, 263, 336, 339, 341, 342, 347], "version": [1, 6, 76, 126, 130, 147, 175, 199, 336, 340, 341], "declar": 1, "member": [1, 213, 242, 246], "method": [1, 3, 7, 8, 25, 207, 213, 243, 263, 312, 315, 316, 317, 318, 319, 320, 321, 323, 326, 327, 334], "each": [1, 49, 76, 86, 131, 144, 147, 148, 150, 160, 167, 168, 177, 192, 199, 200, 221, 222, 223, 225, 256, 263, 282, 284, 336, 339, 342], "find": [1, 2, 6], "pointwis": 1, "captur": [1, 66, 213, 339], "templat": 1, "axpby_impl": 1, "typenam": 1, "t": [1, 3, 84, 148, 196, 213, 215, 229, 315, 316, 317, 318, 319, 320, 321, 326, 327, 339, 340, 346], "readi": 1, "fill": [1, 105, 142, 193, 202, 265, 266, 267, 268, 269, 271, 272], "malloc_or_wait": 1, "synchron": [1, 339], "avail": [1, 2, 3, 4, 6, 8, 209, 346], "There": [1, 213, 264, 339], "wait": [1, 3], "here": [1, 3, 339, 340, 342, 345, 346], "request": 1, "pressur": 1, "condit": [1, 200, 346], "set_data": 1, "nbyte": 1, "collect": [1, 205, 338], "pointer": 1, "x_ptr": 1, "y_ptr": 1, "out_ptr": 1, "relev": 1, "static_cast": 1, "size_t": 1, "out_idx": 1, "size": [1, 3, 4, 49, 69, 76, 88, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 105, 109, 118, 147, 148, 150, 161, 177, 180, 213, 215, 216, 218, 219, 223, 226, 229, 230, 253, 264, 316, 342, 343], "map": [1, 4, 121, 205, 223, 232], "linear": [1, 3, 4, 5, 205, 213, 224, 239, 253, 255, 257, 259, 264, 273, 274, 275, 276, 277, 279, 298, 299, 300, 302, 309, 312, 323, 331, 339], "indic": [1, 12, 21, 22, 23, 24, 106, 111, 112, 113, 114, 177, 187, 188, 196, 245, 247, 284, 291, 330, 341], "offset": [1, 3, 78], "x_offset": 1, "elem_to_loc": 1, "stride": [1, 68, 69, 70, 215, 216, 218, 219, 229, 230, 256, 341], "y_offset": 1, "contigu": 1, "regularli": 1, "default": [1, 6, 11, 12, 13, 14, 21, 22, 23, 24, 59, 66, 67, 68, 69, 70, 74, 75, 76, 77, 78, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 106, 109, 118, 119, 120, 121, 130, 132, 134, 135, 141, 145, 146, 147, 148, 149, 150, 151, 153, 154, 156, 157, 158, 160, 161, 162, 169, 170, 176, 177, 180, 181, 183, 185, 191, 192, 193, 194, 195, 196, 197, 199, 201, 209, 215, 216, 217, 218, 219, 226, 228, 229, 230, 232, 237, 239, 245, 248, 251, 252, 253, 256, 260, 261, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 277, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294, 295, 312, 315, 316, 317, 318, 319, 320, 321, 326, 327, 336, 338, 339, 340, 343, 345, 347], "row": [1, 89, 109, 147, 193], "major": 1, "henc": [1, 147, 339], "doesn": [1, 213], "addit": [1, 3, 10, 121, 217, 225, 227, 251, 254, 312, 340], "abl": [1, 147], "all": [1, 4, 6, 12, 23, 60, 61, 62, 66, 69, 70, 89, 92, 95, 98, 101, 131, 144, 145, 180, 213, 232, 233, 237, 240, 241, 242, 246, 248, 251, 253, 260, 263, 264, 309, 312, 334, 336, 339, 341, 342, 344, 347], "incom": 1, "accordingli": 1, "dispatch": 1, "float16": [1, 121, 209, 232, 342, 343], "bfloat16": [1, 343], "complex64": 1, "throw": [1, 66], "error": [1, 6, 84, 85, 177, 224, 253, 274, 275, 276, 290, 292, 340, 343], "encount": [1, 340], "unexpect": [1, 14], "regist": [1, 4], "op": [1, 143, 237, 342], "assert": 1, "2": [1, 2, 3, 4, 69, 77, 78, 84, 91, 94, 96, 97, 98, 99, 100, 101, 102, 118, 119, 125, 131, 147, 156, 191, 193, 194, 195, 209, 213, 215, 216, 219, 224, 229, 230, 254, 260, 264, 265, 266, 267, 268, 269, 270, 271, 272, 275, 284, 285, 287, 294, 295, 309, 312, 315, 317, 318, 319, 323, 326, 339, 340, 341, 342, 343, 344, 345, 346], "1": [1, 3, 4, 14, 23, 24, 68, 69, 70, 77, 78, 90, 91, 93, 94, 96, 97, 98, 99, 100, 101, 102, 110, 118, 119, 131, 143, 145, 147, 150, 153, 158, 171, 176, 187, 196, 209, 213, 215, 216, 217, 218, 219, 220, 221, 222, 224, 225, 226, 227, 228, 229, 230, 252, 254, 256, 260, 262, 264, 266, 267, 268, 269, 270, 271, 272, 273, 275, 276, 277, 280, 281, 282, 283, 284, 285, 286, 287, 288, 290, 291, 293, 294, 295, 300, 301, 303, 304, 306, 309, 312, 314, 315, 316, 317, 318, 319, 320, 321, 323, 326, 327, 328, 329, 330, 331, 332, 339, 340, 341, 343, 344, 345, 346], "correct": [1, 6, 318, 319, 320, 341, 342], "els": [1, 3, 213, 237, 342], "float16_t": 1, "bfloat16_t": 1, "complex64_t": 1, "runtime_error": 1, "support": [1, 3, 5, 6, 12, 68, 69, 70, 102, 119, 121, 131, 147, 340, 341, 343, 345], "have": [1, 3, 6, 12, 59, 60, 61, 62, 97, 98, 100, 101, 131, 150, 204, 251, 258, 321, 323, 338, 339, 341, 342, 346], "rememb": 1, "3": [1, 3, 6, 102, 118, 119, 264, 267, 269, 278, 316, 321, 336, 339, 341, 343, 344], "complic": 1, "keep": [1, 11, 13, 21, 22, 130, 132, 134, 135, 146, 185, 197, 213, 236, 340, 342], "mind": [1, 3], "half": [1, 14, 154, 158, 256, 342], "precis": [1, 3, 213, 224, 322, 339], "direct": [1, 3, 234, 321, 346], "fix": [1, 3, 6, 342], "possibl": [1, 3, 131, 177, 223, 339, 341, 346], "due": 1, "transpos": [1, 3, 26, 148], "aren": 1, "guarante": 1, "fit": [1, 147, 346], "requir": [1, 3, 213, 342, 343], "column": [1, 89, 109, 147], "inplac": 1, "expect": [1, 3, 218, 219, 220, 221, 222, 260, 263, 285, 339, 341], "answer": 1, "copi": [1, 3, 5, 145, 176, 343], "simpli": [1, 3, 6, 255, 273, 279, 298, 307, 312, 339, 340], "catlas_saxpbi": 1, "axpby_impl_acceler": 1, "first": [1, 2, 3, 4, 6, 78, 102, 106, 127, 129, 131, 145, 156, 186, 191, 196, 204, 213, 216, 225, 230, 264, 283, 291, 316, 318, 319, 320, 323, 339, 340, 343, 346], "mode": [1, 71, 235, 245, 247, 264, 268, 269], "i": [1, 3, 115, 118, 213, 218, 219, 221, 222, 237, 290, 319, 330, 339, 340], "e": [1, 4, 6, 84, 115, 171, 217, 218, 219, 221, 222, 225, 226, 227, 237, 254, 280, 281, 303, 308, 314, 317, 339, 342, 347], "match": [1, 6, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 239, 264, 284, 341, 343], "transposit": 1, "data_s": 1, "items": 1, "flag": [1, 339, 343], "copy_inplac": 1, "copytyp": 1, "n": [1, 3, 25, 68, 69, 70, 89, 90, 92, 93, 95, 96, 99, 101, 109, 193, 197, 215, 216, 217, 218, 219, 221, 222, 229, 230, 264, 290, 295], "incx": 1, "inci": 1, "great": 1, "But": [1, 346], "criteria": 1, "luckili": [1, 342], "alwai": [1, 204, 340], "With": 1, "final": [1, 2, 3, 4, 331], "singl": [1, 4, 86, 115, 121, 144, 198, 216, 230, 339, 341, 345], "row_contigu": 1, "col_contigu": 1, "common": [1, 314, 339, 342], "hit": 1, "mileston": 1, "enough": [1, 342], "run": [1, 3, 4, 5, 6, 7, 143, 207, 217, 232, 315, 316, 318, 319, 320, 339, 342, 346, 347], "If": [1, 3, 6, 11, 12, 13, 14, 21, 22, 23, 24, 56, 59, 65, 67, 71, 77, 78, 86, 99, 100, 101, 104, 105, 106, 118, 121, 130, 131, 132, 134, 135, 141, 144, 145, 146, 150, 160, 175, 176, 177, 185, 187, 188, 191, 196, 197, 199, 201, 205, 217, 218, 219, 225, 227, 228, 237, 239, 248, 253, 256, 258, 260, 264, 282, 284, 295, 316, 339, 340, 342, 345, 346, 347], "plan": [1, 339], "stop": [1, 3, 14, 120, 182, 340, 341], "enjoi": 1, "speed": 1, "appl": [1, 3, 5, 6, 346], "silicon": [1, 3, 5, 6, 346], "address": 1, "shade": 1, "languag": [1, 209], "kernel": [1, 68, 69, 70, 215, 229, 339, 341], "written": 1, "help": [1, 3, 339, 346], "resourc": 1, "walkthrough": 1, "pipelin": 1, "specif": [1, 6, 340], "cpp": 1, "algorithm": [1, 264, 321], "launch": [1, 341], "exactli": [1, 3, 239, 340], "mani": [1, 177, 218, 219, 223, 339, 342], "thread": 1, "pick": 1, "updat": [1, 2, 3, 4, 66, 205, 217, 232, 239, 244, 250, 314, 316, 319, 321, 322, 323, 327, 328, 329, 330, 331, 332, 339, 342], "assign": [1, 312], "axpby_gener": 1, "buffer": [1, 343], "constant": [1, 3, 6, 144, 213, 217, 225, 227, 254, 285, 295, 326, 328, 339, 343], "4": [1, 3, 76, 102, 118, 147, 148, 167, 209, 215, 216, 217, 226, 229, 230, 253, 263, 264, 266, 267, 268, 282, 339, 341, 344, 346], "5": [1, 2, 3, 6, 118, 149, 215, 217, 220, 221, 222, 226, 229, 261, 264, 265, 268, 269, 294, 305, 309, 326, 328, 329, 339, 340, 341], "x_stride": 1, "6": [1, 3, 118, 167, 263, 267, 275, 276, 278, 285, 295, 299, 326, 339, 341, 344], "y_stride": 1, "7": [1, 3, 118, 147, 341], "ndim": [1, 102, 118, 264], "8": [1, 3, 6, 118, 147, 209, 216, 226, 230, 263, 283, 315, 316, 317, 318, 319, 320, 326, 339, 341, 344, 346], "uint": 1, "index": [1, 5, 7, 23, 88, 89, 106, 145, 187, 188, 196, 207], "thread_position_in_grid": 1, "convert": [1, 56, 60, 61, 62, 102, 253, 342, 343, 344], "instanti": [1, 4, 342], "uniqu": [1, 336], "host": 1, "name": [1, 121, 147, 148, 165, 166, 167, 168, 213, 225, 236, 239, 241, 341, 345], "identifi": [1, 204, 338], "instantiate_axpbi": 1, "type_nam": 1, "host_nam": 1, "axpby_general_": 1, "compil": [1, 5, 6, 79, 82, 340, 342], "mlx_ext": 1, "metallib": [1, 6], "see": [1, 3, 4, 6, 8, 27, 28, 29, 30, 31, 33, 35, 37, 38, 39, 40, 41, 42, 44, 45, 46, 47, 48, 50, 52, 53, 54, 55, 57, 58, 118, 165, 166, 213, 217, 221, 224, 235, 252, 253, 255, 256, 257, 259, 260, 261, 264, 266, 267, 268, 269, 274, 275, 276, 300, 339, 340, 341, 344, 346], "later": [1, 6], "co": [1, 260, 340], "locat": [1, 249, 250, 346], "share": [1, 5, 76, 147, 148], "register_librari": 1, "potenti": 1, "path": [1, 6, 167, 168, 239], "tri": 1, "load": [1, 4, 5, 239], "hasn": 1, "alreadi": [1, 3], "static": [1, 6], "object": [1, 8, 25, 36, 56, 149, 154, 157, 158, 199, 204, 205, 221, 263, 338], "why": [1, 3], "packag": [1, 2, 4, 309], "process": [1, 3, 70, 71, 205, 222, 223, 263, 338], "logic": [1, 127, 128, 129], "grid": 1, "shown": 1, "below": [1, 6, 118, 193, 195, 209, 264, 342], "prepar": [1, 3], "carri": 1, "should": [1, 2, 3, 4, 6, 78, 115, 147, 188, 196, 198, 204, 213, 218, 219, 221, 222, 245, 251, 258, 284, 286, 291, 312, 338, 339, 340, 342, 343, 347], "d": [1, 3, 77, 78, 110, 118, 131, 143, 187, 193, 194, 195, 206, 222, 315, 318, 320, 346], "ostringstream": 1, "kname": 1, "axpby_": 1, "general_": 1, "type_to_nam": 1, "make": [1, 3, 4, 6, 131, 140, 170, 213, 328, 329, 331, 332, 339, 342, 344, 346], "sure": [1, 3, 6, 213, 339], "look": [1, 3], "folder": 1, "get_colocated_mtllib_path": 1, "get_kernel": 1, "str": [1, 71, 106, 118, 121, 164, 165, 166, 167, 168, 196, 204, 206, 232, 233, 236, 237, 239, 241, 243, 248, 264, 268, 269, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294, 295], "encod": [1, 256, 260, 263, 284], "compute_encod": 1, "get_command_encod": 1, "setcomputepipelinest": 1, "those": [1, 3, 213], "nelem": 1, "set_array_buff": 1, "setbyt": 1, "sizeof": 1, "threadgroup": 1, "higher": [1, 110, 291, 340], "than": [1, 3, 56, 71, 78, 81, 107, 108, 116, 117, 131, 205, 256, 262, 264, 291, 294, 306, 316, 321, 339, 340, 346], "max": [1, 118, 133, 229, 230, 252, 278, 283, 285, 286, 291, 295, 297, 299, 316, 320, 339, 340, 346], "allow": [1, 213, 250, 312, 334, 341, 344], "tgp_size": 1, "min": [1, 118, 136, 252, 278, 297, 299], "maxtotalthreadsperthreadgroup": 1, "3d": [1, 217, 222, 264], "mtl": 1, "group_dim": 1, "grid_dim": 1, "divid": [1, 104, 147], "among": 1, "dispatchthread": 1, "few": [1, 3, 4, 5, 342, 344], "thing": [1, 3], "note": [1, 3, 6, 12, 66, 68, 69, 97, 98, 118, 147, 150, 213, 264, 343, 345], "befor": [1, 3, 6, 23, 145, 236, 263, 323, 341, 342], "move": [1, 137, 346], "track": [1, 213, 217], "activ": [1, 6, 221, 262, 263, 296, 305, 306, 308, 339], "command": [1, 6], "instead": [1, 6, 213, 250, 260, 340, 342], "end_encod": 1, "end": [1, 78, 147, 216, 230, 262, 287, 294, 300, 305, 306, 331], "until": [1, 342, 344], "limit": [1, 65, 341], "flush": 1, "enqueu": 1, "commit": 1, "associ": [1, 167, 168, 342], "suggest": 1, "deeper": 1, "dive": 1, "studi": 1, "come": [1, 3, 340], "far": [1, 314], "built": [1, 6, 263, 342], "includ": [1, 233, 244, 253, 285, 339, 340, 341, 344, 345, 347], "forward": [1, 196, 339, 342], "diff": 1, "push": 1, "along": [1, 21, 22, 66, 67, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 118, 160, 175, 177, 181, 187, 188, 191, 213, 277], "similarli": [1, 6, 131, 340, 342], "scale_arr": 1, "contribut": 1, "tangent_x": 1, "tangent_i": 1, "revers": [1, 192, 260], "arg": [1, 3, 8, 46, 57, 86, 167, 168], "push_back": 1, "fulli": [1, 5, 339, 343, 346], "overal": 1, "directori": [1, 3, 6], "extens": [1, 121, 209, 243, 345], "h": [1, 68, 69, 118, 216, 217, 219, 221, 222, 230, 340, 342], "mlx_sample_extens": 1, "__init__": [1, 3, 4, 7, 8, 25, 207, 213, 312], "py": [1, 3, 6], "cmakelist": 1, "txt": 1, "setup": [1, 2, 4, 6, 339], "hold": [1, 3, 8, 118, 339], "instal": 1, "pybind11": [1, 6], "sinc": [1, 3, 4, 312, 321, 330, 343, 346], "compon": [1, 3], "etc": [1, 147, 213, 264], "pybind11_modul": 1, "m": [1, 6, 89, 118, 193, 215, 216, 229, 230, 315], "doc": [1, 4], "sampl": [1, 2, 3, 120, 149, 150, 151, 154, 157, 158, 266, 267, 268, 269, 271, 272, 285, 291, 295, 336, 339], "_a": 1, "pos_onli": 1, "kw_onli": 1, "none": [1, 3, 7, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 27, 28, 29, 30, 31, 32, 33, 35, 37, 38, 39, 40, 41, 42, 44, 45, 46, 47, 48, 50, 52, 53, 54, 55, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 166, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 199, 200, 201, 202, 204, 205, 207, 215, 216, 224, 229, 230, 232, 236, 237, 248, 251, 260, 263, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294, 295, 316, 334, 341], "r": [1, 3, 119, 196, 221], "pbdoc": 1, "most": [1, 150, 213, 325, 339, 340, 341, 342], "complex": [1, 97, 98, 99, 100, 101, 149, 154, 157, 158, 204, 213, 250, 339, 340], "bell": 1, "whistl": 1, "liter": [1, 264, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294, 295], "string": [1, 343, 345], "modul": [1, 3, 4, 203, 253, 258, 263, 309, 325, 338, 339, 342], "ensur": [1, 6, 290], "caster": 1, "find_packag": 1, "config": 1, "add_librari": 1, "sourc": [1, 137, 192], "target_sourc": 1, "cmake_current_list_dir": 1, "header": 1, "target_include_directori": 1, "target_link_librari": 1, "attach": 1, "conveni": [1, 4], "mlx_build_metallib": 1, "target": [1, 196, 282, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294, 339], "destin": [1, 137], "automat": [1, 5, 121, 344, 345, 346], "practic": [1, 339], "mlx_build_met": [1, 6], "mlx_ext_metallib": 1, "titl": 1, "include_dir": 1, "project_source_dir": 1, "mlx_include_dir": 1, "output_directori": 1, "cmake_library_output_directori": 1, "add_depend": 1, "endif": 1, "pybind11_add_modul": 1, "build_shared_lib": 1, "target_link_opt": 1, "wl": 1, "rpath": 1, "loader_path": 1, "onc": [1, 339], "describ": [1, 342], "util": [1, 3, 5, 6, 167, 213], "__name__": [1, 3], "__main__": [1, 3], "descript": [1, 3, 209], "ext_modul": 1, "cmakeextens": 1, "cmdclass": 1, "build_ext": 1, "cmakebuild": 1, "package_dir": 1, "package_data": 1, "dylib": 1, "zip_saf": 1, "fals": [1, 3, 11, 12, 13, 21, 22, 28, 29, 30, 31, 39, 40, 41, 42, 44, 55, 58, 59, 66, 70, 118, 121, 130, 132, 134, 135, 146, 185, 197, 200, 204, 205, 209, 225, 226, 228, 237, 239, 248, 251, 253, 256, 260, 263, 264, 282, 285, 316, 327, 343], "python_requir": 1, "even": [1, 3, 66, 339, 342, 343], "though": [1, 3, 339, 342, 343], "j8": 1, "libmlx_ext": 1, "cpython": 1, "3x": 1, "darwin": 1, "pip": [1, 6], "after": [1, 3, 4, 23, 102, 104, 145, 147, 217, 225, 227, 251, 263, 294, 339, 346], "plai": [1, 3], "ones": [1, 3, 142, 167, 193, 249, 250, 253, 341], "b": [1, 3, 10, 12, 59, 80, 81, 83, 104, 107, 108, 110, 116, 117, 118, 126, 127, 129, 131, 133, 136, 138, 143, 147, 184, 191, 196, 228, 264, 277, 340, 341, 342, 343, 344, 345, 346], "f": [1, 2, 4, 118, 213, 319, 339, 343], "item": [1, 2, 3, 4, 205, 342, 343, 344], "true": [1, 2, 3, 12, 59, 66, 118, 121, 148, 175, 200, 204, 205, 209, 213, 217, 218, 219, 225, 226, 227, 228, 236, 237, 239, 245, 248, 253, 256, 260, 263, 264, 282, 290, 316], "quick": [1, 5], "benchmark": [1, 339], "compar": [1, 59, 339], "time": [1, 3, 6, 213, 215, 216, 229, 230, 339, 340, 342, 346], "set_default_devic": 1, "256": [1, 4], "512": [1, 3, 263, 346], "random": [1, 2, 3, 4, 5, 215, 216, 217, 226, 229, 230, 239, 245, 339, 340, 346, 347], "normal": [1, 2, 3, 157, 213, 215, 216, 217, 225, 226, 227, 229, 230, 254, 263, 266, 268, 343, 346], "bench": 1, "warm": [1, 339], "rang": [1, 2, 3, 4, 6, 14, 102, 120, 267, 269, 275, 276, 314, 328, 329, 330, 331, 332, 336, 339, 340, 342, 346], "100": [1, 2, 3, 331, 339, 340, 342, 346], "5000": 1, "simple_tim": 1, "custom_tim": 1, "3f": [1, 4, 339], "custom": [1, 263], "114": 1, "109": 1, "modest": 1, "improv": [1, 3, 315, 316, 317, 318, 319, 320, 326, 339], "awai": [1, 3], "good": [1, 6, 339, 346], "nn": [1, 3, 4, 167, 205, 213, 309, 312, 314, 323, 325, 339, 342], "grad": [1, 2, 4, 196, 314, 322, 339, 340, 341, 342, 344], "full": [1, 4, 46, 57, 71, 175, 249, 250, 285, 339, 342], "implement": [2, 4, 118, 223, 236, 251, 256, 258, 260, 262, 263, 264, 306, 315, 316, 317, 318, 320, 321, 322, 334, 339, 340, 343], "basic": [2, 162, 340], "model": [2, 4, 5, 167, 203, 205, 213, 232, 235, 237, 239, 243, 245, 247, 248, 249, 251, 263, 309, 312, 314, 322, 323, 325, 339, 342], "problem": [2, 4, 213], "metadata": [2, 121, 165, 166], "num_featur": [2, 217], "num_exampl": 2, "1_000": 2, "num_it": 2, "10_000": 2, "iter": [2, 4, 205, 336, 339, 342], "sgd": [2, 4, 314, 321, 323, 328, 329, 332, 339], "lr": [2, 321], "01": [2, 279, 319], "rate": [2, 315, 316, 317, 318, 319, 320, 321, 326, 327], "ll": [2, 4, 287, 339, 340], "synthet": 2, "dataset": [2, 342], "matrix": [2, 76, 77, 89, 109, 118, 119, 131, 147, 148, 253, 270, 309], "ground": [2, 3, 284, 294], "truth": [2, 284, 294], "w_star": 2, "valu": [2, 3, 9, 12, 14, 21, 22, 36, 56, 59, 65, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 105, 118, 120, 144, 149, 150, 151, 153, 154, 157, 158, 165, 187, 188, 196, 199, 203, 204, 205, 209, 216, 220, 221, 222, 226, 228, 230, 236, 251, 252, 261, 262, 263, 265, 282, 283, 284, 285, 286, 287, 289, 290, 291, 292, 293, 294, 306, 312, 316, 319, 328, 329, 331, 332, 340], "gaussian": [2, 224, 274, 275, 276, 285], "nois": 2, "exampl": [2, 3, 4, 14, 102, 118, 119, 183, 187, 213, 215, 216, 217, 226, 229, 230, 237, 239, 245, 248, 264, 265, 266, 267, 268, 269, 270, 271, 272, 282, 284, 291, 309, 314, 323, 328, 329, 330, 331, 332, 336, 340, 341, 342, 343, 344, 345], "noisi": 2, "label": [2, 284, 291], "ep": [2, 217, 225, 226, 227, 254, 283, 285, 295, 315, 316, 317, 318, 319, 320, 326], "1e": [2, 4, 12, 217, 225, 226, 227, 254, 283, 285, 295, 315, 316, 317, 318, 319, 320, 323, 326, 328, 329, 330, 331, 332], "us": [2, 3, 4, 5, 6, 14, 76, 79, 81, 102, 118, 119, 131, 147, 148, 160, 161, 204, 213, 216, 221, 223, 224, 228, 230, 232, 236, 243, 249, 250, 251, 253, 256, 260, 263, 264, 268, 269, 275, 276, 283, 309, 312, 314, 315, 316, 318, 319, 320, 321, 322, 323, 334, 336, 338, 339, 340, 341, 344, 346], "weight": [2, 68, 69, 70, 205, 213, 239, 243, 253, 282, 284, 312, 316, 319, 321, 323, 327, 340, 342], "squar": [2, 3, 109, 163, 178, 196, 205, 213, 254, 292, 294, 315, 316, 318, 319, 320, 340, 343], "loss": [2, 4, 196, 213, 314, 339, 340, 342], "loss_fn": [2, 4, 314, 339, 340], "w": [2, 69, 76, 147, 148, 196, 216, 217, 219, 221, 222, 228, 230, 327, 340], "mean": [2, 3, 4, 153, 196, 213, 217, 225, 237, 254, 271, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294, 295, 339, 340, 343], "grad_fn": [2, 339, 340], "initi": [2, 3, 213, 217, 225, 226, 227, 228, 252, 254, 265, 266, 267, 268, 269, 270, 271, 272, 312, 323, 328, 329, 331, 332, 339, 342], "randomli": [2, 3, 220, 221, 222], "Then": [2, 6], "repeatedli": 2, "_": [2, 3, 213, 328, 329, 330, 331, 332, 336, 339, 342, 346], "verifi": [2, 6], "close": [2, 5, 6, 12], "error_norm": 2, "5f": 2, "someth": [2, 3, 341], "00005": 2, "00364": 2, "complet": [2, 3, 6, 249, 250, 340, 346], "logist": [2, 171, 275, 276, 302], "github": [2, 4, 6, 339], "repo": [2, 4, 6, 339], "enabl": [3, 6, 66, 82, 327], "larg": [3, 213, 251, 290, 339, 342], "ish": 3, "transform": [3, 5, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 203, 213, 217, 225, 227, 228, 236, 237, 248, 253, 256, 341], "compromis": 3, "eas": 3, "llama": 3, "famili": 3, "less": [3, 23, 117, 145, 256, 294], "200": [3, 330], "line": [3, 342, 343], "python": [3, 36, 49, 56, 86, 204, 205, 206, 312, 322, 323, 325, 338, 340, 343], "neural": [3, 5, 223, 266, 267, 296, 309, 312, 326], "network": [3, 5, 217, 221, 223, 266, 267, 309, 312, 326], "build": [3, 5, 268, 312, 339], "concis": 3, "architectur": [3, 6, 213, 250, 346], "notabl": [3, 5], "rope": [3, 213], "posit": [3, 23, 78, 102, 106, 114, 137, 145, 196, 205, 213, 218, 219, 251, 256, 260, 285, 295], "option": [3, 11, 13, 14, 21, 22, 23, 24, 25, 30, 31, 60, 61, 62, 66, 67, 68, 69, 70, 71, 76, 77, 78, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 105, 106, 109, 113, 114, 118, 119, 120, 121, 130, 132, 134, 135, 141, 144, 145, 146, 147, 148, 149, 150, 151, 153, 154, 156, 157, 158, 160, 161, 166, 175, 176, 177, 180, 181, 185, 187, 188, 191, 192, 193, 194, 195, 196, 197, 199, 201, 204, 205, 215, 216, 217, 218, 219, 228, 229, 230, 232, 236, 237, 239, 248, 251, 253, 256, 260, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294, 295, 315, 316, 317, 318, 319, 320, 321, 323, 326, 327, 336, 339, 345, 347], "kei": [3, 149, 150, 151, 153, 154, 156, 157, 158, 204, 205, 236, 237, 248, 251, 323, 336, 338, 340], "cach": [3, 339], "concaten": 3, "project": [3, 251], "llamaattent": 3, "self": [3, 4, 7, 25, 26, 27, 28, 29, 30, 31, 32, 33, 35, 36, 37, 38, 39, 40, 41, 42, 44, 45, 46, 47, 48, 50, 52, 53, 54, 55, 56, 57, 58, 207, 213, 296, 312], "dim": [3, 223, 225, 226, 227, 251, 254, 256, 260, 263], "num_head": [3, 251, 263], "super": [3, 4, 213, 312], "tradit": [3, 221, 222, 256], "query_proj": 3, "bia": [3, 76, 147, 148, 205, 213, 218, 219, 228, 237, 239, 248, 251, 253, 318, 319, 320, 323, 340], "key_proj": 3, "value_proj": 3, "out_proj": [3, 312], "__call__": [3, 4, 213, 312], "queri": [3, 251], "mask": [3, 245, 251, 341], "extract": [3, 77, 78, 213, 236, 312], "l": [3, 4, 213, 215, 217, 218, 229, 294], "reshap": [3, 118, 264, 341], "combin": 3, "key_cach": 3, "value_cach": 3, "sqrt": [3, 84, 217, 225, 226, 227, 228, 254, 260, 266, 267, 268, 269, 315, 317, 318, 319, 326, 339], "score": [3, 291], "softmax": [3, 213, 281, 284], "values_hat": 3, "rm": [3, 6, 316], "swiglu": 3, "rmsnorm": [3, 213], "llamaencoderlay": 3, "mlp_dim": [3, 263], "norm1": 3, "norm2": 3, "linear1": 3, "linear2": 3, "linear3": 3, "sigmoid": [3, 213, 259, 275, 276, 280, 302], "instanc": [3, 147, 206, 213, 226, 232, 233, 234, 237, 240, 241, 248, 250, 258, 312, 343], "embed": [3, 213, 256, 260, 283], "emb": [3, 223, 260], "token": [3, 223], "num_lay": [3, 4, 314], "vocab_s": 3, "norm": [3, 225, 295, 320, 321], "multiheadattent": [3, 213], "create_additive_causal_mask": 3, "list": [3, 8, 11, 13, 25, 28, 29, 39, 40, 41, 42, 44, 52, 55, 56, 58, 60, 61, 62, 63, 66, 67, 70, 86, 88, 91, 92, 94, 95, 97, 98, 100, 101, 105, 106, 115, 118, 130, 132, 134, 135, 141, 144, 146, 149, 150, 151, 153, 154, 157, 158, 161, 165, 175, 177, 180, 181, 185, 191, 192, 196, 197, 198, 201, 204, 206, 213, 237, 239, 240, 241, 242, 246, 248, 249, 250, 312, 318, 319, 320, 321, 330, 338, 339, 340, 342], "still": [3, 6, 118, 339, 342], "consid": [3, 12, 59, 204, 205, 225, 338], "train": [3, 4, 213, 217, 220, 221, 222, 235, 237, 248, 266, 267], "ignor": [3, 65, 66, 86, 316], "whatsoev": 3, "rest": [3, 205, 256], "subsect": 3, "prompt": 3, "autoregress": 3, "yield": [3, 4, 336], "temp": 3, "causal": 3, "save": [3, 5, 121, 147, 165, 166, 167, 168, 243, 342], "append": [3, 131, 339, 342], "store": 3, "per": [3, 4, 76, 147, 148, 217, 225, 226, 227, 254, 334, 339, 342], "care": [3, 342], "last": [3, 24, 56, 92, 95, 97, 98, 100, 101, 102, 110, 119, 131, 150, 176, 191, 218, 219, 221, 222, 225, 264, 343], "logit": [3, 150, 282, 284, 339], "next": [3, 4], "categor": 3, "lazili": [3, 213], "noth": [3, 213, 342], "yet": [3, 118, 213, 312, 323, 340, 341, 342, 344], "forc": [3, 4, 213, 344], "choos": [3, 256], "pars": 3, "feed": 3, "loop": [3, 4, 339, 340, 342], "unsqueez": 3, "sequenc": [3, 217, 218, 263, 336, 346], "length": [3, 180, 217, 218, 330], "len": [3, 92, 95, 98, 101, 330], "overwrit": 3, "discard": [3, 204], "old": 3, "moment": [3, 70, 316, 318, 319, 320], "anymor": 3, "everyth": 3, "small": [3, 217, 225, 227, 254, 285, 290, 295, 339, 346], "10": [3, 4, 123, 162, 167, 205, 213, 239, 309, 330, 332, 339, 341], "12": [3, 330], "8192": 3, "1024": 3, "actual": [3, 14, 239, 312, 342], "materi": [3, 5], "could": [3, 213], "20_000": 3, "machin": [3, 5, 6, 326], "8gb": 3, "ram": 3, "32": [3, 4, 147, 148, 209, 216, 230, 339], "44": 3, "doubl": 3, "bracket": 3, "becaus": [3, 213, 342], "batch": [3, 131, 217, 218, 219, 221, 222, 251, 264, 342], "zip": [3, 4], "haven": 3, "anyth": [3, 196, 342], "result": [3, 14, 56, 66, 76, 110, 118, 121, 131, 143, 148, 160, 162, 181, 191, 200, 205, 260, 339, 340, 343], "similar": [3, 205, 249, 250, 251, 283, 343, 345], "runtim": [3, 339], "section": [3, 6, 177, 295, 339, 340], "access": [3, 36, 213, 312, 323, 342, 346], "origin": [3, 78, 217, 244, 266, 267, 268, 269, 315, 316, 317, 318, 320, 321, 343], "sentencepiec": 3, "pytorch": [3, 5, 225, 340], "compat": [3, 150, 345], "npz": [3, 121, 167, 168, 239, 243, 345], "file": [3, 6, 121, 164, 165, 166, 167, 168, 239, 243, 340, 345], "directli": 3, "argpars": 3, "itertool": [3, 205], "starmap": [3, 205], "np": [3, 4, 343, 344], "torch": [3, 343], "map_torch_to_mlx": 3, "tok_embed": 3, "elif": 3, "replac": [3, 249, 250, 263, 294], "attention_norm": 3, "ffn_norm": 3, "wq": 3, "wk": 3, "wv": 3, "wo": 3, "w1": 3, "w2": 3, "w3": 3, "ffn": 3, "separ": [3, 46, 57, 225, 291], "submodul": [3, 4, 213, 237, 238, 248, 250], "feed_forward": 3, "parser": 3, "argumentpars": 3, "add_argu": 3, "torch_weight": 3, "output_fil": 3, "parse_arg": 3, "state": [3, 4, 213, 314, 323, 336, 339], "savez": [3, 243, 345], "k": [3, 77, 89, 193, 194, 195, 215, 228, 229, 237], "v": [3, 71, 213, 237, 343], "left": [3, 118, 147, 215, 216, 224, 229, 230, 256, 264, 275, 276, 285, 287, 295], "disk": 3, "text": [3, 215, 216, 229, 230, 231, 262, 266, 267, 268, 269, 278, 285, 286, 287, 290, 291, 294, 296, 297, 300, 301, 305, 306, 316, 321], "format": [3, 121, 164, 165, 166, 167, 168, 343], "oper": [3, 5, 7, 32, 60, 61, 62, 70, 175, 182, 188, 207, 213, 263, 321, 339, 340, 341, 342, 343, 344, 346, 347], "dictionari": [3, 66, 121, 165, 166, 204, 213, 236, 244, 249, 250, 324, 338, 345], "represent": [3, 147, 204, 206], "tree_unflatten": 3, "helper": [3, 339], "weight_fil": 3, "incur": 3, "sever": [3, 68, 69, 70, 167, 168, 339, 345], "futur": [3, 253, 341, 342], "pth": 3, "current": [3, 5, 6, 68, 69, 70, 147, 213, 316, 342], "around": 3, "m1": [3, 339, 340, 346], "ultra": 3, "7b": 3, "me": 3, "ishmael": 3, "year": 3, "ago": 3, "never": [3, 342], "long": 3, "info": [3, 6], "247": 3, "press": [3, 118], "enter": 3, "littl": 3, "monei": 3, "my": [3, 6], "purs": 3, "greater": [3, 23, 108, 145, 262, 306], "consequ": 3, "walk": 3, "down": 3, "gower": 3, "street": 3, "afternoon": 3, "heavi": 3, "rain": 3, "saw": [3, 340], "off": [3, 6, 342], "man": 3, "rag": 3, "who": 3, "sat": 3, "upon": [3, 205], "hi": 3, "bundl": 3, "hard": 3, "wet": 3, "he": [3, 268, 269], "were": [3, 346], "cry": 3, "watch": [3, 339], "him": 3, "observ": 3, "numer": [3, 118, 126, 130, 175, 217, 225, 226, 227, 254, 283, 285, 295, 315, 316, 317, 318, 319, 320, 326, 339, 342], "crowd": 3, "wa": [3, 342], "hurri": 3, "437": 3, "330": 3, "second": [3, 78, 127, 129, 131, 186, 196, 216, 230, 283, 291, 316, 318, 319, 320, 340, 346], "spent": 3, "amount": [3, 215, 229], "39": 3, "ms": [3, 339], "By": [3, 340, 343], "bigger": [3, 316], "remain": [3, 196, 220, 221, 222], "almost": 3, "nobodi": 3, "took": 3, "least": [3, 60, 61, 62, 65, 119, 147], "notic": [3, 340, 345], "distanc": [3, 295], "had": 3, "doubt": 3, "minut": 3, "straight": 3, "slowli": 3, "rais": [3, 118, 177, 239], "ey": 3, "speak": [3, 118], "resum": 3, "postur": 3, "stood": 3, "feel": 3, "pain": 3, "heart": 3, "smile": 3, "face": 3, "am": 3, "someon": 3, "three": [3, 62], "quarter": 3, "hour": 3, "made": 3, "immedi": [3, 232], "repli": 3, "again": [3, 6, 213, 339], "hand": [3, 340, 342], "did": 3, "accustom": 3, "thu": [3, 213], "question": [3, 342], "reason": [3, 341], "tell": [3, 339, 343], "understand": [3, 266, 267], "579": 3, "690": 3, "num": [3, 120, 156], "500": [3, 346], "628": 3, "went": 3, "nervou": 3, "trembl": 3, "told": 3, "And": 3, "perhap": 3, "surpris": 3, "matter": [3, 213], "shall": 3, "anyhow": 3, "friend": 3, "ye": 3, "slight": [3, 342], "kind": 3, "longer": [3, 71, 340], "soon": 3, "unless": [3, 12, 118, 312], "unlik": [3, 12, 221, 222, 244], "strang": 3, "amus": 3, "That": 3, "secret": 3, "disappoint": 3, "mine": 3, "cannot": [3, 65, 341, 343], "happi": 3, "ask": 3, "Is": [3, 260, 263], "shop": 3, "bui": 3, "food": 3, "633": 3, "21": [3, 332], "475": 3, "su": 3, "j": [3, 6, 118, 221, 317, 318, 320], "lu": 3, "pan": 3, "murtadha": 3, "wen": 3, "liu": 3, "2021": 3, "roform": [3, 256], "enhanc": [3, 256, 342], "rotari": [3, 256], "arxiv": [3, 225, 226, 227, 231, 254, 276, 296, 315, 321], "preprint": [3, 315, 321], "2104": 3, "09864": 3, "zhang": 3, "sennrich": 3, "2019": [3, 319], "root": [3, 163, 178, 254], "advanc": [3, 339], "inform": [3, 4, 6, 165, 166, 213, 217, 224, 251, 340, 346], "system": [3, 6], "shazeer": 3, "2020": 3, "glu": [3, 213], "variant": [3, 294, 320], "2002": 3, "05202": 3, "classifi": 4, "mnist": 4, "As": [4, 187, 213, 339], "mlp": [4, 213, 263, 314], "inherit": [4, 338], "standard": [4, 36, 56, 131, 151, 153, 263, 266, 268, 271, 344], "idiom": [4, 339], "input_dim": [4, 213, 228, 253], "hidden_dim": [4, 312, 314], "output_dim": [4, 213, 228, 253], "layer_s": 4, "idim": 4, "odim": 4, "maximum": [4, 21, 65, 213, 255, 260, 275, 276, 279, 298, 312, 342], "cross": [4, 70, 282, 284], "entropi": [4, 282, 284], "sub": [4, 78, 156], "commonli": [4, 249, 309, 339], "cross_entropi": [4, 213], "accuraci": 4, "valid": [4, 71, 102, 199, 204, 237, 248, 338], "eval_fn": 4, "argmax": 4, "loader": 4, "num_class": [4, 314], "batch_siz": [4, 314], "num_epoch": [4, 314], "learning_r": [4, 314, 315, 316, 317, 318, 319, 320, 321, 323, 326, 327, 328, 329, 330, 331, 332, 339], "train_imag": [4, 314], "train_label": [4, 314], "test_imag": 4, "test_label": 4, "shuffl": 4, "minibatch": 4, "batch_iter": [4, 314], "perm": 4, "permut": 4, "id": [4, 6], "put": [4, 339], "trainabl": [4, 203, 213, 312], "loss_and_grad_fn": [4, 314, 339, 340], "value_and_grad": [4, 213, 249, 312, 314, 325, 339, 340, 343, 344], "epoch": 4, "test": [4, 6], "confus": 4, "decent": 4, "95": 4, "brought": 5, "research": 5, "except": [5, 89, 96, 97, 99, 100, 101, 225, 239, 341, 343], "featur": [5, 68, 69, 70, 217, 225, 226, 227, 228, 253, 254, 256, 263, 264, 339, 342], "main": [5, 78, 89, 205, 213], "differ": [5, 184, 294, 340], "lazi": [5, 312, 344], "multi": [5, 218, 219, 341, 343], "cpu": [5, 119, 339, 346], "gpu": [5, 339, 341, 346], "inspir": 5, "jax": [5, 336], "arrayfir": 5, "unifi": 5, "live": [5, 346], "guid": 5, "convers": 5, "regress": [5, 290], "layer": [5, 213, 215, 216, 221, 222, 225, 227, 228, 229, 230, 245, 250, 253, 258, 263, 308, 312], "perceptron": 5, "llm": 5, "infer": [5, 105, 121], "fft": 5, "algebra": 5, "tree": [5, 66, 86, 106, 196, 199, 204, 205, 206, 322, 323, 325, 334, 340], "develop": [5, 6], "document": [5, 46, 57, 165, 166, 339, 340, 341], "pypi": 6, "meet": 6, "seri": 6, "chip": 6, "nativ": 6, "maco": 6, "13": 6, "recommend": [6, 321], "14": 6, "sonoma": 6, "conda": 6, "forg": 6, "distribut": [6, 149, 150, 151, 153, 157, 158, 228, 266, 267, 268, 269, 271, 272, 285, 288, 293, 295, 309], "probabl": [6, 154, 220, 221, 222, 253, 282, 284, 288, 346], "platform": 6, "processor": 6, "arm": [6, 209], "i386": 6, "switch": 6, "17": 6, "g": [6, 118, 147, 308, 326, 327, 342, 347], "clang": 6, "cmake": 6, "24": 6, "xcode": 6, "15": [6, 118, 339], "environ": [6, 79, 82], "via": [6, 322, 325, 342, 343], "rosetta": 6, "unam": 6, "p": [6, 149, 213, 220, 221, 222, 295, 318, 320], "clone": 6, "git": 6, "com": 6, "ml": 6, "explor": 6, "cd": 6, "brew": 6, "global": [6, 79, 82, 155, 336, 339], "env": 6, "cmake_build_parallel_level": 6, "edit": [6, 250], "unittest": 6, "discov": 6, "stub": 6, "dev": 6, "generate_stub": 6, "mkdir": 6, "either": [6, 10, 46, 56, 57, 65, 80, 81, 83, 104, 107, 108, 116, 117, 118, 126, 131, 133, 136, 138, 184, 196, 216, 230, 258, 264, 268, 269], "libmlx": 6, "preprocessor": 6, "metal_path": 6, "mlx_build_test": 6, "ON": 6, "mlx_build_exampl": 6, "mlx_build_benchmark": 6, "mlx_build_python_bind": 6, "multipl": [6, 131, 138, 147, 148, 251, 260, 329, 330, 332, 339, 342, 345], "wish": 6, "variabl": [6, 66, 79, 82, 106, 115, 196, 198, 199], "export": 6, "developer_dir": 6, "app": 6, "content": [6, 236, 339], "sdk": 6, "xcrun": 6, "macosx": 6, "show": [6, 209, 339], "unabl": 6, "tool": 6, "select": [6, 200, 232, 236], "sudo": 6, "ouptut": 6, "finder": 6, "iterm": 6, "termin": 6, "click": 6, "uncheck": 6, "window": [6, 215, 216, 229, 230], "restart": 6, "grep": 6, "cmake_host_system_processor": 6, "arm64": 6, "x86_64": 6, "wipe": 6, "cahc": 6, "rf": 6, "devicetyp": 7, "attribut": [7, 8, 25, 207, 244, 312, 334], "kwarg": [8, 167, 168, 347], "union": [9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 27, 28, 29, 30, 31, 32, 33, 35, 37, 38, 39, 40, 41, 42, 44, 45, 46, 47, 48, 50, 52, 53, 54, 55, 57, 58, 59, 60, 61, 62, 63, 64, 65, 67, 68, 69, 70, 71, 72, 73, 76, 77, 78, 80, 81, 83, 84, 85, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 153, 154, 156, 157, 158, 159, 160, 161, 162, 163, 165, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 200, 201, 202, 215, 216, 219, 229, 230, 237, 239, 248, 264, 327], "absolut": [9, 12, 275, 276, 294], "semant": [10, 63, 80, 81, 83, 107, 108, 116, 117, 126, 131, 133, 136, 138, 184, 346], "keepdim": [11, 13, 21, 22, 28, 29, 30, 31, 39, 40, 41, 42, 44, 55, 58, 118, 130, 132, 134, 135, 146, 175, 185, 197], "reduct": [11, 13, 130, 132, 135, 146, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294, 295], "reduc": [11, 13, 21, 22, 130, 132, 134, 135, 146, 185, 197, 217, 263, 290], "unspecifi": [11, 13, 14, 21, 22, 23, 24, 67, 105, 130, 132, 134, 135, 141, 145, 146, 160, 175, 176, 185, 187, 197, 201, 347], "entir": [11, 13, 21, 22, 130, 132, 134, 135, 146, 185, 197, 221, 222], "singleton": [11, 13, 21, 22, 130, 131, 132, 134, 135, 146, 185, 197], "rtol": 12, "05": [12, 217, 225, 226, 227, 254], "atol": 12, "08": [12, 283, 317, 318, 319, 320, 326], "equal_nan": [12, 59], "approxim": [12, 224, 274, 275, 276], "comparison": [12, 83, 107, 108, 116, 117], "infinit": 12, "equal": [12, 23, 59, 89, 108, 117, 145, 154, 177, 226, 228], "sign": [12, 209, 321], "nan": [12, 59, 112], "ab": [12, 118, 196, 225, 226, 227, 231, 254, 276, 296, 339], "array_equ": 12, "rel": [12, 316, 339], "toler": 12, "boolean": [12, 59, 111, 112, 113, 114, 127, 128, 129, 209, 247, 341], "interv": [14, 120, 154, 158], "increment": 14, "otherwis": [14, 70, 204, 205, 237, 239, 248, 262, 263, 264, 282, 287, 294, 305, 306, 342, 343], "int32": [14, 102, 118, 154, 209, 264, 341, 344], "convent": [14, 71, 264, 319], "lead": [14, 339], "fraction": 14, "integr": [14, 187, 342], "invers": [15, 16, 17, 18, 19, 20, 85, 93, 94, 95, 96, 97, 98], "cosin": [15, 16, 72, 73, 283, 328, 330, 340], "hyperbol": [16, 18, 20, 73, 174, 190, 307], "sine": [17, 18, 173, 174, 340], "uint32": [21, 22, 23, 24, 150, 209], "minimum": [22, 65, 260, 283], "kth": [23, 145], "partit": 23, "order": [23, 70, 118, 145, 147, 213, 225, 249, 258, 323, 339, 340], "undefin": [23, 145, 341], "sort": [23, 24, 145], "flatten": [23, 24, 118, 143, 145, 160, 176, 187, 188, 204], "dimension": [25, 90, 91, 92, 93, 94, 95, 99, 100, 101, 215, 216, 217, 218, 219, 223, 228, 229, 230, 253, 260, 341, 343], "val": [25, 105], "tupl": [25, 46, 49, 57, 67, 69, 70, 81, 86, 88, 115, 118, 119, 144, 147, 161, 180, 196, 198, 204, 205, 206, 215, 216, 219, 229, 230, 239, 241, 258, 264, 316, 318, 319, 320, 321, 338, 340], "ndarrai": [25, 341, 342, 344], "properti": [26, 34, 43, 49, 51, 244, 247, 324, 340], "argument": [26, 46, 57, 66, 86, 106, 196, 205, 213, 264, 336, 340, 345, 346, 347], "decim": [47, 162], "indices_or_sect": [52, 177], "nest": [56, 66, 213, 312, 338, 340], "ddof": [58, 197], "a_min": 65, "a_max": 65, "edg": [65, 144, 264, 339], "At": 65, "anoth": [65, 131, 184, 200, 213, 232, 339, 340, 341, 346], "fun": [66, 106, 115, 196, 198, 199, 339, 341, 342, 346], "dict": [66, 86, 121, 165, 166, 167, 242, 246, 249, 250, 312, 322, 323, 325, 338, 340, 345], "dure": [66, 220, 221, 222, 264, 343], "arbitrarili": [66, 213, 338, 340, 344], "leaf": [66, 204, 205, 236], "node": [66, 86, 199], "pad": [68, 69, 70, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 215, 216, 218, 219, 229, 230], "dilat": [68, 69, 70], "group": [68, 69, 70, 76, 147, 148, 225, 253], "1d": [68, 70, 71, 165, 188], "convolut": [68, 69, 70, 71, 218, 219, 221, 222], "channel": [68, 69, 70, 217, 218, 219, 221, 222], "c_in": [68, 69, 70], "c_out": [68, 69, 70], "convolv": [68, 69, 70], "2d": [69, 70, 78, 147, 217, 221], "spatial": [69, 70, 215, 225, 229, 264], "symmetr": 69, "discret": [71, 90, 91, 92, 93, 94, 95, 99, 100, 101, 223], "swap": [71, 186, 250, 253], "conv": 71, "filter": [71, 218, 219, 232, 236], "flip": [70, 71], "signal": [71, 264], "bias": [76, 147, 148, 237, 248, 251], "group_siz": [76, 147, 148, 253], "64": [76, 147, 148, 209, 253], "configur": 76, "formal": [76, 147], "notat": [76, 204, 241], "quantiz": [76, 121, 148, 253], "w_i": [76, 147], "hat": [76, 147], "occupi": [76, 147, 148], "diagon": [77, 89, 193, 194, 195], "th": [77, 89, 330], "axis1": [78, 186], "axis2": [78, 186], "subarrai": [78, 177], "remov": [78, 131, 150, 180, 284], "insert": [78, 88, 346], "neg": [78, 102, 113, 229, 230, 251, 285, 293, 295, 341], "taken": [78, 187], "disabl": [79, 339], "mlx_disable_compil": [79, 82, 339], "divis": [80, 104, 147], "quotient": [80, 81, 104], "remaind": 81, "fuction": 81, "faster": [81, 274, 339, 340], "mathrm": [84, 171, 226], "frac": [84, 147, 171, 215, 216, 217, 220, 221, 222, 225, 226, 227, 228, 229, 230, 254, 266, 267, 268, 269, 283, 285, 287, 290, 301, 303, 315, 317, 318, 319, 320, 326], "pi": [84, 260, 340], "int_0": 84, "dt": 84, "erf": [85, 339], "exponenti": [87, 257, 273, 300, 329], "ident": [89, 182, 213, 245], "zero": [89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 193, 194, 195, 202, 213, 215, 216, 220, 221, 222, 239, 265, 266, 267, 268, 269, 270, 271, 272, 309, 316, 341], "whose": [89, 203], "One": [90, 93, 99, 163, 339, 340], "fourier": [90, 91, 92, 93, 94, 95, 99, 100, 101], "truncat": [90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 157], "dft": [90, 91, 92, 93, 94, 95, 99, 100, 101], "rfft": 96, "real": [96, 97, 98, 99, 100, 101], "rfft2": 97, "rfftn": 98, "silent": [99, 100, 101], "start_axi": 102, "end_axi": 102, "inclus": 102, "outsid": 102, "clamp": 102, "integ": [104, 118, 144, 147, 148, 149, 154, 177, 191, 199, 209, 223, 330, 341], "floor": 104, "argnam": [106, 196], "neither": [106, 196], "keyword": [106, 167, 168, 196, 205, 213, 336, 345, 347], "strict": [107, 116, 237, 239, 248], "ordinari": 110, "inifn": 111, "infin": [111, 113, 114, 229, 230, 320], "ord": 118, "tabl": [118, 209, 223], "frobeniu": 118, "matric": [118, 119], "strictli": 118, "mathemat": 118, "variou": 118, "purpos": 118, "calcul": [118, 285, 291, 316], "fro": 118, "inf": [118, 251], "largest": 118, "sing": 118, "smallest": 118, "singular": 118, "nuclear": 118, "_f": 118, "sum_": [118, 215, 216, 290], "a_": 118, "valueerror": [118, 239, 340], "refer": [118, 226, 231, 244, 266, 267, 268, 269, 276, 296, 341], "golub": 118, "van": 118, "loan": 118, "baltimor": 118, "md": 118, "john": 118, "hopkin": 118, "univers": 118, "1985": 118, "pg": 118, "la": 118, "arang": [118, 264, 341, 343], "9": [118, 284, 315, 318, 319, 320, 321, 323, 329, 332, 343], "74597": 118, "20": 118, "84804": 118, "41421": 118, "23607": [118, 119], "74166": 118, "24264": 118, "11": 118, "225": 118, "factorizatoin": 119, "q": 119, "894427": 119, "447214": 119, "57771": 119, "50": 120, "evenli": 120, "return_metadata": 121, "binari": [121, 164, 165, 166, 167, 168, 262, 282, 306, 339], "npy": [121, 164, 345], "safetensor": [121, 166, 239, 243, 342, 345], "gguf": [121, 165, 345], "matadata": 121, "unsupport": 121, "tensor": [121, 191, 215, 216, 229, 230, 295, 343], "natur": [122, 124, 342], "logarithm": [122, 123, 124, 125], "log": [124, 126, 130, 280, 281, 285, 288, 290, 293, 304], "plu": 124, "exp": [126, 130, 151, 175, 273, 288, 300, 301, 304, 339, 346], "stabl": [126, 130, 175, 290], "prepend": 131, "negat": 139, "beforehand": 143, "pad_with": 144, "constant_valu": 144, "pad_width": 144, "before_1": 144, "after_1": 144, "before_2": 144, "after_2": 144, "before_n": 144, "after_n": 144, "before_i": 144, "after_i": 144, "extend": 144, "side": [144, 215, 216, 229, 230, 339], "smaller": [145, 321, 339], "everi": [147, 205, 332, 340], "particular": [147, 225], "consecut": [147, 256], "w_1": 147, "w_g": 147, "begin": [147, 216, 230, 262, 287, 294, 300, 305, 306], "align": [147, 216, 230], "max_i": 147, "min_i": 147, "textrm": [147, 224, 274, 277], "round": 147, "pack": [147, 148], "unsign": [147, 148, 209], "lower": [147, 154, 157, 158, 193, 272], "upper": [147, 154, 157, 158, 272], "1st": 147, "signific": 147, "2nd": 147, "dequant": 147, "w_q": 147, "whether": [148, 236, 251, 282, 285, 291], "prng": [149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 336], "num_sampl": 150, "unnorm": [150, 282, 284], "draw": 150, "cdf": [151, 224, 274], "accord": [151, 200, 251, 266, 267, 268, 269], "seed": 152, "loc": 153, "deviat": [153, 266, 268, 271], "low": [154, 158, 272, 309], "high": [154, 158, 213, 223, 272, 309], "bound": [154, 157, 158, 224, 272, 339, 341, 346], "roadcast": 154, "domain": 157, "uniformli": 158, "repetit": 160, "preserv": [161, 340], "reciproc": 163, "arr": [164, 341], "obj": 165, "uncompress": 167, "my_path": 167, "tree_flatten": [167, 205, 206, 213], "transformerencod": 167, "128": [167, 213], "flat_param": 167, "compress": 168, "being": [182, 213], "prevent": [182, 295, 343], "flow": [182, 342], "unchang": [182, 256], "prior": [187, 188], "exclud": 188, "dot": [191, 204, 241, 251], "elsewher": [193, 341], "col": 193, "triangl": 193, "mse": 196, "param": [196, 213, 309, 340], "lvalu": 196, "dlvalu": 196, "dparam": 196, "lasso": 196, "l1": [196, 287, 289, 290, 294], "varianc": [197, 217, 225, 285], "divisor": 197, "cotang": 198, "in_ax": [199, 340], "out_ax": [199, 340], "prefix": [199, 204], "fn": [203, 205, 344], "callabl": [203, 204, 205, 232, 233, 236, 258, 263, 265, 266, 267, 268, 269, 270, 271, 272, 327, 328, 329, 330, 331, 332], "wrt": 203, "rho": 315, "06": [285, 295, 315], "paper": [217, 260, 315, 316, 317, 318, 320, 321], "zeiler": 315, "2012": [315, 326], "adapt": [315, 316, 317], "1212": 315, "5701": 315, "v_": [315, 317, 318, 319, 320, 326, 327], "v_t": [315, 317, 318, 319, 320, 326, 327], "g_t": [315, 317, 318, 319, 320, 321, 326, 327], "delta": [287, 315], "w_": [216, 230, 315, 316, 317, 318, 319, 320, 321, 326, 327], "u_t": 315, "epsilon": [217, 225, 226, 227, 254, 283, 285, 315, 317, 318, 319, 320, 326], "u_": 315, "w_t": [315, 317, 318, 319, 320, 321, 326, 327], "lambda": [205, 213, 232, 237, 261, 300, 305, 315, 316, 317, 318, 319, 320, 321, 326, 327, 339, 340], "averag": [215, 216, 315, 316, 318, 319, 320], "denomin": [226, 283, 315, 317, 318, 319, 320, 326], "stabil": [217, 225, 226, 227, 254, 283, 285, 315, 316, 317, 318, 319, 320, 326], "30": 316, "001": 316, "clip_threshold": 316, "decay_r": [316, 329, 332], "beta_1": [316, 318, 319, 320, 321], "weight_decai": [316, 319, 321, 327], "scale_paramet": 316, "relative_step": 316, "warmup_init": 316, "sublinear": 316, "cost": [316, 342], "epsilon_1": 316, "epsilon_2": 316, "parameter_scal": 316, "clip": 316, "unscal": 316, "decai": [316, 319, 321, 327, 328, 329, 332], "duchi": 317, "hazan": 317, "singer": 317, "2011": 317, "subgradi": 317, "onlin": 317, "stochast": [317, 318, 320, 327, 342], "jmlr": 317, "999": [318, 319, 320], "omit": [318, 320], "estim": [318, 320], "kingma": [318, 320], "ba": [318, 320], "2015": [221, 318, 320], "iclr": [318, 319, 320], "m_": [318, 319, 320, 321], "m_t": [318, 319, 320, 321], "beta_2": [318, 319, 320, 321], "contrast": 319, "loshchilov": 319, "hutter": 319, "decoupl": 319, "regular": [221, 296, 319, 339, 341], "adam": [314, 320, 321, 330, 331], "99": [321, 326], "tend": 321, "larger": [256, 321], "10x": 321, "adamw": [314, 321], "maintain": [221, 222, 321], "strength": [321, 327], "wd": 321, "chen": 321, "symbol": 321, "discoveri": 321, "2302": 321, "06675": 321, "c_": 321, "eta": 321, "c_t": 321, "momentum": [217, 321, 323, 327, 339], "appli": [205, 213, 215, 216, 217, 218, 219, 221, 222, 224, 225, 226, 227, 228, 229, 230, 231, 233, 245, 252, 253, 254, 255, 257, 259, 261, 262, 264, 273, 274, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294, 295, 296, 297, 298, 299, 300, 301, 302, 303, 304, 305, 306, 307, 309, 322, 325, 331, 334, 339], "opt": 322, "superset": [205, 322], "trainable_paramet": [213, 236, 323], "tieleman": 326, "hinton": 326, "lectur": 326, "coursera": 326, "smooth": [284, 294, 326], "dampen": 327, "nesterov": 327, "descent": [327, 339, 342], "mu": 327, "tau": 327, "l2": [287, 290, 327], "penalti": 327, "is_leaf": [204, 205], "arbitrari": [204, 312], "depth": [204, 222, 340], "hello": [204, 206], "charact": 204, "flat": [204, 206], "extra": 205, "closer": 205, "constitut": 205, "dict_kei": [205, 323], "recreat": 206, "world": 206, "42": 206, "byte": 209, "bool_": 209, "uint8": 209, "uint16": 209, "16": [209, 215, 226, 229, 232, 312], "uint64": 209, "int8": 209, "int16": 209, "int64": 209, "done": [213, 220, 339, 342, 343], "manual": 213, "explicitli": [213, 336], "solv": 213, "intuit": 213, "freez": [213, 248, 312], "finetun": 213, "in_dim": [213, 312], "out_dim": [213, 312], "enumer": 213, "caus": [213, 339, 342], "local": [213, 221], "scope": 213, "l2_loss": 213, "y_hat": 213, "loss_and_grad": 213, "workhors": 213, "Its": 213, "recurs": [213, 236, 237, 242, 246, 248, 312], "frozen": [213, 237, 246, 248, 253, 312], "individu": [213, 221, 222], "subset": [213, 236], "action": 213, "displai": 213, "tree_map": 213, "count": [213, 330], "num_param": 213, "preclud": 213, "pure": [213, 314], "pattern": [213, 342], "achiev": 213, "other_input": 213, "necessari": 213, "wrap": 213, "apply_to_modul": [213, 237], "children": 213, "filter_and_map": 213, "leaf_modul": 213, "load_weight": [213, 342], "named_modul": 213, "save_weight": 213, "unfreez": [213, 237], "update_modul": 213, "alibi": 213, "batchnorm": 213, "conv1d": 213, "conv2d": 213, "dropout": [213, 221, 222, 245, 263, 339], "dropout2d": 213, "dropout3d": 213, "gelu": [213, 275, 276, 339], "groupnorm": 213, "instancenorm": 213, "layernorm": 213, "mish": 213, "prelu": 213, "quantizedlinear": 213, "relu": [213, 252, 263, 297, 309], "selu": 213, "sequenti": [213, 309], "silu": 213, "sinusoidalpositionalencod": 213, "softshrink": 213, "gelu_approx": [213, 224, 274], "gelu_fast_approx": [213, 224, 274], "binary_cross_entropi": [213, 339], "cosine_similarity_loss": 213, "gaussian_nll_loss": 213, "hinge_loss": 213, "huber_loss": 213, "kl_div_loss": 213, "l1_loss": 213, "log_cosh_loss": 213, "margin_ranking_loss": 213, "mse_loss": 213, "nll_loss": 213, "smooth_l1_loss": 213, "triplet_loss": 213, "init": [213, 252, 309, 314, 328, 329, 331, 332], "uniform": [213, 228, 239, 267, 269, 309, 336, 339, 340, 346], "glorot_norm": 213, "glorot_uniform": 213, "he_norm": 213, "he_uniform": 213, "affin": [217, 225, 226, 227, 228, 253], "track_running_stat": 217, "var": [217, 225, 226, 227, 285], "gamma": [217, 225, 226, 227, 254, 266, 267, 268, 269], "nc": 217, "nlc": [217, 218], "four": 217, "nhwc": [217, 219], "height": [216, 217, 219, 221, 222, 230], "width": [216, 217, 219, 221, 222, 230, 253], "deep": [217, 266, 267, 268, 269], "intern": 217, "covari": 217, "shift": 217, "bn": 217, "in_channel": [218, 219], "out_channel": [218, 219], "kernel_s": [215, 216, 218, 219, 229, 230], "learnabl": [218, 219, 258], "portion": 220, "independ": [221, 222], "nwhc": 221, "whc": 221, "entri": [221, 222], "benefici": [221, 222, 342], "earli": 221, "adjac": 221, "pixel": 221, "correl": [70, 221], "thompson": 221, "goroshin": 221, "jain": 221, "lecun": 221, "bregler": 221, "cvpr": 221, "ndhwc": 222, "dhwc": 222, "medic": 222, "video": 222, "num_embed": 223, "lookup": 223, "typic": [223, 314, 339, 342], "usual": [223, 338, 342], "vocabulari": 223, "approx": 224, "unit": [224, 255, 257, 259, 266, 267, 268, 269, 273, 274, 275, 276, 277, 279, 298, 299, 300, 302], "phi": [224, 274], "geluapprox": 224, "sigma": [224, 266, 267, 268, 269, 275, 276, 277, 280, 301, 302], "60033": [224, 275], "0433603": [224, 275], "gelufast": 224, "773": 224, "regard": 224, "num_group": 225, "pytorch_compat": 225, "split": [225, 277], "preced": 225, "http": [225, 226, 227, 231, 254, 276, 296], "org": [225, 226, 227, 231, 254, 276, 296], "1803": 225, "08494": 225, "inorm": 226, "1607": [226, 227], "08022": 226, "06450": 227, "mathcal": 228, "u": 228, "d_i": 228, "monoton": 296, "1908": [231, 296], "08681": [231, 296], "tanh": [213, 231, 296], "softplu": [213, 231, 296], "map_fn": [232, 236], "filter_fn": [232, 236], "valid_parameter_filt": 232, "apply_fn": 233, "descend": 234, "is_leaf_fn": 236, "found": 236, "drop": 236, "idempot": [237, 248], "attent": [237, 251, 260, 263], "endswith": 237, "file_or_weight": 239, "miss": [239, 345], "ok": [239, 340], "save_safetensor": [243, 345], "reflect": [244, 339, 341, 343], "certain": [245, 339], "ie": 248, "noop": 248, "unfrozen": 248, "chang": [66, 170, 249, 253, 264, 287, 294, 339, 343], "tracer": 249, "partial": [249, 250, 339, 342], "child": 250, "programmat": 250, "query_input_dim": 251, "key_input_dim": 251, "value_input_dim": 251, "value_dim": 251, "value_output_dim": 251, "head": [251, 263], "aggreg": 251, "linearli": 251, "attend": 251, "num_paramet": 252, "25": [252, 264], "parametr": [252, 297], "classmethod": 253, "from_linear": 253, "quantize_modul": 253, "1910": 254, "07467": 254, "rectifi": [255, 268, 269, 279, 298, 299], "10000": 256, "rotat": 256, "slightli": [256, 346], "angular": 256, "frequenc": [256, 260], "_cos_sin_theta_kei": [], "precomput": [], "_cos_sin_theta_valu": [], "leq": [287, 300], "0507": 300, "67326": 300, "elu": [213, 300], "plain": 258, "known": [259, 302], "swish": [259, 302], "cdot": [275, 276, 283, 286, 302], "min_freq": 260, "0001": 260, "max_freq": 260, "cos_first": 260, "full_turn": 260, "sinusoid": 260, "sin": [260, 340, 344], "lambd": [261, 305], "threshold": [262, 287, 294, 306], "geq": [262, 306], "num_encoder_lay": 263, "num_decoder_lay": 263, "custom_encod": 263, "custom_decod": 263, "norm_first": 263, "checkpoint": 263, "decod": 263, "interact": 263, "mechan": 263, "hidden": 263, "chekpoint": 263, "usag": [263, 339], "expens": 263, "init_fn": [265, 266, 267, 268, 269, 270, 271, 272, 309], "glorot": [266, 267], "fan_in": [266, 267, 268, 269], "fan_out": [266, 267, 268, 269], "difficulti": [266, 267], "feedforward": [266, 267], "191107": 266, "61278": 266, "150594": 266, "363207": 266, "gain": [266, 267, 268, 269], "89613": 266, "53947": 266, "48095": 266, "995016": 266, "223404": 267, "890597": 267, "379159": 267, "776856": 267, "90041": 267, "02264": 267, "912766": 267, "12451": 267, "fan": [268, 269], "delv": [268, 269], "surpass": [268, 269], "human": [268, 269], "level": [268, 269], "imagenet": [268, 269], "classif": [268, 269], "25211": 268, "458835": 268, "177208": 268, "0137595": 268, "6967": 268, "02765": 268, "15268": 268, "75787": 268, "kaim": 269, "0300242": 269, "0184009": 269, "793615": 269, "666329": 269, "64331": 269, "16506": 269, "08619": 269, "79854": 269, "982273": 271, "534422": 271, "380709": 271, "0645099": 271, "883935": 272, "863726": 272, "617261": 272, "417497": 272, "exact": [275, 276], "0003": 275, "015": 276, "with_logit": 282, "predict": [282, 285, 286, 287, 288, 289, 290, 292, 293, 294], "105361": 282, "223144": 282, "20397": 282, "916291": 282, "539245": 282, "prob": 282, "510826": 282, "x1": 283, "x2": 283, "x_1": [283, 291], "x_2": [283, 291], "label_smooth": 284, "hot": 284, "0485873": 284, "348587": 284, "likelihood": [285, 293], "nll": [285, 293], "hing": 286, "y_": [286, 290], "pred": [286, 290], "huber": 287, "l_": [215, 229, 287], "kullback": 288, "leibler": 288, "diverg": 288, "cosh": 290, "logcosh": 290, "sensit": 290, "outlier": 290, "dual": 290, "behavior": [290, 341, 342], "offer": 290, "balanc": 290, "robust": 290, "approach": [290, 340], "task": 290, "inputs1": 291, "inputs2": 291, "margin": [291, 295], "rank": 291, "573409": 291, "765166": 291, "0638": 291, "75596": 291, "225763": 291, "256995": 291, "773433": 291, "formula": 294, "anchor": 295, "triplet": 295, "_p": 295, "degre": 295, "pairwis": 295, "instabl": 295, "subclass": 312, "concept": 312, "mymlp": 312, "in_proj": 312, "basi": 334, "subsequ": 314, "apply_gradi": 314, "implicit": [336, 339, 340], "fine": [336, 342], "grain": 336, "control": [336, 342], "manag": [183, 336, 346], "pseudo": 336, "altern": 336, "splittabl": 336, "threefri": 336, "counter": 336, "cycl": 338, "merg": 339, "fuse": 339, "big": 339, "awar": [339, 342], "36788": 339, "compiled_fun": 339, "code": [339, 342], "slow": 339, "Not": [66, 339], "recompil": [66, 339], "stack": 339, "rerun": [339, 342], "too": [339, 342], "frequent": [339, 342], "destroi": 339, "anonym": 339, "don": [339, 346], "nonlinear": 339, "unari": 339, "overhead": [339, 342, 346], "bandwidth": 339, "fusibl": 339, "consider": 339, "versu": 339, "timeit": [339, 340], "tic": 339, "perf_count": 339, "toc": 339, "tpi": 339, "1e3": 339, "1000": [328, 339], "4096": [339, 340, 346], "On": [339, 340, 342], "millisecond": [339, 346], "five": 339, "latest": 339, "won": 339, "trace": 339, "placehold": 339, "insid": 339, "crash": 339, "inspect": [339, 344], "disable_compil": 339, "okai": [339, 342], "intend": 339, "deal": 339, "pretti": [339, 342], "inconveni": 339, "functool": 339, "particularli": 339, "backward": [339, 340], "squeez": [264, 339], "checkout": 339, "compiled_grad_fn": 339, "71828": 339, "outer": [339, 342], "opportun": 339, "idea": [340, 342], "behind": 340, "dfdx": [340, 341], "d2fdx2": 340, "differentiaion": 340, "zero_grad": 340, "detach": 340, "requires_grad": 340, "dloss_dw": 340, "dloss_dx": 340, "lot": 340, "redund": 340, "suppos": [340, 346], "nice": [340, 342], "propag": [340, 341], "stop_gradi": 340, "autom": 340, "contriv": [340, 346], "sake": 340, "clariti": 340, "quit": [340, 343], "power": [340, 343], "difficult": 340, "primit": 340, "issu": [340, 343], "priorit": 340, "xs": 340, "ys": 340, "naive_add": 340, "vmap_add": 340, "total": 340, "390": 340, "wherea": 340, "025": 340, "ten": [340, 342], "Of": 340, "better": [340, 346], "handi": 340, "slice": 341, "ellipsi": 341, "syntax": 341, "idx": 341, "mix": 341, "take_along_axi": 341, "lack": 341, "extrem": [341, 342], "ineffici": [341, 342], "nonzero": 341, "record": 342, "dynam": 342, "easier": 342, "worri": 342, "fun1": 342, "expensive_fun": 342, "consum": 342, "eager": 342, "thank": 342, "weights_fp16": 342, "trade": 342, "bad": 342, "grow": 342, "computation": 342, "costli": 342, "wide": 342, "thousand": 342, "value_and_grad_fn": 342, "implicitli": 342, "anytim": 342, "memoryview": [342, 343], "perfectli": 342, "first_lay": 342, "second_layer_a": 342, "second_layer_b": 342, "protocol": 343, "receiv": [330, 343], "pep": 343, "3118": 343, "view": 343, "a_view": 343, "owndata": 343, "extern": 343, "x_view": 343, "modifi": 343, "df": 343, "x\u00b2": 343, "2x": 343, "indirectli": 343, "modif": 343, "seen": 343, "occur": 343, "incorpor": 343, "incorrect": 343, "experiment": 343, "break": 343, "advis": 343, "intermedi": 343, "jnp": 343, "tf": 343, "page": 344, "composit": 344, "archiv": 345, "savez_compress": 345, "save_gguf": 345, "arr_0": 345, "pool": [215, 216, 229, 230, 346], "advantag": 346, "parallel": 346, "race": 346, "interest": 346, "albeit": 346, "d1": 346, "d2": 346, "matmul": 346, "dens": 346, "twice": 346, "measur": 346, "default_stream": 347, "default_devic": 347, "my_devic": 347, "streamcontext": 183, "context": 183, "avgpool1d": 213, "avgpool2d": 213, "maxpool1d": 213, "maxpool2d": [213, 216], "n_i": [215, 216, 229, 230], "c_j": [215, 216, 229, 230], "ldot": [215, 216, 229, 230], "lfloor": [215, 216, 229, 230], "rfloor": [215, 216, 229, 230], "k_h": [216, 230], "k_w": [216, 230], "h_": [216, 230], "max_": [229, 230], "rmsprop": 314, "adagrad": 314, "adafactor": 314, "adadelta": 314, "adamax": 314, "lion": 314, "step_decai": 314, "exponential_decai": 314, "cosine_decai": [314, 330], "decay_step": 328, "beyond": [328, 331], "lr_schedul": [328, 329, 330, 332], "0999961": 328, "06561": 329, "step_siz": 332, "081": 332, "ari": [60, 61, 62], "shapeless": 66, "attempt": 66, "kernel_dil": 70, "input_dil": 70, "upsampl": 213, "hardswish": 213, "leaky_relu": 213, "log_sigmoid": 213, "log_softmax": 213, "relu6": 213, "pycapsul": 263, "scale_factor": 264, "nearest": 264, "align_corn": 264, "audio": 264, "4d": 264, "forth": 264, "neighbor": 264, "interpol": 264, "bilinear": 264, "trilinear": 264, "corner": 264, "bottom": 264, "75": 264, "33333": 264, "66667": 264, "702": 276, "hendryck": 276, "1606": 276, "08415": 276, "gate": 277, "halv": 277, "negative_slop": 279, "leaki": 279, "sum_i": 281, "x_i": [281, 303], "sum_j": 303, "x_j": 303, "join_schedul": 314, "linear_schedul": [314, 330], "boundari": 330, "join": 330, "transit": 330, "warmup": [330, 331], "0999938": 330, "101": 331}, "objects": {"mlx.core": [[7, 0, 1, "", "Device"], [8, 0, 1, "", "Dtype"], [207, 0, 1, "", "Stream"], [9, 2, 1, "", "abs"], [10, 2, 1, "", "add"], [11, 2, 1, "", "all"], [12, 2, 1, "", "allclose"], [13, 2, 1, "", "any"], [14, 2, 1, "", "arange"], [15, 2, 1, "", "arccos"], [16, 2, 1, "", "arccosh"], [17, 2, 1, "", "arcsin"], [18, 2, 1, "", "arcsinh"], [19, 2, 1, "", "arctan"], [20, 2, 1, "", "arctanh"], [21, 2, 1, "", "argmax"], [22, 2, 1, "", "argmin"], [23, 2, 1, "", "argpartition"], [24, 2, 1, "", "argsort"], [25, 0, 1, "", "array"], [59, 2, 1, "", "array_equal"], [60, 2, 1, "", "atleast_1d"], [61, 2, 1, "", "atleast_2d"], [62, 2, 1, "", "atleast_3d"], [63, 2, 1, "", "broadcast_to"], [64, 2, 1, "", "ceil"], [65, 2, 1, "", "clip"], [66, 2, 1, "", "compile"], [67, 2, 1, "", "concatenate"], [68, 2, 1, "", "conv1d"], [69, 2, 1, "", "conv2d"], [70, 2, 1, "", "conv_general"], [71, 2, 1, "", "convolve"], [72, 2, 1, "", "cos"], [73, 2, 1, "", "cosh"], [74, 2, 1, "", "default_device"], [75, 2, 1, "", "default_stream"], [76, 2, 1, "", "dequantize"], [77, 2, 1, "", "diag"], [78, 2, 1, "", "diagonal"], [79, 2, 1, "", "disable_compile"], [80, 2, 1, "", "divide"], [81, 2, 1, "", "divmod"], [82, 2, 1, "", "enable_compile"], [83, 2, 1, "", "equal"], [84, 2, 1, "", "erf"], [85, 2, 1, "", "erfinv"], [86, 2, 1, "", "eval"], [87, 2, 1, "", "exp"], [88, 2, 1, "", "expand_dims"], [89, 2, 1, "", "eye"], [102, 2, 1, "", "flatten"], [103, 2, 1, "", "floor"], [104, 2, 1, "", "floor_divide"], [105, 2, 1, "", "full"], [106, 2, 1, "", "grad"], [107, 2, 1, "", "greater"], [108, 2, 1, "", "greater_equal"], [109, 2, 1, "", "identity"], [110, 2, 1, "", "inner"], [111, 2, 1, "", "isinf"], [112, 2, 1, "", "isnan"], [113, 2, 1, "", "isneginf"], [114, 2, 1, "", "isposinf"], [115, 2, 1, "", "jvp"], [116, 2, 1, "", "less"], [117, 2, 1, "", "less_equal"], [120, 2, 1, "", "linspace"], [121, 2, 1, "", "load"], [122, 2, 1, "", "log"], [123, 2, 1, "", "log10"], [124, 2, 1, "", "log1p"], [125, 2, 1, "", "log2"], [126, 2, 1, "", "logaddexp"], [127, 2, 1, "", "logical_and"], [128, 2, 1, "", "logical_not"], [129, 2, 1, "", "logical_or"], [130, 2, 1, "", "logsumexp"], [131, 2, 1, "", "matmul"], [132, 2, 1, "", "max"], [133, 2, 1, "", "maximum"], [134, 2, 1, "", "mean"], [135, 2, 1, "", "min"], [136, 2, 1, "", "minimum"], [137, 2, 1, "", "moveaxis"], [138, 2, 1, "", "multiply"], [139, 2, 1, "", "negative"], [140, 2, 1, "", "new_stream"], [141, 2, 1, "", "ones"], [142, 2, 1, "", "ones_like"], [143, 2, 1, "", "outer"], [144, 2, 1, "", "pad"], [145, 2, 1, "", "partition"], [146, 2, 1, "", "prod"], [147, 2, 1, "", "quantize"], [148, 2, 1, "", "quantized_matmul"], [159, 2, 1, "", "reciprocal"], [160, 2, 1, "", "repeat"], [161, 2, 1, "", "reshape"], [162, 2, 1, "", "round"], [163, 2, 1, "", "rsqrt"], [164, 2, 1, "", "save"], [165, 2, 1, "", "save_gguf"], [166, 2, 1, "", "save_safetensors"], [167, 2, 1, "", "savez"], [168, 2, 1, "", "savez_compressed"], [169, 2, 1, "", "set_default_device"], [170, 2, 1, "", "set_default_stream"], [171, 2, 1, "", "sigmoid"], [172, 2, 1, "", "sign"], [173, 2, 1, "", "sin"], [174, 2, 1, "", "sinh"], [175, 2, 1, "", "softmax"], [176, 2, 1, "", "sort"], [177, 2, 1, "", "split"], [178, 2, 1, "", "sqrt"], [179, 2, 1, "", "square"], [180, 2, 1, "", "squeeze"], [181, 2, 1, "", "stack"], [182, 2, 1, "", "stop_gradient"], [183, 2, 1, "", "stream"], [184, 2, 1, "", "subtract"], [185, 2, 1, "", "sum"], [186, 2, 1, "", "swapaxes"], [187, 2, 1, "", "take"], [188, 2, 1, "", "take_along_axis"], [189, 2, 1, "", "tan"], [190, 2, 1, "", "tanh"], [191, 2, 1, "", "tensordot"], [192, 2, 1, "", "transpose"], [193, 2, 1, "", "tri"], [194, 2, 1, "", "tril"], [195, 2, 1, "", "triu"], [196, 2, 1, "", "value_and_grad"], [197, 2, 1, "", "var"], [198, 2, 1, "", "vjp"], [199, 2, 1, "", "vmap"], [200, 2, 1, "", "where"], [201, 2, 1, "", "zeros"], [202, 2, 1, "", "zeros_like"]], "mlx.core.Device": [[7, 1, 1, "", "__init__"]], "mlx.core.Dtype": [[8, 1, 1, "", "__init__"]], "mlx.core.Stream": [[207, 1, 1, "", "__init__"]], "mlx.core.array": [[26, 3, 1, "", "T"], [25, 1, 1, "", "__init__"], [27, 1, 1, "", "abs"], [28, 1, 1, "", "all"], [29, 1, 1, "", "any"], [30, 1, 1, "", "argmax"], [31, 1, 1, "", "argmin"], [32, 1, 1, "", "astype"], [33, 1, 1, "", "cos"], [34, 3, 1, "", "dtype"], [35, 1, 1, "", "exp"], [36, 1, 1, "", "item"], [37, 1, 1, "", "log"], [38, 1, 1, "", "log1p"], [39, 1, 1, "", "logsumexp"], [40, 1, 1, "", "max"], [41, 1, 1, "", "mean"], [42, 1, 1, "", "min"], [43, 3, 1, "", "ndim"], [44, 1, 1, "", "prod"], [45, 1, 1, "", "reciprocal"], [46, 1, 1, "", "reshape"], [47, 1, 1, "", "round"], [48, 1, 1, "", "rsqrt"], [49, 3, 1, "", "shape"], [50, 1, 1, "", "sin"], [51, 3, 1, "", "size"], [52, 1, 1, "", "split"], [53, 1, 1, "", "sqrt"], [54, 1, 1, "", "square"], [55, 1, 1, "", "sum"], [56, 1, 1, "", "tolist"], [57, 1, 1, "", "transpose"], [58, 1, 1, "", "var"]], "mlx.core.fft": [[90, 2, 1, "", "fft"], [91, 2, 1, "", "fft2"], [92, 2, 1, "", "fftn"], [93, 2, 1, "", "ifft"], [94, 2, 1, "", "ifft2"], [95, 2, 1, "", "ifftn"], [96, 2, 1, "", "irfft"], [97, 2, 1, "", "irfft2"], [98, 2, 1, "", "irfftn"], [99, 2, 1, "", "rfft"], [100, 2, 1, "", "rfft2"], [101, 2, 1, "", "rfftn"]], "mlx.core.linalg": [[118, 2, 1, "", "norm"], [119, 2, 1, "", "qr"]], "mlx.core.random": [[149, 2, 1, "", "bernoulli"], [150, 2, 1, "", "categorical"], [151, 2, 1, "", "gumbel"], [152, 2, 1, "", "key"], [153, 2, 1, "", "normal"], [154, 2, 1, "", "randint"], [155, 2, 1, "", "seed"], [156, 2, 1, "", "split"], [157, 2, 1, "", "truncated_normal"], [158, 2, 1, "", "uniform"]], "mlx.nn": [[214, 0, 1, "", "ALiBi"], [215, 0, 1, "", "AvgPool1d"], [216, 0, 1, "", "AvgPool2d"], [217, 0, 1, "", "BatchNorm"], [218, 0, 1, "", "Conv1d"], [219, 0, 1, "", "Conv2d"], [220, 0, 1, "", "Dropout"], [221, 0, 1, "", "Dropout2d"], [222, 0, 1, "", "Dropout3d"], [223, 0, 1, "", "Embedding"], [224, 0, 1, "", "GELU"], [225, 0, 1, "", "GroupNorm"], [226, 0, 1, "", "InstanceNorm"], [227, 0, 1, "", "LayerNorm"], [228, 0, 1, "", "Linear"], [229, 0, 1, "", "MaxPool1d"], [230, 0, 1, "", "MaxPool2d"], [231, 0, 1, "", "Mish"], [312, 0, 1, "", "Module"], [251, 0, 1, "", "MultiHeadAttention"], [252, 0, 1, "", "PReLU"], [253, 0, 1, "", "QuantizedLinear"], [254, 0, 1, "", "RMSNorm"], [255, 0, 1, "", "ReLU"], [256, 0, 1, "", "RoPE"], [257, 0, 1, "", "SELU"], [258, 0, 1, "", "Sequential"], [259, 0, 1, "", "SiLU"], [260, 0, 1, "", "SinusoidalPositionalEncoding"], [261, 0, 1, "", "Softshrink"], [262, 0, 1, "", "Step"], [263, 0, 1, "", "Transformer"], [264, 0, 1, "", "Upsample"], [273, 2, 1, "", "elu"], [274, 2, 1, "", "gelu"], [275, 2, 1, "", "gelu_approx"], [276, 2, 1, "", "gelu_fast_approx"], [277, 2, 1, "", "glu"], [278, 2, 1, "", "hardswish"], [279, 2, 1, "", "leaky_relu"], [280, 2, 1, "", "log_sigmoid"], [281, 2, 1, "", "log_softmax"], [296, 2, 1, "", "mish"], [297, 2, 1, "", "prelu"], [298, 2, 1, "", "relu"], [299, 2, 1, "", "relu6"], [300, 2, 1, "", "selu"], [301, 2, 1, "", "sigmoid"], [302, 2, 1, "", "silu"], [303, 2, 1, "", "softmax"], [304, 2, 1, "", "softplus"], [305, 2, 1, "", "softshrink"], [306, 2, 1, "", "step"], [307, 2, 1, "", "tanh"], [203, 2, 1, "", "value_and_grad"]], "mlx.nn.Module": [[232, 1, 1, "", "apply"], [233, 1, 1, "", "apply_to_modules"], [234, 1, 1, "", "children"], [235, 1, 1, "", "eval"], [236, 1, 1, "", "filter_and_map"], [237, 1, 1, "", "freeze"], [238, 1, 1, "", "leaf_modules"], [239, 1, 1, "", "load_weights"], [240, 1, 1, "", "modules"], [241, 1, 1, "", "named_modules"], [242, 1, 1, "", "parameters"], [243, 1, 1, "", "save_weights"], [244, 3, 1, "", "state"], [245, 1, 1, "", "train"], [246, 1, 1, "", "trainable_parameters"], [247, 3, 1, "", "training"], [248, 1, 1, "", "unfreeze"], [249, 1, 1, "", "update"], [250, 1, 1, "", "update_modules"]], "mlx.nn.init": [[265, 2, 1, "", "constant"], [266, 2, 1, "", "glorot_normal"], [267, 2, 1, "", "glorot_uniform"], [268, 2, 1, "", "he_normal"], [269, 2, 1, "", "he_uniform"], [270, 2, 1, "", "identity"], [271, 2, 1, "", "normal"], [272, 2, 1, "", "uniform"]], "mlx.nn.losses": [[282, 2, 1, "", "binary_cross_entropy"], [283, 2, 1, "", "cosine_similarity_loss"], [284, 2, 1, "", "cross_entropy"], [285, 2, 1, "", "gaussian_nll_loss"], [286, 2, 1, "", "hinge_loss"], [287, 2, 1, "", "huber_loss"], [288, 2, 1, "", "kl_div_loss"], [289, 2, 1, "", "l1_loss"], [290, 2, 1, "", "log_cosh_loss"], [291, 2, 1, "", "margin_ranking_loss"], [292, 2, 1, "", "mse_loss"], [293, 2, 1, "", "nll_loss"], [294, 2, 1, "", "smooth_l1_loss"], [295, 2, 1, "", "triplet_loss"]], "mlx.optimizers": [[315, 0, 1, "", "AdaDelta"], [316, 0, 1, "", "Adafactor"], [317, 0, 1, "", "Adagrad"], [318, 0, 1, "", "Adam"], [319, 0, 1, "", "AdamW"], [320, 0, 1, "", "Adamax"], [321, 0, 1, "", "Lion"], [334, 0, 1, "", "Optimizer"], [326, 0, 1, "", "RMSprop"], [327, 0, 1, "", "SGD"], [328, 2, 1, "", "cosine_decay"], [329, 2, 1, "", "exponential_decay"], [330, 2, 1, "", "join_schedules"], [331, 2, 1, "", "linear_schedule"], [332, 2, 1, "", "step_decay"]], "mlx.optimizers.Optimizer": [[322, 1, 1, "", "apply_gradients"], [323, 1, 1, "", "init"], [324, 3, 1, "", "state"], [325, 1, 1, "", "update"]], "mlx.utils": [[204, 2, 1, "", "tree_flatten"], [205, 2, 1, "", "tree_map"], [206, 2, 1, "", "tree_unflatten"]]}, "objtypes": {"0": "py:class", "1": "py:method", "2": "py:function", "3": "py:property"}, "objnames": {"0": ["py", "class", "Python class"], "1": ["py", "method", "Python method"], "2": ["py", "function", "Python function"], "3": ["py", "property", "Python property"]}, "titleterms": {"oper": [0, 1, 313], "develop": 1, "document": 1, "introduc": 1, "exampl": [1, 5, 339, 346], "primit": 1, "us": [1, 342, 347], "implement": [1, 3], "cpu": 1, "backend": 1, "gpu": 1, "transform": [1, 263, 337, 339, 340, 342, 344], "build": [1, 6], "bind": 1, "python": [1, 5, 6], "cmake": 1, "setuptool": 1, "usag": [1, 5], "result": 1, "script": [1, 3], "download": [1, 3], "code": [1, 3], "linear": [2, 212, 228], "regress": 2, "llm": 3, "infer": 3, "model": 3, "attent": 3, "layer": [3, 4, 310], "encod": 3, "full": [3, 105], "gener": 3, "put": 3, "all": [3, 11, 28], "togeth": 3, "convert": 3, "weight": 3, "load": [3, 121, 345], "benchmark": 3, "multi": 4, "perceptron": 4, "mlx": [5, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294, 295, 296, 297, 298, 299, 300, 301, 302, 303, 304, 305, 306, 307, 315, 316, 317, 318, 319, 320, 321, 322, 323, 324, 325, 326, 327, 328, 329, 330, 331, 332], "instal": [5, 6], "api": [5, 6], "refer": 5, "c": [5, 6], "further": 5, "read": 5, "troubleshoot": 6, "from": [6, 341], "sourc": 6, "requir": 6, "option": 6, "metal": 6, "found": 6, "x86": 6, "shell": 6, "core": [7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 207], "devic": [7, 210], "dtype": [8, 34], "stream": [183, 207, 210, 347], "ab": [9, 27], "add": 10, "allclos": 12, "ani": [13, 29], "arang": 14, "arcco": 15, "arccosh": 16, "arcsin": 17, "arcsinh": 18, "arctan": 19, "arctanh": 20, "argmax": [21, 30], "argmin": [22, 31], "argpartit": 23, "argsort": 24, "arrai": [25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 208, 341, 345], "t": 26, "astyp": 32, "co": [33, 72], "exp": [35, 87], "item": 36, "log": [37, 122], "log1p": [38, 124], "logsumexp": [39, 130], "max": [40, 132], "mean": [41, 134], "min": [42, 135], "ndim": 43, "prod": [44, 146], "reciproc": [45, 159], "reshap": [46, 161], "round": [47, 162], "rsqrt": [48, 163], "shape": 49, "sin": [50, 173], "size": 51, "split": [52, 156, 177], "sqrt": [53, 178], "squar": [54, 179], "sum": [55, 185], "tolist": 56, "transpos": [57, 192], "var": [58, 197], "array_equ": 59, "broadcast_to": 63, "ceil": 64, "clip": 65, "compil": [66, 339], "concaten": 67, "conv1d": [68, 218], "conv2d": [69, 219], "convolv": 71, "cosh": 73, "default_devic": 74, "default_stream": 75, "dequant": 76, "diag": 77, "diagon": 78, "disable_compil": 79, "divid": 80, "divmod": 81, "enable_compil": 82, "equal": 83, "erf": 84, "erfinv": 85, "eval": [86, 235], "expand_dim": 88, "ey": 89, "fft": [90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 211], "fft2": 91, "fftn": 92, "ifft": 93, "ifft2": 94, "ifftn": 95, "irfft": 96, "irfft2": 97, "irfftn": 98, "rfft": 99, "rfft2": 100, "rfftn": 101, "flatten": 102, "floor": 103, "floor_divid": 104, "grad": [106, 213], "greater": 107, "greater_equ": 108, "ident": [109, 270], "inner": 110, "isinf": 111, "isnan": 112, "isneginf": 113, "isposinf": 114, "jvp": 115, "less": 116, "less_equ": 117, "linalg": [118, 119], "norm": 118, "qr": 119, "linspac": 120, "log10": 123, "log2": 125, "logaddexp": 126, "logical_and": 127, "logical_not": 128, "logical_or": 129, "matmul": 131, "maximum": 133, "minimum": 136, "moveaxi": 137, "multipli": 138, "neg": 139, "new_stream": 140, "ones": 141, "ones_lik": 142, "outer": 143, "pad": 144, "partit": 145, "quantiz": 147, "quantized_matmul": 148, "random": [149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 336], "bernoulli": 149, "categor": 150, "gumbel": 151, "kei": 152, "normal": [153, 271], "randint": 154, "seed": 155, "truncated_norm": 157, "uniform": [158, 272], "repeat": 160, "save": [164, 345], "save_gguf": 165, "save_safetensor": 166, "savez": 167, "savez_compress": 168, "set_default_devic": 169, "set_default_stream": 170, "sigmoid": [171, 301], "sign": 172, "sinh": 174, "softmax": [175, 303], "sort": 176, "squeez": 180, "stack": 181, "stop_gradi": 182, "subtract": 184, "swapax": 186, "take": 187, "take_along_axi": 188, "tan": 189, "tanh": [190, 307], "tensordot": 191, "tri": 193, "tril": 194, "triu": 195, "value_and_grad": [196, 203], "vjp": 198, "vmap": 199, "where": 200, "zero": 201, "zeros_lik": 202, "nn": [203, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294, 295, 296, 297, 298, 299, 300, 301, 302, 303, 304, 305, 306, 307], "optim": [314, 315, 316, 317, 318, 319, 320, 321, 322, 323, 324, 325, 326, 327, 328, 329, 330, 331, 332, 333, 334], "adadelta": 315, "adafactor": 316, "adagrad": 317, "adam": 318, "adamw": 319, "adamax": 320, "lion": 321, "apply_gradi": 322, "init": [265, 266, 267, 268, 269, 270, 271, 272, 323], "state": [244, 324], "updat": [213, 249, 325, 341], "rmsprop": 326, "sgd": 327, "util": [204, 205, 206, 338], "tree_flatten": 204, "tree_map": 205, "tree_unflatten": 206, "data": 209, "type": 209, "support": 209, "algebra": 212, "neural": 213, "network": 213, "quick": [213, 344], "start": [213, 344], "The": 213, "modul": [213, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 312], "class": 213, "paramet": [213, 242], "inspect": 213, "valu": 213, "alibi": 214, "batchnorm": 217, "dropout": 220, "dropout2d": 221, "dropout3d": 222, "embed": 223, "gelu": [224, 274], "groupnorm": 225, "instancenorm": 226, "layernorm": 227, "mish": [231, 296], "appli": 232, "apply_to_modul": 233, "children": 234, "filter_and_map": 236, "freez": 237, "leaf_modul": 238, "load_weight": 239, "named_modul": 241, "save_weight": 243, "train": [245, 247, 339], "trainable_paramet": 246, "unfreez": 248, "update_modul": 250, "multiheadattent": 251, "prelu": [252, 297], "quantizedlinear": 253, "rmsnorm": 254, "relu": [255, 298], "rope": 256, "selu": [257, 300], "sequenti": 258, "silu": [259, 302], "sinusoidalpositionalencod": 260, "softshrink": [261, 305], "step": [262, 306], "constant": 265, "glorot_norm": 266, "glorot_uniform": 267, "he_norm": 268, "he_uniform": 269, "gelu_approx": 275, "gelu_fast_approx": 276, "loss": [282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294, 295, 311], "binary_cross_entropi": 282, "cosine_similarity_loss": 283, "cross_entropi": 284, "gaussian_nll_loss": 285, "hinge_loss": 286, "huber_loss": 287, "kl_div_loss": 288, "l1_loss": 289, "log_cosh_loss": 290, "margin_ranking_loss": 291, "mse_loss": 292, "nll_loss": 293, "smooth_l1_loss": 294, "triplet_loss": 295, "function": [308, 311, 339, 340, 344], "initi": 309, "tree": 338, "basic": [339, 344], "speedup": 339, "debug": 339, "pure": 339, "graph": [339, 342, 344], "automat": 340, "differenti": 340, "vector": 340, "index": 341, "differ": 341, "numpi": [341, 343], "In": 341, "place": 341, "lazi": 342, "evalu": 342, "why": 342, "comput": 342, "onli": 342, "what": 342, "you": 342, "when": 342, "convers": 343, "other": 343, "framework": 343, "pytorch": 343, "jax": 343, "tensorflow": 343, "guid": 344, "serial": 345, "format": 345, "unifi": 346, "memori": 346, "A": 346, "simpl": 346, "specifi": 347, "avgpool1d": 215, "avgpool2d": 216, "maxpool1d": 229, "maxpool2d": 230, "cosine_decai": 328, "exponential_decai": 329, "step_decai": 332, "common": 333, "schedul": 335, "atleast_1d": 60, "atleast_2d": 61, "atleast_3d": 62, "conv_gener": 70, "upsampl": 264, "elu": 273, "glu": 277, "hardswish": 278, "leaky_relu": 279, "log_sigmoid": 280, "log_softmax": 281, "relu6": 299, "softplu": 304, "join_schedul": 330, "linear_schedul": 331}, "envversion": {"sphinx.domains.c": 2, "sphinx.domains.changeset": 1, "sphinx.domains.citation": 1, "sphinx.domains.cpp": 6, "sphinx.domains.index": 1, "sphinx.domains.javascript": 2, "sphinx.domains.math": 2, "sphinx.domains.python": 3, "sphinx.domains.rst": 2, "sphinx.domains.std": 2, "sphinx.ext.intersphinx": 1, "sphinx": 56}}) \ No newline at end of file diff --git a/docs/build/html/usage/compile.html b/docs/build/html/usage/compile.html index 062532d39..2428cfd2f 100644 --- a/docs/build/html/usage/compile.html +++ b/docs/build/html/usage/compile.html @@ -9,7 +9,7 @@ - Compilation — MLX 0.3.0 documentation + Compilation — MLX 0.5.0 documentation @@ -134,8 +134,8 @@ - MLX 0.3.0 documentation - Home - + MLX 0.5.0 documentation - Home + @@ -233,6 +233,9 @@
  • mlx.core.argpartition
  • mlx.core.argsort
  • mlx.core.array_equal
  • +
  • mlx.core.atleast_1d
  • +
  • mlx.core.atleast_2d
  • +
  • mlx.core.atleast_3d
  • mlx.core.broadcast_to
  • mlx.core.ceil
  • mlx.core.clip
  • @@ -240,6 +243,7 @@
  • mlx.core.convolve
  • mlx.core.conv1d
  • mlx.core.conv2d
  • +
  • mlx.core.conv_general
  • mlx.core.cos
  • mlx.core.cosh
  • mlx.core.dequantize
  • @@ -436,19 +440,31 @@
  • mlx.nn.Softshrink
  • mlx.nn.Step
  • mlx.nn.Transformer
  • +
  • mlx.nn.Upsample
  • Functions
  • Loss Functions
      @@ -502,9 +518,11 @@
  • Schedulers
  • diff --git a/docs/build/html/usage/function_transforms.html b/docs/build/html/usage/function_transforms.html index 8c1ce50d9..a7a98a34e 100644 --- a/docs/build/html/usage/function_transforms.html +++ b/docs/build/html/usage/function_transforms.html @@ -9,7 +9,7 @@ - Function Transforms — MLX 0.3.0 documentation + Function Transforms — MLX 0.5.0 documentation @@ -134,8 +134,8 @@ - MLX 0.3.0 documentation - Home - + MLX 0.5.0 documentation - Home + @@ -233,6 +233,9 @@
  • mlx.core.argpartition
  • mlx.core.argsort
  • mlx.core.array_equal
  • +
  • mlx.core.atleast_1d
  • +
  • mlx.core.atleast_2d
  • +
  • mlx.core.atleast_3d
  • mlx.core.broadcast_to
  • mlx.core.ceil
  • mlx.core.clip
  • @@ -240,6 +243,7 @@
  • mlx.core.convolve
  • mlx.core.conv1d
  • mlx.core.conv2d
  • +
  • mlx.core.conv_general
  • mlx.core.cos
  • mlx.core.cosh
  • mlx.core.dequantize
  • @@ -436,19 +440,31 @@
  • mlx.nn.Softshrink
  • mlx.nn.Step
  • mlx.nn.Transformer
  • +
  • mlx.nn.Upsample
  • Functions
  • Loss Functions
      @@ -502,9 +518,11 @@
  • Schedulers
  • diff --git a/docs/build/html/usage/indexing.html b/docs/build/html/usage/indexing.html index 085eb56a4..00de482fa 100644 --- a/docs/build/html/usage/indexing.html +++ b/docs/build/html/usage/indexing.html @@ -9,7 +9,7 @@ - Indexing Arrays — MLX 0.3.0 documentation + Indexing Arrays — MLX 0.5.0 documentation @@ -134,8 +134,8 @@ - MLX 0.3.0 documentation - Home - + MLX 0.5.0 documentation - Home + @@ -233,6 +233,9 @@
  • mlx.core.argpartition
  • mlx.core.argsort
  • mlx.core.array_equal
  • +
  • mlx.core.atleast_1d
  • +
  • mlx.core.atleast_2d
  • +
  • mlx.core.atleast_3d
  • mlx.core.broadcast_to
  • mlx.core.ceil
  • mlx.core.clip
  • @@ -240,6 +243,7 @@
  • mlx.core.convolve
  • mlx.core.conv1d
  • mlx.core.conv2d
  • +
  • mlx.core.conv_general
  • mlx.core.cos
  • mlx.core.cosh
  • mlx.core.dequantize
  • @@ -436,19 +440,31 @@
  • mlx.nn.Softshrink
  • mlx.nn.Step
  • mlx.nn.Transformer
  • +
  • mlx.nn.Upsample
  • Functions
  • Loss Functions
      @@ -502,9 +518,11 @@
  • Schedulers
  • diff --git a/docs/build/html/usage/lazy_evaluation.html b/docs/build/html/usage/lazy_evaluation.html index 99c6d48b8..eba3bc667 100644 --- a/docs/build/html/usage/lazy_evaluation.html +++ b/docs/build/html/usage/lazy_evaluation.html @@ -9,7 +9,7 @@ - Lazy Evaluation — MLX 0.3.0 documentation + Lazy Evaluation — MLX 0.5.0 documentation @@ -134,8 +134,8 @@ - MLX 0.3.0 documentation - Home - + MLX 0.5.0 documentation - Home + @@ -233,6 +233,9 @@
  • mlx.core.argpartition
  • mlx.core.argsort
  • mlx.core.array_equal
  • +
  • mlx.core.atleast_1d
  • +
  • mlx.core.atleast_2d
  • +
  • mlx.core.atleast_3d
  • mlx.core.broadcast_to
  • mlx.core.ceil
  • mlx.core.clip
  • @@ -240,6 +243,7 @@
  • mlx.core.convolve
  • mlx.core.conv1d
  • mlx.core.conv2d
  • +
  • mlx.core.conv_general
  • mlx.core.cos
  • mlx.core.cosh
  • mlx.core.dequantize
  • @@ -436,19 +440,31 @@
  • mlx.nn.Softshrink
  • mlx.nn.Step
  • mlx.nn.Transformer
  • +
  • mlx.nn.Upsample
  • Functions
  • Loss Functions
      @@ -502,9 +518,11 @@
  • Schedulers
  • diff --git a/docs/build/html/usage/numpy.html b/docs/build/html/usage/numpy.html index d91658156..a8775d915 100644 --- a/docs/build/html/usage/numpy.html +++ b/docs/build/html/usage/numpy.html @@ -9,7 +9,7 @@ - Conversion to NumPy and Other Frameworks — MLX 0.3.0 documentation + Conversion to NumPy and Other Frameworks — MLX 0.5.0 documentation @@ -134,8 +134,8 @@ - MLX 0.3.0 documentation - Home - + MLX 0.5.0 documentation - Home + @@ -233,6 +233,9 @@
  • mlx.core.argpartition
  • mlx.core.argsort
  • mlx.core.array_equal
  • +
  • mlx.core.atleast_1d
  • +
  • mlx.core.atleast_2d
  • +
  • mlx.core.atleast_3d
  • mlx.core.broadcast_to
  • mlx.core.ceil
  • mlx.core.clip
  • @@ -240,6 +243,7 @@
  • mlx.core.convolve
  • mlx.core.conv1d
  • mlx.core.conv2d
  • +
  • mlx.core.conv_general
  • mlx.core.cos
  • mlx.core.cosh
  • mlx.core.dequantize
  • @@ -436,19 +440,31 @@
  • mlx.nn.Softshrink
  • mlx.nn.Step
  • mlx.nn.Transformer
  • +
  • mlx.nn.Upsample
  • Functions
  • Loss Functions
      @@ -502,9 +518,11 @@
  • Schedulers
  • diff --git a/docs/build/html/usage/quick_start.html b/docs/build/html/usage/quick_start.html index 51f9ce328..bd7cecc5d 100644 --- a/docs/build/html/usage/quick_start.html +++ b/docs/build/html/usage/quick_start.html @@ -9,7 +9,7 @@ - Quick Start Guide — MLX 0.3.0 documentation + Quick Start Guide — MLX 0.5.0 documentation @@ -134,8 +134,8 @@ - MLX 0.3.0 documentation - Home - + MLX 0.5.0 documentation - Home + @@ -233,6 +233,9 @@
  • mlx.core.argpartition
  • mlx.core.argsort
  • mlx.core.array_equal
  • +
  • mlx.core.atleast_1d
  • +
  • mlx.core.atleast_2d
  • +
  • mlx.core.atleast_3d
  • mlx.core.broadcast_to
  • mlx.core.ceil
  • mlx.core.clip
  • @@ -240,6 +243,7 @@
  • mlx.core.convolve
  • mlx.core.conv1d
  • mlx.core.conv2d
  • +
  • mlx.core.conv_general
  • mlx.core.cos
  • mlx.core.cosh
  • mlx.core.dequantize
  • @@ -436,19 +440,31 @@
  • mlx.nn.Softshrink
  • mlx.nn.Step
  • mlx.nn.Transformer
  • +
  • mlx.nn.Upsample
  • Functions
  • Loss Functions
      @@ -502,9 +518,11 @@
  • Schedulers
  • diff --git a/docs/build/html/usage/saving_and_loading.html b/docs/build/html/usage/saving_and_loading.html index 90572c53c..9c803d377 100644 --- a/docs/build/html/usage/saving_and_loading.html +++ b/docs/build/html/usage/saving_and_loading.html @@ -9,7 +9,7 @@ - Saving and Loading Arrays — MLX 0.3.0 documentation + Saving and Loading Arrays — MLX 0.5.0 documentation @@ -134,8 +134,8 @@ - MLX 0.3.0 documentation - Home - + MLX 0.5.0 documentation - Home + @@ -233,6 +233,9 @@
  • mlx.core.argpartition
  • mlx.core.argsort
  • mlx.core.array_equal
  • +
  • mlx.core.atleast_1d
  • +
  • mlx.core.atleast_2d
  • +
  • mlx.core.atleast_3d
  • mlx.core.broadcast_to
  • mlx.core.ceil
  • mlx.core.clip
  • @@ -240,6 +243,7 @@
  • mlx.core.convolve
  • mlx.core.conv1d
  • mlx.core.conv2d
  • +
  • mlx.core.conv_general
  • mlx.core.cos
  • mlx.core.cosh
  • mlx.core.dequantize
  • @@ -436,19 +440,31 @@
  • mlx.nn.Softshrink
  • mlx.nn.Step
  • mlx.nn.Transformer
  • +
  • mlx.nn.Upsample
  • Functions
  • Loss Functions
      @@ -502,9 +518,11 @@
  • Schedulers
  • diff --git a/docs/build/html/usage/unified_memory.html b/docs/build/html/usage/unified_memory.html index aa90d4401..89b685178 100644 --- a/docs/build/html/usage/unified_memory.html +++ b/docs/build/html/usage/unified_memory.html @@ -9,7 +9,7 @@ - Unified Memory — MLX 0.3.0 documentation + Unified Memory — MLX 0.5.0 documentation @@ -134,8 +134,8 @@ - MLX 0.3.0 documentation - Home - + MLX 0.5.0 documentation - Home + @@ -233,6 +233,9 @@
  • mlx.core.argpartition
  • mlx.core.argsort
  • mlx.core.array_equal
  • +
  • mlx.core.atleast_1d
  • +
  • mlx.core.atleast_2d
  • +
  • mlx.core.atleast_3d
  • mlx.core.broadcast_to
  • mlx.core.ceil
  • mlx.core.clip
  • @@ -240,6 +243,7 @@
  • mlx.core.convolve
  • mlx.core.conv1d
  • mlx.core.conv2d
  • +
  • mlx.core.conv_general
  • mlx.core.cos
  • mlx.core.cosh
  • mlx.core.dequantize
  • @@ -436,19 +440,31 @@
  • mlx.nn.Softshrink
  • mlx.nn.Step
  • mlx.nn.Transformer
  • +
  • mlx.nn.Upsample
  • Functions
  • Loss Functions
      @@ -502,9 +518,11 @@
  • Schedulers
  • diff --git a/docs/build/html/usage/using_streams.html b/docs/build/html/usage/using_streams.html index 3a41c2d19..e52e40793 100644 --- a/docs/build/html/usage/using_streams.html +++ b/docs/build/html/usage/using_streams.html @@ -9,7 +9,7 @@ - Using Streams — MLX 0.3.0 documentation + Using Streams — MLX 0.5.0 documentation @@ -134,8 +134,8 @@ - MLX 0.3.0 documentation - Home - + MLX 0.5.0 documentation - Home + @@ -233,6 +233,9 @@
  • mlx.core.argpartition
  • mlx.core.argsort
  • mlx.core.array_equal
  • +
  • mlx.core.atleast_1d
  • +
  • mlx.core.atleast_2d
  • +
  • mlx.core.atleast_3d
  • mlx.core.broadcast_to
  • mlx.core.ceil
  • mlx.core.clip
  • @@ -240,6 +243,7 @@
  • mlx.core.convolve
  • mlx.core.conv1d
  • mlx.core.conv2d
  • +
  • mlx.core.conv_general
  • mlx.core.cos
  • mlx.core.cosh
  • mlx.core.dequantize
  • @@ -436,19 +440,31 @@
  • mlx.nn.Softshrink
  • mlx.nn.Step
  • mlx.nn.Transformer
  • +
  • mlx.nn.Upsample
  • Functions
  • Loss Functions
      @@ -502,9 +518,11 @@
  • Schedulers