mirror of
https://github.com/ml-explore/mlx.git
synced 2025-06-26 02:33:21 +08:00
1 line
132 KiB
JavaScript
1 line
132 KiB
JavaScript
Search.setIndex({"docnames": ["cpp/ops", "dev/extensions", "examples/linear_regression", "examples/llama-inference", "examples/mlp", "index", "install", "python/_autosummary/mlx.core.Device", "python/_autosummary/mlx.core.Dtype", "python/_autosummary/mlx.core.Stream", "python/_autosummary/mlx.core.abs", "python/_autosummary/mlx.core.add", "python/_autosummary/mlx.core.all", "python/_autosummary/mlx.core.allclose", "python/_autosummary/mlx.core.any", "python/_autosummary/mlx.core.arange", "python/_autosummary/mlx.core.arccos", "python/_autosummary/mlx.core.arccosh", "python/_autosummary/mlx.core.arcsin", "python/_autosummary/mlx.core.arcsinh", "python/_autosummary/mlx.core.arctan", "python/_autosummary/mlx.core.arctanh", "python/_autosummary/mlx.core.argmax", "python/_autosummary/mlx.core.argmin", "python/_autosummary/mlx.core.argpartition", "python/_autosummary/mlx.core.argsort", "python/_autosummary/mlx.core.array", "python/_autosummary/mlx.core.array.T", "python/_autosummary/mlx.core.array.abs", "python/_autosummary/mlx.core.array.all", "python/_autosummary/mlx.core.array.any", "python/_autosummary/mlx.core.array.argmax", "python/_autosummary/mlx.core.array.argmin", "python/_autosummary/mlx.core.array.astype", "python/_autosummary/mlx.core.array.cos", "python/_autosummary/mlx.core.array.dtype", "python/_autosummary/mlx.core.array.exp", "python/_autosummary/mlx.core.array.item", "python/_autosummary/mlx.core.array.log", "python/_autosummary/mlx.core.array.log1p", "python/_autosummary/mlx.core.array.logsumexp", "python/_autosummary/mlx.core.array.max", "python/_autosummary/mlx.core.array.mean", "python/_autosummary/mlx.core.array.min", "python/_autosummary/mlx.core.array.ndim", "python/_autosummary/mlx.core.array.prod", "python/_autosummary/mlx.core.array.reciprocal", "python/_autosummary/mlx.core.array.reshape", "python/_autosummary/mlx.core.array.round", "python/_autosummary/mlx.core.array.rsqrt", "python/_autosummary/mlx.core.array.shape", "python/_autosummary/mlx.core.array.sin", "python/_autosummary/mlx.core.array.size", "python/_autosummary/mlx.core.array.split", "python/_autosummary/mlx.core.array.sqrt", "python/_autosummary/mlx.core.array.square", "python/_autosummary/mlx.core.array.sum", "python/_autosummary/mlx.core.array.tolist", "python/_autosummary/mlx.core.array.transpose", "python/_autosummary/mlx.core.array.var", "python/_autosummary/mlx.core.array_equal", "python/_autosummary/mlx.core.broadcast_to", "python/_autosummary/mlx.core.ceil", "python/_autosummary/mlx.core.clip", "python/_autosummary/mlx.core.compile", "python/_autosummary/mlx.core.concatenate", "python/_autosummary/mlx.core.conv1d", "python/_autosummary/mlx.core.conv2d", "python/_autosummary/mlx.core.convolve", "python/_autosummary/mlx.core.cos", "python/_autosummary/mlx.core.cosh", "python/_autosummary/mlx.core.default_device", "python/_autosummary/mlx.core.default_stream", "python/_autosummary/mlx.core.dequantize", "python/_autosummary/mlx.core.diag", "python/_autosummary/mlx.core.diagonal", "python/_autosummary/mlx.core.disable_compile", "python/_autosummary/mlx.core.divide", "python/_autosummary/mlx.core.divmod", "python/_autosummary/mlx.core.enable_compile", "python/_autosummary/mlx.core.equal", "python/_autosummary/mlx.core.erf", "python/_autosummary/mlx.core.erfinv", "python/_autosummary/mlx.core.eval", "python/_autosummary/mlx.core.exp", "python/_autosummary/mlx.core.expand_dims", "python/_autosummary/mlx.core.eye", "python/_autosummary/mlx.core.fft.fft", "python/_autosummary/mlx.core.fft.fft2", "python/_autosummary/mlx.core.fft.fftn", "python/_autosummary/mlx.core.fft.ifft", "python/_autosummary/mlx.core.fft.ifft2", "python/_autosummary/mlx.core.fft.ifftn", "python/_autosummary/mlx.core.fft.irfft", "python/_autosummary/mlx.core.fft.irfft2", "python/_autosummary/mlx.core.fft.irfftn", "python/_autosummary/mlx.core.fft.rfft", "python/_autosummary/mlx.core.fft.rfft2", "python/_autosummary/mlx.core.fft.rfftn", "python/_autosummary/mlx.core.flatten", "python/_autosummary/mlx.core.floor", "python/_autosummary/mlx.core.floor_divide", "python/_autosummary/mlx.core.full", "python/_autosummary/mlx.core.grad", "python/_autosummary/mlx.core.greater", "python/_autosummary/mlx.core.greater_equal", "python/_autosummary/mlx.core.identity", "python/_autosummary/mlx.core.inner", "python/_autosummary/mlx.core.isinf", "python/_autosummary/mlx.core.isnan", "python/_autosummary/mlx.core.isneginf", "python/_autosummary/mlx.core.isposinf", "python/_autosummary/mlx.core.jvp", "python/_autosummary/mlx.core.less", "python/_autosummary/mlx.core.less_equal", "python/_autosummary/mlx.core.linalg.norm", "python/_autosummary/mlx.core.linalg.qr", "python/_autosummary/mlx.core.linspace", "python/_autosummary/mlx.core.load", "python/_autosummary/mlx.core.log", "python/_autosummary/mlx.core.log10", "python/_autosummary/mlx.core.log1p", "python/_autosummary/mlx.core.log2", "python/_autosummary/mlx.core.logaddexp", "python/_autosummary/mlx.core.logical_and", "python/_autosummary/mlx.core.logical_not", "python/_autosummary/mlx.core.logical_or", "python/_autosummary/mlx.core.logsumexp", "python/_autosummary/mlx.core.matmul", "python/_autosummary/mlx.core.max", "python/_autosummary/mlx.core.maximum", "python/_autosummary/mlx.core.mean", "python/_autosummary/mlx.core.min", "python/_autosummary/mlx.core.minimum", "python/_autosummary/mlx.core.moveaxis", "python/_autosummary/mlx.core.multiply", "python/_autosummary/mlx.core.negative", "python/_autosummary/mlx.core.new_stream", "python/_autosummary/mlx.core.ones", "python/_autosummary/mlx.core.ones_like", "python/_autosummary/mlx.core.outer", "python/_autosummary/mlx.core.pad", "python/_autosummary/mlx.core.partition", "python/_autosummary/mlx.core.prod", "python/_autosummary/mlx.core.quantize", "python/_autosummary/mlx.core.quantized_matmul", "python/_autosummary/mlx.core.random.bernoulli", "python/_autosummary/mlx.core.random.categorical", "python/_autosummary/mlx.core.random.gumbel", "python/_autosummary/mlx.core.random.key", "python/_autosummary/mlx.core.random.normal", "python/_autosummary/mlx.core.random.randint", "python/_autosummary/mlx.core.random.seed", "python/_autosummary/mlx.core.random.split", "python/_autosummary/mlx.core.random.truncated_normal", "python/_autosummary/mlx.core.random.uniform", "python/_autosummary/mlx.core.reciprocal", "python/_autosummary/mlx.core.repeat", "python/_autosummary/mlx.core.reshape", "python/_autosummary/mlx.core.round", "python/_autosummary/mlx.core.rsqrt", "python/_autosummary/mlx.core.save", "python/_autosummary/mlx.core.save_gguf", "python/_autosummary/mlx.core.save_safetensors", "python/_autosummary/mlx.core.savez", "python/_autosummary/mlx.core.savez_compressed", "python/_autosummary/mlx.core.set_default_device", "python/_autosummary/mlx.core.set_default_stream", "python/_autosummary/mlx.core.sigmoid", "python/_autosummary/mlx.core.sign", "python/_autosummary/mlx.core.sin", "python/_autosummary/mlx.core.sinh", "python/_autosummary/mlx.core.softmax", "python/_autosummary/mlx.core.sort", "python/_autosummary/mlx.core.split", "python/_autosummary/mlx.core.sqrt", "python/_autosummary/mlx.core.square", "python/_autosummary/mlx.core.squeeze", "python/_autosummary/mlx.core.stack", "python/_autosummary/mlx.core.stop_gradient", "python/_autosummary/mlx.core.subtract", "python/_autosummary/mlx.core.sum", "python/_autosummary/mlx.core.swapaxes", "python/_autosummary/mlx.core.take", "python/_autosummary/mlx.core.take_along_axis", "python/_autosummary/mlx.core.tan", "python/_autosummary/mlx.core.tanh", "python/_autosummary/mlx.core.tensordot", "python/_autosummary/mlx.core.transpose", "python/_autosummary/mlx.core.tri", "python/_autosummary/mlx.core.tril", "python/_autosummary/mlx.core.triu", "python/_autosummary/mlx.core.value_and_grad", "python/_autosummary/mlx.core.var", "python/_autosummary/mlx.core.vjp", "python/_autosummary/mlx.core.vmap", "python/_autosummary/mlx.core.where", "python/_autosummary/mlx.core.zeros", "python/_autosummary/mlx.core.zeros_like", "python/_autosummary/mlx.nn.value_and_grad", "python/_autosummary/mlx.optimizers.AdaDelta", "python/_autosummary/mlx.optimizers.Adafactor", "python/_autosummary/mlx.optimizers.Adagrad", "python/_autosummary/mlx.optimizers.Adam", "python/_autosummary/mlx.optimizers.AdamW", "python/_autosummary/mlx.optimizers.Adamax", "python/_autosummary/mlx.optimizers.Lion", "python/_autosummary/mlx.optimizers.Optimizer.apply_gradients", "python/_autosummary/mlx.optimizers.Optimizer.init", "python/_autosummary/mlx.optimizers.Optimizer.state", "python/_autosummary/mlx.optimizers.Optimizer.update", "python/_autosummary/mlx.optimizers.RMSprop", "python/_autosummary/mlx.optimizers.SGD", "python/_autosummary/mlx.utils.tree_flatten", "python/_autosummary/mlx.utils.tree_map", "python/_autosummary/mlx.utils.tree_unflatten", "python/array", "python/data_types", "python/devices_and_streams", "python/fft", "python/linalg", "python/nn", "python/nn/_autosummary/mlx.nn.ALiBi", "python/nn/_autosummary/mlx.nn.BatchNorm", "python/nn/_autosummary/mlx.nn.Conv1d", "python/nn/_autosummary/mlx.nn.Conv2d", "python/nn/_autosummary/mlx.nn.Dropout", "python/nn/_autosummary/mlx.nn.Dropout2d", "python/nn/_autosummary/mlx.nn.Dropout3d", "python/nn/_autosummary/mlx.nn.Embedding", "python/nn/_autosummary/mlx.nn.GELU", "python/nn/_autosummary/mlx.nn.GroupNorm", "python/nn/_autosummary/mlx.nn.InstanceNorm", "python/nn/_autosummary/mlx.nn.LayerNorm", "python/nn/_autosummary/mlx.nn.Linear", "python/nn/_autosummary/mlx.nn.Mish", "python/nn/_autosummary/mlx.nn.Module.apply", "python/nn/_autosummary/mlx.nn.Module.apply_to_modules", "python/nn/_autosummary/mlx.nn.Module.children", "python/nn/_autosummary/mlx.nn.Module.eval", "python/nn/_autosummary/mlx.nn.Module.filter_and_map", "python/nn/_autosummary/mlx.nn.Module.freeze", "python/nn/_autosummary/mlx.nn.Module.leaf_modules", "python/nn/_autosummary/mlx.nn.Module.load_weights", "python/nn/_autosummary/mlx.nn.Module.modules", "python/nn/_autosummary/mlx.nn.Module.named_modules", "python/nn/_autosummary/mlx.nn.Module.parameters", "python/nn/_autosummary/mlx.nn.Module.save_weights", "python/nn/_autosummary/mlx.nn.Module.state", "python/nn/_autosummary/mlx.nn.Module.train", "python/nn/_autosummary/mlx.nn.Module.trainable_parameters", "python/nn/_autosummary/mlx.nn.Module.training", "python/nn/_autosummary/mlx.nn.Module.unfreeze", "python/nn/_autosummary/mlx.nn.Module.update", "python/nn/_autosummary/mlx.nn.Module.update_modules", "python/nn/_autosummary/mlx.nn.MultiHeadAttention", "python/nn/_autosummary/mlx.nn.PReLU", "python/nn/_autosummary/mlx.nn.QuantizedLinear", "python/nn/_autosummary/mlx.nn.RMSNorm", "python/nn/_autosummary/mlx.nn.ReLU", "python/nn/_autosummary/mlx.nn.RoPE", "python/nn/_autosummary/mlx.nn.SELU", "python/nn/_autosummary/mlx.nn.Sequential", "python/nn/_autosummary/mlx.nn.SiLU", "python/nn/_autosummary/mlx.nn.SinusoidalPositionalEncoding", "python/nn/_autosummary/mlx.nn.Softshrink", "python/nn/_autosummary/mlx.nn.Step", "python/nn/_autosummary/mlx.nn.Transformer", "python/nn/_autosummary/mlx.nn.init.constant", "python/nn/_autosummary/mlx.nn.init.glorot_normal", "python/nn/_autosummary/mlx.nn.init.glorot_uniform", "python/nn/_autosummary/mlx.nn.init.he_normal", "python/nn/_autosummary/mlx.nn.init.he_uniform", "python/nn/_autosummary/mlx.nn.init.identity", "python/nn/_autosummary/mlx.nn.init.normal", "python/nn/_autosummary/mlx.nn.init.uniform", "python/nn/_autosummary_functions/mlx.nn.gelu", "python/nn/_autosummary_functions/mlx.nn.gelu_approx", "python/nn/_autosummary_functions/mlx.nn.gelu_fast_approx", "python/nn/_autosummary_functions/mlx.nn.losses.binary_cross_entropy", "python/nn/_autosummary_functions/mlx.nn.losses.cosine_similarity_loss", "python/nn/_autosummary_functions/mlx.nn.losses.cross_entropy", "python/nn/_autosummary_functions/mlx.nn.losses.gaussian_nll_loss", "python/nn/_autosummary_functions/mlx.nn.losses.hinge_loss", "python/nn/_autosummary_functions/mlx.nn.losses.huber_loss", "python/nn/_autosummary_functions/mlx.nn.losses.kl_div_loss", "python/nn/_autosummary_functions/mlx.nn.losses.l1_loss", "python/nn/_autosummary_functions/mlx.nn.losses.log_cosh_loss", "python/nn/_autosummary_functions/mlx.nn.losses.margin_ranking_loss", "python/nn/_autosummary_functions/mlx.nn.losses.mse_loss", "python/nn/_autosummary_functions/mlx.nn.losses.nll_loss", "python/nn/_autosummary_functions/mlx.nn.losses.smooth_l1_loss", "python/nn/_autosummary_functions/mlx.nn.losses.triplet_loss", "python/nn/_autosummary_functions/mlx.nn.mish", "python/nn/_autosummary_functions/mlx.nn.prelu", "python/nn/_autosummary_functions/mlx.nn.relu", "python/nn/_autosummary_functions/mlx.nn.selu", "python/nn/_autosummary_functions/mlx.nn.silu", "python/nn/_autosummary_functions/mlx.nn.softshrink", "python/nn/_autosummary_functions/mlx.nn.step", "python/nn/functions", "python/nn/init", "python/nn/layers", "python/nn/losses", "python/nn/module", "python/ops", "python/optimizer", "python/optimizers", "python/random", "python/transforms", "python/tree_utils", "usage/compile", "usage/function_transforms", "usage/indexing", "usage/lazy_evaluation", "usage/numpy", "usage/quick_start", "usage/saving_and_loading", "usage/unified_memory", "usage/using_streams"], "filenames": ["cpp/ops.rst", "dev/extensions.rst", "examples/linear_regression.rst", "examples/llama-inference.rst", "examples/mlp.rst", "index.rst", "install.rst", "python/_autosummary/mlx.core.Device.rst", "python/_autosummary/mlx.core.Dtype.rst", "python/_autosummary/mlx.core.Stream.rst", "python/_autosummary/mlx.core.abs.rst", "python/_autosummary/mlx.core.add.rst", "python/_autosummary/mlx.core.all.rst", "python/_autosummary/mlx.core.allclose.rst", "python/_autosummary/mlx.core.any.rst", "python/_autosummary/mlx.core.arange.rst", "python/_autosummary/mlx.core.arccos.rst", "python/_autosummary/mlx.core.arccosh.rst", "python/_autosummary/mlx.core.arcsin.rst", "python/_autosummary/mlx.core.arcsinh.rst", "python/_autosummary/mlx.core.arctan.rst", "python/_autosummary/mlx.core.arctanh.rst", "python/_autosummary/mlx.core.argmax.rst", "python/_autosummary/mlx.core.argmin.rst", "python/_autosummary/mlx.core.argpartition.rst", "python/_autosummary/mlx.core.argsort.rst", "python/_autosummary/mlx.core.array.rst", "python/_autosummary/mlx.core.array.T.rst", "python/_autosummary/mlx.core.array.abs.rst", "python/_autosummary/mlx.core.array.all.rst", "python/_autosummary/mlx.core.array.any.rst", "python/_autosummary/mlx.core.array.argmax.rst", "python/_autosummary/mlx.core.array.argmin.rst", "python/_autosummary/mlx.core.array.astype.rst", "python/_autosummary/mlx.core.array.cos.rst", "python/_autosummary/mlx.core.array.dtype.rst", "python/_autosummary/mlx.core.array.exp.rst", "python/_autosummary/mlx.core.array.item.rst", "python/_autosummary/mlx.core.array.log.rst", "python/_autosummary/mlx.core.array.log1p.rst", "python/_autosummary/mlx.core.array.logsumexp.rst", "python/_autosummary/mlx.core.array.max.rst", "python/_autosummary/mlx.core.array.mean.rst", "python/_autosummary/mlx.core.array.min.rst", "python/_autosummary/mlx.core.array.ndim.rst", "python/_autosummary/mlx.core.array.prod.rst", "python/_autosummary/mlx.core.array.reciprocal.rst", "python/_autosummary/mlx.core.array.reshape.rst", "python/_autosummary/mlx.core.array.round.rst", "python/_autosummary/mlx.core.array.rsqrt.rst", "python/_autosummary/mlx.core.array.shape.rst", "python/_autosummary/mlx.core.array.sin.rst", "python/_autosummary/mlx.core.array.size.rst", "python/_autosummary/mlx.core.array.split.rst", "python/_autosummary/mlx.core.array.sqrt.rst", "python/_autosummary/mlx.core.array.square.rst", "python/_autosummary/mlx.core.array.sum.rst", "python/_autosummary/mlx.core.array.tolist.rst", "python/_autosummary/mlx.core.array.transpose.rst", "python/_autosummary/mlx.core.array.var.rst", "python/_autosummary/mlx.core.array_equal.rst", "python/_autosummary/mlx.core.broadcast_to.rst", "python/_autosummary/mlx.core.ceil.rst", "python/_autosummary/mlx.core.clip.rst", "python/_autosummary/mlx.core.compile.rst", "python/_autosummary/mlx.core.concatenate.rst", "python/_autosummary/mlx.core.conv1d.rst", "python/_autosummary/mlx.core.conv2d.rst", "python/_autosummary/mlx.core.convolve.rst", "python/_autosummary/mlx.core.cos.rst", "python/_autosummary/mlx.core.cosh.rst", "python/_autosummary/mlx.core.default_device.rst", "python/_autosummary/mlx.core.default_stream.rst", "python/_autosummary/mlx.core.dequantize.rst", "python/_autosummary/mlx.core.diag.rst", "python/_autosummary/mlx.core.diagonal.rst", "python/_autosummary/mlx.core.disable_compile.rst", "python/_autosummary/mlx.core.divide.rst", "python/_autosummary/mlx.core.divmod.rst", "python/_autosummary/mlx.core.enable_compile.rst", "python/_autosummary/mlx.core.equal.rst", "python/_autosummary/mlx.core.erf.rst", "python/_autosummary/mlx.core.erfinv.rst", "python/_autosummary/mlx.core.eval.rst", "python/_autosummary/mlx.core.exp.rst", "python/_autosummary/mlx.core.expand_dims.rst", "python/_autosummary/mlx.core.eye.rst", "python/_autosummary/mlx.core.fft.fft.rst", "python/_autosummary/mlx.core.fft.fft2.rst", "python/_autosummary/mlx.core.fft.fftn.rst", "python/_autosummary/mlx.core.fft.ifft.rst", "python/_autosummary/mlx.core.fft.ifft2.rst", "python/_autosummary/mlx.core.fft.ifftn.rst", "python/_autosummary/mlx.core.fft.irfft.rst", "python/_autosummary/mlx.core.fft.irfft2.rst", "python/_autosummary/mlx.core.fft.irfftn.rst", "python/_autosummary/mlx.core.fft.rfft.rst", "python/_autosummary/mlx.core.fft.rfft2.rst", "python/_autosummary/mlx.core.fft.rfftn.rst", "python/_autosummary/mlx.core.flatten.rst", "python/_autosummary/mlx.core.floor.rst", "python/_autosummary/mlx.core.floor_divide.rst", "python/_autosummary/mlx.core.full.rst", "python/_autosummary/mlx.core.grad.rst", "python/_autosummary/mlx.core.greater.rst", "python/_autosummary/mlx.core.greater_equal.rst", "python/_autosummary/mlx.core.identity.rst", "python/_autosummary/mlx.core.inner.rst", "python/_autosummary/mlx.core.isinf.rst", "python/_autosummary/mlx.core.isnan.rst", "python/_autosummary/mlx.core.isneginf.rst", "python/_autosummary/mlx.core.isposinf.rst", "python/_autosummary/mlx.core.jvp.rst", "python/_autosummary/mlx.core.less.rst", "python/_autosummary/mlx.core.less_equal.rst", "python/_autosummary/mlx.core.linalg.norm.rst", "python/_autosummary/mlx.core.linalg.qr.rst", "python/_autosummary/mlx.core.linspace.rst", "python/_autosummary/mlx.core.load.rst", "python/_autosummary/mlx.core.log.rst", "python/_autosummary/mlx.core.log10.rst", "python/_autosummary/mlx.core.log1p.rst", "python/_autosummary/mlx.core.log2.rst", "python/_autosummary/mlx.core.logaddexp.rst", "python/_autosummary/mlx.core.logical_and.rst", "python/_autosummary/mlx.core.logical_not.rst", "python/_autosummary/mlx.core.logical_or.rst", "python/_autosummary/mlx.core.logsumexp.rst", "python/_autosummary/mlx.core.matmul.rst", "python/_autosummary/mlx.core.max.rst", "python/_autosummary/mlx.core.maximum.rst", "python/_autosummary/mlx.core.mean.rst", "python/_autosummary/mlx.core.min.rst", "python/_autosummary/mlx.core.minimum.rst", "python/_autosummary/mlx.core.moveaxis.rst", "python/_autosummary/mlx.core.multiply.rst", "python/_autosummary/mlx.core.negative.rst", "python/_autosummary/mlx.core.new_stream.rst", "python/_autosummary/mlx.core.ones.rst", "python/_autosummary/mlx.core.ones_like.rst", "python/_autosummary/mlx.core.outer.rst", "python/_autosummary/mlx.core.pad.rst", "python/_autosummary/mlx.core.partition.rst", "python/_autosummary/mlx.core.prod.rst", "python/_autosummary/mlx.core.quantize.rst", "python/_autosummary/mlx.core.quantized_matmul.rst", "python/_autosummary/mlx.core.random.bernoulli.rst", "python/_autosummary/mlx.core.random.categorical.rst", "python/_autosummary/mlx.core.random.gumbel.rst", "python/_autosummary/mlx.core.random.key.rst", "python/_autosummary/mlx.core.random.normal.rst", "python/_autosummary/mlx.core.random.randint.rst", "python/_autosummary/mlx.core.random.seed.rst", "python/_autosummary/mlx.core.random.split.rst", "python/_autosummary/mlx.core.random.truncated_normal.rst", "python/_autosummary/mlx.core.random.uniform.rst", "python/_autosummary/mlx.core.reciprocal.rst", "python/_autosummary/mlx.core.repeat.rst", "python/_autosummary/mlx.core.reshape.rst", "python/_autosummary/mlx.core.round.rst", "python/_autosummary/mlx.core.rsqrt.rst", "python/_autosummary/mlx.core.save.rst", "python/_autosummary/mlx.core.save_gguf.rst", "python/_autosummary/mlx.core.save_safetensors.rst", "python/_autosummary/mlx.core.savez.rst", "python/_autosummary/mlx.core.savez_compressed.rst", "python/_autosummary/mlx.core.set_default_device.rst", "python/_autosummary/mlx.core.set_default_stream.rst", "python/_autosummary/mlx.core.sigmoid.rst", "python/_autosummary/mlx.core.sign.rst", "python/_autosummary/mlx.core.sin.rst", "python/_autosummary/mlx.core.sinh.rst", "python/_autosummary/mlx.core.softmax.rst", "python/_autosummary/mlx.core.sort.rst", "python/_autosummary/mlx.core.split.rst", "python/_autosummary/mlx.core.sqrt.rst", "python/_autosummary/mlx.core.square.rst", "python/_autosummary/mlx.core.squeeze.rst", "python/_autosummary/mlx.core.stack.rst", "python/_autosummary/mlx.core.stop_gradient.rst", "python/_autosummary/mlx.core.subtract.rst", "python/_autosummary/mlx.core.sum.rst", "python/_autosummary/mlx.core.swapaxes.rst", "python/_autosummary/mlx.core.take.rst", "python/_autosummary/mlx.core.take_along_axis.rst", "python/_autosummary/mlx.core.tan.rst", "python/_autosummary/mlx.core.tanh.rst", "python/_autosummary/mlx.core.tensordot.rst", "python/_autosummary/mlx.core.transpose.rst", "python/_autosummary/mlx.core.tri.rst", "python/_autosummary/mlx.core.tril.rst", "python/_autosummary/mlx.core.triu.rst", "python/_autosummary/mlx.core.value_and_grad.rst", "python/_autosummary/mlx.core.var.rst", "python/_autosummary/mlx.core.vjp.rst", "python/_autosummary/mlx.core.vmap.rst", "python/_autosummary/mlx.core.where.rst", "python/_autosummary/mlx.core.zeros.rst", "python/_autosummary/mlx.core.zeros_like.rst", "python/_autosummary/mlx.nn.value_and_grad.rst", "python/_autosummary/mlx.optimizers.AdaDelta.rst", "python/_autosummary/mlx.optimizers.Adafactor.rst", "python/_autosummary/mlx.optimizers.Adagrad.rst", "python/_autosummary/mlx.optimizers.Adam.rst", "python/_autosummary/mlx.optimizers.AdamW.rst", "python/_autosummary/mlx.optimizers.Adamax.rst", "python/_autosummary/mlx.optimizers.Lion.rst", "python/_autosummary/mlx.optimizers.Optimizer.apply_gradients.rst", "python/_autosummary/mlx.optimizers.Optimizer.init.rst", "python/_autosummary/mlx.optimizers.Optimizer.state.rst", "python/_autosummary/mlx.optimizers.Optimizer.update.rst", "python/_autosummary/mlx.optimizers.RMSprop.rst", "python/_autosummary/mlx.optimizers.SGD.rst", "python/_autosummary/mlx.utils.tree_flatten.rst", "python/_autosummary/mlx.utils.tree_map.rst", "python/_autosummary/mlx.utils.tree_unflatten.rst", "python/array.rst", "python/data_types.rst", "python/devices_and_streams.rst", "python/fft.rst", "python/linalg.rst", "python/nn.rst", "python/nn/_autosummary/mlx.nn.ALiBi.rst", "python/nn/_autosummary/mlx.nn.BatchNorm.rst", "python/nn/_autosummary/mlx.nn.Conv1d.rst", "python/nn/_autosummary/mlx.nn.Conv2d.rst", "python/nn/_autosummary/mlx.nn.Dropout.rst", "python/nn/_autosummary/mlx.nn.Dropout2d.rst", "python/nn/_autosummary/mlx.nn.Dropout3d.rst", "python/nn/_autosummary/mlx.nn.Embedding.rst", "python/nn/_autosummary/mlx.nn.GELU.rst", "python/nn/_autosummary/mlx.nn.GroupNorm.rst", "python/nn/_autosummary/mlx.nn.InstanceNorm.rst", "python/nn/_autosummary/mlx.nn.LayerNorm.rst", "python/nn/_autosummary/mlx.nn.Linear.rst", "python/nn/_autosummary/mlx.nn.Mish.rst", "python/nn/_autosummary/mlx.nn.Module.apply.rst", "python/nn/_autosummary/mlx.nn.Module.apply_to_modules.rst", "python/nn/_autosummary/mlx.nn.Module.children.rst", "python/nn/_autosummary/mlx.nn.Module.eval.rst", "python/nn/_autosummary/mlx.nn.Module.filter_and_map.rst", "python/nn/_autosummary/mlx.nn.Module.freeze.rst", "python/nn/_autosummary/mlx.nn.Module.leaf_modules.rst", "python/nn/_autosummary/mlx.nn.Module.load_weights.rst", "python/nn/_autosummary/mlx.nn.Module.modules.rst", "python/nn/_autosummary/mlx.nn.Module.named_modules.rst", "python/nn/_autosummary/mlx.nn.Module.parameters.rst", "python/nn/_autosummary/mlx.nn.Module.save_weights.rst", "python/nn/_autosummary/mlx.nn.Module.state.rst", "python/nn/_autosummary/mlx.nn.Module.train.rst", "python/nn/_autosummary/mlx.nn.Module.trainable_parameters.rst", "python/nn/_autosummary/mlx.nn.Module.training.rst", "python/nn/_autosummary/mlx.nn.Module.unfreeze.rst", "python/nn/_autosummary/mlx.nn.Module.update.rst", "python/nn/_autosummary/mlx.nn.Module.update_modules.rst", "python/nn/_autosummary/mlx.nn.MultiHeadAttention.rst", "python/nn/_autosummary/mlx.nn.PReLU.rst", "python/nn/_autosummary/mlx.nn.QuantizedLinear.rst", "python/nn/_autosummary/mlx.nn.RMSNorm.rst", "python/nn/_autosummary/mlx.nn.ReLU.rst", "python/nn/_autosummary/mlx.nn.RoPE.rst", "python/nn/_autosummary/mlx.nn.SELU.rst", "python/nn/_autosummary/mlx.nn.Sequential.rst", "python/nn/_autosummary/mlx.nn.SiLU.rst", "python/nn/_autosummary/mlx.nn.SinusoidalPositionalEncoding.rst", "python/nn/_autosummary/mlx.nn.Softshrink.rst", "python/nn/_autosummary/mlx.nn.Step.rst", "python/nn/_autosummary/mlx.nn.Transformer.rst", "python/nn/_autosummary/mlx.nn.init.constant.rst", "python/nn/_autosummary/mlx.nn.init.glorot_normal.rst", "python/nn/_autosummary/mlx.nn.init.glorot_uniform.rst", "python/nn/_autosummary/mlx.nn.init.he_normal.rst", "python/nn/_autosummary/mlx.nn.init.he_uniform.rst", "python/nn/_autosummary/mlx.nn.init.identity.rst", "python/nn/_autosummary/mlx.nn.init.normal.rst", "python/nn/_autosummary/mlx.nn.init.uniform.rst", "python/nn/_autosummary_functions/mlx.nn.gelu.rst", "python/nn/_autosummary_functions/mlx.nn.gelu_approx.rst", "python/nn/_autosummary_functions/mlx.nn.gelu_fast_approx.rst", "python/nn/_autosummary_functions/mlx.nn.losses.binary_cross_entropy.rst", "python/nn/_autosummary_functions/mlx.nn.losses.cosine_similarity_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.cross_entropy.rst", "python/nn/_autosummary_functions/mlx.nn.losses.gaussian_nll_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.hinge_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.huber_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.kl_div_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.l1_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.log_cosh_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.margin_ranking_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.mse_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.nll_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.smooth_l1_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.triplet_loss.rst", "python/nn/_autosummary_functions/mlx.nn.mish.rst", "python/nn/_autosummary_functions/mlx.nn.prelu.rst", "python/nn/_autosummary_functions/mlx.nn.relu.rst", "python/nn/_autosummary_functions/mlx.nn.selu.rst", "python/nn/_autosummary_functions/mlx.nn.silu.rst", "python/nn/_autosummary_functions/mlx.nn.softshrink.rst", "python/nn/_autosummary_functions/mlx.nn.step.rst", "python/nn/functions.rst", "python/nn/init.rst", "python/nn/layers.rst", "python/nn/losses.rst", "python/nn/module.rst", "python/ops.rst", "python/optimizer.rst", "python/optimizers.rst", "python/random.rst", "python/transforms.rst", "python/tree_utils.rst", "usage/compile.rst", "usage/function_transforms.rst", "usage/indexing.rst", "usage/lazy_evaluation.rst", "usage/numpy.rst", "usage/quick_start.rst", "usage/saving_and_loading.rst", "usage/unified_memory.rst", "usage/using_streams.rst"], "titles": ["Operations", "Developer Documentation", "Linear Regression", "LLM inference", "Multi-Layer Perceptron", "MLX", "Build and Install", "mlx.core.Device", "mlx.core.Dtype", "mlx.core.Stream", "mlx.core.abs", "mlx.core.add", "mlx.core.all", "mlx.core.allclose", "mlx.core.any", "mlx.core.arange", "mlx.core.arccos", "mlx.core.arccosh", "mlx.core.arcsin", "mlx.core.arcsinh", "mlx.core.arctan", "mlx.core.arctanh", "mlx.core.argmax", "mlx.core.argmin", "mlx.core.argpartition", "mlx.core.argsort", "mlx.core.array", "mlx.core.array.T", "mlx.core.array.abs", "mlx.core.array.all", "mlx.core.array.any", "mlx.core.array.argmax", "mlx.core.array.argmin", "mlx.core.array.astype", "mlx.core.array.cos", "mlx.core.array.dtype", "mlx.core.array.exp", "mlx.core.array.item", "mlx.core.array.log", "mlx.core.array.log1p", "mlx.core.array.logsumexp", "mlx.core.array.max", "mlx.core.array.mean", "mlx.core.array.min", "mlx.core.array.ndim", "mlx.core.array.prod", "mlx.core.array.reciprocal", "mlx.core.array.reshape", "mlx.core.array.round", "mlx.core.array.rsqrt", "mlx.core.array.shape", "mlx.core.array.sin", "mlx.core.array.size", "mlx.core.array.split", "mlx.core.array.sqrt", "mlx.core.array.square", "mlx.core.array.sum", "mlx.core.array.tolist", "mlx.core.array.transpose", "mlx.core.array.var", "mlx.core.array_equal", "mlx.core.broadcast_to", "mlx.core.ceil", "mlx.core.clip", "mlx.core.compile", "mlx.core.concatenate", "mlx.core.conv1d", "mlx.core.conv2d", "mlx.core.convolve", "mlx.core.cos", "mlx.core.cosh", "mlx.core.default_device", "mlx.core.default_stream", "mlx.core.dequantize", "mlx.core.diag", "mlx.core.diagonal", "mlx.core.disable_compile", "mlx.core.divide", "mlx.core.divmod", "mlx.core.enable_compile", "mlx.core.equal", "mlx.core.erf", "mlx.core.erfinv", "mlx.core.eval", "mlx.core.exp", "mlx.core.expand_dims", "mlx.core.eye", "mlx.core.fft.fft", "mlx.core.fft.fft2", "mlx.core.fft.fftn", "mlx.core.fft.ifft", "mlx.core.fft.ifft2", "mlx.core.fft.ifftn", "mlx.core.fft.irfft", "mlx.core.fft.irfft2", "mlx.core.fft.irfftn", "mlx.core.fft.rfft", "mlx.core.fft.rfft2", "mlx.core.fft.rfftn", "mlx.core.flatten", "mlx.core.floor", "mlx.core.floor_divide", "mlx.core.full", "mlx.core.grad", "mlx.core.greater", "mlx.core.greater_equal", "mlx.core.identity", "mlx.core.inner", "mlx.core.isinf", "mlx.core.isnan", "mlx.core.isneginf", "mlx.core.isposinf", "mlx.core.jvp", "mlx.core.less", "mlx.core.less_equal", "mlx.core.linalg.norm", "mlx.core.linalg.qr", "mlx.core.linspace", "mlx.core.load", "mlx.core.log", "mlx.core.log10", "mlx.core.log1p", "mlx.core.log2", "mlx.core.logaddexp", "mlx.core.logical_and", "mlx.core.logical_not", "mlx.core.logical_or", "mlx.core.logsumexp", "mlx.core.matmul", "mlx.core.max", "mlx.core.maximum", "mlx.core.mean", "mlx.core.min", "mlx.core.minimum", "mlx.core.moveaxis", "mlx.core.multiply", "mlx.core.negative", "mlx.core.new_stream", "mlx.core.ones", "mlx.core.ones_like", "mlx.core.outer", "mlx.core.pad", "mlx.core.partition", "mlx.core.prod", "mlx.core.quantize", "mlx.core.quantized_matmul", "mlx.core.random.bernoulli", "mlx.core.random.categorical", "mlx.core.random.gumbel", "mlx.core.random.key", "mlx.core.random.normal", "mlx.core.random.randint", "mlx.core.random.seed", "mlx.core.random.split", "mlx.core.random.truncated_normal", "mlx.core.random.uniform", "mlx.core.reciprocal", "mlx.core.repeat", "mlx.core.reshape", "mlx.core.round", "mlx.core.rsqrt", "mlx.core.save", "mlx.core.save_gguf", "mlx.core.save_safetensors", "mlx.core.savez", "mlx.core.savez_compressed", "mlx.core.set_default_device", "mlx.core.set_default_stream", "mlx.core.sigmoid", "mlx.core.sign", "mlx.core.sin", "mlx.core.sinh", "mlx.core.softmax", "mlx.core.sort", "mlx.core.split", "mlx.core.sqrt", "mlx.core.square", "mlx.core.squeeze", "mlx.core.stack", "mlx.core.stop_gradient", "mlx.core.subtract", "mlx.core.sum", "mlx.core.swapaxes", "mlx.core.take", "mlx.core.take_along_axis", "mlx.core.tan", "mlx.core.tanh", "mlx.core.tensordot", "mlx.core.transpose", "mlx.core.tri", "mlx.core.tril", "mlx.core.triu", "mlx.core.value_and_grad", "mlx.core.var", "mlx.core.vjp", "mlx.core.vmap", "mlx.core.where", "mlx.core.zeros", "mlx.core.zeros_like", "mlx.nn.value_and_grad", "mlx.optimizers.AdaDelta", "mlx.optimizers.Adafactor", "mlx.optimizers.Adagrad", "mlx.optimizers.Adam", "mlx.optimizers.AdamW", "mlx.optimizers.Adamax", "mlx.optimizers.Lion", "mlx.optimizers.Optimizer.apply_gradients", "mlx.optimizers.Optimizer.init", "mlx.optimizers.Optimizer.state", "mlx.optimizers.Optimizer.update", "mlx.optimizers.RMSprop", "mlx.optimizers.SGD", "mlx.utils.tree_flatten", "mlx.utils.tree_map", "mlx.utils.tree_unflatten", "Array", "Data Types", "Devices and Streams", "FFT", "Linear Algebra", "Neural Networks", "mlx.nn.ALiBi", "mlx.nn.BatchNorm", "mlx.nn.Conv1d", "mlx.nn.Conv2d", "mlx.nn.Dropout", "mlx.nn.Dropout2d", "mlx.nn.Dropout3d", "mlx.nn.Embedding", "mlx.nn.GELU", "mlx.nn.GroupNorm", "mlx.nn.InstanceNorm", "mlx.nn.LayerNorm", "mlx.nn.Linear", "mlx.nn.Mish", "mlx.nn.Module.apply", "mlx.nn.Module.apply_to_modules", "mlx.nn.Module.children", "mlx.nn.Module.eval", "mlx.nn.Module.filter_and_map", "mlx.nn.Module.freeze", "mlx.nn.Module.leaf_modules", "mlx.nn.Module.load_weights", "mlx.nn.Module.modules", "mlx.nn.Module.named_modules", "mlx.nn.Module.parameters", "mlx.nn.Module.save_weights", "mlx.nn.Module.state", "mlx.nn.Module.train", "mlx.nn.Module.trainable_parameters", "mlx.nn.Module.training", "mlx.nn.Module.unfreeze", "mlx.nn.Module.update", "mlx.nn.Module.update_modules", "mlx.nn.MultiHeadAttention", "mlx.nn.PReLU", "mlx.nn.QuantizedLinear", "mlx.nn.RMSNorm", "mlx.nn.ReLU", "mlx.nn.RoPE", "mlx.nn.SELU", "mlx.nn.Sequential", "mlx.nn.SiLU", "mlx.nn.SinusoidalPositionalEncoding", "mlx.nn.Softshrink", "mlx.nn.Step", "mlx.nn.Transformer", "mlx.nn.init.constant", "mlx.nn.init.glorot_normal", "mlx.nn.init.glorot_uniform", "mlx.nn.init.he_normal", "mlx.nn.init.he_uniform", "mlx.nn.init.identity", "mlx.nn.init.normal", "mlx.nn.init.uniform", "mlx.nn.gelu", "mlx.nn.gelu_approx", "mlx.nn.gelu_fast_approx", "mlx.nn.losses.binary_cross_entropy", "mlx.nn.losses.cosine_similarity_loss", "mlx.nn.losses.cross_entropy", "mlx.nn.losses.gaussian_nll_loss", "mlx.nn.losses.hinge_loss", "mlx.nn.losses.huber_loss", "mlx.nn.losses.kl_div_loss", "mlx.nn.losses.l1_loss", "mlx.nn.losses.log_cosh_loss", "mlx.nn.losses.margin_ranking_loss", "mlx.nn.losses.mse_loss", "mlx.nn.losses.nll_loss", "mlx.nn.losses.smooth_l1_loss", "mlx.nn.losses.triplet_loss", "mlx.nn.mish", "mlx.nn.prelu", "mlx.nn.relu", "mlx.nn.selu", "mlx.nn.silu", "mlx.nn.softshrink", "mlx.nn.step", "Functions", "Initializers", "Layers", "Loss Functions", "Module", "Operations", "Optimizer", "Optimizers", "Random", "Transforms", "Tree Utils", "Compilation", "Function Transforms", "Indexing Arrays", "Lazy Evaluation", "Conversion to NumPy and Other Frameworks", "Quick Start Guide", "Saving and Loading Arrays", "Unified Memory", "Using Streams"], "terms": {"mlx": [1, 2, 3, 4, 6, 221, 301, 304, 306, 307, 308, 310, 311, 312, 313, 314, 315, 316, 317, 318], "provid": [1, 3, 73, 103, 187, 192, 214, 221, 236, 241, 243, 252, 253, 254, 257, 267, 300, 304, 317, 319], "open": [1, 6, 15, 151, 155], "flexibl": [1, 5, 254], "which": [1, 3, 4, 5, 6, 15, 33, 64, 75, 83, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 103, 108, 109, 110, 111, 112, 115, 116, 118, 144, 147, 148, 157, 158, 161, 162, 163, 164, 165, 177, 178, 183, 192, 194, 195, 207, 208, 227, 228, 230, 236, 240, 260, 281, 284, 288, 291, 301, 308, 311, 312, 313, 314, 318, 319], "user": [1, 3, 221], "mai": [1, 115, 227, 312, 313], "add": [1, 3, 85, 123, 141, 144, 224, 225, 312, 318], "special": 1, "without": [1, 3, 5, 179, 255, 300, 310, 311, 314, 315, 318], "much": [1, 3, 311, 314], "hassl": 1, "while": [1, 3, 6, 158, 260, 314, 315], "librari": [1, 6, 221], "suppli": 1, "effici": [1, 3, 5, 227, 260, 314, 316], "can": [1, 3, 5, 6, 11, 15, 47, 58, 64, 75, 76, 77, 78, 80, 83, 104, 105, 113, 114, 115, 123, 130, 133, 135, 146, 147, 151, 154, 155, 162, 180, 192, 207, 208, 221, 229, 240, 252, 262, 281, 301, 304, 307, 308, 310, 311, 312, 313, 314, 315, 316, 317, 318, 319], "compos": [1, 5, 221, 311, 312, 316], "ani": [1, 3, 5, 15, 213, 214, 215, 221, 230, 236, 237, 240, 248, 257, 267, 301, 310, 311, 312, 314, 316, 317, 318], "number": [1, 15, 52, 64, 67, 73, 86, 103, 106, 112, 117, 141, 144, 145, 147, 150, 153, 155, 157, 159, 187, 189, 192, 194, 195, 221, 223, 224, 225, 227, 228, 231, 232, 255, 256, 267, 269, 270, 271, 272, 308, 311, 312, 319], "applic": [1, 6], "aris": [1, 315], "case": [1, 3, 89, 92, 93, 95, 96, 97, 98, 99, 116, 128, 158, 177, 207, 208, 227, 261, 266, 291, 296, 298, 299, 311, 312, 316, 317, 318, 319], "where": [1, 4, 86, 144, 192, 195, 208, 223, 224, 225, 226, 227, 228, 230, 231, 232, 233, 234, 240, 256, 258, 261, 263, 266, 271, 272, 276, 277, 278, 282, 288, 294, 296, 297, 299, 312, 313], "new": [1, 4, 61, 75, 134, 158, 178, 188, 210, 214, 255, 304, 307, 311, 313, 314, 315], "function": [1, 2, 3, 4, 5, 13, 64, 78, 81, 82, 103, 112, 115, 116, 128, 168, 192, 194, 195, 199, 208, 214, 221, 230, 235, 237, 241, 252, 256, 262, 265, 266, 267, 276, 277, 278, 293, 298, 299, 301, 307, 308, 310, 313, 314, 315, 317], "highli": [1, 6], "optim": [1, 2, 4, 5, 253, 311, 312, 314], "ar": [1, 2, 3, 4, 5, 6, 13, 15, 60, 61, 63, 64, 68, 75, 83, 86, 88, 89, 91, 92, 94, 95, 97, 98, 99, 103, 108, 109, 110, 111, 112, 115, 116, 118, 128, 140, 141, 142, 144, 145, 146, 147, 148, 151, 154, 155, 164, 165, 177, 178, 183, 192, 194, 195, 206, 208, 213, 214, 223, 224, 225, 226, 227, 228, 231, 232, 233, 234, 243, 255, 257, 279, 281, 282, 300, 304, 310, 311, 312, 313, 314, 315, 316, 317, 318], "need": [1, 3, 4, 5, 60, 144, 221, 253, 254, 264, 267, 308, 312, 314, 315, 316, 318], "For": [1, 3, 6, 115, 144, 215, 221, 223, 227, 236, 241, 249, 252, 257, 260, 264, 269, 270, 271, 272, 301, 308, 311, 312, 313, 314, 315, 316, 317, 318], "you": [1, 3, 4, 5, 6, 221, 264, 267, 301, 308, 311, 312, 313, 315, 317, 318], "design": [1, 2, 5, 308, 318], "your": [1, 3, 6, 304, 312, 314], "own": [1, 6, 315], "link": [1, 6], "top": [1, 234], "core": [1, 2, 3, 4, 221, 223, 232, 243, 246, 250, 268, 269, 270, 271, 272, 273, 274, 275, 279, 281, 288, 301, 304, 307, 311, 315, 316], "we": [1, 2, 3, 4, 73, 144, 145, 204, 206, 221, 229, 262, 308, 310, 311, 312, 314, 318], "inner": [1, 311], "work": [1, 3, 6, 311, 312, 313, 314], "go": [1, 3, 312], "over": [1, 3, 4, 12, 14, 22, 23, 24, 25, 66, 67, 89, 92, 95, 98, 107, 115, 117, 127, 129, 131, 132, 142, 143, 160, 172, 173, 181, 187, 193, 223, 224, 225, 231, 233, 258, 281, 312], "simpl": [1, 3, 4, 221, 229, 300, 311, 312, 314], "learn": [1, 2, 4, 5, 200, 201, 202, 203, 204, 205, 206, 211, 212, 223, 231, 232, 233, 256, 258], "step": [1, 3, 4, 15, 201, 221, 311], "involv": [1, 307, 311], "ad": [1, 2, 6, 200, 201, 202, 203, 204, 205, 211, 232, 304, 314, 317], "let": [1, 2, 3, 311, 312, 314, 315], "s": [1, 2, 3, 4, 35, 44, 64, 73, 88, 89, 91, 92, 94, 95, 97, 98, 103, 115, 118, 131, 140, 144, 147, 159, 162, 163, 192, 193, 195, 199, 208, 209, 221, 240, 241, 243, 247, 248, 252, 307, 308, 311, 312, 314, 315, 316, 317, 318], "sai": [1, 3, 301, 314], "would": [1, 3, 313, 314, 315, 318], "like": [1, 3, 5, 139, 198, 208, 210, 228, 287, 311, 312, 314, 315, 316, 318], "an": [1, 3, 4, 6, 8, 12, 14, 26, 61, 66, 67, 83, 86, 99, 102, 106, 115, 118, 129, 132, 134, 138, 139, 141, 143, 144, 145, 157, 158, 159, 174, 177, 182, 183, 184, 187, 189, 195, 197, 198, 200, 210, 213, 214, 221, 226, 231, 233, 234, 236, 255, 256, 257, 267, 268, 269, 270, 271, 272, 273, 274, 275, 277, 294, 301, 306, 308, 310, 311, 312, 313, 314, 315, 316, 317, 318, 319], "take": [1, 3, 4, 64, 103, 112, 130, 133, 139, 145, 184, 192, 194, 195, 198, 255, 308, 312, 313, 317, 318, 319], "two": [1, 11, 13, 60, 75, 77, 80, 88, 91, 97, 104, 105, 113, 114, 116, 123, 128, 130, 133, 135, 140, 182, 257, 280, 311, 312, 313, 318], "arrai": [1, 3, 4, 5, 8, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 73, 74, 75, 77, 78, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 208, 221, 223, 236, 243, 246, 250, 256, 268, 269, 270, 271, 272, 273, 274, 275, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294, 299, 301, 304, 311, 312, 314, 315, 316, 318], "x": [1, 2, 3, 4, 81, 106, 115, 145, 148, 159, 164, 168, 190, 191, 196, 206, 214, 221, 223, 230, 231, 232, 233, 234, 235, 236, 256, 258, 259, 261, 263, 264, 266, 276, 277, 278, 291, 293, 294, 295, 296, 297, 298, 299, 304, 307, 311, 312, 313, 314, 315, 316, 318], "y": [1, 2, 3, 4, 196, 202, 221, 223, 227, 231, 232, 233, 234, 258, 283, 288, 291, 307, 311, 312, 314, 315], "scale": [1, 3, 73, 144, 145, 150, 201, 227, 228, 255, 260, 261, 264, 296], "them": [1, 3, 221, 241, 252, 318], "both": [1, 11, 77, 78, 80, 104, 105, 113, 114, 115, 123, 130, 133, 135, 147, 180, 232, 307, 311, 312, 316, 318], "some": [1, 2, 3, 4, 208, 241, 252, 311, 312, 314], "coeffici": [1, 200, 201, 203, 204, 205, 206], "alpha": [1, 144, 204, 211, 261, 292, 294, 296], "beta": [1, 73, 144, 203, 204, 205, 206, 223, 231, 232, 233, 291], "respect": [1, 2, 4, 103, 144, 192, 214, 221, 223, 230, 231, 232, 233, 304, 312, 316], "togeth": [1, 4, 144, 214], "get": [1, 2, 4, 6, 67, 149, 221, 311, 312, 314, 318], "z": [1, 311, 314], "well": [1, 3, 221, 241, 252, 255, 314], "veri": [1, 3, 255, 314, 318], "easili": 1, "do": [1, 3, 6, 204, 221, 242, 252, 301, 304, 311, 312, 314], "just": [1, 4, 311, 313], "write": [1, 3, 221, 315], "out": [1, 6, 227, 228, 249, 311, 312, 313], "follow": [1, 3, 4, 5, 6, 15, 68, 73, 115, 144, 200, 201, 202, 203, 204, 205, 206, 212, 221, 277, 278, 285, 308, 311, 312, 318], "import": [1, 2, 3, 4, 6, 115, 164, 192, 213, 214, 215, 221, 223, 232, 243, 279, 281, 288, 301, 304, 311, 312, 313, 314, 315, 316], "mx": [1, 2, 3, 4, 99, 115, 116, 118, 164, 192, 221, 223, 232, 236, 243, 247, 259, 268, 269, 270, 271, 272, 273, 274, 275, 279, 280, 281, 285, 288, 295, 301, 304, 307, 308, 311, 312, 313, 314, 315, 316, 317, 318, 319], "def": [1, 2, 3, 4, 192, 221, 304, 311, 312, 313, 314, 315, 318], "simple_axpbi": 1, "float": [1, 13, 15, 57, 101, 102, 115, 145, 146, 150, 151, 154, 155, 200, 201, 202, 203, 204, 205, 206, 211, 212, 217, 223, 226, 227, 228, 231, 232, 233, 236, 258, 260, 264, 266, 267, 268, 269, 270, 271, 272, 274, 275, 280, 281, 282, 284, 288, 291, 292, 298, 299], "return": [1, 2, 3, 4, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 33, 37, 50, 57, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 73, 74, 75, 77, 78, 80, 81, 82, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 153, 154, 155, 156, 157, 158, 159, 160, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 207, 213, 214, 215, 221, 238, 240, 242, 244, 245, 246, 250, 257, 268, 269, 270, 271, 272, 273, 274, 275, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 301, 304, 310, 311, 312, 313, 314, 315, 317, 318], "thi": [1, 3, 4, 6, 12, 13, 14, 15, 22, 23, 24, 25, 79, 112, 115, 116, 123, 127, 128, 129, 131, 132, 142, 143, 147, 172, 173, 174, 181, 183, 193, 208, 221, 226, 227, 228, 237, 238, 240, 241, 244, 245, 246, 250, 252, 253, 254, 255, 257, 266, 269, 270, 271, 272, 277, 278, 287, 299, 304, 310, 311, 312, 314, 315, 317], "perform": [1, 3, 5, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 128, 145, 159, 172, 183, 221, 231, 267, 271, 272, 311, 313, 314, 318], "leav": [1, 83, 214], "differenti": [1, 5], "howev": [1, 208, 221, 230, 231, 308, 311, 314, 315], "vector": [1, 2, 5, 107, 112, 115, 183, 194, 195, 229, 281, 316], "math": [1, 3, 292, 311], "often": [1, 228], "realiz": 1, "axpbi": 1, "routin": 1, "defin": [1, 2, 3, 4, 6, 115, 145, 213, 315], "same": [1, 3, 6, 13, 60, 61, 64, 67, 68, 93, 96, 97, 98, 103, 112, 141, 147, 159, 194, 196, 207, 221, 223, 226, 231, 232, 257, 268, 269, 270, 271, 272, 273, 274, 275, 281, 292, 304, 308, 311, 313, 318], "realli": 1, "part": [1, 312, 313], "doe": [1, 3, 6, 221, 311, 313, 314, 315], "fast": [1, 230, 278, 318], "so": [1, 3, 6, 103, 192, 226, 307, 311, 314, 318], "decid": [1, 214, 240], "want": [1, 3, 312, 318], "reli": 1, "acceler": [1, 223], "framework": [1, 5], "continu": [1, 312], "impos": 1, "our": [1, 3, 4, 200, 201, 202, 203, 205, 206, 262], "assumpt": 1, "also": [1, 3, 4, 5, 6, 11, 76, 77, 78, 80, 89, 92, 95, 98, 104, 105, 113, 114, 123, 130, 133, 135, 144, 180, 199, 221, 240, 253, 255, 257, 261, 263, 276, 296, 297, 300, 307, 311, 312, 313, 314, 315, 316, 319], "assum": [1, 3, 116, 214, 221, 231], "how": [1, 3, 4, 207, 221, 224, 225, 229, 311, 313, 318], "gradient": [1, 2, 4, 103, 179, 192, 199, 200, 201, 203, 204, 205, 206, 207, 210, 212, 221, 241, 253, 257, 267, 287, 304, 307, 311, 312, 313, 314, 315, 316], "ins": 1, "what": [1, 3, 214], "coincid": 1, "right": [1, 6, 144, 230, 277, 278, 282, 284, 292], "place": [1, 3, 159, 314, 315], "cours": [1, 312], "The": [1, 3, 4, 5, 6, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 33, 35, 44, 50, 57, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 73, 74, 75, 77, 78, 80, 81, 82, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 153, 154, 155, 156, 157, 158, 162, 163, 168, 169, 170, 171, 172, 173, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 209, 211, 212, 213, 214, 215, 217, 223, 224, 225, 226, 227, 228, 229, 231, 232, 233, 234, 237, 243, 247, 248, 253, 254, 255, 257, 258, 260, 262, 264, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 299, 301, 304, 306, 307, 311, 312, 313, 314, 315, 316, 317, 318, 319], "structur": [1, 207, 312], "from": [1, 3, 4, 5, 73, 75, 94, 95, 97, 98, 102, 115, 118, 128, 139, 144, 146, 147, 148, 149, 151, 154, 164, 177, 179, 180, 183, 184, 196, 198, 213, 214, 215, 221, 234, 241, 243, 255, 269, 270, 271, 272, 274, 275, 282, 291, 301, 310, 311, 312, 314, 315, 316, 317, 318], "frontend": 1, "api": [1, 312], "redirect": 1, "when": [1, 3, 5, 6, 115, 118, 224, 225, 271, 272, 285, 291, 304, 308, 311, 318], "appropri": [1, 311], "fallback": 1, "metal": 1, "vjp": [1, 316], "jvp": [1, 316], "In": [1, 3, 4, 128, 144, 200, 202, 203, 205, 206, 207, 214, 221, 227, 231, 304, 310, 311, 312, 314, 317, 318], "one": [1, 3, 6, 57, 63, 67, 85, 86, 115, 121, 128, 145, 147, 177, 180, 252, 281, 318], "sentenc": 1, "comput": [1, 2, 3, 4, 5, 6, 73, 103, 112, 115, 123, 131, 140, 144, 172, 179, 187, 192, 193, 194, 199, 200, 201, 203, 204, 205, 206, 210, 221, 223, 231, 232, 233, 241, 253, 257, 258, 260, 267, 269, 270, 271, 272, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 307, 311, 312, 316, 318], "graph": [1, 3, 4, 5, 312], "rule": 1, "evalu": [1, 3, 4, 5, 83, 112, 194, 221, 239, 249, 304, 307, 311, 316], "said": [1, 3], "start": [1, 2, 3, 5, 6, 15, 117, 174, 311, 313, 318], "discuss": 1, "more": [1, 4, 8, 57, 75, 128, 162, 163, 221, 223, 227, 260, 264, 267, 269, 270, 271, 272, 308, 311, 312, 313, 316, 318], "detail": [1, 8, 200, 202, 203, 205, 206, 221, 227, 260, 264, 269, 270, 271, 272, 313, 316], "thei": [1, 2, 3, 13, 68, 206, 262, 283, 304, 310, 311, 314, 316, 317, 318], "c": [1, 3, 115, 217, 223, 224, 225, 227, 228, 232, 315, 316, 318], "scalar": [1, 11, 13, 26, 37, 57, 60, 61, 63, 77, 78, 80, 101, 102, 103, 104, 105, 113, 114, 115, 117, 123, 124, 125, 126, 128, 130, 133, 135, 141, 151, 154, 155, 162, 180, 192, 196, 199, 292, 312, 314, 316], "sum": [1, 2, 11, 107, 115, 127, 172, 187, 221, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 313, 315], "element": [1, 10, 11, 16, 17, 18, 19, 20, 21, 24, 52, 62, 69, 70, 73, 77, 78, 80, 81, 82, 84, 86, 100, 101, 104, 105, 108, 109, 110, 111, 113, 114, 119, 120, 121, 122, 123, 124, 125, 126, 130, 133, 135, 136, 142, 144, 145, 156, 157, 160, 168, 169, 170, 171, 175, 176, 180, 183, 185, 186, 192, 196, 226, 227, 228, 235, 256, 260, 263, 293, 294, 297, 311, 312], "wise": [1, 10, 11, 16, 17, 18, 19, 20, 21, 62, 69, 70, 77, 78, 80, 81, 82, 84, 100, 101, 104, 105, 113, 114, 119, 120, 121, 122, 123, 124, 125, 126, 130, 133, 135, 136, 156, 160, 168, 169, 170, 171, 175, 176, 180, 185, 186, 227, 228, 235, 256, 263, 293, 294, 297, 311], "numpi": [1, 3, 4, 5, 11, 13, 15, 61, 77, 78, 80, 104, 105, 113, 114, 123, 128, 130, 133, 135, 180, 314, 316, 317], "style": [1, 11, 13, 77, 78, 80, 104, 105, 113, 114, 123, 128, 130, 133, 135, 180], "broadcast": [1, 11, 13, 61, 63, 77, 78, 80, 102, 104, 105, 113, 114, 123, 128, 130, 133, 135, 146, 147, 154, 155, 180, 184, 196, 255], "between": [1, 5, 63, 99, 267, 280, 283, 284, 287, 314, 318], "input": [1, 2, 3, 10, 11, 12, 13, 14, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 74, 75, 77, 78, 80, 81, 82, 84, 85, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 103, 104, 105, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 139, 140, 141, 142, 143, 144, 145, 153, 156, 157, 158, 159, 160, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 190, 191, 192, 193, 195, 196, 198, 223, 224, 225, 227, 228, 229, 231, 232, 233, 234, 255, 257, 258, 260, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 279, 280, 282, 283, 284, 285, 287, 288, 290, 292, 299, 301, 311, 312, 313, 316, 317], "upcast": 1, "const": [1, 282], "factor": [1, 116, 281], "streamordevic": 1, "stream": [1, 5, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 28, 29, 30, 31, 32, 33, 34, 36, 38, 39, 40, 41, 42, 43, 45, 46, 47, 48, 49, 51, 53, 54, 55, 56, 58, 59, 60, 61, 62, 63, 65, 66, 67, 68, 69, 70, 72, 73, 74, 75, 77, 78, 80, 81, 82, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 104, 105, 106, 107, 108, 109, 110, 111, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 150, 151, 153, 154, 155, 156, 157, 158, 159, 160, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 193, 196, 197, 198, 318], "schedul": [1, 318], "itself": [1, 208], "call": [1, 3, 4, 27, 101, 208, 221, 229, 241, 252, 262, 304, 307, 311, 312, 314], "other": [1, 3, 5, 115, 206, 221, 242, 304, 311, 313, 314, 316], "within": [1, 24], "simplest": [1, 221], "wai": [1, 3, 6, 221, 311, 312, 313], "about": [1, 3, 4, 314, 318], "term": [1, 200, 201, 202, 203, 204, 205, 211, 282], "exist": [1, 3, 241, 252], "auto": [1, 6], "ax": [1, 12, 14, 22, 23, 58, 85, 88, 89, 91, 92, 94, 95, 97, 98, 99, 107, 115, 127, 129, 131, 132, 141, 143, 172, 177, 181, 182, 187, 188, 193, 312], "multipli": [1, 144, 145, 226, 264], "earlier": 1, "goal": 1, "themselv": [1, 311], "contain": [1, 3, 24, 25, 50, 64, 75, 93, 94, 95, 115, 124, 125, 126, 144, 174, 196, 221, 240, 242, 243, 248, 267, 288, 301, 304, 311, 312], "act": [1, 287], "data": [1, 4, 5, 8, 15, 86, 96, 97, 102, 106, 117, 138, 154, 189, 197, 228, 268, 269, 270, 271, 272, 273, 274, 275, 311, 313, 315], "nor": [1, 103, 192], "rather": [1, 312, 318], "easi": [1, 221], "interfac": 1, "block": [1, 3, 267], "A": [1, 3, 5, 6, 50, 60, 64, 103, 112, 115, 116, 118, 127, 128, 144, 146, 147, 148, 150, 151, 154, 155, 174, 178, 192, 194, 195, 199, 203, 205, 207, 208, 210, 213, 214, 215, 221, 223, 227, 231, 232, 233, 235, 240, 244, 245, 253, 254, 258, 262, 264, 267, 269, 270, 272, 278, 292, 293, 304, 307, 311, 312, 314, 315], "It": [1, 3, 6, 103, 192, 207, 221, 254, 257, 306, 315, 317], "creat": [1, 3, 6, 86, 106, 221, 304, 307, 311, 313, 315], "output": [1, 3, 6, 12, 13, 14, 15, 24, 61, 64, 86, 93, 96, 97, 98, 102, 103, 106, 115, 117, 127, 129, 131, 132, 138, 139, 142, 143, 146, 147, 148, 150, 151, 154, 155, 164, 165, 172, 177, 181, 184, 189, 192, 193, 194, 195, 196, 197, 198, 223, 224, 225, 232, 234, 255, 257, 266, 267, 269, 270, 271, 272, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 299, 301, 311, 312, 313, 314, 315, 316, 317, 318], "given": [1, 12, 14, 24, 61, 63, 65, 73, 75, 83, 85, 87, 88, 89, 90, 91, 92, 96, 97, 98, 102, 115, 127, 129, 131, 132, 143, 151, 159, 172, 174, 181, 189, 190, 191, 193, 226, 240, 255, 280, 282, 288], "set": [1, 3, 4, 6, 76, 79, 201, 208, 230, 234, 239, 241, 248, 249, 252, 253, 257, 260, 266, 280, 292, 299, 304, 308, 312, 314], "further": [1, 6, 312], "class": [1, 3, 4, 7, 8, 9, 26, 200, 201, 202, 203, 204, 205, 206, 211, 212, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 281, 304, 306], "under": [1, 115], "These": [1, 64, 184, 281, 318], "word": 1, "bit": [1, 73, 144, 145, 217, 236, 257], "abstract": 1, "back": [1, 3, 315], "give": [1, 3, 4, 24, 311], "ourselv": 1, "concret": [1, 234, 314, 318], "imag": [1, 225, 227, 228], "public": [1, 221], "explicit": [1, 208, 308, 315], "alpha_": 1, "beta_": 1, "must": [1, 6, 63, 102, 115, 146, 147, 151, 154, 155, 196, 315], "know": [1, 3], "popul": 1, "To": [1, 2, 3, 4, 6, 221, 301, 311, 312, 316], "avoid": [1, 311], "unnecessari": [1, 3], "alloc": [1, 304], "respons": 1, "space": [1, 117, 290], "void": 1, "eval_cpu": 1, "std": [1, 274], "overrid": [1, 79], "eval_gpu": 1, "jacobian": [1, 112, 194, 316], "product": [1, 107, 112, 128, 140, 143, 187, 194, 255, 316], "primal": [1, 112, 194], "tangent": [1, 20, 21, 112, 185, 186], "int": [1, 3, 4, 7, 9, 12, 14, 15, 22, 23, 24, 25, 29, 30, 31, 32, 40, 41, 42, 43, 45, 48, 50, 53, 56, 57, 59, 61, 65, 66, 67, 73, 74, 75, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 102, 103, 106, 115, 117, 127, 129, 131, 132, 134, 138, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 157, 158, 159, 172, 173, 174, 177, 178, 181, 182, 183, 184, 187, 188, 189, 190, 191, 192, 193, 195, 197, 221, 223, 224, 225, 229, 231, 232, 233, 234, 255, 257, 258, 260, 264, 267, 280, 281, 285, 290, 292, 304], "argnum": [1, 103, 192, 312], "cotan": 1, "across": [1, 231], "pair": [1, 141, 243, 260], "repres": [1, 3, 288, 292, 315], "axi": [1, 3, 4, 12, 14, 22, 23, 24, 25, 29, 30, 31, 32, 40, 41, 42, 43, 45, 53, 56, 59, 65, 75, 85, 87, 90, 93, 94, 95, 96, 97, 98, 99, 115, 127, 129, 131, 132, 134, 141, 142, 143, 147, 157, 172, 173, 174, 177, 178, 181, 182, 183, 184, 188, 193, 195, 280, 281, 285, 290, 292, 313], "correspond": [1, 12, 14, 57, 63, 73, 75, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 127, 129, 132, 143, 181, 187, 195, 214, 312], "dimens": [1, 3, 12, 14, 22, 23, 44, 50, 57, 67, 75, 85, 94, 95, 97, 98, 99, 107, 115, 116, 127, 128, 129, 131, 132, 143, 144, 147, 153, 181, 184, 187, 188, 193, 223, 224, 225, 227, 228, 231, 232, 233, 255, 258, 260, 267, 281, 311, 312], "vmap": [1, 312, 314, 316], "print": [1, 2, 3, 4, 6, 213, 214, 215, 221, 308, 311, 312, 313, 314, 315, 316], "ostream": 1, "os": [1, 6], "equival": [1, 27, 47, 58, 78, 101, 183, 230, 254, 256, 257, 265], "check": [1, 6, 60, 243, 312, 313], "bool": [1, 12, 13, 14, 22, 23, 29, 30, 31, 32, 40, 41, 42, 43, 45, 56, 57, 59, 60, 115, 118, 127, 129, 131, 132, 143, 145, 146, 151, 154, 155, 181, 193, 201, 212, 223, 224, 225, 231, 232, 233, 234, 236, 240, 241, 243, 249, 252, 255, 257, 260, 264, 267, 279, 282], "is_equival": 1, "privat": 1, "fall": 1, "eval": [1, 2, 3, 4, 221, 304, 307, 311, 312, 314, 316], "deriv": [1, 312, 314], "base": [1, 115, 120, 122, 205, 260, 267, 304, 306, 307, 308, 311, 313], "abov": [1, 3, 6, 144, 190, 204, 221, 312, 313, 314, 318], "demonstr": [1, 315], "treat": [1, 94, 95, 97, 98, 183, 311], "paramet": [1, 2, 3, 4, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 33, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 73, 74, 75, 77, 78, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 210, 211, 212, 213, 214, 215, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 236, 237, 240, 241, 243, 248, 249, 252, 253, 254, 255, 256, 257, 258, 260, 262, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 299, 300, 301, 304, 306, 307, 311, 312, 314], "produc": [1, 64, 255, 301], "through": [1, 179, 206, 267, 311, 312, 315], "construct": [1, 4, 74, 102, 138, 197], "its": [1, 6, 128, 142, 153, 189, 199, 203, 204, 205, 215, 221, 257, 315, 318], "type": [1, 5, 6, 7, 8, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 33, 50, 57, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 73, 74, 75, 77, 78, 80, 81, 82, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 153, 154, 155, 156, 157, 158, 159, 160, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 213, 221, 260, 267, 268, 269, 270, 271, 272, 273, 274, 275, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 311, 313], "shape": [1, 3, 4, 47, 60, 61, 66, 67, 75, 87, 90, 93, 96, 97, 98, 102, 112, 128, 138, 139, 146, 147, 148, 150, 151, 154, 155, 158, 184, 194, 196, 197, 198, 221, 223, 224, 225, 227, 228, 232, 234, 243, 268, 269, 270, 271, 272, 273, 274, 275, 281, 292, 307, 311, 312, 313, 316, 318], "pass": [1, 3, 4, 47, 58, 140, 141, 192, 199, 213, 214, 221, 241, 252, 253, 254, 257, 262, 311, 314], "re": [1, 4, 6, 301], "now": [1, 3, 6, 257, 311, 315], "promot": 1, "dtype": [1, 3, 15, 26, 33, 57, 86, 99, 102, 106, 115, 116, 117, 138, 148, 150, 151, 154, 155, 189, 197, 208, 217, 268, 269, 270, 271, 272, 273, 274, 275, 279, 281, 288, 311, 312, 313, 315, 316, 317], "promoted_dtyp": 1, "promote_typ": 1, "float32": [1, 15, 86, 106, 115, 116, 117, 138, 148, 150, 154, 155, 189, 197, 208, 217, 268, 269, 270, 271, 272, 273, 274, 275, 279, 281, 288, 311, 312, 313, 314, 315, 316, 317], "non": [1, 6, 235, 250, 293, 304], "point": [1, 2, 3, 6, 101, 145, 217], "out_dtyp": 1, "is_floating_point": 1, "cast": [1, 33, 96, 97, 98, 118, 236, 315], "up": [1, 3, 257, 311], "determin": [1, 75, 247, 317], "x_cast": 1, "astyp": [1, 3, 236, 315], "y_cast": 1, "broadcasted_input": 1, "broadcast_arrai": 1, "out_shap": 1, "0": [1, 2, 3, 4, 6, 7, 15, 48, 53, 59, 65, 66, 67, 74, 75, 86, 99, 103, 115, 116, 141, 146, 150, 155, 157, 159, 174, 178, 189, 190, 191, 192, 193, 195, 200, 201, 203, 204, 205, 206, 208, 211, 212, 213, 221, 223, 224, 225, 226, 227, 228, 230, 231, 232, 233, 256, 259, 260, 261, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 277, 278, 279, 281, 283, 284, 288, 291, 292, 294, 295, 296, 298, 299, 301, 304, 308, 311, 312, 313, 314, 315, 316, 317], "unique_ptr": 1, "make_uniqu": 1, "to_stream": 1, "handl": [1, 221, 311], "resolv": 1, "No": [1, 3], "happen": [1, 3, 267, 307, 311, 314], "alon": [1, 315], "effect": [1, 227, 311, 314], "onli": [1, 3, 5, 6, 60, 66, 67, 115, 144, 217, 221, 240, 241, 243, 249, 252, 253, 254, 304, 311, 312, 317, 318], "execut": [1, 6, 315, 318], "depend": [1, 2, 57, 115, 313, 317, 318], "devic": [1, 5, 6, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 28, 29, 30, 31, 32, 33, 34, 36, 38, 39, 40, 41, 42, 43, 45, 46, 47, 48, 49, 51, 53, 54, 55, 56, 58, 59, 60, 61, 62, 63, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 77, 78, 80, 81, 82, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 104, 105, 106, 107, 108, 109, 110, 111, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 150, 151, 153, 154, 155, 156, 157, 158, 159, 160, 166, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 193, 196, 197, 198, 318, 319], "specifi": [1, 15, 33, 67, 75, 94, 95, 102, 103, 115, 117, 134, 138, 147, 157, 182, 183, 184, 187, 188, 192, 195, 197, 223, 266, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 299, 312, 318], "memori": [1, 5, 201, 267, 304, 311, 314, 315], "ha": [1, 3, 4, 5, 57, 64, 75, 93, 94, 96, 97, 98, 103, 147, 223, 234, 304, 307, 311, 313, 314, 316, 318], "been": [1, 3, 314], "try": [1, 6], "naiv": [1, 312], "gener": [1, 2, 15, 86, 94, 95, 117, 146, 150, 151, 154, 155, 267, 308, 311, 313, 314, 319], "version": [1, 6, 73, 123, 127, 144, 172, 195, 308, 312, 313], "declar": 1, "member": [1, 221, 246, 250], "method": [1, 3, 7, 8, 9, 26, 200, 201, 202, 203, 204, 205, 206, 208, 211, 212, 221, 247, 304, 306], "each": [1, 50, 73, 83, 128, 141, 144, 145, 147, 157, 164, 165, 174, 188, 195, 196, 227, 228, 229, 231, 260, 267, 279, 281, 308, 311, 314], "find": [1, 2, 6], "pointwis": 1, "captur": [1, 64, 221, 311], "templat": 1, "axpby_impl": 1, "typenam": 1, "t": [1, 3, 81, 145, 192, 200, 201, 202, 203, 204, 205, 206, 211, 212, 221, 311, 312, 318], "readi": 1, "fill": [1, 102, 139, 189, 198, 268, 269, 270, 271, 272, 274, 275], "malloc_or_wait": 1, "synchron": [1, 311], "avail": [1, 2, 3, 4, 6, 8, 217, 318], "There": [1, 221, 311], "wait": [1, 3], "here": [1, 3, 311, 312, 314, 317, 318], "request": 1, "pressur": 1, "condit": [1, 196, 318], "set_data": 1, "nbyte": 1, "collect": [1, 214, 310], "pointer": 1, "x_ptr": 1, "y_ptr": 1, "out_ptr": 1, "relev": 1, "static_cast": 1, "size_t": 1, "out_idx": 1, "size": [1, 3, 4, 50, 67, 73, 85, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 102, 106, 115, 144, 145, 147, 158, 174, 177, 201, 221, 224, 225, 229, 232, 257, 314, 315], "map": [1, 4, 118, 214, 229, 236], "linear": [1, 3, 4, 5, 208, 214, 221, 230, 243, 257, 259, 261, 263, 276, 277, 278, 295, 296, 297, 301, 304, 311], "indic": [1, 13, 22, 23, 24, 25, 103, 108, 109, 110, 111, 174, 183, 184, 192, 249, 251, 281, 288, 313], "offset": [1, 3, 75], "x_offset": 1, "elem_to_loc": 1, "stride": [1, 66, 67, 224, 225, 260, 313], "y_offset": 1, "contigu": 1, "regularli": 1, "default": [1, 6, 12, 13, 14, 15, 22, 23, 24, 25, 60, 64, 65, 66, 67, 73, 74, 75, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 103, 106, 115, 116, 117, 118, 127, 129, 131, 132, 138, 142, 143, 144, 145, 146, 147, 148, 150, 151, 153, 154, 155, 157, 158, 159, 173, 174, 177, 178, 181, 187, 188, 189, 190, 191, 192, 193, 195, 197, 200, 201, 202, 203, 204, 205, 206, 211, 212, 217, 223, 224, 225, 232, 234, 236, 241, 243, 249, 252, 255, 256, 257, 260, 264, 265, 267, 268, 269, 270, 271, 272, 273, 274, 275, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 304, 308, 310, 311, 312, 315, 317, 319], "row": [1, 86, 106, 144, 189], "major": 1, "henc": [1, 144, 311], "doesn": [1, 221], "addit": [1, 3, 11, 118, 223, 231, 233, 255, 258, 304, 312], "abl": [1, 144], "all": [1, 4, 6, 13, 24, 67, 86, 89, 92, 95, 98, 128, 141, 142, 177, 221, 236, 237, 241, 244, 245, 246, 250, 252, 255, 257, 264, 267, 301, 304, 306, 308, 311, 313, 314, 316, 319], "incom": 1, "accordingli": 1, "dispatch": 1, "float16": [1, 118, 217, 236, 314, 315], "bfloat16": [1, 315], "complex64": 1, "throw": 1, "error": [1, 6, 81, 82, 174, 230, 257, 276, 277, 278, 287, 289, 312, 315], "encount": [1, 312], "unexpect": [1, 15], "regist": [1, 4], "op": [1, 140, 241, 314], "assert": 1, "2": [1, 2, 3, 4, 67, 74, 75, 81, 88, 91, 93, 94, 95, 96, 97, 98, 99, 115, 116, 122, 128, 144, 153, 187, 189, 190, 191, 200, 202, 203, 204, 208, 211, 217, 221, 225, 230, 258, 264, 268, 269, 270, 271, 272, 273, 274, 275, 277, 281, 282, 284, 291, 292, 301, 304, 311, 312, 313, 314, 315, 316, 317, 318], "1": [1, 3, 4, 15, 24, 25, 66, 67, 74, 75, 87, 88, 90, 91, 93, 94, 95, 96, 97, 98, 99, 107, 115, 116, 128, 140, 142, 144, 147, 150, 155, 168, 173, 183, 192, 200, 201, 202, 203, 204, 205, 206, 208, 211, 212, 217, 221, 223, 224, 225, 226, 227, 228, 230, 231, 232, 233, 234, 256, 258, 260, 261, 264, 266, 269, 270, 271, 272, 273, 274, 275, 277, 278, 279, 280, 281, 282, 283, 284, 285, 287, 288, 290, 291, 292, 296, 299, 301, 304, 307, 311, 312, 313, 315, 316, 317, 318], "correct": [1, 6, 203, 204, 205, 313, 314], "els": [1, 3, 221, 241, 314], "float16_t": 1, "bfloat16_t": 1, "complex64_t": 1, "runtime_error": 1, "support": [1, 3, 5, 6, 13, 66, 67, 99, 116, 118, 128, 144, 312, 313, 315, 317], "have": [1, 3, 6, 13, 60, 94, 95, 97, 98, 128, 147, 206, 208, 213, 255, 262, 310, 311, 313, 314, 318], "rememb": 1, "3": [1, 3, 6, 99, 115, 116, 201, 206, 270, 272, 308, 311, 313, 315, 316], "complic": 1, "keep": [1, 12, 14, 22, 23, 127, 129, 131, 132, 143, 181, 193, 221, 240, 312, 314], "mind": [1, 3], "half": [1, 15, 151, 155, 260, 314], "precis": [1, 3, 207, 221, 230, 311], "direct": [1, 3, 206, 238, 318], "fix": [1, 3, 6, 314], "possibl": [1, 3, 128, 174, 229, 311, 313, 318], "due": 1, "transpos": [1, 3, 27, 145], "aren": 1, "guarante": 1, "fit": [1, 144, 318], "requir": [1, 3, 221, 314, 315], "column": [1, 86, 106, 144], "inplac": 1, "expect": [1, 3, 224, 225, 226, 227, 228, 264, 267, 282, 311, 313], "answer": 1, "copi": [1, 3, 5, 142, 173, 315], "simpli": [1, 3, 6, 259, 295, 304, 311, 312], "catlas_saxpbi": 1, "axpby_impl_acceler": 1, "first": [1, 2, 3, 4, 6, 75, 99, 103, 124, 126, 128, 142, 153, 182, 187, 192, 201, 203, 204, 205, 208, 213, 221, 231, 280, 288, 311, 312, 315, 318], "mode": [1, 68, 239, 249, 251, 271, 272], "i": [1, 3, 112, 115, 204, 221, 224, 225, 227, 228, 241, 287, 311, 312], "e": [1, 4, 6, 81, 112, 168, 202, 223, 224, 225, 227, 228, 231, 232, 233, 241, 258, 300, 307, 311, 314, 319], "match": [1, 6, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 243, 281, 313, 315], "transposit": 1, "data_s": 1, "items": 1, "flag": [1, 311, 315], "copy_inplac": 1, "copytyp": 1, "n": [1, 3, 26, 66, 67, 86, 87, 89, 90, 92, 93, 96, 98, 106, 189, 193, 223, 224, 225, 227, 228, 287, 292], "incx": 1, "inci": 1, "great": 1, "But": [1, 318], "criteria": 1, "luckili": [1, 314], "alwai": [1, 213, 312], "With": 1, "final": [1, 2, 3, 4], "singl": [1, 4, 83, 112, 118, 141, 194, 311, 313, 317], "row_contigu": 1, "col_contigu": 1, "common": [1, 311, 314], "hit": 1, "mileston": 1, "enough": [1, 314], "run": [1, 3, 4, 5, 6, 140, 200, 201, 203, 204, 205, 223, 236, 311, 314, 318, 319], "If": [1, 3, 6, 12, 13, 14, 15, 22, 23, 24, 25, 57, 60, 63, 65, 68, 74, 75, 83, 96, 97, 98, 101, 102, 103, 115, 118, 127, 128, 129, 131, 132, 138, 141, 142, 143, 147, 157, 172, 173, 174, 181, 183, 184, 187, 192, 193, 195, 197, 201, 214, 223, 224, 225, 231, 233, 234, 241, 243, 252, 257, 260, 262, 264, 279, 281, 292, 311, 312, 314, 317, 318, 319], "plan": [1, 311], "stop": [1, 3, 15, 117, 179, 312, 313], "enjoi": 1, "speed": 1, "appl": [1, 3, 5, 6, 318], "silicon": [1, 3, 5, 6, 318], "address": 1, "shade": 1, "languag": [1, 217], "kernel": [1, 66, 67, 311, 313], "written": 1, "help": [1, 3, 311, 318], "resourc": 1, "walkthrough": 1, "pipelin": 1, "specif": [1, 6, 312], "cpp": 1, "algorithm": [1, 206], "launch": [1, 313], "exactli": [1, 3, 243, 312], "mani": [1, 174, 224, 225, 229, 311, 314], "thread": 1, "pick": 1, "updat": [1, 2, 3, 4, 64, 201, 204, 206, 207, 208, 212, 214, 223, 236, 243, 248, 254, 307, 311, 314], "assign": [1, 304], "axpby_gener": 1, "buffer": [1, 315], "constant": [1, 3, 6, 141, 211, 221, 223, 231, 233, 258, 282, 292, 311, 315], "4": [1, 3, 73, 99, 115, 144, 145, 164, 217, 223, 232, 257, 267, 269, 270, 271, 279, 311, 313, 316, 318], "5": [1, 2, 3, 6, 115, 146, 211, 223, 226, 227, 228, 232, 265, 268, 271, 272, 291, 298, 301, 311, 312, 313], "x_stride": 1, "6": [1, 3, 115, 164, 211, 267, 270, 277, 278, 282, 292, 311, 313, 316], "y_stride": 1, "7": [1, 3, 115, 144, 313], "ndim": [1, 99, 115], "8": [1, 3, 6, 115, 144, 200, 201, 202, 203, 204, 205, 211, 217, 232, 267, 280, 311, 313, 316, 318], "uint": 1, "index": [1, 5, 7, 9, 24, 85, 86, 103, 142, 183, 184, 192], "thread_position_in_grid": 1, "convert": [1, 57, 99, 257, 314, 315, 316], "instanti": [1, 4, 314], "uniqu": [1, 308], "host": 1, "name": [1, 118, 144, 145, 162, 163, 164, 165, 221, 231, 240, 243, 245, 313, 317], "identifi": [1, 213, 310], "instantiate_axpbi": 1, "type_nam": 1, "host_nam": 1, "axpby_general_": 1, "compil": [1, 5, 6, 76, 79, 312, 314], "mlx_ext": 1, "metallib": [1, 6], "see": [1, 3, 4, 6, 8, 28, 29, 30, 31, 32, 34, 36, 38, 39, 40, 41, 42, 43, 45, 46, 47, 48, 49, 51, 53, 54, 55, 56, 58, 59, 115, 162, 163, 221, 223, 227, 230, 239, 256, 257, 260, 261, 264, 265, 269, 270, 271, 272, 276, 277, 278, 296, 311, 312, 313, 316, 318], "later": [1, 6], "co": [1, 264, 312], "locat": [1, 253, 254, 318], "share": [1, 5, 73, 144, 145], "register_librari": 1, "potenti": 1, "path": [1, 6, 164, 165, 243], "tri": 1, "load": [1, 4, 5, 243], "hasn": 1, "alreadi": [1, 3], "static": [1, 6], "object": [1, 8, 26, 37, 57, 146, 151, 154, 155, 195, 213, 214, 227, 310], "why": [1, 3], "packag": [1, 2, 4, 301], "process": [1, 3, 68, 214, 228, 229, 267, 310], "logic": [1, 124, 125, 126], "grid": 1, "shown": 1, "below": [1, 6, 115, 189, 191, 217, 314], "prepar": [1, 3], "carri": 1, "should": [1, 2, 3, 4, 6, 75, 112, 144, 184, 192, 194, 213, 221, 224, 225, 227, 228, 249, 255, 262, 281, 283, 288, 304, 310, 311, 312, 314, 315, 319], "d": [1, 3, 74, 75, 107, 115, 128, 140, 183, 189, 190, 191, 200, 203, 205, 215, 228, 318], "ostringstream": 1, "kname": 1, "axpby_": 1, "general_": 1, "type_to_nam": 1, "make": [1, 3, 4, 6, 128, 221, 311, 314, 316, 318], "sure": [1, 3, 6, 221, 311], "look": [1, 3], "folder": 1, "get_colocated_mtllib_path": 1, "get_kernel": 1, "str": [1, 68, 103, 115, 118, 161, 162, 163, 164, 165, 192, 213, 215, 236, 237, 240, 241, 243, 245, 247, 252, 271, 272, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292], "encod": [1, 260, 264, 267, 281], "compute_encod": 1, "get_command_encod": 1, "setcomputepipelinest": 1, "those": [1, 3, 221], "nelem": 1, "set_array_buff": 1, "setbyt": 1, "sizeof": 1, "threadgroup": 1, "higher": [1, 107, 288, 312], "than": [1, 3, 57, 68, 75, 78, 104, 105, 113, 114, 128, 201, 206, 214, 260, 266, 288, 291, 299, 311, 312, 318], "max": [1, 115, 130, 201, 205, 256, 280, 282, 283, 288, 292, 294, 311, 312, 318], "allow": [1, 221, 254, 304, 306, 313, 316], "tgp_size": 1, "min": [1, 115, 133, 256, 294], "maxtotalthreadsperthreadgroup": 1, "3d": [1, 223, 228], "mtl": 1, "group_dim": 1, "grid_dim": 1, "divid": [1, 101, 144], "among": 1, "dispatchthread": 1, "few": [1, 3, 4, 5, 314, 316], "thing": [1, 3], "note": [1, 3, 6, 13, 66, 67, 94, 95, 115, 144, 147, 221, 315, 317], "befor": [1, 3, 6, 24, 142, 208, 240, 267, 313, 314], "move": [1, 134, 318], "track": [1, 221, 223], "activ": [1, 6, 227, 235, 266, 267, 293, 298, 299, 300, 311], "command": [1, 6], "instead": [1, 6, 221, 254, 264, 312, 314], "end_encod": 1, "end": [1, 75, 144, 261, 266, 284, 291, 296, 298, 299], "until": [1, 314, 316], "limit": [1, 63, 313], "flush": 1, "enqueu": 1, "commit": 1, "associ": [1, 164, 165, 314], "suggest": 1, "deeper": 1, "dive": 1, "studi": 1, "come": [1, 3, 312], "far": [1, 307], "built": [1, 6, 314], "includ": [1, 237, 248, 257, 282, 311, 312, 313, 316, 317, 319], "forward": [1, 192, 311, 314], "diff": 1, "push": 1, "along": [1, 22, 23, 64, 65, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 115, 157, 172, 174, 178, 183, 184, 187, 221], "similarli": [1, 6, 128, 312, 314], "scale_arr": 1, "contribut": 1, "tangent_x": 1, "tangent_i": 1, "revers": [1, 188, 264], "arg": [1, 3, 8, 47, 58, 83, 164, 165], "push_back": 1, "fulli": [1, 5, 311, 315, 318], "overal": 1, "directori": [1, 3, 6], "extens": [1, 118, 217, 247, 317], "h": [1, 66, 67, 115, 223, 225, 227, 228, 312, 314], "mlx_sample_extens": 1, "__init__": [1, 3, 4, 7, 8, 9, 26, 221, 304], "py": [1, 3, 6], "cmakelist": 1, "txt": 1, "setup": [1, 2, 4, 6, 311], "hold": [1, 3, 8, 115, 311], "instal": 1, "pybind11": [1, 6], "sinc": [1, 3, 4, 206, 304, 315, 318], "compon": [1, 3], "etc": [1, 144, 221], "pybind11_modul": 1, "m": [1, 6, 86, 115, 189, 200], "doc": [1, 4], "sampl": [1, 2, 3, 117, 146, 147, 148, 151, 154, 155, 269, 270, 271, 272, 274, 275, 282, 288, 292, 308, 311], "_a": 1, "pos_onli": 1, "kw_onli": 1, "none": [1, 3, 7, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 28, 29, 30, 31, 32, 33, 34, 36, 38, 39, 40, 41, 42, 43, 45, 46, 47, 48, 49, 51, 53, 54, 55, 56, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 195, 196, 197, 198, 201, 213, 214, 230, 236, 240, 241, 252, 255, 264, 267, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 313], "r": [1, 3, 116, 192, 227], "pbdoc": 1, "most": [1, 147, 210, 221, 311, 312, 313, 314], "complex": [1, 94, 95, 96, 97, 98, 146, 151, 154, 155, 213, 221, 254, 311, 312], "bell": 1, "whistl": 1, "liter": [1, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292], "string": [1, 315, 317], "modul": [1, 3, 4, 199, 210, 257, 262, 267, 301, 310, 311, 314], "ensur": [1, 6, 287], "caster": 1, "find_packag": 1, "config": 1, "add_librari": 1, "sourc": [1, 134, 188], "target_sourc": 1, "cmake_current_list_dir": 1, "header": 1, "target_include_directori": 1, "target_link_librari": 1, "attach": 1, "conveni": [1, 4], "mlx_build_metallib": 1, "target": [1, 192, 279, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 311], "destin": [1, 134], "automat": [1, 5, 118, 316, 317, 318], "practic": [1, 311], "mlx_build_met": [1, 6], "mlx_ext_metallib": 1, "titl": 1, "include_dir": 1, "project_source_dir": 1, "mlx_include_dir": 1, "output_directori": 1, "cmake_library_output_directori": 1, "add_depend": 1, "endif": 1, "pybind11_add_modul": 1, "build_shared_lib": 1, "target_link_opt": 1, "wl": 1, "rpath": 1, "loader_path": 1, "onc": [1, 311], "describ": [1, 314], "util": [1, 3, 5, 6, 164, 221], "__name__": [1, 3], "__main__": [1, 3], "descript": [1, 3, 217], "ext_modul": 1, "cmakeextens": 1, "cmdclass": 1, "build_ext": 1, "cmakebuild": 1, "package_dir": 1, "package_data": 1, "dylib": 1, "zip_saf": 1, "fals": [1, 3, 12, 13, 14, 22, 23, 29, 30, 31, 32, 40, 41, 42, 43, 45, 56, 59, 60, 115, 118, 127, 129, 131, 132, 143, 181, 193, 196, 201, 212, 213, 214, 217, 231, 232, 234, 241, 243, 252, 255, 257, 260, 264, 267, 279, 282, 315], "python_requir": 1, "even": [1, 3, 311, 314, 315], "though": [1, 3, 311, 314, 315], "j8": 1, "libmlx_ext": 1, "cpython": 1, "3x": 1, "darwin": 1, "pip": [1, 6], "after": [1, 3, 4, 24, 99, 101, 142, 144, 223, 231, 233, 255, 267, 291, 311, 318], "plai": [1, 3], "ones": [1, 3, 139, 164, 189, 253, 254, 257, 313], "b": [1, 3, 11, 13, 60, 77, 78, 80, 101, 104, 105, 107, 113, 114, 115, 123, 124, 126, 128, 130, 133, 135, 140, 144, 180, 187, 192, 234, 312, 313, 314, 315, 316, 317, 318], "f": [1, 2, 4, 115, 204, 221, 311, 315], "item": [1, 2, 3, 4, 214, 314, 315, 316], "true": [1, 2, 3, 13, 60, 115, 118, 145, 172, 196, 201, 213, 214, 217, 221, 223, 224, 225, 231, 232, 233, 234, 240, 241, 243, 249, 252, 257, 260, 264, 267, 279, 287], "quick": [1, 5], "benchmark": [1, 311], "compar": [1, 60, 311], "time": [1, 3, 6, 221, 311, 312, 314, 318], "set_default_devic": 1, "256": [1, 4], "512": [1, 3, 267, 318], "random": [1, 2, 3, 4, 5, 223, 232, 243, 249, 311, 312, 318, 319], "normal": [1, 2, 3, 154, 221, 223, 231, 232, 233, 258, 267, 269, 271, 315, 318], "bench": 1, "warm": [1, 311], "rang": [1, 2, 3, 4, 6, 15, 99, 117, 270, 272, 277, 278, 307, 308, 311, 312, 314, 318], "100": [1, 2, 3, 311, 312, 314, 318], "5000": 1, "simple_tim": 1, "custom_tim": 1, "3f": [1, 4, 311], "custom": [1, 267], "114": 1, "109": 1, "modest": 1, "improv": [1, 3, 200, 201, 202, 203, 204, 205, 211, 311], "awai": [1, 3], "good": [1, 6, 311, 318], "nn": [1, 3, 4, 164, 208, 210, 214, 221, 301, 304, 307, 311, 314], "grad": [1, 2, 4, 192, 207, 307, 311, 312, 313, 314, 316], "full": [1, 4, 47, 58, 68, 172, 253, 254, 282, 311, 314], "implement": [2, 4, 115, 200, 201, 202, 203, 205, 206, 207, 229, 240, 255, 260, 262, 264, 266, 267, 299, 306, 311, 312, 315], "basic": [2, 159, 312], "model": [2, 4, 5, 164, 199, 207, 208, 210, 214, 221, 236, 239, 241, 243, 247, 249, 251, 252, 253, 255, 267, 301, 304, 307, 311, 314], "problem": [2, 4, 221], "metadata": [2, 118, 162], "num_featur": [2, 223], "num_exampl": 2, "1_000": 2, "num_it": 2, "10_000": 2, "iter": [2, 4, 214, 308, 311, 314], "sgd": [2, 4, 206, 208, 307, 311], "lr": [2, 206], "01": [2, 204], "rate": [2, 200, 201, 202, 203, 204, 205, 206, 211, 212], "ll": [2, 4, 284, 311, 312], "synthet": 2, "dataset": [2, 314], "matrix": [2, 73, 74, 86, 106, 115, 116, 128, 144, 145, 257, 273, 301], "ground": [2, 3, 281, 291], "truth": [2, 281, 291], "w_star": 2, "valu": [2, 3, 10, 13, 15, 22, 23, 37, 57, 60, 63, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 102, 115, 117, 141, 146, 147, 148, 150, 151, 154, 155, 162, 183, 184, 192, 195, 199, 201, 204, 213, 214, 217, 226, 227, 228, 232, 234, 240, 255, 256, 260, 265, 266, 267, 268, 279, 280, 281, 282, 283, 284, 286, 287, 288, 289, 290, 291, 299, 304, 312], "gaussian": [2, 230, 276, 277, 278, 282], "nois": 2, "exampl": [2, 3, 4, 15, 99, 115, 116, 183, 208, 221, 223, 232, 241, 243, 249, 252, 268, 269, 270, 271, 272, 273, 274, 275, 279, 281, 288, 301, 307, 308, 312, 313, 314, 315, 316, 317], "noisi": 2, "label": [2, 281, 288], "ep": [2, 200, 201, 202, 203, 204, 205, 211, 223, 231, 232, 233, 258, 280, 282, 292], "1e": [2, 4, 13, 200, 201, 202, 203, 204, 205, 208, 211, 223, 231, 232, 233, 258, 280, 282, 292], "us": [2, 3, 4, 5, 6, 15, 73, 76, 78, 99, 115, 116, 128, 144, 145, 157, 158, 200, 201, 203, 204, 205, 206, 207, 208, 213, 221, 227, 229, 230, 234, 236, 240, 247, 253, 254, 255, 257, 260, 264, 267, 271, 272, 277, 278, 280, 301, 304, 306, 307, 308, 310, 311, 312, 313, 316, 318], "weight": [2, 66, 67, 201, 204, 206, 208, 212, 214, 221, 243, 247, 257, 279, 281, 304, 312, 314], "squar": [2, 3, 106, 160, 175, 192, 200, 201, 203, 204, 205, 214, 221, 258, 289, 291, 312, 315], "loss": [2, 4, 192, 221, 307, 311, 312, 314], "loss_fn": [2, 4, 307, 311, 312], "w": [2, 67, 73, 144, 145, 192, 212, 223, 225, 227, 228, 234, 312], "mean": [2, 3, 4, 150, 192, 221, 223, 231, 241, 258, 274, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 311, 312, 315], "grad_fn": [2, 311, 312], "initi": [2, 3, 208, 221, 223, 231, 232, 233, 234, 256, 258, 268, 269, 270, 271, 272, 273, 274, 275, 304, 311, 314], "randomli": [2, 3, 226, 227, 228], "Then": [2, 6], "repeatedli": 2, "_": [2, 3, 221, 308, 311, 314, 318], "verifi": [2, 6], "close": [2, 5, 6, 13], "error_norm": 2, "5f": 2, "someth": [2, 3, 313], "00005": 2, "00364": 2, "complet": [2, 3, 6, 253, 254, 312, 318], "logist": [2, 168, 263, 277, 278, 297], "github": [2, 4, 6, 311], "repo": [2, 4, 6, 311], "enabl": [3, 6, 79, 212], "larg": [3, 221, 255, 287, 311, 314], "ish": 3, "transform": [3, 5, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 199, 221, 223, 231, 233, 234, 240, 241, 252, 257, 260, 313], "compromis": 3, "eas": 3, "llama": 3, "famili": 3, "less": [3, 24, 114, 142, 260, 291], "200": 3, "line": [3, 314, 315], "python": [3, 37, 50, 57, 83, 207, 208, 210, 213, 214, 215, 304, 310, 312, 315], "neural": [3, 5, 211, 229, 235, 269, 270, 293, 301, 304], "network": [3, 5, 211, 223, 227, 229, 269, 270, 301, 304], "build": [3, 5, 271, 304, 311], "concis": 3, "architectur": [3, 6, 221, 254, 318], "notabl": [3, 5], "rope": [3, 221], "posit": [3, 24, 75, 99, 103, 111, 134, 142, 192, 214, 221, 224, 225, 255, 260, 264, 282, 292], "option": [3, 12, 14, 15, 22, 23, 24, 25, 26, 31, 32, 64, 65, 66, 67, 68, 73, 74, 75, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 102, 103, 106, 110, 111, 115, 116, 117, 118, 127, 129, 131, 132, 138, 141, 142, 143, 144, 145, 146, 147, 148, 150, 151, 153, 154, 155, 157, 158, 172, 173, 174, 177, 178, 181, 183, 184, 187, 188, 189, 190, 191, 192, 193, 195, 197, 200, 201, 202, 203, 204, 205, 206, 208, 211, 212, 213, 214, 223, 224, 225, 234, 236, 240, 241, 243, 252, 255, 257, 260, 264, 267, 268, 269, 270, 271, 272, 273, 274, 275, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 308, 311, 317, 319], "kei": [3, 146, 147, 148, 150, 151, 153, 154, 155, 213, 214, 240, 241, 252, 255, 260, 308, 310, 312], "cach": [3, 260, 311], "concaten": 3, "project": [3, 255], "llamaattent": 3, "self": [3, 4, 7, 9, 26, 27, 28, 29, 30, 31, 32, 33, 34, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 47, 48, 49, 51, 53, 54, 55, 56, 57, 58, 59, 221, 235, 293, 304], "dim": [3, 229, 231, 232, 233, 255, 258, 260, 264, 267], "num_head": [3, 255, 267], "super": [3, 4, 221, 304], "tradit": [3, 227, 228, 260], "query_proj": 3, "bia": [3, 73, 144, 145, 203, 204, 205, 208, 214, 221, 224, 225, 234, 241, 243, 252, 255, 257, 312], "key_proj": 3, "value_proj": 3, "out_proj": [3, 304], "__call__": [3, 4, 221, 304], "queri": [3, 255], "mask": [3, 249, 255, 313], "extract": [3, 74, 75, 221, 240, 304], "l": [3, 4, 221, 223, 224, 291], "reshap": [3, 115, 313], "combin": 3, "key_cach": 3, "value_cach": 3, "sqrt": [3, 81, 200, 202, 203, 204, 211, 223, 231, 232, 233, 234, 258, 264, 269, 270, 271, 272, 311], "score": [3, 288], "softmax": [3, 281], "values_hat": 3, "rm": [3, 6, 201], "swiglu": 3, "rmsnorm": [3, 221], "llamaencoderlay": 3, "mlp_dim": [3, 267], "norm1": 3, "norm2": 3, "linear1": 3, "linear2": 3, "linear3": 3, "sigmoid": [3, 263, 277, 278, 297], "instanc": [3, 144, 215, 221, 232, 236, 237, 238, 241, 244, 245, 252, 254, 262, 304, 315], "embed": [3, 221, 260, 264, 280], "emb": [3, 229, 264], "token": [3, 229], "num_lay": [3, 4, 307], "vocab_s": 3, "norm": [3, 205, 206, 231, 292], "multiheadattent": [3, 221], "create_additive_causal_mask": 3, "list": [3, 8, 12, 14, 26, 29, 30, 40, 41, 42, 43, 45, 53, 56, 57, 59, 61, 64, 65, 83, 85, 88, 89, 91, 92, 94, 95, 97, 98, 102, 103, 112, 115, 127, 129, 131, 132, 138, 141, 143, 146, 147, 148, 150, 151, 154, 155, 158, 162, 172, 174, 177, 178, 181, 187, 188, 192, 193, 194, 197, 203, 204, 205, 206, 213, 215, 221, 241, 243, 244, 245, 246, 250, 252, 253, 254, 304, 310, 311, 312, 314], "still": [3, 6, 115, 311, 314], "consid": [3, 13, 60, 213, 214, 231, 310], "train": [3, 4, 221, 223, 226, 227, 228, 239, 241, 252, 269, 270], "ignor": [3, 63, 64, 83, 201], "whatsoev": 3, "rest": [3, 214, 260], "subsect": 3, "prompt": 3, "autoregress": 3, "yield": [3, 4, 308], "temp": 3, "causal": 3, "save": [3, 5, 118, 144, 162, 163, 164, 165, 247, 314], "append": [3, 128, 311, 314], "store": 3, "per": [3, 4, 73, 144, 145, 223, 231, 232, 233, 258, 306, 311, 314], "care": [3, 314], "last": [3, 25, 57, 89, 92, 94, 95, 97, 98, 99, 107, 116, 128, 147, 173, 187, 224, 225, 227, 228, 231, 315], "logit": [3, 147, 279, 281, 311], "next": [3, 4], "categor": 3, "lazili": [3, 221], "noth": [3, 221, 314], "yet": [3, 115, 208, 221, 304, 312, 313, 314, 316], "forc": [3, 4, 221, 316], "choos": [3, 260], "pars": 3, "feed": 3, "loop": [3, 4, 311, 312, 314], "unsqueez": 3, "sequenc": [3, 223, 224, 267, 308, 318], "length": [3, 177, 223, 224], "len": [3, 89, 92, 95, 98], "overwrit": 3, "discard": [3, 213], "old": 3, "moment": [3, 201, 203, 204, 205], "anymor": 3, "everyth": 3, "small": [3, 223, 231, 233, 258, 282, 287, 292, 311, 318], "10": [3, 4, 120, 159, 164, 214, 221, 243, 301, 311, 313], "12": 3, "8192": 3, "1024": 3, "actual": [3, 15, 243, 304, 314], "materi": [3, 5], "could": [3, 221], "20_000": 3, "machin": [3, 5, 6, 211], "8gb": 3, "ram": 3, "32": [3, 4, 144, 145, 217, 311], "44": 3, "doubl": 3, "bracket": 3, "becaus": [3, 221, 314], "batch": [3, 128, 223, 224, 225, 227, 228, 255, 314], "zip": [3, 4], "haven": 3, "anyth": [3, 192, 314], "result": [3, 15, 57, 73, 107, 115, 118, 128, 140, 145, 157, 159, 178, 187, 196, 214, 264, 311, 312, 315], "similar": [3, 214, 253, 254, 255, 280, 315, 317], "runtim": [3, 311], "section": [3, 6, 174, 292, 311, 312], "access": [3, 37, 208, 221, 304, 314, 318], "origin": [3, 75, 200, 201, 202, 203, 205, 206, 223, 248, 269, 270, 271, 272, 315], "sentencepiec": 3, "pytorch": [3, 5, 231, 312], "compat": [3, 147, 317], "npz": [3, 118, 164, 165, 243, 247, 317], "file": [3, 6, 118, 161, 162, 163, 164, 165, 243, 247, 312, 317], "directli": 3, "argpars": 3, "itertool": [3, 214], "starmap": [3, 214], "np": [3, 4, 315, 316], "torch": [3, 315], "map_torch_to_mlx": 3, "tok_embed": 3, "elif": 3, "replac": [3, 253, 254, 267, 291], "attention_norm": 3, "ffn_norm": 3, "wq": 3, "wk": 3, "wv": 3, "wo": 3, "w1": 3, "w2": 3, "w3": 3, "ffn": 3, "separ": [3, 47, 58, 231, 288], "submodul": [3, 4, 221, 241, 242, 252, 254], "feed_forward": 3, "parser": 3, "argumentpars": 3, "add_argu": 3, "torch_weight": 3, "output_fil": 3, "parse_arg": 3, "state": [3, 4, 208, 221, 307, 308, 311], "savez": [3, 247, 317], "k": [3, 74, 86, 189, 190, 191, 234, 241], "v": [3, 68, 208, 221, 241, 315], "left": [3, 115, 144, 230, 260, 277, 278, 282, 284, 292], "disk": 3, "text": [3, 201, 206, 235, 261, 266, 269, 270, 271, 272, 282, 283, 284, 287, 288, 291, 293, 294, 296, 298, 299], "format": [3, 118, 161, 162, 163, 164, 165, 315], "oper": [3, 5, 33, 172, 179, 184, 206, 221, 267, 311, 312, 313, 314, 315, 316, 318, 319], "dictionari": [3, 64, 118, 162, 163, 209, 213, 221, 240, 248, 253, 254, 310, 317], "represent": [3, 144, 213, 215], "tree_unflatten": 3, "helper": [3, 311], "weight_fil": 3, "incur": 3, "sever": [3, 66, 67, 164, 165, 311, 317], "futur": [3, 257, 313, 314], "pth": 3, "current": [3, 5, 6, 66, 67, 144, 201, 221, 314], "around": 3, "m1": [3, 311, 312, 318], "ultra": 3, "7b": 3, "me": 3, "ishmael": 3, "year": 3, "ago": 3, "never": [3, 314], "long": 3, "info": [3, 6], "247": 3, "press": [3, 115], "enter": 3, "littl": 3, "monei": 3, "my": [3, 6], "purs": 3, "greater": [3, 24, 105, 142, 266, 299], "consequ": 3, "walk": 3, "down": 3, "gower": 3, "street": 3, "afternoon": 3, "heavi": 3, "rain": 3, "saw": [3, 312], "off": [3, 6, 314], "man": 3, "rag": 3, "who": 3, "sat": 3, "upon": [3, 214], "hi": 3, "bundl": 3, "hard": 3, "wet": 3, "he": [3, 271, 272], "were": [3, 318], "cry": 3, "watch": [3, 311], "him": 3, "observ": 3, "numer": [3, 115, 123, 127, 172, 200, 201, 202, 203, 204, 205, 211, 223, 231, 232, 233, 258, 280, 282, 292, 311, 314], "crowd": 3, "wa": [3, 314], "hurri": 3, "437": 3, "330": 3, "second": [3, 75, 124, 126, 128, 182, 192, 201, 203, 204, 205, 280, 288, 312, 318], "spent": 3, "amount": 3, "39": 3, "ms": [3, 311], "By": [3, 312, 315], "bigger": [3, 201], "remain": [3, 192, 226, 227, 228], "almost": 3, "nobodi": 3, "took": 3, "least": [3, 63, 116, 144], "notic": [3, 312, 317], "distanc": [3, 292], "had": 3, "doubt": 3, "minut": 3, "straight": 3, "slowli": 3, "rais": [3, 115, 174, 243], "ey": 3, "speak": [3, 115], "resum": 3, "postur": 3, "stood": 3, "feel": 3, "pain": 3, "heart": 3, "smile": 3, "face": 3, "am": 3, "someon": 3, "three": 3, "quarter": 3, "hour": 3, "made": 3, "immedi": [3, 236], "repli": 3, "again": [3, 6, 221, 311], "hand": [3, 312, 314], "did": 3, "accustom": 3, "thu": [3, 221], "question": [3, 314], "reason": [3, 313], "tell": [3, 311, 315], "understand": [3, 269, 270], "579": 3, "690": 3, "num": [3, 117, 153], "500": [3, 318], "628": 3, "went": 3, "nervou": 3, "trembl": 3, "told": 3, "And": 3, "perhap": 3, "surpris": 3, "matter": [3, 221], "shall": 3, "anyhow": 3, "friend": 3, "ye": 3, "slight": [3, 314], "kind": 3, "longer": [3, 68, 312], "soon": 3, "unless": [3, 13, 115, 304], "unlik": [3, 13, 227, 228, 248], "strang": 3, "amus": 3, "That": 3, "secret": 3, "disappoint": 3, "mine": 3, "cannot": [3, 63, 313, 315], "happi": 3, "ask": 3, "Is": [3, 264, 267], "shop": 3, "bui": 3, "food": 3, "633": 3, "21": 3, "475": 3, "su": 3, "j": [3, 6, 115, 202, 203, 205, 227], "lu": 3, "pan": 3, "murtadha": 3, "wen": 3, "liu": 3, "2021": 3, "roform": [3, 260], "enhanc": [3, 260, 314], "rotari": [3, 260], "arxiv": [3, 200, 206, 231, 232, 233, 235, 258, 293], "preprint": [3, 200, 206], "2104": 3, "09864": 3, "zhang": 3, "sennrich": 3, "2019": [3, 204], "root": [3, 160, 175, 258], "advanc": [3, 311], "inform": [3, 4, 6, 162, 163, 221, 223, 230, 255, 312, 318], "system": [3, 6], "shazeer": 3, "2020": 3, "glu": 3, "variant": [3, 205, 291], "2002": 3, "05202": 3, "classifi": 4, "mnist": 4, "As": [4, 183, 221, 311], "mlp": [4, 221, 267, 307], "inherit": [4, 310], "standard": [4, 37, 57, 128, 148, 150, 267, 269, 271, 274, 316], "idiom": [4, 311], "input_dim": [4, 221, 234, 257], "hidden_dim": [4, 304, 307], "output_dim": [4, 221, 234, 257], "layer_s": 4, "idim": 4, "odim": 4, "maximum": [4, 22, 63, 221, 259, 264, 277, 278, 295, 304, 314], "cross": [4, 279, 281], "entropi": [4, 279, 281], "sub": [4, 75, 153], "commonli": [4, 253, 301, 311], "cross_entropi": [4, 221], "accuraci": 4, "valid": [4, 68, 99, 195, 213, 241, 252, 310], "eval_fn": 4, "argmax": 4, "loader": 4, "num_class": [4, 307], "batch_siz": [4, 307], "num_epoch": [4, 307], "learning_r": [4, 200, 201, 202, 203, 204, 205, 206, 208, 211, 212, 307, 311], "train_imag": [4, 307], "train_label": [4, 307], "test_imag": 4, "test_label": 4, "shuffl": 4, "minibatch": 4, "batch_iter": [4, 307], "perm": 4, "permut": 4, "id": [4, 6], "put": [4, 311], "trainabl": [4, 199, 221, 304], "loss_and_grad_fn": [4, 307, 311, 312], "value_and_grad": [4, 210, 221, 253, 304, 307, 311, 312, 315, 316], "epoch": 4, "test": [4, 6], "confus": 4, "decent": 4, "95": 4, "brought": 5, "research": 5, "except": [5, 86, 93, 94, 96, 97, 98, 231, 243, 313, 315], "featur": [5, 66, 67, 223, 231, 232, 233, 234, 257, 258, 260, 267, 311, 314], "main": [5, 75, 86, 214, 221], "differ": [5, 180, 291, 312], "lazi": [5, 304, 316], "multi": [5, 224, 225, 313, 315], "cpu": [5, 116, 311, 318], "gpu": [5, 311, 313, 318], "inspir": 5, "jax": [5, 308], "arrayfir": 5, "unifi": 5, "live": [5, 318], "guid": 5, "convers": 5, "regress": [5, 287], "layer": [5, 221, 227, 228, 231, 233, 234, 249, 254, 257, 262, 267, 300, 304], "perceptron": 5, "llm": 5, "infer": [5, 102, 118], "fft": 5, "algebra": 5, "tree": [5, 64, 83, 103, 192, 195, 207, 208, 210, 213, 214, 215, 306, 312], "develop": [5, 6], "document": [5, 47, 58, 162, 163, 311, 312, 313], "pypi": 6, "meet": 6, "seri": 6, "chip": 6, "nativ": 6, "maco": 6, "13": 6, "recommend": [6, 206], "14": 6, "sonoma": 6, "conda": 6, "forg": 6, "distribut": [6, 146, 147, 148, 150, 154, 155, 234, 269, 270, 271, 272, 274, 275, 282, 285, 290, 292, 301], "probabl": [6, 151, 226, 227, 228, 257, 279, 281, 285, 318], "platform": 6, "processor": 6, "arm": [6, 217], "i386": 6, "switch": 6, "17": 6, "g": [6, 115, 144, 211, 212, 300, 314, 319], "clang": 6, "cmake": 6, "24": 6, "xcode": 6, "15": [6, 115, 311], "environ": [6, 76, 79], "via": [6, 207, 210, 314, 315], "rosetta": 6, "unam": 6, "p": [6, 146, 203, 205, 221, 226, 227, 228, 292], "clone": 6, "git": 6, "com": 6, "ml": 6, "explor": 6, "cd": 6, "brew": 6, "global": [6, 76, 79, 152, 308, 311], "env": 6, "cmake_build_parallel_level": 6, "edit": [6, 254], "unittest": 6, "discov": 6, "stub": 6, "dev": 6, "generate_stub": 6, "mkdir": 6, "either": [6, 11, 47, 57, 58, 63, 77, 78, 80, 101, 104, 105, 113, 114, 115, 123, 128, 130, 133, 135, 180, 192, 262, 271, 272], "libmlx": 6, "preprocessor": 6, "metal_path": 6, "mlx_build_test": 6, "ON": 6, "mlx_build_exampl": 6, "mlx_build_benchmark": 6, "mlx_build_python_bind": 6, "multipl": [6, 128, 135, 144, 145, 255, 264, 311, 314, 317], "wish": 6, "variabl": [6, 64, 76, 79, 103, 112, 192, 194, 195], "export": 6, "developer_dir": 6, "app": 6, "content": [6, 240, 311], "sdk": 6, "xcrun": 6, "macosx": 6, "show": [6, 217, 311], "unabl": 6, "tool": 6, "select": [6, 196, 236, 240], "sudo": 6, "ouptut": 6, "finder": 6, "iterm": 6, "termin": 6, "click": 6, "uncheck": 6, "window": 6, "restart": 6, "grep": 6, "cmake_host_system_processor": 6, "arm64": 6, "x86_64": 6, "wipe": 6, "cahc": 6, "rf": 6, "devicetyp": 7, "attribut": [7, 8, 9, 26, 248, 304, 306], "kwarg": [8, 164, 165, 319], "union": [10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 28, 29, 30, 31, 32, 33, 34, 36, 38, 39, 40, 41, 42, 43, 45, 46, 47, 48, 49, 51, 53, 54, 55, 56, 58, 59, 60, 61, 62, 63, 65, 66, 67, 68, 69, 70, 73, 74, 75, 77, 78, 80, 81, 82, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 150, 151, 153, 154, 155, 156, 157, 158, 159, 160, 162, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 196, 197, 198, 225, 241, 243, 252], "absolut": [10, 13, 277, 278, 291], "semant": [11, 61, 77, 78, 80, 104, 105, 113, 114, 123, 128, 130, 133, 135, 180, 318], "keepdim": [12, 14, 22, 23, 29, 30, 31, 32, 40, 41, 42, 43, 45, 56, 59, 115, 127, 129, 131, 132, 143, 172, 181, 193], "reduct": [12, 14, 127, 129, 132, 143, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292], "reduc": [12, 14, 22, 23, 127, 129, 131, 132, 143, 181, 193, 223, 267, 287], "unspecifi": [12, 14, 15, 22, 23, 24, 25, 65, 102, 127, 129, 131, 132, 138, 142, 143, 157, 172, 173, 181, 183, 193, 197, 319], "entir": [12, 14, 22, 23, 127, 129, 131, 132, 143, 181, 193, 227, 228], "singleton": [12, 14, 22, 23, 127, 128, 129, 131, 132, 143, 181, 193], "rtol": 13, "05": [13, 223, 231, 232, 233, 258], "atol": 13, "08": [13, 202, 203, 204, 205, 211, 280], "equal_nan": [13, 60], "approxim": [13, 230, 276, 277, 278], "comparison": [13, 80, 104, 105, 113, 114], "infinit": 13, "equal": [13, 24, 60, 86, 105, 114, 142, 151, 174, 232, 234], "sign": [13, 206, 217], "nan": [13, 60, 109], "ab": [13, 115, 192, 231, 232, 233, 235, 258, 293, 311], "array_equ": 13, "rel": [13, 201, 311], "toler": 13, "boolean": [13, 60, 108, 109, 110, 111, 124, 125, 126, 217, 251, 313], "interv": [15, 117, 151, 155], "increment": 15, "otherwis": [15, 213, 214, 241, 243, 252, 266, 267, 279, 284, 291, 298, 299, 314, 315], "int32": [15, 99, 115, 151, 217, 313, 316], "convent": [15, 68, 204], "lead": [15, 311], "fraction": 15, "integr": [15, 183, 314], "invers": [16, 17, 18, 19, 20, 21, 82, 90, 91, 92, 93, 94, 95], "cosin": [16, 17, 69, 70, 260, 280, 312], "hyperbol": [17, 19, 21, 70, 171, 186], "sine": [18, 19, 170, 171, 260, 312], "uint32": [22, 23, 24, 25, 147, 217], "minimum": [23, 63, 264, 280], "kth": [24, 142], "partit": 24, "order": [24, 115, 142, 144, 208, 221, 231, 253, 262, 311, 312], "undefin": [24, 142, 313], "sort": [24, 25, 142], "flatten": [24, 25, 115, 140, 142, 157, 173, 183, 184, 213], "dimension": [26, 87, 88, 89, 90, 91, 92, 96, 97, 98, 223, 224, 225, 229, 234, 257, 264, 313, 315], "val": [26, 102], "tupl": [26, 47, 50, 58, 65, 67, 78, 83, 85, 112, 115, 116, 141, 144, 158, 177, 192, 194, 201, 203, 204, 205, 206, 213, 214, 215, 225, 243, 245, 260, 262, 310, 312], "ndarrai": [26, 313, 314, 316], "properti": [27, 35, 44, 50, 52, 209, 248, 251, 312], "argument": [27, 47, 58, 64, 83, 103, 192, 214, 221, 308, 312, 317, 318, 319], "decim": [48, 159], "indices_or_sect": [53, 174], "nest": [57, 64, 221, 304, 310, 312], "ddof": [59, 193], "a_min": 63, "a_max": 63, "edg": [63, 141, 311], "At": 63, "anoth": [63, 128, 180, 196, 221, 236, 311, 312, 313, 318], "fun": [64, 103, 112, 192, 194, 195, 311, 313, 314, 318], "dict": [64, 83, 118, 162, 163, 164, 207, 208, 210, 246, 250, 253, 254, 304, 310, 312, 317], "dure": [64, 226, 227, 228, 315], "arbitrarili": [64, 221, 310, 312, 316], "leaf": [64, 213, 214, 240], "node": [64, 83, 195], "pad": [66, 67, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 224, 225], "dilat": [66, 67], "group": [66, 67, 73, 144, 145, 231, 257], "1d": [66, 68, 162, 184], "convolut": [66, 67, 68, 224, 225, 227, 228], "channel": [66, 67, 223, 224, 225, 227, 228], "c_in": [66, 67], "c_out": [66, 67], "convolv": [66, 67], "2d": [67, 75, 144, 223, 227], "spatial": [67, 231], "symmetr": 67, "discret": [68, 87, 88, 89, 90, 91, 92, 96, 97, 98, 229], "swap": [68, 182, 254, 257], "conv": 68, "filter": [68, 224, 225, 236, 240], "flip": 68, "signal": 68, "bias": [73, 144, 145, 241, 252, 255], "group_siz": [73, 144, 145, 257], "64": [73, 144, 145, 217, 257], "configur": 73, "formal": [73, 144], "notat": [73, 213, 245], "quantiz": [73, 118, 145, 257], "w_i": [73, 144], "hat": [73, 144], "occupi": [73, 144, 145], "diagon": [74, 86, 189, 190, 191], "th": [74, 86], "axis1": [75, 182], "axis2": [75, 182], "subarrai": [75, 174], "remov": [75, 128, 147, 177, 281], "insert": [75, 85, 318], "neg": [75, 99, 110, 255, 282, 290, 292, 313], "taken": [75, 183], "disabl": [76, 311], "mlx_disable_compil": [76, 79, 311], "divis": [77, 101, 144], "quotient": [77, 78, 101], "remaind": 78, "fuction": 78, "faster": [78, 276, 311, 312], "mathrm": [81, 168, 232], "frac": [81, 144, 168, 200, 202, 203, 204, 205, 211, 223, 226, 227, 228, 231, 232, 233, 234, 258, 269, 270, 271, 272, 280, 282, 284, 287], "pi": [81, 264, 312], "int_0": 81, "dt": 81, "erf": [82, 311], "exponenti": [84, 261, 296], "ident": [86, 179, 221, 249], "zero": [86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 189, 190, 191, 198, 201, 221, 226, 227, 228, 243, 268, 269, 270, 271, 272, 273, 274, 275, 301, 313], "whose": [86, 199], "One": [87, 90, 96, 160, 311, 312], "fourier": [87, 88, 89, 90, 91, 92, 96, 97, 98], "truncat": [87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 154], "dft": [87, 88, 89, 90, 91, 92, 96, 97, 98], "rfft": 93, "real": [93, 94, 95, 96, 97, 98], "rfft2": 94, "rfftn": 95, "silent": [96, 97, 98], "start_axi": 99, "end_axi": 99, "inclus": 99, "outsid": 99, "clamp": 99, "integ": [101, 115, 141, 144, 145, 146, 151, 174, 187, 195, 217, 229, 313], "floor": 101, "argnam": [103, 192], "neither": [103, 192], "keyword": [103, 164, 165, 192, 214, 221, 308, 317, 319], "strict": [104, 113, 241, 243, 252], "ordinari": 107, "inifn": 108, "infin": [108, 110, 111, 205], "ord": 115, "tabl": [115, 217, 229], "frobeniu": 115, "matric": [115, 116], "strictli": 115, "mathemat": 115, "variou": 115, "purpos": 115, "calcul": [115, 201, 282, 288], "fro": 115, "inf": [115, 255], "largest": 115, "sing": 115, "smallest": 115, "singular": 115, "nuclear": 115, "_f": 115, "sum_": [115, 287], "a_": 115, "valueerror": [115, 243, 312], "refer": [115, 232, 235, 248, 269, 270, 271, 272, 293, 313], "golub": 115, "van": 115, "loan": 115, "baltimor": 115, "md": 115, "john": 115, "hopkin": 115, "univers": 115, "1985": 115, "pg": 115, "la": 115, "arang": [115, 313, 315], "9": [115, 200, 203, 204, 205, 206, 208, 281, 315], "74597": 115, "20": 115, "84804": 115, "41421": 115, "23607": [115, 116], "74166": 115, "24264": 115, "11": 115, "225": 115, "factorizatoin": 116, "q": 116, "894427": 116, "447214": 116, "57771": 116, "50": 117, "evenli": 117, "return_metadata": 118, "binari": [118, 161, 162, 163, 164, 165, 266, 279, 299, 311], "npy": [118, 161, 317], "safetensor": [118, 163, 243, 247, 314, 317], "gguf": [118, 162, 317], "matadata": 118, "unsupport": 118, "tensor": [118, 187, 292, 315], "natur": [119, 121, 314], "logarithm": [119, 120, 121, 122], "log": [121, 123, 127, 282, 285, 287, 290], "plu": 121, "exp": [123, 127, 148, 172, 261, 285, 296, 311, 318], "stabl": [123, 127, 172, 287], "prepend": 128, "negat": 136, "beforehand": 140, "pad_with": 141, "constant_valu": 141, "pad_width": 141, "before_1": 141, "after_1": 141, "before_2": 141, "after_2": 141, "before_n": 141, "after_n": 141, "before_i": 141, "after_i": 141, "extend": 141, "side": [141, 311], "smaller": [142, 206, 311], "everi": [144, 214, 312], "particular": [144, 231], "consecut": [144, 260], "w_1": 144, "w_g": 144, "begin": [144, 261, 266, 284, 291, 296, 298, 299], "align": 144, "max_i": 144, "min_i": 144, "textrm": [144, 230, 276], "round": 144, "pack": [144, 145], "unsign": [144, 145, 217], "lower": [144, 151, 154, 155, 189, 275], "upper": [144, 151, 154, 155, 275], "1st": 144, "signific": 144, "2nd": 144, "dequant": 144, "w_q": 144, "whether": [145, 240, 255, 279, 282, 288], "prng": [146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 308], "num_sampl": 147, "unnorm": [147, 279, 281], "draw": 147, "cdf": [148, 230, 276], "accord": [148, 196, 255, 269, 270, 271, 272], "seed": 149, "loc": 150, "deviat": [150, 269, 271, 274], "low": [151, 155, 275, 301], "high": [151, 155, 221, 229, 275, 301], "bound": [151, 154, 155, 230, 275, 311, 313, 318], "roadcast": 151, "domain": 154, "uniformli": 155, "repetit": 157, "preserv": [158, 312], "reciproc": 160, "arr": [161, 313], "obj": 162, "uncompress": 164, "my_path": 164, "tree_flatten": [164, 214, 215, 221], "transformerencod": 164, "128": [164, 221], "flat_param": 164, "compress": 165, "being": [179, 221], "prevent": [179, 292, 315], "flow": [179, 314], "unchang": [179, 260], "prior": [183, 184], "exclud": 184, "dot": [187, 213, 245, 255], "elsewher": [189, 313], "col": 189, "triangl": 189, "mse": 192, "param": [192, 221, 301, 312], "lvalu": 192, "dlvalu": 192, "dparam": 192, "lasso": 192, "l1": [192, 284, 286, 287, 291], "varianc": [193, 223, 231, 282], "divisor": 193, "cotang": 194, "in_ax": [195, 312], "out_ax": [195, 312], "prefix": [195, 213], "fn": [199, 214, 316], "callabl": [199, 213, 214, 236, 237, 240, 262, 267, 268, 269, 270, 271, 272, 273, 274, 275], "wrt": 199, "rho": 200, "06": [200, 282, 292], "paper": [200, 201, 202, 203, 205, 206, 223, 264], "zeiler": 200, "2012": [200, 211], "adapt": [200, 201, 202], "1212": 200, "5701": 200, "v_": [200, 202, 203, 204, 205, 211, 212], "v_t": [200, 202, 203, 204, 205, 211, 212], "g_t": [200, 202, 203, 204, 205, 206, 211, 212], "delta": [200, 284], "w_": [200, 201, 202, 203, 204, 205, 206, 211, 212], "u_t": 200, "epsilon": [200, 202, 203, 204, 205, 211, 223, 231, 232, 233, 258, 280, 282], "u_": 200, "w_t": [200, 202, 203, 204, 205, 206, 211, 212], "lambda": [200, 201, 202, 203, 204, 205, 206, 211, 212, 214, 221, 236, 241, 261, 265, 296, 298, 311, 312], "averag": [200, 201, 203, 204, 205], "denomin": [200, 202, 203, 204, 205, 211, 232, 280], "stabil": [200, 201, 202, 203, 204, 205, 211, 223, 231, 232, 233, 258, 280, 282], "30": 201, "001": 201, "clip_threshold": 201, "decay_r": 201, "beta_1": [201, 203, 204, 205, 206], "weight_decai": [201, 204, 206, 212], "scale_paramet": 201, "relative_step": 201, "warmup_init": 201, "sublinear": 201, "cost": [201, 314], "epsilon_1": 201, "epsilon_2": 201, "parameter_scal": 201, "clip": 201, "unscal": 201, "decai": [201, 204, 206, 212], "duchi": 202, "hazan": 202, "singer": 202, "2011": 202, "subgradi": 202, "onlin": 202, "stochast": [202, 203, 205, 212, 314], "jmlr": 202, "999": [203, 204, 205], "omit": [203, 205], "estim": [203, 205], "kingma": [203, 205], "ba": [203, 205], "2015": [203, 205, 227], "iclr": [203, 204, 205], "m_": [203, 204, 205, 206], "m_t": [203, 204, 205, 206], "beta_2": [203, 204, 205, 206], "contrast": 204, "loshchilov": 204, "hutter": 204, "decoupl": 204, "regular": [204, 227, 235, 293, 311, 313], "adam": [205, 206], "99": [206, 211], "tend": 206, "larger": [206, 260], "10x": 206, "adamw": 206, "maintain": [206, 227, 228], "strength": [206, 212], "wd": 206, "chen": 206, "symbol": 206, "discoveri": 206, "2302": 206, "06675": 206, "c_": 206, "eta": 206, "c_t": 206, "momentum": [206, 208, 212, 223, 311], "appli": [207, 210, 214, 221, 223, 224, 225, 227, 228, 230, 231, 232, 233, 234, 235, 237, 249, 256, 257, 258, 259, 261, 263, 265, 266, 276, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294, 295, 296, 297, 298, 299, 301, 306, 311], "opt": 207, "superset": [207, 214], "trainable_paramet": [208, 221, 240], "tieleman": 211, "hinton": 211, "lectur": 211, "coursera": 211, "smooth": [211, 281, 291], "dampen": 212, "nesterov": 212, "descent": [212, 311, 314], "mu": 212, "tau": 212, "l2": [212, 284, 287], "penalti": 212, "is_leaf": [213, 214], "arbitrari": [213, 304], "depth": [213, 228, 312], "hello": [213, 215], "charact": 213, "flat": [213, 215], "extra": 214, "closer": 214, "constitut": 214, "dict_kei": 214, "recreat": 215, "world": 215, "42": 215, "byte": 217, "bool_": 217, "uint8": 217, "uint16": 217, "16": [217, 232, 236, 304], "uint64": 217, "int8": 217, "int16": 217, "int64": 217, "done": [221, 226, 311, 314, 315], "manual": 221, "explicitli": [221, 308], "solv": 221, "intuit": 221, "freez": [221, 252, 304], "finetun": 221, "in_dim": [221, 304], "out_dim": [221, 304], "enumer": 221, "caus": [221, 311, 314], "local": [221, 227], "scope": 221, "l2_loss": 221, "y_hat": 221, "loss_and_grad": 221, "workhors": 221, "Its": 221, "recurs": [221, 240, 241, 246, 250, 252, 304], "frozen": [221, 241, 250, 252, 257, 304], "individu": [221, 227, 228], "subset": [221, 240], "action": 221, "displai": 221, "tree_map": 221, "count": 221, "num_param": 221, "preclud": 221, "pure": [221, 307], "pattern": [221, 314], "achiev": 221, "other_input": 221, "necessari": 221, "wrap": 221, "apply_to_modul": [221, 241], "children": 221, "filter_and_map": 221, "leaf_modul": 221, "load_weight": [221, 314], "named_modul": 221, "save_weight": 221, "unfreez": [221, 241], "update_modul": 221, "alibi": 221, "batchnorm": 221, "conv1d": 221, "conv2d": 221, "dropout": [221, 227, 228, 249, 267, 311], "dropout2d": 221, "dropout3d": 221, "gelu": [221, 277, 278, 311], "groupnorm": 221, "instancenorm": 221, "layernorm": 221, "mish": 221, "prelu": 221, "quantizedlinear": 221, "relu": [221, 256, 267, 294, 301], "selu": 221, "sequenti": [221, 301], "silu": 221, "sinusoidalpositionalencod": 221, "softshrink": 221, "gelu_approx": [221, 230, 276], "gelu_fast_approx": [221, 230, 276], "binary_cross_entropi": [221, 311], "cosine_similarity_loss": 221, "gaussian_nll_loss": 221, "hinge_loss": 221, "huber_loss": 221, "kl_div_loss": 221, "l1_loss": 221, "log_cosh_loss": 221, "margin_ranking_loss": 221, "mse_loss": 221, "nll_loss": 221, "smooth_l1_loss": 221, "triplet_loss": 221, "init": [221, 256, 301, 307], "uniform": [221, 234, 243, 270, 272, 301, 308, 311, 312, 318], "glorot_norm": 221, "glorot_uniform": 221, "he_norm": 221, "he_uniform": 221, "affin": [223, 231, 232, 233, 234, 257], "track_running_stat": 223, "var": [223, 231, 232, 233, 282], "gamma": [223, 231, 232, 233, 258, 269, 270, 271, 272], "nc": 223, "nlc": [223, 224], "four": 223, "nhwc": [223, 225], "height": [223, 225, 227, 228], "width": [223, 225, 227, 228, 257], "deep": [223, 269, 270, 271, 272], "intern": 223, "covari": 223, "shift": 223, "bn": 223, "in_channel": [224, 225], "out_channel": [224, 225], "kernel_s": [224, 225], "learnabl": [224, 225, 262], "portion": 226, "independ": [227, 228], "nwhc": 227, "whc": 227, "entri": [227, 228], "benefici": [227, 228, 314], "earli": 227, "adjac": 227, "pixel": 227, "correl": 227, "thompson": 227, "goroshin": 227, "jain": 227, "lecun": 227, "bregler": 227, "cvpr": 227, "ndhwc": 228, "dhwc": 228, "medic": 228, "video": 228, "num_embed": 229, "lookup": 229, "typic": [229, 307, 311, 314], "usual": [229, 310, 314], "vocabulari": 229, "approx": 230, "unit": [230, 259, 261, 263, 269, 270, 271, 272, 276, 277, 278, 295, 296, 297], "phi": [230, 276], "geluapprox": 230, "sigma": [230, 263, 269, 270, 271, 272, 277, 278, 297], "60033": [230, 277], "0433603": [230, 277], "gelufast": 230, "773": [230, 278], "regard": 230, "num_group": 231, "pytorch_compat": 231, "split": 231, "preced": 231, "http": [231, 232, 233, 235, 258, 293], "org": [231, 232, 233, 235, 258, 293], "1803": 231, "08494": 231, "inorm": 232, "1607": [232, 233], "08022": 232, "06450": 233, "mathcal": 234, "u": 234, "d_i": 234, "monoton": [235, 293], "1908": [235, 293], "08681": [235, 293], "tanh": [235, 293], "softplu": [235, 293], "map_fn": [236, 240], "filter_fn": [236, 240], "valid_parameter_filt": 236, "apply_fn": 237, "descend": 238, "is_leaf_fn": 240, "found": 240, "drop": 240, "idempot": [241, 252], "attent": [241, 255, 264, 267], "endswith": 241, "file_or_weight": 243, "miss": [243, 317], "ok": [243, 312], "save_safetensor": [247, 317], "reflect": [248, 311, 313, 315], "certain": [249, 311], "ie": 252, "noop": 252, "unfrozen": 252, "chang": [253, 257, 284, 291, 311, 315], "tracer": 253, "partial": [253, 254, 311, 314], "child": 254, "programmat": 254, "query_input_dim": 255, "key_input_dim": 255, "value_input_dim": 255, "value_dim": 255, "value_output_dim": 255, "head": [255, 267], "aggreg": 255, "linearli": 255, "attend": 255, "num_paramet": 256, "25": 256, "parametr": [256, 294], "classmethod": 257, "from_linear": 257, "quantize_modul": 257, "1910": 258, "07467": 258, "rectifi": [259, 271, 272, 295], "10000": 260, "rotat": 260, "slightli": [260, 318], "angular": 260, "frequenc": [260, 264], "_cos_sin_theta_kei": 260, "precomput": 260, "_cos_sin_theta_valu": 260, "leq": [261, 284, 296], "0507": [261, 296], "67326": [261, 296], "elu": [261, 296], "plain": 262, "known": [263, 297], "swish": [263, 297], "cdot": [263, 277, 278, 280, 283, 297], "min_freq": 264, "0001": 264, "max_freq": 264, "cos_first": 264, "full_turn": 264, "sinusoid": 264, "sin": [264, 312, 316], "lambd": [265, 298], "threshold": [266, 284, 291, 299], "geq": [266, 299], "num_encoder_lay": 267, "num_decoder_lay": 267, "custom_encod": 267, "custom_decod": 267, "norm_first": 267, "checkpoint": 267, "decod": 267, "interact": 267, "mechan": 267, "hidden": 267, "chekpoint": 267, "usag": [267, 311], "expens": 267, "init_fn": [268, 269, 270, 271, 272, 273, 274, 275, 301], "glorot": [269, 270], "fan_in": [269, 270, 271, 272], "fan_out": [269, 270, 271, 272], "difficulti": [269, 270], "feedforward": [269, 270], "191107": 269, "61278": 269, "150594": 269, "363207": 269, "gain": [269, 270, 271, 272], "89613": 269, "53947": 269, "48095": 269, "995016": 269, "223404": 270, "890597": 270, "379159": 270, "776856": 270, "90041": 270, "02264": 270, "912766": 270, "12451": 270, "fan": [271, 272], "delv": [271, 272], "surpass": [271, 272], "human": [271, 272], "level": [271, 272], "imagenet": [271, 272], "classif": [271, 272], "25211": 271, "458835": 271, "177208": 271, "0137595": 271, "6967": 271, "02765": 271, "15268": 271, "75787": 271, "kaim": 272, "0300242": 272, "0184009": 272, "793615": 272, "666329": 272, "64331": 272, "16506": 272, "08619": 272, "79854": 272, "982273": 274, "534422": 274, "380709": 274, "0645099": 274, "883935": 275, "863726": 275, "617261": 275, "417497": 275, "exact": [277, 278], "0003": 277, "015": 278, "with_logit": 279, "predict": [279, 282, 283, 284, 285, 286, 287, 289, 290, 291], "105361": 279, "223144": 279, "20397": 279, "916291": 279, "539245": 279, "prob": 279, "510826": 279, "x1": 280, "x2": 280, "x_1": [280, 288], "x_2": [280, 288], "label_smooth": 281, "hot": 281, "0485873": 281, "348587": 281, "likelihood": [282, 290], "nll": [282, 290], "hing": 283, "y_": [283, 287], "pred": [283, 287], "huber": 284, "l_": 284, "kullback": 285, "leibler": 285, "diverg": 285, "cosh": 287, "logcosh": 287, "sensit": 287, "outlier": 287, "dual": 287, "behavior": [287, 313, 314], "offer": 287, "balanc": 287, "robust": 287, "approach": [287, 312], "task": 287, "inputs1": 288, "inputs2": 288, "margin": [288, 292], "rank": 288, "573409": 288, "765166": 288, "0638": 288, "75596": 288, "225763": 288, "256995": 288, "773433": 288, "formula": 291, "anchor": 292, "triplet": 292, "_p": 292, "degre": 292, "pairwis": 292, "instabl": 292, "subclass": 304, "concept": 304, "mymlp": 304, "in_proj": 304, "basi": 306, "subsequ": 307, "apply_gradi": 307, "implicit": [308, 311, 312], "fine": [308, 314], "grain": 308, "control": [308, 314], "manag": [308, 318], "pseudo": 308, "altern": 308, "splittabl": 308, "threefri": 308, "counter": 308, "cycl": 310, "merg": 311, "fuse": 311, "big": 311, "awar": [311, 314], "36788": 311, "compiled_fun": 311, "code": [311, 314], "slow": 311, "Not": 311, "recompil": 311, "stack": 311, "rerun": [311, 314], "too": [311, 314], "frequent": [311, 314], "destroi": 311, "anonym": 311, "don": [311, 318], "nonlinear": 311, "unari": 311, "overhead": [311, 314, 318], "bandwidth": 311, "fusibl": 311, "consider": 311, "versu": 311, "timeit": [311, 312], "tic": 311, "perf_count": 311, "toc": 311, "tpi": 311, "1e3": 311, "1000": 311, "4096": [311, 312, 318], "On": [311, 312, 314], "millisecond": [311, 318], "five": 311, "latest": 311, "won": 311, "trace": 311, "placehold": 311, "insid": 311, "crash": 311, "inspect": [311, 316], "disable_compil": 311, "okai": [311, 314], "intend": 311, "deal": 311, "pretti": [311, 314], "inconveni": 311, "functool": 311, "particularli": 311, "backward": [311, 312], "squeez": 311, "checkout": 311, "compiled_grad_fn": 311, "71828": 311, "outer": [311, 314], "opportun": 311, "idea": [312, 314], "behind": 312, "dfdx": [312, 313], "d2fdx2": 312, "differentiaion": 312, "zero_grad": 312, "detach": 312, "requires_grad": 312, "dloss_dw": 312, "dloss_dx": 312, "lot": 312, "redund": 312, "suppos": [312, 318], "nice": [312, 314], "propag": [312, 313], "stop_gradi": 312, "autom": 312, "contriv": [312, 318], "sake": 312, "clariti": 312, "quit": [312, 315], "power": [312, 315], "difficult": 312, "primit": 312, "issu": [312, 315], "priorit": 312, "xs": 312, "ys": 312, "naive_add": 312, "vmap_add": 312, "total": 312, "390": 312, "wherea": 312, "025": 312, "ten": [312, 314], "Of": 312, "better": [312, 318], "handi": 312, "slice": 313, "ellipsi": 313, "syntax": 313, "idx": 313, "mix": 313, "take_along_axi": 313, "lack": 313, "extrem": [313, 314], "ineffici": [313, 314], "nonzero": 313, "record": 314, "dynam": 314, "easier": 314, "worri": 314, "fun1": 314, "expensive_fun": 314, "consum": 314, "eager": 314, "thank": 314, "weights_fp16": 314, "trade": 314, "bad": 314, "grow": 314, "computation": 314, "costli": 314, "wide": 314, "thousand": 314, "value_and_grad_fn": 314, "implicitli": 314, "anytim": 314, "memoryview": [314, 315], "perfectli": 314, "first_lay": 314, "second_layer_a": 314, "second_layer_b": 314, "protocol": 315, "receiv": 315, "pep": 315, "3118": 315, "view": 315, "a_view": 315, "owndata": 315, "extern": 315, "x_view": 315, "modifi": 315, "df": 315, "x\u00b2": 315, "2x": 315, "indirectli": 315, "modif": 315, "seen": 315, "occur": 315, "incorpor": 315, "incorrect": 315, "experiment": 315, "break": 315, "advis": 315, "intermedi": 315, "jnp": 315, "tf": 315, "page": 316, "composit": 316, "archiv": 317, "savez_compress": 317, "save_gguf": 317, "arr_0": 317, "pool": 318, "advantag": 318, "parallel": 318, "race": 318, "interest": 318, "albeit": 318, "d1": 318, "d2": 318, "matmul": 318, "dens": 318, "twice": 318, "measur": 318, "default_stream": 319, "default_devic": 319, "my_devic": 319}, "objects": {"mlx.core": [[7, 0, 1, "", "Device"], [8, 0, 1, "", "Dtype"], [9, 0, 1, "", "Stream"], [10, 2, 1, "", "abs"], [11, 2, 1, "", "add"], [12, 2, 1, "", "all"], [13, 2, 1, "", "allclose"], [14, 2, 1, "", "any"], [15, 2, 1, "", "arange"], [16, 2, 1, "", "arccos"], [17, 2, 1, "", "arccosh"], [18, 2, 1, "", "arcsin"], [19, 2, 1, "", "arcsinh"], [20, 2, 1, "", "arctan"], [21, 2, 1, "", "arctanh"], [22, 2, 1, "", "argmax"], [23, 2, 1, "", "argmin"], [24, 2, 1, "", "argpartition"], [25, 2, 1, "", "argsort"], [26, 0, 1, "", "array"], [60, 2, 1, "", "array_equal"], [61, 2, 1, "", "broadcast_to"], [62, 2, 1, "", "ceil"], [63, 2, 1, "", "clip"], [64, 2, 1, "", "compile"], [65, 2, 1, "", "concatenate"], [66, 2, 1, "", "conv1d"], [67, 2, 1, "", "conv2d"], [68, 2, 1, "", "convolve"], [69, 2, 1, "", "cos"], [70, 2, 1, "", "cosh"], [71, 2, 1, "", "default_device"], [72, 2, 1, "", "default_stream"], [73, 2, 1, "", "dequantize"], [74, 2, 1, "", "diag"], [75, 2, 1, "", "diagonal"], [76, 2, 1, "", "disable_compile"], [77, 2, 1, "", "divide"], [78, 2, 1, "", "divmod"], [79, 2, 1, "", "enable_compile"], [80, 2, 1, "", "equal"], [81, 2, 1, "", "erf"], [82, 2, 1, "", "erfinv"], [83, 2, 1, "", "eval"], [84, 2, 1, "", "exp"], [85, 2, 1, "", "expand_dims"], [86, 2, 1, "", "eye"], [99, 2, 1, "", "flatten"], [100, 2, 1, "", "floor"], [101, 2, 1, "", "floor_divide"], [102, 2, 1, "", "full"], [103, 2, 1, "", "grad"], [104, 2, 1, "", "greater"], [105, 2, 1, "", "greater_equal"], [106, 2, 1, "", "identity"], [107, 2, 1, "", "inner"], [108, 2, 1, "", "isinf"], [109, 2, 1, "", "isnan"], [110, 2, 1, "", "isneginf"], [111, 2, 1, "", "isposinf"], [112, 2, 1, "", "jvp"], [113, 2, 1, "", "less"], [114, 2, 1, "", "less_equal"], [117, 2, 1, "", "linspace"], [118, 2, 1, "", "load"], [119, 2, 1, "", "log"], [120, 2, 1, "", "log10"], [121, 2, 1, "", "log1p"], [122, 2, 1, "", "log2"], [123, 2, 1, "", "logaddexp"], [124, 2, 1, "", "logical_and"], [125, 2, 1, "", "logical_not"], [126, 2, 1, "", "logical_or"], [127, 2, 1, "", "logsumexp"], [128, 2, 1, "", "matmul"], [129, 2, 1, "", "max"], [130, 2, 1, "", "maximum"], [131, 2, 1, "", "mean"], [132, 2, 1, "", "min"], [133, 2, 1, "", "minimum"], [134, 2, 1, "", "moveaxis"], [135, 2, 1, "", "multiply"], [136, 2, 1, "", "negative"], [137, 2, 1, "", "new_stream"], [138, 2, 1, "", "ones"], [139, 2, 1, "", "ones_like"], [140, 2, 1, "", "outer"], [141, 2, 1, "", "pad"], [142, 2, 1, "", "partition"], [143, 2, 1, "", "prod"], [144, 2, 1, "", "quantize"], [145, 2, 1, "", "quantized_matmul"], [156, 2, 1, "", "reciprocal"], [157, 2, 1, "", "repeat"], [158, 2, 1, "", "reshape"], [159, 2, 1, "", "round"], [160, 2, 1, "", "rsqrt"], [161, 2, 1, "", "save"], [162, 2, 1, "", "save_gguf"], [163, 2, 1, "", "save_safetensors"], [164, 2, 1, "", "savez"], [165, 2, 1, "", "savez_compressed"], [166, 2, 1, "", "set_default_device"], [167, 2, 1, "", "set_default_stream"], [168, 2, 1, "", "sigmoid"], [169, 2, 1, "", "sign"], [170, 2, 1, "", "sin"], [171, 2, 1, "", "sinh"], [172, 2, 1, "", "softmax"], [173, 2, 1, "", "sort"], [174, 2, 1, "", "split"], [175, 2, 1, "", "sqrt"], [176, 2, 1, "", "square"], [177, 2, 1, "", "squeeze"], [178, 2, 1, "", "stack"], [179, 2, 1, "", "stop_gradient"], [180, 2, 1, "", "subtract"], [181, 2, 1, "", "sum"], [182, 2, 1, "", "swapaxes"], [183, 2, 1, "", "take"], [184, 2, 1, "", "take_along_axis"], [185, 2, 1, "", "tan"], [186, 2, 1, "", "tanh"], [187, 2, 1, "", "tensordot"], [188, 2, 1, "", "transpose"], [189, 2, 1, "", "tri"], [190, 2, 1, "", "tril"], [191, 2, 1, "", "triu"], [192, 2, 1, "", "value_and_grad"], [193, 2, 1, "", "var"], [194, 2, 1, "", "vjp"], [195, 2, 1, "", "vmap"], [196, 2, 1, "", "where"], [197, 2, 1, "", "zeros"], [198, 2, 1, "", "zeros_like"]], "mlx.core.Device": [[7, 1, 1, "", "__init__"]], "mlx.core.Dtype": [[8, 1, 1, "", "__init__"]], "mlx.core.Stream": [[9, 1, 1, "", "__init__"]], "mlx.core.array": [[27, 3, 1, "", "T"], [26, 1, 1, "", "__init__"], [28, 1, 1, "", "abs"], [29, 1, 1, "", "all"], [30, 1, 1, "", "any"], [31, 1, 1, "", "argmax"], [32, 1, 1, "", "argmin"], [33, 1, 1, "", "astype"], [34, 1, 1, "", "cos"], [35, 3, 1, "", "dtype"], [36, 1, 1, "", "exp"], [37, 1, 1, "", "item"], [38, 1, 1, "", "log"], [39, 1, 1, "", "log1p"], [40, 1, 1, "", "logsumexp"], [41, 1, 1, "", "max"], [42, 1, 1, "", "mean"], [43, 1, 1, "", "min"], [44, 3, 1, "", "ndim"], [45, 1, 1, "", "prod"], [46, 1, 1, "", "reciprocal"], [47, 1, 1, "", "reshape"], [48, 1, 1, "", "round"], [49, 1, 1, "", "rsqrt"], [50, 3, 1, "", "shape"], [51, 1, 1, "", "sin"], [52, 3, 1, "", "size"], [53, 1, 1, "", "split"], [54, 1, 1, "", "sqrt"], [55, 1, 1, "", "square"], [56, 1, 1, "", "sum"], [57, 1, 1, "", "tolist"], [58, 1, 1, "", "transpose"], [59, 1, 1, "", "var"]], "mlx.core.fft": [[87, 2, 1, "", "fft"], [88, 2, 1, "", "fft2"], [89, 2, 1, "", "fftn"], [90, 2, 1, "", "ifft"], [91, 2, 1, "", "ifft2"], [92, 2, 1, "", "ifftn"], [93, 2, 1, "", "irfft"], [94, 2, 1, "", "irfft2"], [95, 2, 1, "", "irfftn"], [96, 2, 1, "", "rfft"], [97, 2, 1, "", "rfft2"], [98, 2, 1, "", "rfftn"]], "mlx.core.linalg": [[115, 2, 1, "", "norm"], [116, 2, 1, "", "qr"]], "mlx.core.random": [[146, 2, 1, "", "bernoulli"], [147, 2, 1, "", "categorical"], [148, 2, 1, "", "gumbel"], [149, 2, 1, "", "key"], [150, 2, 1, "", "normal"], [151, 2, 1, "", "randint"], [152, 2, 1, "", "seed"], [153, 2, 1, "", "split"], [154, 2, 1, "", "truncated_normal"], [155, 2, 1, "", "uniform"]], "mlx.nn": [[222, 0, 1, "", "ALiBi"], [223, 0, 1, "", "BatchNorm"], [224, 0, 1, "", "Conv1d"], [225, 0, 1, "", "Conv2d"], [226, 0, 1, "", "Dropout"], [227, 0, 1, "", "Dropout2d"], [228, 0, 1, "", "Dropout3d"], [229, 0, 1, "", "Embedding"], [230, 0, 1, "", "GELU"], [231, 0, 1, "", "GroupNorm"], [232, 0, 1, "", "InstanceNorm"], [233, 0, 1, "", "LayerNorm"], [234, 0, 1, "", "Linear"], [235, 0, 1, "", "Mish"], [304, 0, 1, "", "Module"], [255, 0, 1, "", "MultiHeadAttention"], [256, 0, 1, "", "PReLU"], [257, 0, 1, "", "QuantizedLinear"], [258, 0, 1, "", "RMSNorm"], [259, 0, 1, "", "ReLU"], [260, 0, 1, "", "RoPE"], [261, 0, 1, "", "SELU"], [262, 0, 1, "", "Sequential"], [263, 0, 1, "", "SiLU"], [264, 0, 1, "", "SinusoidalPositionalEncoding"], [265, 0, 1, "", "Softshrink"], [266, 0, 1, "", "Step"], [267, 0, 1, "", "Transformer"], [276, 2, 1, "", "gelu"], [277, 2, 1, "", "gelu_approx"], [278, 2, 1, "", "gelu_fast_approx"], [293, 2, 1, "", "mish"], [294, 2, 1, "", "prelu"], [295, 2, 1, "", "relu"], [296, 2, 1, "", "selu"], [297, 2, 1, "", "silu"], [298, 2, 1, "", "softshrink"], [299, 2, 1, "", "step"], [199, 2, 1, "", "value_and_grad"]], "mlx.nn.Module": [[236, 1, 1, "", "apply"], [237, 1, 1, "", "apply_to_modules"], [238, 1, 1, "", "children"], [239, 1, 1, "", "eval"], [240, 1, 1, "", "filter_and_map"], [241, 1, 1, "", "freeze"], [242, 1, 1, "", "leaf_modules"], [243, 1, 1, "", "load_weights"], [244, 1, 1, "", "modules"], [245, 1, 1, "", "named_modules"], [246, 1, 1, "", "parameters"], [247, 1, 1, "", "save_weights"], [248, 3, 1, "", "state"], [249, 1, 1, "", "train"], [250, 1, 1, "", "trainable_parameters"], [251, 3, 1, "", "training"], [252, 1, 1, "", "unfreeze"], [253, 1, 1, "", "update"], [254, 1, 1, "", "update_modules"]], "mlx.nn.RoPE": [[260, 4, 1, "", "_cos_sin_theta_key"], [260, 4, 1, "", "_cos_sin_theta_value"]], "mlx.nn.init": [[268, 2, 1, "", "constant"], [269, 2, 1, "", "glorot_normal"], [270, 2, 1, "", "glorot_uniform"], [271, 2, 1, "", "he_normal"], [272, 2, 1, "", "he_uniform"], [273, 2, 1, "", "identity"], [274, 2, 1, "", "normal"], [275, 2, 1, "", "uniform"]], "mlx.nn.losses": [[279, 2, 1, "", "binary_cross_entropy"], [280, 2, 1, "", "cosine_similarity_loss"], [281, 2, 1, "", "cross_entropy"], [282, 2, 1, "", "gaussian_nll_loss"], [283, 2, 1, "", "hinge_loss"], [284, 2, 1, "", "huber_loss"], [285, 2, 1, "", "kl_div_loss"], [286, 2, 1, "", "l1_loss"], [287, 2, 1, "", "log_cosh_loss"], [288, 2, 1, "", "margin_ranking_loss"], [289, 2, 1, "", "mse_loss"], [290, 2, 1, "", "nll_loss"], [291, 2, 1, "", "smooth_l1_loss"], [292, 2, 1, "", "triplet_loss"]], "mlx.optimizers": [[200, 0, 1, "", "AdaDelta"], [201, 0, 1, "", "Adafactor"], [202, 0, 1, "", "Adagrad"], [203, 0, 1, "", "Adam"], [204, 0, 1, "", "AdamW"], [205, 0, 1, "", "Adamax"], [206, 0, 1, "", "Lion"], [306, 0, 1, "", "Optimizer"], [211, 0, 1, "", "RMSprop"], [212, 0, 1, "", "SGD"]], "mlx.optimizers.Optimizer": [[207, 1, 1, "", "apply_gradients"], [208, 1, 1, "", "init"], [209, 3, 1, "", "state"], [210, 1, 1, "", "update"]], "mlx.utils": [[213, 2, 1, "", "tree_flatten"], [214, 2, 1, "", "tree_map"], [215, 2, 1, "", "tree_unflatten"]]}, "objtypes": {"0": "py:class", "1": "py:method", "2": "py:function", "3": "py:property", "4": "py:attribute"}, "objnames": {"0": ["py", "class", "Python class"], "1": ["py", "method", "Python method"], "2": ["py", "function", "Python function"], "3": ["py", "property", "Python property"], "4": ["py", "attribute", "Python attribute"]}, "titleterms": {"oper": [0, 1, 305], "develop": 1, "document": 1, "introduc": 1, "exampl": [1, 5, 311, 318], "primit": 1, "us": [1, 314, 319], "implement": [1, 3], "cpu": 1, "backend": 1, "gpu": 1, "transform": [1, 267, 309, 311, 312, 314, 316], "build": [1, 6], "bind": 1, "python": [1, 5, 6], "cmake": 1, "setuptool": 1, "usag": [1, 5], "result": 1, "script": [1, 3], "download": [1, 3], "code": [1, 3], "linear": [2, 220, 234], "regress": 2, "llm": 3, "infer": 3, "model": 3, "attent": 3, "layer": [3, 4, 302], "encod": 3, "full": [3, 102], "gener": 3, "put": 3, "all": [3, 12, 29], "togeth": 3, "convert": 3, "weight": 3, "load": [3, 118, 317], "benchmark": 3, "multi": 4, "perceptron": 4, "mlx": [5, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294, 295, 296, 297, 298, 299], "instal": [5, 6], "api": [5, 6], "refer": 5, "c": [5, 6], "further": 5, "read": 5, "troubleshoot": 6, "from": [6, 313], "sourc": 6, "requir": 6, "option": 6, "metal": 6, "found": 6, "x86": 6, "shell": 6, "core": [7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198], "devic": [7, 218], "dtype": [8, 35], "stream": [9, 218, 319], "ab": [10, 28], "add": 11, "allclos": 13, "ani": [14, 30], "arang": 15, "arcco": 16, "arccosh": 17, "arcsin": 18, "arcsinh": 19, "arctan": 20, "arctanh": 21, "argmax": [22, 31], "argmin": [23, 32], "argpartit": 24, "argsort": 25, "arrai": [26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 216, 313, 317], "t": 27, "astyp": 33, "co": [34, 69], "exp": [36, 84], "item": 37, "log": [38, 119], "log1p": [39, 121], "logsumexp": [40, 127], "max": [41, 129], "mean": [42, 131], "min": [43, 132], "ndim": 44, "prod": [45, 143], "reciproc": [46, 156], "reshap": [47, 158], "round": [48, 159], "rsqrt": [49, 160], "shape": 50, "sin": [51, 170], "size": 52, "split": [53, 153, 174], "sqrt": [54, 175], "squar": [55, 176], "sum": [56, 181], "tolist": 57, "transpos": [58, 188], "var": [59, 193], "array_equ": 60, "broadcast_to": 61, "ceil": 62, "clip": 63, "compil": [64, 311], "concaten": 65, "conv1d": [66, 224], "conv2d": [67, 225], "convolv": 68, "cosh": 70, "default_devic": 71, "default_stream": 72, "dequant": 73, "diag": 74, "diagon": 75, "disable_compil": 76, "divid": 77, "divmod": 78, "enable_compil": 79, "equal": 80, "erf": 81, "erfinv": 82, "eval": [83, 239], "expand_dim": 85, "ey": 86, "fft": [87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 219], "fft2": 88, "fftn": 89, "ifft": 90, "ifft2": 91, "ifftn": 92, "irfft": 93, "irfft2": 94, "irfftn": 95, "rfft": 96, "rfft2": 97, "rfftn": 98, "flatten": 99, "floor": 100, "floor_divid": 101, "grad": [103, 221], "greater": 104, "greater_equ": 105, "ident": [106, 273], "inner": 107, "isinf": 108, "isnan": 109, "isneginf": 110, "isposinf": 111, "jvp": 112, "less": 113, "less_equ": 114, "linalg": [115, 116], "norm": 115, "qr": 116, "linspac": 117, "log10": 120, "log2": 122, "logaddexp": 123, "logical_and": 124, "logical_not": 125, "logical_or": 126, "matmul": 128, "maximum": 130, "minimum": 133, "moveaxi": 134, "multipli": 135, "neg": 136, "new_stream": 137, "ones": 138, "ones_lik": 139, "outer": 140, "pad": 141, "partit": 142, "quantiz": 144, "quantized_matmul": 145, "random": [146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 308], "bernoulli": 146, "categor": 147, "gumbel": 148, "kei": 149, "normal": [150, 274], "randint": 151, "seed": 152, "truncated_norm": 154, "uniform": [155, 275], "repeat": 157, "save": [161, 317], "save_gguf": 162, "save_safetensor": 163, "savez": 164, "savez_compress": 165, "set_default_devic": 166, "set_default_stream": 167, "sigmoid": 168, "sign": 169, "sinh": 171, "softmax": 172, "sort": 173, "squeez": 177, "stack": 178, "stop_gradi": 179, "subtract": 180, "swapax": 182, "take": 183, "take_along_axi": 184, "tan": 185, "tanh": 186, "tensordot": 187, "tri": 189, "tril": 190, "triu": 191, "value_and_grad": [192, 199], "vjp": 194, "vmap": 195, "where": 196, "zero": 197, "zeros_lik": 198, "nn": [199, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294, 295, 296, 297, 298, 299], "optim": [200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 306, 307], "adadelta": 200, "adafactor": 201, "adagrad": 202, "adam": 203, "adamw": 204, "adamax": 205, "lion": 206, "apply_gradi": 207, "init": [208, 268, 269, 270, 271, 272, 273, 274, 275], "state": [209, 248], "updat": [210, 221, 253, 313], "rmsprop": 211, "sgd": 212, "util": [213, 214, 215, 310], "tree_flatten": 213, "tree_map": 214, "tree_unflatten": 215, "data": 217, "type": 217, "support": 217, "algebra": 220, "neural": 221, "network": 221, "quick": [221, 316], "start": [221, 316], "The": 221, "modul": [221, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 304], "class": 221, "paramet": [221, 246], "inspect": 221, "valu": 221, "alibi": 222, "batchnorm": 223, "dropout": 226, "dropout2d": 227, "dropout3d": 228, "embed": 229, "gelu": [230, 276], "groupnorm": 231, "instancenorm": 232, "layernorm": 233, "mish": [235, 293], "appli": 236, "apply_to_modul": 237, "children": 238, "filter_and_map": 240, "freez": 241, "leaf_modul": 242, "load_weight": 243, "named_modul": 245, "save_weight": 247, "train": [249, 251, 311], "trainable_paramet": 250, "unfreez": 252, "update_modul": 254, "multiheadattent": 255, "prelu": [256, 294], "quantizedlinear": 257, "rmsnorm": 258, "relu": [259, 295], "rope": 260, "selu": [261, 296], "sequenti": 262, "silu": [263, 297], "sinusoidalpositionalencod": 264, "softshrink": [265, 298], "step": [266, 299], "constant": 268, "glorot_norm": 269, "glorot_uniform": 270, "he_norm": 271, "he_uniform": 272, "gelu_approx": 277, "gelu_fast_approx": 278, "loss": [279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 303], "binary_cross_entropi": 279, "cosine_similarity_loss": 280, "cross_entropi": 281, "gaussian_nll_loss": 282, "hinge_loss": 283, "huber_loss": 284, "kl_div_loss": 285, "l1_loss": 286, "log_cosh_loss": 287, "margin_ranking_loss": 288, "mse_loss": 289, "nll_loss": 290, "smooth_l1_loss": 291, "triplet_loss": 292, "function": [300, 303, 311, 312, 316], "initi": 301, "tree": 310, "basic": [311, 316], "speedup": 311, "debug": 311, "pure": 311, "graph": [311, 314, 316], "automat": 312, "differenti": 312, "vector": 312, "index": 313, "differ": 313, "numpi": [313, 315], "In": 313, "place": 313, "lazi": 314, "evalu": 314, "why": 314, "comput": 314, "onli": 314, "what": 314, "you": 314, "when": 314, "convers": 315, "other": 315, "framework": 315, "pytorch": 315, "jax": 315, "tensorflow": 315, "guid": 316, "serial": 317, "format": 317, "unifi": 318, "memori": 318, "A": 318, "simpl": 318, "specifi": 319}, "envversion": {"sphinx.domains.c": 2, "sphinx.domains.changeset": 1, "sphinx.domains.citation": 1, "sphinx.domains.cpp": 6, "sphinx.domains.index": 1, "sphinx.domains.javascript": 2, "sphinx.domains.math": 2, "sphinx.domains.python": 3, "sphinx.domains.rst": 2, "sphinx.domains.std": 2, "sphinx.ext.intersphinx": 1, "sphinx": 56}}) |