mlx/docs/build/html/searchindex.js
2025-06-04 01:01:49 +00:00

1 line
147 KiB
JavaScript

Search.setIndex({"docnames": ["cpp/ops", "dev/extensions", "examples/linear_regression", "examples/llama-inference", "examples/mlp", "index", "install", "python/_autosummary/mlx.core.Device", "python/_autosummary/mlx.core.Dtype", "python/_autosummary/mlx.core.abs", "python/_autosummary/mlx.core.add", "python/_autosummary/mlx.core.all", "python/_autosummary/mlx.core.allclose", "python/_autosummary/mlx.core.any", "python/_autosummary/mlx.core.arange", "python/_autosummary/mlx.core.arccos", "python/_autosummary/mlx.core.arccosh", "python/_autosummary/mlx.core.arcsin", "python/_autosummary/mlx.core.arcsinh", "python/_autosummary/mlx.core.arctan", "python/_autosummary/mlx.core.arctanh", "python/_autosummary/mlx.core.argmax", "python/_autosummary/mlx.core.argmin", "python/_autosummary/mlx.core.argpartition", "python/_autosummary/mlx.core.argsort", "python/_autosummary/mlx.core.array", "python/_autosummary/mlx.core.array.T", "python/_autosummary/mlx.core.array.abs", "python/_autosummary/mlx.core.array.all", "python/_autosummary/mlx.core.array.any", "python/_autosummary/mlx.core.array.argmax", "python/_autosummary/mlx.core.array.argmin", "python/_autosummary/mlx.core.array.astype", "python/_autosummary/mlx.core.array.cos", "python/_autosummary/mlx.core.array.dtype", "python/_autosummary/mlx.core.array.exp", "python/_autosummary/mlx.core.array.item", "python/_autosummary/mlx.core.array.log", "python/_autosummary/mlx.core.array.log1p", "python/_autosummary/mlx.core.array.logsumexp", "python/_autosummary/mlx.core.array.max", "python/_autosummary/mlx.core.array.mean", "python/_autosummary/mlx.core.array.min", "python/_autosummary/mlx.core.array.ndim", "python/_autosummary/mlx.core.array.prod", "python/_autosummary/mlx.core.array.reciprocal", "python/_autosummary/mlx.core.array.reshape", "python/_autosummary/mlx.core.array.round", "python/_autosummary/mlx.core.array.rsqrt", "python/_autosummary/mlx.core.array.shape", "python/_autosummary/mlx.core.array.sin", "python/_autosummary/mlx.core.array.size", "python/_autosummary/mlx.core.array.split", "python/_autosummary/mlx.core.array.sqrt", "python/_autosummary/mlx.core.array.square", "python/_autosummary/mlx.core.array.sum", "python/_autosummary/mlx.core.array.tolist", "python/_autosummary/mlx.core.array.transpose", "python/_autosummary/mlx.core.array.var", "python/_autosummary/mlx.core.array_equal", "python/_autosummary/mlx.core.atleast_1d", "python/_autosummary/mlx.core.atleast_2d", "python/_autosummary/mlx.core.atleast_3d", "python/_autosummary/mlx.core.broadcast_to", "python/_autosummary/mlx.core.ceil", "python/_autosummary/mlx.core.clip", "python/_autosummary/mlx.core.compile", "python/_autosummary/mlx.core.concatenate", "python/_autosummary/mlx.core.conv1d", "python/_autosummary/mlx.core.conv2d", "python/_autosummary/mlx.core.conv_general", "python/_autosummary/mlx.core.convolve", "python/_autosummary/mlx.core.cos", "python/_autosummary/mlx.core.cosh", "python/_autosummary/mlx.core.default_device", "python/_autosummary/mlx.core.default_stream", "python/_autosummary/mlx.core.dequantize", "python/_autosummary/mlx.core.diag", "python/_autosummary/mlx.core.diagonal", "python/_autosummary/mlx.core.disable_compile", "python/_autosummary/mlx.core.divide", "python/_autosummary/mlx.core.divmod", "python/_autosummary/mlx.core.enable_compile", "python/_autosummary/mlx.core.equal", "python/_autosummary/mlx.core.erf", "python/_autosummary/mlx.core.erfinv", "python/_autosummary/mlx.core.eval", "python/_autosummary/mlx.core.exp", "python/_autosummary/mlx.core.expand_dims", "python/_autosummary/mlx.core.eye", "python/_autosummary/mlx.core.fft.fft", "python/_autosummary/mlx.core.fft.fft2", "python/_autosummary/mlx.core.fft.fftn", "python/_autosummary/mlx.core.fft.ifft", "python/_autosummary/mlx.core.fft.ifft2", "python/_autosummary/mlx.core.fft.ifftn", "python/_autosummary/mlx.core.fft.irfft", "python/_autosummary/mlx.core.fft.irfft2", "python/_autosummary/mlx.core.fft.irfftn", "python/_autosummary/mlx.core.fft.rfft", "python/_autosummary/mlx.core.fft.rfft2", "python/_autosummary/mlx.core.fft.rfftn", "python/_autosummary/mlx.core.flatten", "python/_autosummary/mlx.core.floor", "python/_autosummary/mlx.core.floor_divide", "python/_autosummary/mlx.core.full", "python/_autosummary/mlx.core.grad", "python/_autosummary/mlx.core.greater", "python/_autosummary/mlx.core.greater_equal", "python/_autosummary/mlx.core.identity", "python/_autosummary/mlx.core.inner", "python/_autosummary/mlx.core.isclose", "python/_autosummary/mlx.core.isinf", "python/_autosummary/mlx.core.isnan", "python/_autosummary/mlx.core.isneginf", "python/_autosummary/mlx.core.isposinf", "python/_autosummary/mlx.core.jvp", "python/_autosummary/mlx.core.less", "python/_autosummary/mlx.core.less_equal", "python/_autosummary/mlx.core.linalg.norm", "python/_autosummary/mlx.core.linalg.qr", "python/_autosummary/mlx.core.linspace", "python/_autosummary/mlx.core.load", "python/_autosummary/mlx.core.log", "python/_autosummary/mlx.core.log10", "python/_autosummary/mlx.core.log1p", "python/_autosummary/mlx.core.log2", "python/_autosummary/mlx.core.logaddexp", "python/_autosummary/mlx.core.logical_and", "python/_autosummary/mlx.core.logical_not", "python/_autosummary/mlx.core.logical_or", "python/_autosummary/mlx.core.logsumexp", "python/_autosummary/mlx.core.matmul", "python/_autosummary/mlx.core.max", "python/_autosummary/mlx.core.maximum", "python/_autosummary/mlx.core.mean", "python/_autosummary/mlx.core.metal.get_active_memory", "python/_autosummary/mlx.core.metal.get_cache_memory", "python/_autosummary/mlx.core.metal.get_peak_memory", "python/_autosummary/mlx.core.metal.is_available", "python/_autosummary/mlx.core.metal.set_cache_limit", "python/_autosummary/mlx.core.metal.set_memory_limit", "python/_autosummary/mlx.core.min", "python/_autosummary/mlx.core.minimum", "python/_autosummary/mlx.core.moveaxis", "python/_autosummary/mlx.core.multiply", "python/_autosummary/mlx.core.negative", "python/_autosummary/mlx.core.new_stream", "python/_autosummary/mlx.core.ones", "python/_autosummary/mlx.core.ones_like", "python/_autosummary/mlx.core.outer", "python/_autosummary/mlx.core.pad", "python/_autosummary/mlx.core.partition", "python/_autosummary/mlx.core.prod", "python/_autosummary/mlx.core.quantize", "python/_autosummary/mlx.core.quantized_matmul", "python/_autosummary/mlx.core.random.bernoulli", "python/_autosummary/mlx.core.random.categorical", "python/_autosummary/mlx.core.random.gumbel", "python/_autosummary/mlx.core.random.key", "python/_autosummary/mlx.core.random.normal", "python/_autosummary/mlx.core.random.randint", "python/_autosummary/mlx.core.random.seed", "python/_autosummary/mlx.core.random.split", "python/_autosummary/mlx.core.random.truncated_normal", "python/_autosummary/mlx.core.random.uniform", "python/_autosummary/mlx.core.reciprocal", "python/_autosummary/mlx.core.repeat", "python/_autosummary/mlx.core.reshape", "python/_autosummary/mlx.core.round", "python/_autosummary/mlx.core.rsqrt", "python/_autosummary/mlx.core.save", "python/_autosummary/mlx.core.save_gguf", "python/_autosummary/mlx.core.save_safetensors", "python/_autosummary/mlx.core.savez", "python/_autosummary/mlx.core.savez_compressed", "python/_autosummary/mlx.core.set_default_device", "python/_autosummary/mlx.core.set_default_stream", "python/_autosummary/mlx.core.sigmoid", "python/_autosummary/mlx.core.sign", "python/_autosummary/mlx.core.sin", "python/_autosummary/mlx.core.sinh", "python/_autosummary/mlx.core.softmax", "python/_autosummary/mlx.core.sort", "python/_autosummary/mlx.core.split", "python/_autosummary/mlx.core.sqrt", "python/_autosummary/mlx.core.square", "python/_autosummary/mlx.core.squeeze", "python/_autosummary/mlx.core.stack", "python/_autosummary/mlx.core.stop_gradient", "python/_autosummary/mlx.core.stream", "python/_autosummary/mlx.core.subtract", "python/_autosummary/mlx.core.sum", "python/_autosummary/mlx.core.swapaxes", "python/_autosummary/mlx.core.take", "python/_autosummary/mlx.core.take_along_axis", "python/_autosummary/mlx.core.tan", "python/_autosummary/mlx.core.tanh", "python/_autosummary/mlx.core.tensordot", "python/_autosummary/mlx.core.tile", "python/_autosummary/mlx.core.topk", "python/_autosummary/mlx.core.transpose", "python/_autosummary/mlx.core.tri", "python/_autosummary/mlx.core.tril", "python/_autosummary/mlx.core.triu", "python/_autosummary/mlx.core.value_and_grad", "python/_autosummary/mlx.core.var", "python/_autosummary/mlx.core.vjp", "python/_autosummary/mlx.core.vmap", "python/_autosummary/mlx.core.where", "python/_autosummary/mlx.core.zeros", "python/_autosummary/mlx.core.zeros_like", "python/_autosummary/mlx.nn.value_and_grad", "python/_autosummary/mlx.utils.tree_flatten", "python/_autosummary/mlx.utils.tree_map", "python/_autosummary/mlx.utils.tree_unflatten", "python/_autosummary/stream_class", "python/array", "python/data_types", "python/devices_and_streams", "python/fft", "python/linalg", "python/metal", "python/nn", "python/nn/_autosummary/mlx.nn.ALiBi", "python/nn/_autosummary/mlx.nn.AvgPool1d", "python/nn/_autosummary/mlx.nn.AvgPool2d", "python/nn/_autosummary/mlx.nn.BatchNorm", "python/nn/_autosummary/mlx.nn.Conv1d", "python/nn/_autosummary/mlx.nn.Conv2d", "python/nn/_autosummary/mlx.nn.Dropout", "python/nn/_autosummary/mlx.nn.Dropout2d", "python/nn/_autosummary/mlx.nn.Dropout3d", "python/nn/_autosummary/mlx.nn.Embedding", "python/nn/_autosummary/mlx.nn.GELU", "python/nn/_autosummary/mlx.nn.GRU", "python/nn/_autosummary/mlx.nn.GroupNorm", "python/nn/_autosummary/mlx.nn.InstanceNorm", "python/nn/_autosummary/mlx.nn.LSTM", "python/nn/_autosummary/mlx.nn.LayerNorm", "python/nn/_autosummary/mlx.nn.Linear", "python/nn/_autosummary/mlx.nn.MaxPool1d", "python/nn/_autosummary/mlx.nn.MaxPool2d", "python/nn/_autosummary/mlx.nn.Mish", "python/nn/_autosummary/mlx.nn.Module.apply", "python/nn/_autosummary/mlx.nn.Module.apply_to_modules", "python/nn/_autosummary/mlx.nn.Module.children", "python/nn/_autosummary/mlx.nn.Module.eval", "python/nn/_autosummary/mlx.nn.Module.filter_and_map", "python/nn/_autosummary/mlx.nn.Module.freeze", "python/nn/_autosummary/mlx.nn.Module.leaf_modules", "python/nn/_autosummary/mlx.nn.Module.load_weights", "python/nn/_autosummary/mlx.nn.Module.modules", "python/nn/_autosummary/mlx.nn.Module.named_modules", "python/nn/_autosummary/mlx.nn.Module.parameters", "python/nn/_autosummary/mlx.nn.Module.save_weights", "python/nn/_autosummary/mlx.nn.Module.state", "python/nn/_autosummary/mlx.nn.Module.train", "python/nn/_autosummary/mlx.nn.Module.trainable_parameters", "python/nn/_autosummary/mlx.nn.Module.training", "python/nn/_autosummary/mlx.nn.Module.unfreeze", "python/nn/_autosummary/mlx.nn.Module.update", "python/nn/_autosummary/mlx.nn.Module.update_modules", "python/nn/_autosummary/mlx.nn.MultiHeadAttention", "python/nn/_autosummary/mlx.nn.PReLU", "python/nn/_autosummary/mlx.nn.QuantizedLinear", "python/nn/_autosummary/mlx.nn.RMSNorm", "python/nn/_autosummary/mlx.nn.RNN", "python/nn/_autosummary/mlx.nn.ReLU", "python/nn/_autosummary/mlx.nn.RoPE", "python/nn/_autosummary/mlx.nn.SELU", "python/nn/_autosummary/mlx.nn.Sequential", "python/nn/_autosummary/mlx.nn.SiLU", "python/nn/_autosummary/mlx.nn.SinusoidalPositionalEncoding", "python/nn/_autosummary/mlx.nn.Softshrink", "python/nn/_autosummary/mlx.nn.Step", "python/nn/_autosummary/mlx.nn.Transformer", "python/nn/_autosummary/mlx.nn.Upsample", "python/nn/_autosummary/mlx.nn.init.constant", "python/nn/_autosummary/mlx.nn.init.glorot_normal", "python/nn/_autosummary/mlx.nn.init.glorot_uniform", "python/nn/_autosummary/mlx.nn.init.he_normal", "python/nn/_autosummary/mlx.nn.init.he_uniform", "python/nn/_autosummary/mlx.nn.init.identity", "python/nn/_autosummary/mlx.nn.init.normal", "python/nn/_autosummary/mlx.nn.init.uniform", "python/nn/_autosummary_functions/mlx.nn.elu", "python/nn/_autosummary_functions/mlx.nn.gelu", "python/nn/_autosummary_functions/mlx.nn.gelu_approx", "python/nn/_autosummary_functions/mlx.nn.gelu_fast_approx", "python/nn/_autosummary_functions/mlx.nn.glu", "python/nn/_autosummary_functions/mlx.nn.hardswish", "python/nn/_autosummary_functions/mlx.nn.leaky_relu", "python/nn/_autosummary_functions/mlx.nn.log_sigmoid", "python/nn/_autosummary_functions/mlx.nn.log_softmax", "python/nn/_autosummary_functions/mlx.nn.losses.binary_cross_entropy", "python/nn/_autosummary_functions/mlx.nn.losses.cosine_similarity_loss", "python/nn/_autosummary_functions/mlx.nn.losses.cross_entropy", "python/nn/_autosummary_functions/mlx.nn.losses.gaussian_nll_loss", "python/nn/_autosummary_functions/mlx.nn.losses.hinge_loss", "python/nn/_autosummary_functions/mlx.nn.losses.huber_loss", "python/nn/_autosummary_functions/mlx.nn.losses.kl_div_loss", "python/nn/_autosummary_functions/mlx.nn.losses.l1_loss", "python/nn/_autosummary_functions/mlx.nn.losses.log_cosh_loss", "python/nn/_autosummary_functions/mlx.nn.losses.margin_ranking_loss", "python/nn/_autosummary_functions/mlx.nn.losses.mse_loss", "python/nn/_autosummary_functions/mlx.nn.losses.nll_loss", "python/nn/_autosummary_functions/mlx.nn.losses.smooth_l1_loss", "python/nn/_autosummary_functions/mlx.nn.losses.triplet_loss", "python/nn/_autosummary_functions/mlx.nn.mish", "python/nn/_autosummary_functions/mlx.nn.prelu", "python/nn/_autosummary_functions/mlx.nn.relu", "python/nn/_autosummary_functions/mlx.nn.relu6", "python/nn/_autosummary_functions/mlx.nn.selu", "python/nn/_autosummary_functions/mlx.nn.sigmoid", "python/nn/_autosummary_functions/mlx.nn.silu", "python/nn/_autosummary_functions/mlx.nn.softmax", "python/nn/_autosummary_functions/mlx.nn.softplus", "python/nn/_autosummary_functions/mlx.nn.softshrink", "python/nn/_autosummary_functions/mlx.nn.step", "python/nn/_autosummary_functions/mlx.nn.tanh", "python/nn/functions", "python/nn/init", "python/nn/layers", "python/nn/losses", "python/nn/module", "python/ops", "python/optimizers", "python/optimizers/_autosummary/mlx.optimizers.AdaDelta", "python/optimizers/_autosummary/mlx.optimizers.Adafactor", "python/optimizers/_autosummary/mlx.optimizers.Adagrad", "python/optimizers/_autosummary/mlx.optimizers.Adam", "python/optimizers/_autosummary/mlx.optimizers.AdamW", "python/optimizers/_autosummary/mlx.optimizers.Adamax", "python/optimizers/_autosummary/mlx.optimizers.Lion", "python/optimizers/_autosummary/mlx.optimizers.Optimizer.apply_gradients", "python/optimizers/_autosummary/mlx.optimizers.Optimizer.init", "python/optimizers/_autosummary/mlx.optimizers.Optimizer.state", "python/optimizers/_autosummary/mlx.optimizers.Optimizer.update", "python/optimizers/_autosummary/mlx.optimizers.RMSprop", "python/optimizers/_autosummary/mlx.optimizers.SGD", "python/optimizers/_autosummary/mlx.optimizers.cosine_decay", "python/optimizers/_autosummary/mlx.optimizers.exponential_decay", "python/optimizers/_autosummary/mlx.optimizers.join_schedules", "python/optimizers/_autosummary/mlx.optimizers.linear_schedule", "python/optimizers/_autosummary/mlx.optimizers.step_decay", "python/optimizers/common_optimizers", "python/optimizers/optimizer", "python/optimizers/schedulers", "python/random", "python/transforms", "python/tree_utils", "usage/compile", "usage/function_transforms", "usage/indexing", "usage/lazy_evaluation", "usage/numpy", "usage/quick_start", "usage/saving_and_loading", "usage/unified_memory", "usage/using_streams"], "filenames": ["cpp/ops.rst", "dev/extensions.rst", "examples/linear_regression.rst", "examples/llama-inference.rst", "examples/mlp.rst", "index.rst", "install.rst", "python/_autosummary/mlx.core.Device.rst", "python/_autosummary/mlx.core.Dtype.rst", "python/_autosummary/mlx.core.abs.rst", "python/_autosummary/mlx.core.add.rst", "python/_autosummary/mlx.core.all.rst", "python/_autosummary/mlx.core.allclose.rst", "python/_autosummary/mlx.core.any.rst", "python/_autosummary/mlx.core.arange.rst", "python/_autosummary/mlx.core.arccos.rst", "python/_autosummary/mlx.core.arccosh.rst", "python/_autosummary/mlx.core.arcsin.rst", "python/_autosummary/mlx.core.arcsinh.rst", "python/_autosummary/mlx.core.arctan.rst", "python/_autosummary/mlx.core.arctanh.rst", "python/_autosummary/mlx.core.argmax.rst", "python/_autosummary/mlx.core.argmin.rst", "python/_autosummary/mlx.core.argpartition.rst", "python/_autosummary/mlx.core.argsort.rst", "python/_autosummary/mlx.core.array.rst", "python/_autosummary/mlx.core.array.T.rst", "python/_autosummary/mlx.core.array.abs.rst", "python/_autosummary/mlx.core.array.all.rst", "python/_autosummary/mlx.core.array.any.rst", "python/_autosummary/mlx.core.array.argmax.rst", "python/_autosummary/mlx.core.array.argmin.rst", "python/_autosummary/mlx.core.array.astype.rst", "python/_autosummary/mlx.core.array.cos.rst", "python/_autosummary/mlx.core.array.dtype.rst", "python/_autosummary/mlx.core.array.exp.rst", "python/_autosummary/mlx.core.array.item.rst", "python/_autosummary/mlx.core.array.log.rst", "python/_autosummary/mlx.core.array.log1p.rst", "python/_autosummary/mlx.core.array.logsumexp.rst", "python/_autosummary/mlx.core.array.max.rst", "python/_autosummary/mlx.core.array.mean.rst", "python/_autosummary/mlx.core.array.min.rst", "python/_autosummary/mlx.core.array.ndim.rst", "python/_autosummary/mlx.core.array.prod.rst", "python/_autosummary/mlx.core.array.reciprocal.rst", "python/_autosummary/mlx.core.array.reshape.rst", "python/_autosummary/mlx.core.array.round.rst", "python/_autosummary/mlx.core.array.rsqrt.rst", "python/_autosummary/mlx.core.array.shape.rst", "python/_autosummary/mlx.core.array.sin.rst", "python/_autosummary/mlx.core.array.size.rst", "python/_autosummary/mlx.core.array.split.rst", "python/_autosummary/mlx.core.array.sqrt.rst", "python/_autosummary/mlx.core.array.square.rst", "python/_autosummary/mlx.core.array.sum.rst", "python/_autosummary/mlx.core.array.tolist.rst", "python/_autosummary/mlx.core.array.transpose.rst", "python/_autosummary/mlx.core.array.var.rst", "python/_autosummary/mlx.core.array_equal.rst", "python/_autosummary/mlx.core.atleast_1d.rst", "python/_autosummary/mlx.core.atleast_2d.rst", "python/_autosummary/mlx.core.atleast_3d.rst", "python/_autosummary/mlx.core.broadcast_to.rst", "python/_autosummary/mlx.core.ceil.rst", "python/_autosummary/mlx.core.clip.rst", "python/_autosummary/mlx.core.compile.rst", "python/_autosummary/mlx.core.concatenate.rst", "python/_autosummary/mlx.core.conv1d.rst", "python/_autosummary/mlx.core.conv2d.rst", "python/_autosummary/mlx.core.conv_general.rst", "python/_autosummary/mlx.core.convolve.rst", "python/_autosummary/mlx.core.cos.rst", "python/_autosummary/mlx.core.cosh.rst", "python/_autosummary/mlx.core.default_device.rst", "python/_autosummary/mlx.core.default_stream.rst", "python/_autosummary/mlx.core.dequantize.rst", "python/_autosummary/mlx.core.diag.rst", "python/_autosummary/mlx.core.diagonal.rst", "python/_autosummary/mlx.core.disable_compile.rst", "python/_autosummary/mlx.core.divide.rst", "python/_autosummary/mlx.core.divmod.rst", "python/_autosummary/mlx.core.enable_compile.rst", "python/_autosummary/mlx.core.equal.rst", "python/_autosummary/mlx.core.erf.rst", "python/_autosummary/mlx.core.erfinv.rst", "python/_autosummary/mlx.core.eval.rst", "python/_autosummary/mlx.core.exp.rst", "python/_autosummary/mlx.core.expand_dims.rst", "python/_autosummary/mlx.core.eye.rst", "python/_autosummary/mlx.core.fft.fft.rst", "python/_autosummary/mlx.core.fft.fft2.rst", "python/_autosummary/mlx.core.fft.fftn.rst", "python/_autosummary/mlx.core.fft.ifft.rst", "python/_autosummary/mlx.core.fft.ifft2.rst", "python/_autosummary/mlx.core.fft.ifftn.rst", "python/_autosummary/mlx.core.fft.irfft.rst", "python/_autosummary/mlx.core.fft.irfft2.rst", "python/_autosummary/mlx.core.fft.irfftn.rst", "python/_autosummary/mlx.core.fft.rfft.rst", "python/_autosummary/mlx.core.fft.rfft2.rst", "python/_autosummary/mlx.core.fft.rfftn.rst", "python/_autosummary/mlx.core.flatten.rst", "python/_autosummary/mlx.core.floor.rst", "python/_autosummary/mlx.core.floor_divide.rst", "python/_autosummary/mlx.core.full.rst", "python/_autosummary/mlx.core.grad.rst", "python/_autosummary/mlx.core.greater.rst", "python/_autosummary/mlx.core.greater_equal.rst", "python/_autosummary/mlx.core.identity.rst", "python/_autosummary/mlx.core.inner.rst", "python/_autosummary/mlx.core.isclose.rst", "python/_autosummary/mlx.core.isinf.rst", "python/_autosummary/mlx.core.isnan.rst", "python/_autosummary/mlx.core.isneginf.rst", "python/_autosummary/mlx.core.isposinf.rst", "python/_autosummary/mlx.core.jvp.rst", "python/_autosummary/mlx.core.less.rst", "python/_autosummary/mlx.core.less_equal.rst", "python/_autosummary/mlx.core.linalg.norm.rst", "python/_autosummary/mlx.core.linalg.qr.rst", "python/_autosummary/mlx.core.linspace.rst", "python/_autosummary/mlx.core.load.rst", "python/_autosummary/mlx.core.log.rst", "python/_autosummary/mlx.core.log10.rst", "python/_autosummary/mlx.core.log1p.rst", "python/_autosummary/mlx.core.log2.rst", "python/_autosummary/mlx.core.logaddexp.rst", "python/_autosummary/mlx.core.logical_and.rst", "python/_autosummary/mlx.core.logical_not.rst", "python/_autosummary/mlx.core.logical_or.rst", "python/_autosummary/mlx.core.logsumexp.rst", "python/_autosummary/mlx.core.matmul.rst", "python/_autosummary/mlx.core.max.rst", "python/_autosummary/mlx.core.maximum.rst", "python/_autosummary/mlx.core.mean.rst", "python/_autosummary/mlx.core.metal.get_active_memory.rst", "python/_autosummary/mlx.core.metal.get_cache_memory.rst", "python/_autosummary/mlx.core.metal.get_peak_memory.rst", "python/_autosummary/mlx.core.metal.is_available.rst", "python/_autosummary/mlx.core.metal.set_cache_limit.rst", "python/_autosummary/mlx.core.metal.set_memory_limit.rst", "python/_autosummary/mlx.core.min.rst", "python/_autosummary/mlx.core.minimum.rst", "python/_autosummary/mlx.core.moveaxis.rst", "python/_autosummary/mlx.core.multiply.rst", "python/_autosummary/mlx.core.negative.rst", "python/_autosummary/mlx.core.new_stream.rst", "python/_autosummary/mlx.core.ones.rst", "python/_autosummary/mlx.core.ones_like.rst", "python/_autosummary/mlx.core.outer.rst", "python/_autosummary/mlx.core.pad.rst", "python/_autosummary/mlx.core.partition.rst", "python/_autosummary/mlx.core.prod.rst", "python/_autosummary/mlx.core.quantize.rst", "python/_autosummary/mlx.core.quantized_matmul.rst", "python/_autosummary/mlx.core.random.bernoulli.rst", "python/_autosummary/mlx.core.random.categorical.rst", "python/_autosummary/mlx.core.random.gumbel.rst", "python/_autosummary/mlx.core.random.key.rst", "python/_autosummary/mlx.core.random.normal.rst", "python/_autosummary/mlx.core.random.randint.rst", "python/_autosummary/mlx.core.random.seed.rst", "python/_autosummary/mlx.core.random.split.rst", "python/_autosummary/mlx.core.random.truncated_normal.rst", "python/_autosummary/mlx.core.random.uniform.rst", "python/_autosummary/mlx.core.reciprocal.rst", "python/_autosummary/mlx.core.repeat.rst", "python/_autosummary/mlx.core.reshape.rst", "python/_autosummary/mlx.core.round.rst", "python/_autosummary/mlx.core.rsqrt.rst", "python/_autosummary/mlx.core.save.rst", "python/_autosummary/mlx.core.save_gguf.rst", "python/_autosummary/mlx.core.save_safetensors.rst", "python/_autosummary/mlx.core.savez.rst", "python/_autosummary/mlx.core.savez_compressed.rst", "python/_autosummary/mlx.core.set_default_device.rst", "python/_autosummary/mlx.core.set_default_stream.rst", "python/_autosummary/mlx.core.sigmoid.rst", "python/_autosummary/mlx.core.sign.rst", "python/_autosummary/mlx.core.sin.rst", "python/_autosummary/mlx.core.sinh.rst", "python/_autosummary/mlx.core.softmax.rst", "python/_autosummary/mlx.core.sort.rst", "python/_autosummary/mlx.core.split.rst", "python/_autosummary/mlx.core.sqrt.rst", "python/_autosummary/mlx.core.square.rst", "python/_autosummary/mlx.core.squeeze.rst", "python/_autosummary/mlx.core.stack.rst", "python/_autosummary/mlx.core.stop_gradient.rst", "python/_autosummary/mlx.core.stream.rst", "python/_autosummary/mlx.core.subtract.rst", "python/_autosummary/mlx.core.sum.rst", "python/_autosummary/mlx.core.swapaxes.rst", "python/_autosummary/mlx.core.take.rst", "python/_autosummary/mlx.core.take_along_axis.rst", "python/_autosummary/mlx.core.tan.rst", "python/_autosummary/mlx.core.tanh.rst", "python/_autosummary/mlx.core.tensordot.rst", "python/_autosummary/mlx.core.tile.rst", "python/_autosummary/mlx.core.topk.rst", "python/_autosummary/mlx.core.transpose.rst", "python/_autosummary/mlx.core.tri.rst", "python/_autosummary/mlx.core.tril.rst", "python/_autosummary/mlx.core.triu.rst", "python/_autosummary/mlx.core.value_and_grad.rst", "python/_autosummary/mlx.core.var.rst", "python/_autosummary/mlx.core.vjp.rst", "python/_autosummary/mlx.core.vmap.rst", "python/_autosummary/mlx.core.where.rst", "python/_autosummary/mlx.core.zeros.rst", "python/_autosummary/mlx.core.zeros_like.rst", "python/_autosummary/mlx.nn.value_and_grad.rst", "python/_autosummary/mlx.utils.tree_flatten.rst", "python/_autosummary/mlx.utils.tree_map.rst", "python/_autosummary/mlx.utils.tree_unflatten.rst", "python/_autosummary/stream_class.rst", "python/array.rst", "python/data_types.rst", "python/devices_and_streams.rst", "python/fft.rst", "python/linalg.rst", "python/metal.rst", "python/nn.rst", "python/nn/_autosummary/mlx.nn.ALiBi.rst", "python/nn/_autosummary/mlx.nn.AvgPool1d.rst", "python/nn/_autosummary/mlx.nn.AvgPool2d.rst", "python/nn/_autosummary/mlx.nn.BatchNorm.rst", "python/nn/_autosummary/mlx.nn.Conv1d.rst", "python/nn/_autosummary/mlx.nn.Conv2d.rst", "python/nn/_autosummary/mlx.nn.Dropout.rst", "python/nn/_autosummary/mlx.nn.Dropout2d.rst", "python/nn/_autosummary/mlx.nn.Dropout3d.rst", "python/nn/_autosummary/mlx.nn.Embedding.rst", "python/nn/_autosummary/mlx.nn.GELU.rst", "python/nn/_autosummary/mlx.nn.GRU.rst", "python/nn/_autosummary/mlx.nn.GroupNorm.rst", "python/nn/_autosummary/mlx.nn.InstanceNorm.rst", "python/nn/_autosummary/mlx.nn.LSTM.rst", "python/nn/_autosummary/mlx.nn.LayerNorm.rst", "python/nn/_autosummary/mlx.nn.Linear.rst", "python/nn/_autosummary/mlx.nn.MaxPool1d.rst", "python/nn/_autosummary/mlx.nn.MaxPool2d.rst", "python/nn/_autosummary/mlx.nn.Mish.rst", "python/nn/_autosummary/mlx.nn.Module.apply.rst", "python/nn/_autosummary/mlx.nn.Module.apply_to_modules.rst", "python/nn/_autosummary/mlx.nn.Module.children.rst", "python/nn/_autosummary/mlx.nn.Module.eval.rst", "python/nn/_autosummary/mlx.nn.Module.filter_and_map.rst", "python/nn/_autosummary/mlx.nn.Module.freeze.rst", "python/nn/_autosummary/mlx.nn.Module.leaf_modules.rst", "python/nn/_autosummary/mlx.nn.Module.load_weights.rst", "python/nn/_autosummary/mlx.nn.Module.modules.rst", "python/nn/_autosummary/mlx.nn.Module.named_modules.rst", "python/nn/_autosummary/mlx.nn.Module.parameters.rst", "python/nn/_autosummary/mlx.nn.Module.save_weights.rst", "python/nn/_autosummary/mlx.nn.Module.state.rst", "python/nn/_autosummary/mlx.nn.Module.train.rst", "python/nn/_autosummary/mlx.nn.Module.trainable_parameters.rst", "python/nn/_autosummary/mlx.nn.Module.training.rst", "python/nn/_autosummary/mlx.nn.Module.unfreeze.rst", "python/nn/_autosummary/mlx.nn.Module.update.rst", "python/nn/_autosummary/mlx.nn.Module.update_modules.rst", "python/nn/_autosummary/mlx.nn.MultiHeadAttention.rst", "python/nn/_autosummary/mlx.nn.PReLU.rst", "python/nn/_autosummary/mlx.nn.QuantizedLinear.rst", "python/nn/_autosummary/mlx.nn.RMSNorm.rst", "python/nn/_autosummary/mlx.nn.RNN.rst", "python/nn/_autosummary/mlx.nn.ReLU.rst", "python/nn/_autosummary/mlx.nn.RoPE.rst", "python/nn/_autosummary/mlx.nn.SELU.rst", "python/nn/_autosummary/mlx.nn.Sequential.rst", "python/nn/_autosummary/mlx.nn.SiLU.rst", "python/nn/_autosummary/mlx.nn.SinusoidalPositionalEncoding.rst", "python/nn/_autosummary/mlx.nn.Softshrink.rst", "python/nn/_autosummary/mlx.nn.Step.rst", "python/nn/_autosummary/mlx.nn.Transformer.rst", "python/nn/_autosummary/mlx.nn.Upsample.rst", "python/nn/_autosummary/mlx.nn.init.constant.rst", "python/nn/_autosummary/mlx.nn.init.glorot_normal.rst", "python/nn/_autosummary/mlx.nn.init.glorot_uniform.rst", "python/nn/_autosummary/mlx.nn.init.he_normal.rst", "python/nn/_autosummary/mlx.nn.init.he_uniform.rst", "python/nn/_autosummary/mlx.nn.init.identity.rst", "python/nn/_autosummary/mlx.nn.init.normal.rst", "python/nn/_autosummary/mlx.nn.init.uniform.rst", "python/nn/_autosummary_functions/mlx.nn.elu.rst", "python/nn/_autosummary_functions/mlx.nn.gelu.rst", "python/nn/_autosummary_functions/mlx.nn.gelu_approx.rst", "python/nn/_autosummary_functions/mlx.nn.gelu_fast_approx.rst", "python/nn/_autosummary_functions/mlx.nn.glu.rst", "python/nn/_autosummary_functions/mlx.nn.hardswish.rst", "python/nn/_autosummary_functions/mlx.nn.leaky_relu.rst", "python/nn/_autosummary_functions/mlx.nn.log_sigmoid.rst", "python/nn/_autosummary_functions/mlx.nn.log_softmax.rst", "python/nn/_autosummary_functions/mlx.nn.losses.binary_cross_entropy.rst", "python/nn/_autosummary_functions/mlx.nn.losses.cosine_similarity_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.cross_entropy.rst", "python/nn/_autosummary_functions/mlx.nn.losses.gaussian_nll_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.hinge_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.huber_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.kl_div_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.l1_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.log_cosh_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.margin_ranking_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.mse_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.nll_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.smooth_l1_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.triplet_loss.rst", "python/nn/_autosummary_functions/mlx.nn.mish.rst", "python/nn/_autosummary_functions/mlx.nn.prelu.rst", "python/nn/_autosummary_functions/mlx.nn.relu.rst", "python/nn/_autosummary_functions/mlx.nn.relu6.rst", "python/nn/_autosummary_functions/mlx.nn.selu.rst", "python/nn/_autosummary_functions/mlx.nn.sigmoid.rst", "python/nn/_autosummary_functions/mlx.nn.silu.rst", "python/nn/_autosummary_functions/mlx.nn.softmax.rst", "python/nn/_autosummary_functions/mlx.nn.softplus.rst", "python/nn/_autosummary_functions/mlx.nn.softshrink.rst", "python/nn/_autosummary_functions/mlx.nn.step.rst", "python/nn/_autosummary_functions/mlx.nn.tanh.rst", "python/nn/functions.rst", "python/nn/init.rst", "python/nn/layers.rst", "python/nn/losses.rst", "python/nn/module.rst", "python/ops.rst", "python/optimizers.rst", "python/optimizers/_autosummary/mlx.optimizers.AdaDelta.rst", "python/optimizers/_autosummary/mlx.optimizers.Adafactor.rst", "python/optimizers/_autosummary/mlx.optimizers.Adagrad.rst", "python/optimizers/_autosummary/mlx.optimizers.Adam.rst", "python/optimizers/_autosummary/mlx.optimizers.AdamW.rst", "python/optimizers/_autosummary/mlx.optimizers.Adamax.rst", "python/optimizers/_autosummary/mlx.optimizers.Lion.rst", "python/optimizers/_autosummary/mlx.optimizers.Optimizer.apply_gradients.rst", "python/optimizers/_autosummary/mlx.optimizers.Optimizer.init.rst", "python/optimizers/_autosummary/mlx.optimizers.Optimizer.state.rst", "python/optimizers/_autosummary/mlx.optimizers.Optimizer.update.rst", "python/optimizers/_autosummary/mlx.optimizers.RMSprop.rst", "python/optimizers/_autosummary/mlx.optimizers.SGD.rst", "python/optimizers/_autosummary/mlx.optimizers.cosine_decay.rst", "python/optimizers/_autosummary/mlx.optimizers.exponential_decay.rst", "python/optimizers/_autosummary/mlx.optimizers.join_schedules.rst", "python/optimizers/_autosummary/mlx.optimizers.linear_schedule.rst", "python/optimizers/_autosummary/mlx.optimizers.step_decay.rst", "python/optimizers/common_optimizers.rst", "python/optimizers/optimizer.rst", "python/optimizers/schedulers.rst", "python/random.rst", "python/transforms.rst", "python/tree_utils.rst", "usage/compile.rst", "usage/function_transforms.rst", "usage/indexing.rst", "usage/lazy_evaluation.rst", "usage/numpy.rst", "usage/quick_start.rst", "usage/saving_and_loading.rst", "usage/unified_memory.rst", "usage/using_streams.rst"], "titles": ["Operations", "Developer Documentation", "Linear Regression", "LLM inference", "Multi-Layer Perceptron", "MLX", "Build and Install", "mlx.core.Device", "mlx.core.Dtype", "mlx.core.abs", "mlx.core.add", "mlx.core.all", "mlx.core.allclose", "mlx.core.any", "mlx.core.arange", "mlx.core.arccos", "mlx.core.arccosh", "mlx.core.arcsin", "mlx.core.arcsinh", "mlx.core.arctan", "mlx.core.arctanh", "mlx.core.argmax", "mlx.core.argmin", "mlx.core.argpartition", "mlx.core.argsort", "mlx.core.array", "mlx.core.array.T", "mlx.core.array.abs", "mlx.core.array.all", "mlx.core.array.any", "mlx.core.array.argmax", "mlx.core.array.argmin", "mlx.core.array.astype", "mlx.core.array.cos", "mlx.core.array.dtype", "mlx.core.array.exp", "mlx.core.array.item", "mlx.core.array.log", "mlx.core.array.log1p", "mlx.core.array.logsumexp", "mlx.core.array.max", "mlx.core.array.mean", "mlx.core.array.min", "mlx.core.array.ndim", "mlx.core.array.prod", "mlx.core.array.reciprocal", "mlx.core.array.reshape", "mlx.core.array.round", "mlx.core.array.rsqrt", "mlx.core.array.shape", "mlx.core.array.sin", "mlx.core.array.size", "mlx.core.array.split", "mlx.core.array.sqrt", "mlx.core.array.square", "mlx.core.array.sum", "mlx.core.array.tolist", "mlx.core.array.transpose", "mlx.core.array.var", "mlx.core.array_equal", "mlx.core.atleast_1d", "mlx.core.atleast_2d", "mlx.core.atleast_3d", "mlx.core.broadcast_to", "mlx.core.ceil", "mlx.core.clip", "mlx.core.compile", "mlx.core.concatenate", "mlx.core.conv1d", "mlx.core.conv2d", "mlx.core.conv_general", "mlx.core.convolve", "mlx.core.cos", "mlx.core.cosh", "mlx.core.default_device", "mlx.core.default_stream", "mlx.core.dequantize", "mlx.core.diag", "mlx.core.diagonal", "mlx.core.disable_compile", "mlx.core.divide", "mlx.core.divmod", "mlx.core.enable_compile", "mlx.core.equal", "mlx.core.erf", "mlx.core.erfinv", "mlx.core.eval", "mlx.core.exp", "mlx.core.expand_dims", "mlx.core.eye", "mlx.core.fft.fft", "mlx.core.fft.fft2", "mlx.core.fft.fftn", "mlx.core.fft.ifft", "mlx.core.fft.ifft2", "mlx.core.fft.ifftn", "mlx.core.fft.irfft", "mlx.core.fft.irfft2", "mlx.core.fft.irfftn", "mlx.core.fft.rfft", "mlx.core.fft.rfft2", "mlx.core.fft.rfftn", "mlx.core.flatten", "mlx.core.floor", "mlx.core.floor_divide", "mlx.core.full", "mlx.core.grad", "mlx.core.greater", "mlx.core.greater_equal", "mlx.core.identity", "mlx.core.inner", "mlx.core.isclose", "mlx.core.isinf", "mlx.core.isnan", "mlx.core.isneginf", "mlx.core.isposinf", "mlx.core.jvp", "mlx.core.less", "mlx.core.less_equal", "mlx.core.linalg.norm", "mlx.core.linalg.qr", "mlx.core.linspace", "mlx.core.load", "mlx.core.log", "mlx.core.log10", "mlx.core.log1p", "mlx.core.log2", "mlx.core.logaddexp", "mlx.core.logical_and", "mlx.core.logical_not", "mlx.core.logical_or", "mlx.core.logsumexp", "mlx.core.matmul", "mlx.core.max", "mlx.core.maximum", "mlx.core.mean", "mlx.core.metal.get_active_memory", "mlx.core.metal.get_cache_memory", "mlx.core.metal.get_peak_memory", "mlx.core.metal.is_available", "mlx.core.metal.set_cache_limit", "mlx.core.metal.set_memory_limit", "mlx.core.min", "mlx.core.minimum", "mlx.core.moveaxis", "mlx.core.multiply", "mlx.core.negative", "mlx.core.new_stream", "mlx.core.ones", "mlx.core.ones_like", "mlx.core.outer", "mlx.core.pad", "mlx.core.partition", "mlx.core.prod", "mlx.core.quantize", "mlx.core.quantized_matmul", "mlx.core.random.bernoulli", "mlx.core.random.categorical", "mlx.core.random.gumbel", "mlx.core.random.key", "mlx.core.random.normal", "mlx.core.random.randint", "mlx.core.random.seed", "mlx.core.random.split", "mlx.core.random.truncated_normal", "mlx.core.random.uniform", "mlx.core.reciprocal", "mlx.core.repeat", "mlx.core.reshape", "mlx.core.round", "mlx.core.rsqrt", "mlx.core.save", "mlx.core.save_gguf", "mlx.core.save_safetensors", "mlx.core.savez", "mlx.core.savez_compressed", "mlx.core.set_default_device", "mlx.core.set_default_stream", "mlx.core.sigmoid", "mlx.core.sign", "mlx.core.sin", "mlx.core.sinh", "mlx.core.softmax", "mlx.core.sort", "mlx.core.split", "mlx.core.sqrt", "mlx.core.square", "mlx.core.squeeze", "mlx.core.stack", "mlx.core.stop_gradient", "mlx.core.stream", "mlx.core.subtract", "mlx.core.sum", "mlx.core.swapaxes", "mlx.core.take", "mlx.core.take_along_axis", "mlx.core.tan", "mlx.core.tanh", "mlx.core.tensordot", "mlx.core.tile", "mlx.core.topk", "mlx.core.transpose", "mlx.core.tri", "mlx.core.tril", "mlx.core.triu", "mlx.core.value_and_grad", "mlx.core.var", "mlx.core.vjp", "mlx.core.vmap", "mlx.core.where", "mlx.core.zeros", "mlx.core.zeros_like", "mlx.nn.value_and_grad", "mlx.utils.tree_flatten", "mlx.utils.tree_map", "mlx.utils.tree_unflatten", "mlx.core.Stream", "Array", "Data Types", "Devices and Streams", "FFT", "Linear Algebra", "Metal", "Neural Networks", "mlx.nn.ALiBi", "mlx.nn.AvgPool1d", "mlx.nn.AvgPool2d", "mlx.nn.BatchNorm", "mlx.nn.Conv1d", "mlx.nn.Conv2d", "mlx.nn.Dropout", "mlx.nn.Dropout2d", "mlx.nn.Dropout3d", "mlx.nn.Embedding", "mlx.nn.GELU", "mlx.nn.GRU", "mlx.nn.GroupNorm", "mlx.nn.InstanceNorm", "mlx.nn.LSTM", "mlx.nn.LayerNorm", "mlx.nn.Linear", "mlx.nn.MaxPool1d", "mlx.nn.MaxPool2d", "mlx.nn.Mish", "mlx.nn.Module.apply", "mlx.nn.Module.apply_to_modules", "mlx.nn.Module.children", "mlx.nn.Module.eval", "mlx.nn.Module.filter_and_map", "mlx.nn.Module.freeze", "mlx.nn.Module.leaf_modules", "mlx.nn.Module.load_weights", "mlx.nn.Module.modules", "mlx.nn.Module.named_modules", "mlx.nn.Module.parameters", "mlx.nn.Module.save_weights", "mlx.nn.Module.state", "mlx.nn.Module.train", "mlx.nn.Module.trainable_parameters", "mlx.nn.Module.training", "mlx.nn.Module.unfreeze", "mlx.nn.Module.update", "mlx.nn.Module.update_modules", "mlx.nn.MultiHeadAttention", "mlx.nn.PReLU", "mlx.nn.QuantizedLinear", "mlx.nn.RMSNorm", "mlx.nn.RNN", "mlx.nn.ReLU", "mlx.nn.RoPE", "mlx.nn.SELU", "mlx.nn.Sequential", "mlx.nn.SiLU", "mlx.nn.SinusoidalPositionalEncoding", "mlx.nn.Softshrink", "mlx.nn.Step", "mlx.nn.Transformer", "mlx.nn.Upsample", "mlx.nn.init.constant", "mlx.nn.init.glorot_normal", "mlx.nn.init.glorot_uniform", "mlx.nn.init.he_normal", "mlx.nn.init.he_uniform", "mlx.nn.init.identity", "mlx.nn.init.normal", "mlx.nn.init.uniform", "mlx.nn.elu", "mlx.nn.gelu", "mlx.nn.gelu_approx", "mlx.nn.gelu_fast_approx", "mlx.nn.glu", "mlx.nn.hardswish", "mlx.nn.leaky_relu", "mlx.nn.log_sigmoid", "mlx.nn.log_softmax", "mlx.nn.losses.binary_cross_entropy", "mlx.nn.losses.cosine_similarity_loss", "mlx.nn.losses.cross_entropy", "mlx.nn.losses.gaussian_nll_loss", "mlx.nn.losses.hinge_loss", "mlx.nn.losses.huber_loss", "mlx.nn.losses.kl_div_loss", "mlx.nn.losses.l1_loss", "mlx.nn.losses.log_cosh_loss", "mlx.nn.losses.margin_ranking_loss", "mlx.nn.losses.mse_loss", "mlx.nn.losses.nll_loss", "mlx.nn.losses.smooth_l1_loss", "mlx.nn.losses.triplet_loss", "mlx.nn.mish", "mlx.nn.prelu", "mlx.nn.relu", "mlx.nn.relu6", "mlx.nn.selu", "mlx.nn.sigmoid", "mlx.nn.silu", "mlx.nn.softmax", "mlx.nn.softplus", "mlx.nn.softshrink", "mlx.nn.step", "mlx.nn.tanh", "Functions", "Initializers", "Layers", "Loss Functions", "Module", "Operations", "Optimizers", "mlx.optimizers.AdaDelta", "mlx.optimizers.Adafactor", "mlx.optimizers.Adagrad", "mlx.optimizers.Adam", "mlx.optimizers.AdamW", "mlx.optimizers.Adamax", "mlx.optimizers.Lion", "mlx.optimizers.Optimizer.apply_gradients", "mlx.optimizers.Optimizer.init", "mlx.optimizers.Optimizer.state", "mlx.optimizers.Optimizer.update", "mlx.optimizers.RMSprop", "mlx.optimizers.SGD", "mlx.optimizers.cosine_decay", "mlx.optimizers.exponential_decay", "mlx.optimizers.join_schedules", "mlx.optimizers.linear_schedule", "mlx.optimizers.step_decay", "Common Optimizers", "Optimizer", "Schedulers", "Random", "Transforms", "Tree Utils", "Compilation", "Function Transforms", "Indexing Arrays", "Lazy Evaluation", "Conversion to NumPy and Other Frameworks", "Quick Start Guide", "Saving and Loading Arrays", "Unified Memory", "Using Streams"], "terms": {"mlx": [1, 2, 3, 4, 6, 223, 322, 325, 327, 347, 349, 351, 352, 353, 354, 355, 356, 357, 358, 359], "provid": [1, 3, 76, 106, 198, 205, 214, 223, 244, 249, 251, 260, 261, 262, 265, 276, 277, 321, 325, 358, 360], "open": [1, 6, 14, 161, 165], "flexibl": [1, 5, 262], "which": [1, 3, 4, 5, 6, 14, 32, 66, 70, 78, 86, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 106, 112, 113, 114, 115, 116, 119, 120, 122, 154, 157, 158, 167, 168, 171, 172, 173, 174, 175, 187, 188, 194, 205, 207, 208, 226, 231, 232, 234, 242, 244, 248, 269, 297, 300, 304, 307, 322, 335, 336, 349, 352, 353, 354, 355, 359, 360], "user": [1, 3, 223], "mai": [1, 119, 231, 353, 354], "add": [1, 3, 88, 127, 151, 154, 228, 229, 353, 359], "special": 1, "without": [1, 3, 5, 189, 263, 321, 351, 352, 355, 356, 359], "much": [1, 3, 225, 226, 241, 242, 352, 355], "hassl": 1, "while": [1, 3, 6, 168, 269, 355, 356], "librari": [1, 6, 223], "suppli": 1, "effici": [1, 3, 5, 231, 269, 355, 357], "can": [1, 3, 5, 6, 10, 14, 46, 57, 66, 78, 79, 80, 81, 83, 86, 107, 108, 117, 118, 119, 127, 134, 143, 145, 156, 157, 161, 164, 165, 172, 191, 205, 223, 226, 233, 242, 248, 260, 271, 277, 297, 322, 325, 327, 335, 336, 349, 351, 352, 353, 354, 355, 356, 357, 358, 359, 360], "compos": [1, 5, 223, 352, 353, 357], "ani": [1, 3, 5, 14, 66, 213, 214, 215, 223, 234, 244, 245, 248, 256, 265, 276, 277, 322, 344, 351, 352, 353, 355, 357, 358, 359], "number": [1, 14, 51, 66, 69, 70, 76, 89, 106, 109, 116, 121, 151, 154, 155, 157, 160, 163, 165, 167, 169, 198, 199, 202, 205, 207, 208, 223, 227, 228, 229, 231, 232, 236, 237, 263, 264, 276, 277, 279, 280, 281, 282, 341, 343, 344, 349, 352, 353, 360], "applic": [1, 6], "aris": [1, 356], "case": [1, 3, 92, 95, 96, 98, 99, 100, 101, 102, 120, 132, 168, 187, 226, 231, 242, 275, 307, 313, 318, 319, 335, 336, 352, 353, 357, 358, 359, 360], "where": [1, 4, 89, 111, 154, 205, 208, 225, 226, 227, 228, 229, 230, 231, 232, 234, 235, 236, 237, 238, 239, 240, 241, 242, 248, 264, 266, 267, 275, 281, 282, 286, 287, 288, 289, 298, 304, 310, 313, 315, 319, 336, 353, 354], "new": [1, 4, 63, 78, 144, 147, 168, 188, 201, 214, 263, 325, 327, 338, 343, 352, 354, 355, 356], "function": [1, 2, 3, 4, 5, 12, 66, 81, 84, 85, 106, 111, 116, 119, 120, 132, 178, 205, 207, 208, 212, 214, 223, 234, 243, 245, 249, 260, 264, 267, 268, 270, 271, 272, 274, 275, 276, 287, 288, 289, 290, 291, 293, 294, 309, 314, 316, 317, 318, 319, 320, 322, 327, 336, 349, 351, 354, 355, 356, 358], "highli": [1, 6], "optim": [1, 2, 4, 5, 261, 352, 353, 355], "ar": [1, 2, 3, 4, 5, 6, 12, 14, 59, 63, 65, 66, 70, 71, 78, 86, 89, 91, 92, 94, 95, 97, 98, 100, 101, 102, 106, 111, 112, 113, 114, 115, 116, 119, 120, 122, 132, 141, 150, 151, 152, 154, 155, 156, 157, 158, 161, 164, 165, 174, 175, 187, 188, 194, 205, 207, 208, 213, 214, 227, 228, 229, 230, 231, 232, 236, 237, 239, 240, 251, 263, 265, 277, 295, 297, 298, 321, 325, 334, 336, 351, 352, 353, 354, 355, 356, 357, 358, 359], "need": [1, 3, 4, 5, 59, 154, 223, 261, 262, 273, 276, 349, 353, 355, 356, 357, 359], "For": [1, 3, 6, 119, 154, 215, 223, 227, 231, 244, 249, 257, 260, 265, 269, 273, 277, 279, 280, 281, 282, 322, 349, 352, 353, 354, 355, 356, 357, 358, 359], "you": [1, 3, 4, 5, 6, 223, 273, 276, 322, 349, 352, 353, 354, 356, 358, 359], "design": [1, 2, 5, 349, 359], "your": [1, 3, 6, 325, 353, 355], "own": [1, 6, 356], "link": [1, 6], "top": [1, 200, 240, 277], "core": [1, 2, 3, 4, 223, 225, 226, 227, 237, 241, 242, 251, 254, 258, 277, 278, 279, 280, 281, 282, 283, 284, 285, 295, 297, 304, 322, 325, 327, 352, 356, 357], "we": [1, 2, 3, 4, 76, 154, 155, 223, 233, 271, 332, 334, 349, 351, 352, 353, 355, 359], "inner": [1, 352], "work": [1, 3, 6, 141, 352, 353, 354, 355], "go": [1, 3, 353], "over": [1, 3, 4, 11, 13, 21, 22, 23, 24, 68, 69, 70, 92, 95, 98, 101, 110, 119, 121, 131, 133, 135, 142, 152, 153, 170, 182, 183, 192, 198, 200, 206, 227, 228, 229, 236, 239, 266, 297, 341, 344, 353], "simpl": [1, 3, 4, 223, 233, 321, 352, 353, 355], "learn": [1, 2, 4, 5, 227, 236, 237, 239, 264, 266, 328, 329, 330, 331, 332, 333, 334, 339, 340], "step": [1, 3, 4, 14, 223, 235, 238, 267, 329, 336, 341, 343, 344, 345, 352], "involv": [1, 327, 352], "ad": [1, 2, 6, 237, 325, 328, 329, 330, 331, 332, 333, 339, 355, 358], "let": [1, 2, 3, 352, 353, 355, 356], "s": [1, 2, 3, 4, 34, 43, 66, 75, 76, 91, 92, 94, 95, 97, 98, 100, 101, 106, 119, 122, 135, 150, 154, 157, 169, 172, 173, 190, 205, 206, 208, 212, 223, 226, 235, 238, 242, 248, 249, 251, 255, 256, 260, 267, 327, 336, 337, 349, 352, 353, 355, 356, 357, 358, 359], "sai": [1, 3, 322, 355], "would": [1, 3, 277, 354, 355, 356, 359], "like": [1, 3, 5, 149, 211, 232, 303, 336, 338, 352, 353, 355, 356, 357, 359], "an": [1, 3, 4, 6, 8, 11, 13, 25, 60, 61, 62, 63, 68, 69, 70, 86, 89, 102, 105, 109, 119, 122, 133, 141, 142, 144, 148, 149, 151, 153, 154, 155, 167, 168, 169, 184, 187, 193, 194, 195, 198, 199, 202, 208, 210, 211, 213, 214, 223, 225, 226, 230, 236, 238, 239, 240, 241, 242, 244, 263, 264, 265, 267, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 288, 310, 322, 328, 338, 342, 347, 349, 351, 352, 353, 354, 355, 356, 357, 358, 359, 360], "take": [1, 3, 4, 66, 106, 116, 134, 143, 149, 155, 195, 205, 207, 208, 211, 263, 349, 353, 354, 358, 359, 360], "two": [1, 10, 12, 59, 61, 78, 80, 83, 91, 94, 100, 107, 108, 111, 117, 118, 120, 127, 132, 134, 143, 145, 150, 193, 226, 238, 242, 265, 277, 290, 296, 352, 353, 354, 359], "arrai": [1, 3, 4, 5, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 76, 77, 78, 80, 81, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 142, 143, 144, 145, 146, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 223, 227, 238, 244, 251, 254, 258, 264, 277, 278, 279, 280, 281, 282, 283, 284, 285, 287, 290, 295, 296, 297, 298, 299, 300, 301, 302, 303, 304, 305, 306, 307, 308, 309, 310, 319, 322, 325, 328, 329, 330, 331, 332, 333, 334, 339, 340, 341, 342, 343, 344, 345, 352, 353, 355, 356, 357, 359], "x": [1, 2, 3, 4, 84, 109, 119, 155, 158, 169, 174, 178, 203, 204, 209, 214, 223, 225, 226, 227, 234, 236, 237, 239, 240, 241, 242, 243, 244, 264, 266, 268, 273, 275, 277, 286, 287, 288, 289, 290, 291, 292, 293, 294, 307, 309, 310, 311, 312, 313, 314, 315, 316, 317, 318, 319, 320, 325, 327, 334, 352, 353, 354, 355, 356, 357, 359], "y": [1, 2, 3, 4, 209, 223, 227, 231, 236, 237, 239, 240, 266, 299, 304, 307, 327, 330, 352, 353, 355, 356], "scale": [1, 3, 76, 154, 155, 160, 231, 232, 263, 269, 270, 273, 277, 313, 329], "them": [1, 3, 223, 249, 260, 359], "both": [1, 10, 80, 81, 83, 107, 108, 117, 118, 119, 127, 134, 143, 145, 157, 191, 225, 226, 237, 238, 241, 242, 277, 327, 352, 353, 357, 359], "some": [1, 2, 3, 4, 249, 260, 336, 352, 353, 355], "coeffici": [1, 328, 329, 331, 332, 333, 334], "alpha": [1, 154, 286, 308, 310, 313, 332, 339], "beta": [1, 76, 154, 227, 236, 237, 239, 307, 331, 332, 333, 334], "respect": [1, 2, 4, 106, 154, 205, 214, 223, 227, 234, 236, 237, 239, 325, 353, 357], "togeth": [1, 4, 154, 214], "get": [1, 2, 4, 6, 69, 70, 74, 75, 136, 137, 138, 159, 223, 352, 353, 355, 359], "z": [1, 235, 352, 355], "well": [1, 3, 223, 249, 260, 263, 355], "veri": [1, 3, 263, 355, 359], "easili": 1, "do": [1, 3, 6, 223, 250, 260, 322, 325, 332, 352, 353, 355], "just": [1, 4, 352, 354], "write": [1, 3, 223, 356], "out": [1, 6, 225, 226, 231, 232, 241, 242, 257, 352, 353, 354], "follow": [1, 3, 4, 5, 6, 14, 71, 76, 119, 154, 223, 288, 289, 301, 328, 329, 330, 331, 332, 333, 334, 340, 349, 352, 353, 359], "import": [1, 2, 3, 4, 6, 119, 174, 205, 213, 214, 215, 223, 225, 226, 227, 237, 241, 242, 251, 277, 295, 297, 304, 322, 325, 352, 353, 354, 355, 356, 357], "mx": [1, 2, 3, 4, 102, 119, 120, 122, 174, 205, 223, 225, 226, 227, 237, 241, 242, 244, 251, 255, 268, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 292, 295, 296, 297, 301, 304, 311, 320, 322, 325, 327, 349, 352, 353, 354, 355, 356, 357, 358, 359, 360], "def": [1, 2, 3, 4, 205, 223, 325, 352, 353, 354, 355, 356, 359], "simple_axpbi": 1, "float": [1, 12, 14, 56, 104, 105, 111, 119, 155, 156, 160, 161, 164, 165, 218, 227, 230, 231, 232, 236, 237, 239, 244, 266, 269, 273, 275, 276, 277, 278, 279, 280, 281, 282, 284, 285, 296, 297, 298, 300, 304, 307, 308, 318, 319, 328, 329, 330, 331, 332, 333, 334, 339, 340, 341, 342, 344, 345], "return": [1, 2, 3, 4, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 32, 36, 49, 56, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 76, 77, 78, 80, 81, 83, 84, 85, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 137, 140, 141, 142, 143, 144, 145, 146, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 163, 164, 165, 166, 167, 168, 169, 170, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 223, 235, 238, 246, 248, 250, 252, 253, 254, 258, 265, 267, 278, 279, 280, 281, 282, 283, 284, 285, 295, 296, 297, 298, 299, 300, 301, 302, 303, 304, 305, 306, 307, 308, 322, 325, 335, 351, 352, 353, 354, 355, 356, 358, 359], "thi": [1, 3, 4, 6, 11, 12, 13, 14, 21, 22, 23, 24, 82, 111, 116, 119, 120, 127, 131, 132, 133, 135, 136, 142, 152, 153, 157, 177, 182, 183, 184, 192, 194, 200, 206, 223, 230, 231, 232, 235, 238, 245, 246, 248, 249, 252, 253, 254, 258, 260, 261, 262, 263, 265, 267, 275, 279, 280, 281, 282, 288, 289, 290, 303, 319, 325, 336, 351, 352, 353, 355, 356, 358], "perform": [1, 3, 5, 70, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 132, 155, 169, 182, 194, 223, 236, 276, 281, 282, 352, 354, 355, 359], "leav": [1, 86, 214], "differenti": [1, 5], "howev": [1, 223, 234, 236, 336, 349, 352, 355, 356], "vector": [1, 2, 5, 110, 116, 119, 194, 207, 208, 233, 297, 357], "math": [1, 3, 308, 352], "often": [1, 232], "realiz": 1, "axpbi": 1, "routin": 1, "defin": [1, 2, 3, 4, 6, 119, 155, 213, 356], "same": [1, 3, 6, 12, 59, 63, 66, 69, 70, 71, 96, 99, 100, 101, 106, 111, 116, 151, 157, 169, 207, 209, 223, 226, 227, 230, 236, 237, 242, 265, 278, 279, 280, 281, 282, 283, 284, 285, 297, 308, 325, 335, 349, 352, 354, 359], "realli": 1, "part": [1, 353, 354], "doe": [1, 3, 6, 136, 223, 352, 354, 355, 356], "fast": [1, 234, 289, 359], "so": [1, 3, 6, 106, 205, 230, 277, 327, 352, 355, 359], "decid": [1, 214, 248], "want": [1, 3, 353, 359], "reli": 1, "acceler": [1, 227], "framework": [1, 5], "continu": [1, 353], "impos": 1, "our": [1, 3, 4, 271, 328, 329, 330, 331, 333, 334], "assumpt": 1, "also": [1, 3, 4, 5, 6, 10, 79, 80, 81, 83, 92, 95, 98, 101, 107, 108, 117, 118, 127, 134, 143, 145, 154, 191, 212, 223, 248, 261, 263, 265, 272, 287, 313, 315, 321, 327, 352, 353, 354, 355, 356, 357, 360], "assum": [1, 3, 120, 214, 223, 225, 226, 236, 241, 242], "how": [1, 3, 4, 223, 225, 226, 228, 229, 233, 241, 242, 277, 335, 352, 354, 359], "gradient": [1, 2, 4, 106, 189, 205, 212, 223, 249, 261, 265, 276, 303, 325, 327, 328, 329, 331, 332, 333, 334, 335, 338, 340, 352, 353, 354, 355, 356, 357], "ins": 1, "what": [1, 3, 214], "coincid": 1, "right": [1, 6, 154, 225, 226, 234, 241, 242, 277, 288, 289, 298, 300, 308], "place": [1, 3, 169, 355, 356], "cours": [1, 353], "The": [1, 3, 4, 5, 6, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 32, 34, 43, 49, 56, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 76, 77, 78, 80, 81, 83, 84, 85, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 137, 138, 140, 141, 142, 143, 144, 145, 146, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 163, 164, 165, 166, 167, 168, 172, 173, 178, 179, 180, 181, 182, 183, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 218, 225, 226, 227, 228, 229, 230, 231, 232, 233, 235, 236, 237, 238, 239, 240, 241, 242, 245, 251, 255, 256, 261, 262, 263, 265, 266, 267, 269, 271, 273, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 290, 295, 296, 297, 298, 299, 300, 301, 302, 303, 304, 305, 306, 307, 308, 319, 322, 325, 327, 328, 329, 330, 331, 332, 333, 334, 337, 339, 340, 341, 344, 347, 352, 353, 354, 355, 356, 357, 358, 359, 360], "structur": [1, 335, 353], "from": [1, 3, 4, 5, 76, 78, 97, 98, 100, 101, 105, 119, 122, 132, 138, 140, 149, 154, 156, 157, 158, 159, 161, 164, 174, 187, 189, 191, 194, 195, 200, 209, 211, 213, 214, 215, 223, 240, 249, 251, 263, 279, 280, 281, 282, 284, 285, 298, 307, 322, 351, 352, 353, 355, 356, 357, 358, 359], "frontend": 1, "api": [1, 353], "redirect": 1, "when": [1, 3, 5, 6, 66, 70, 119, 122, 228, 229, 277, 281, 282, 301, 307, 325, 343, 349, 352, 359], "appropri": [1, 352], "fallback": 1, "metal": [1, 5], "vjp": [1, 357], "jvp": [1, 357], "In": [1, 3, 4, 132, 154, 214, 223, 231, 236, 325, 328, 330, 331, 333, 334, 335, 351, 352, 353, 355, 358, 359], "one": [1, 3, 6, 56, 60, 65, 69, 70, 88, 89, 119, 125, 132, 155, 157, 187, 191, 260, 277, 297, 359], "sentenc": 1, "comput": [1, 2, 3, 4, 5, 6, 76, 106, 116, 119, 127, 135, 150, 154, 182, 189, 198, 205, 206, 207, 212, 223, 227, 235, 236, 237, 238, 239, 249, 261, 265, 266, 269, 276, 279, 280, 281, 282, 288, 289, 295, 296, 297, 298, 299, 300, 301, 302, 303, 304, 305, 306, 307, 308, 327, 328, 329, 331, 332, 333, 334, 338, 352, 353, 357, 359], "graph": [1, 3, 4, 5, 353], "rule": 1, "evalu": [1, 3, 4, 5, 86, 116, 207, 223, 247, 257, 325, 327, 352, 357], "said": [1, 3], "start": [1, 2, 3, 5, 6, 14, 121, 184, 352, 354, 359], "discuss": 1, "more": [1, 4, 8, 56, 78, 132, 140, 141, 172, 173, 223, 227, 231, 269, 273, 276, 277, 279, 280, 281, 282, 349, 352, 353, 354, 357, 359], "detail": [1, 8, 140, 223, 231, 269, 273, 277, 279, 280, 281, 282, 328, 330, 331, 333, 334, 354, 357], "thei": [1, 2, 3, 12, 71, 111, 271, 299, 325, 334, 351, 352, 355, 357, 358, 359], "c": [1, 3, 119, 218, 225, 226, 227, 228, 229, 231, 232, 237, 238, 241, 242, 356, 357, 359], "scalar": [1, 10, 12, 25, 36, 56, 59, 63, 65, 80, 81, 83, 104, 105, 106, 107, 108, 111, 117, 118, 119, 121, 127, 128, 129, 130, 132, 134, 143, 145, 151, 161, 164, 165, 172, 191, 205, 209, 212, 308, 353, 355, 357], "sum": [1, 2, 10, 110, 119, 131, 182, 198, 223, 295, 296, 297, 298, 299, 300, 301, 302, 303, 304, 305, 306, 307, 308, 354, 356], "element": [1, 9, 10, 15, 16, 17, 18, 19, 20, 23, 51, 64, 72, 73, 76, 80, 81, 83, 84, 85, 87, 89, 103, 104, 107, 108, 111, 112, 113, 114, 115, 117, 118, 123, 124, 125, 126, 127, 128, 129, 130, 134, 143, 145, 146, 152, 154, 155, 166, 167, 170, 178, 179, 180, 181, 185, 186, 191, 194, 196, 197, 200, 205, 209, 230, 231, 232, 235, 238, 243, 264, 267, 269, 291, 293, 294, 309, 310, 312, 315, 316, 317, 352, 353], "wise": [1, 9, 10, 15, 16, 17, 18, 19, 20, 64, 72, 73, 80, 81, 83, 84, 85, 87, 103, 104, 107, 108, 111, 117, 118, 123, 124, 125, 126, 127, 128, 129, 130, 134, 143, 145, 146, 166, 170, 178, 179, 180, 181, 185, 186, 191, 196, 197, 231, 232, 243, 264, 291, 293, 294, 309, 310, 312, 315, 316, 317, 352], "numpi": [1, 3, 4, 5, 10, 12, 14, 63, 80, 81, 83, 107, 108, 111, 117, 118, 127, 132, 134, 143, 145, 191, 355, 357, 358], "style": [1, 10, 12, 80, 81, 83, 107, 108, 111, 117, 118, 127, 132, 134, 143, 145, 191], "broadcast": [1, 10, 12, 63, 65, 80, 81, 83, 105, 107, 108, 111, 117, 118, 127, 132, 134, 143, 145, 156, 157, 164, 165, 191, 195, 209, 263], "between": [1, 5, 65, 102, 276, 296, 299, 300, 303, 343, 355, 359], "input": [1, 2, 3, 9, 10, 11, 12, 13, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 77, 78, 80, 81, 83, 84, 85, 87, 88, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 106, 107, 108, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 142, 143, 144, 145, 146, 149, 150, 151, 152, 153, 154, 155, 163, 166, 167, 168, 169, 170, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 203, 204, 205, 206, 208, 209, 211, 225, 226, 227, 228, 229, 231, 232, 233, 235, 236, 237, 238, 239, 240, 241, 242, 263, 265, 266, 267, 269, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 290, 295, 296, 298, 299, 300, 301, 303, 304, 306, 308, 319, 322, 352, 353, 354, 357, 358], "upcast": 1, "const": [1, 298], "factor": [1, 120, 277, 297, 342, 345], "streamordevic": 1, "stream": [1, 5, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 27, 28, 29, 30, 31, 32, 33, 35, 37, 38, 39, 40, 41, 42, 44, 45, 46, 47, 48, 50, 52, 53, 54, 55, 57, 58, 59, 60, 61, 62, 63, 64, 65, 67, 68, 69, 70, 71, 72, 73, 75, 76, 77, 78, 80, 81, 83, 84, 85, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 107, 108, 109, 110, 111, 112, 113, 114, 115, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 160, 161, 163, 164, 165, 166, 167, 168, 169, 170, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 206, 209, 210, 211, 359], "schedul": [1, 141, 327, 341, 342, 343, 344, 345, 347, 359], "itself": [1, 336], "call": [1, 3, 4, 26, 104, 223, 233, 249, 260, 271, 325, 327, 336, 352, 353, 355], "other": [1, 3, 5, 119, 223, 250, 325, 334, 352, 354, 355, 357], "within": [1, 23, 111], "simplest": [1, 223], "wai": [1, 3, 6, 223, 277, 352, 353, 354], "about": [1, 3, 4, 355, 359], "term": [1, 298, 328, 329, 330, 331, 332, 333, 339], "exist": [1, 3, 249, 260], "auto": [1, 6], "ax": [1, 11, 13, 21, 22, 57, 88, 91, 92, 94, 95, 97, 98, 100, 101, 102, 110, 119, 131, 133, 135, 142, 151, 153, 182, 187, 192, 193, 198, 201, 206, 353], "multipli": [1, 154, 155, 230, 273, 277], "earlier": 1, "goal": 1, "themselv": [1, 352], "contain": [1, 3, 23, 24, 49, 66, 78, 96, 97, 98, 119, 128, 129, 130, 154, 184, 209, 223, 248, 250, 251, 256, 276, 304, 322, 325, 352, 353], "act": [1, 303], "data": [1, 4, 5, 8, 14, 89, 99, 100, 105, 109, 121, 148, 164, 202, 210, 232, 278, 279, 280, 281, 282, 283, 284, 285, 352, 354, 356], "nor": [1, 106, 205], "rather": [1, 353, 359], "easi": [1, 223], "interfac": 1, "block": [1, 3, 276], "A": [1, 3, 5, 6, 7, 49, 59, 66, 106, 116, 119, 120, 122, 131, 132, 154, 156, 157, 158, 160, 161, 164, 165, 184, 188, 190, 205, 207, 208, 212, 213, 214, 215, 216, 223, 227, 231, 235, 236, 237, 239, 248, 252, 253, 261, 262, 266, 271, 273, 276, 279, 280, 282, 289, 308, 309, 325, 327, 331, 333, 335, 336, 338, 343, 352, 353, 355, 356], "It": [1, 3, 6, 106, 177, 205, 223, 262, 265, 335, 347, 356, 358], "creat": [1, 3, 6, 89, 109, 190, 223, 325, 327, 343, 352, 354, 356], "output": [1, 3, 6, 11, 12, 13, 14, 23, 63, 66, 89, 96, 99, 100, 101, 105, 106, 109, 111, 119, 121, 131, 133, 135, 142, 148, 149, 152, 153, 156, 157, 158, 160, 161, 164, 165, 174, 175, 182, 187, 192, 195, 202, 205, 206, 207, 208, 209, 210, 211, 225, 226, 227, 228, 229, 237, 240, 241, 242, 263, 265, 275, 276, 277, 279, 280, 281, 282, 295, 296, 297, 298, 299, 300, 301, 302, 303, 304, 305, 306, 307, 308, 319, 322, 352, 353, 354, 355, 356, 357, 358, 359], "given": [1, 11, 13, 23, 63, 65, 67, 76, 78, 86, 88, 90, 91, 92, 93, 94, 95, 99, 100, 101, 105, 119, 131, 133, 135, 140, 142, 147, 153, 161, 169, 177, 182, 184, 192, 199, 200, 202, 203, 204, 206, 216, 225, 226, 230, 241, 242, 248, 263, 296, 298, 304], "set": [1, 3, 4, 6, 66, 79, 82, 140, 141, 176, 177, 190, 234, 240, 247, 249, 256, 257, 260, 261, 265, 269, 275, 296, 308, 319, 325, 329, 336, 349, 353, 355], "further": [1, 6, 353], "class": [1, 3, 4, 7, 8, 25, 216, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 297, 325, 328, 329, 330, 331, 332, 333, 334, 339, 340, 347], "under": [1, 119], "These": [1, 66, 195, 297, 359], "word": 1, "bit": [1, 76, 154, 155, 218, 244, 265], "abstract": 1, "back": [1, 3, 139, 356], "give": [1, 3, 4, 23, 352], "ourselv": 1, "concret": [1, 235, 238, 240, 267, 355, 359], "imag": [1, 229, 231, 232, 277], "public": [1, 223], "explicit": [1, 336, 349, 356], "alpha_": 1, "beta_": 1, "must": [1, 6, 65, 105, 119, 156, 157, 161, 164, 165, 209, 277, 356], "know": [1, 3], "popul": 1, "To": [1, 2, 3, 4, 6, 140, 223, 322, 352, 353, 357], "avoid": [1, 352], "unnecessari": [1, 3], "alloc": [1, 137, 140, 141, 325], "respons": 1, "space": [1, 121, 306], "void": 1, "eval_cpu": 1, "std": [1, 284], "overrid": [1, 82], "eval_gpu": 1, "jacobian": [1, 116, 207, 357], "product": [1, 110, 116, 132, 150, 153, 198, 207, 263, 357], "primal": [1, 116, 207], "tangent": [1, 19, 20, 116, 196, 197, 320], "int": [1, 3, 4, 7, 11, 13, 14, 21, 22, 23, 24, 28, 29, 30, 31, 39, 40, 41, 42, 44, 47, 49, 52, 55, 56, 58, 63, 67, 68, 69, 70, 76, 77, 78, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 105, 106, 109, 119, 121, 131, 133, 135, 136, 137, 138, 140, 141, 142, 144, 148, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 167, 168, 169, 182, 183, 184, 187, 188, 192, 193, 194, 195, 198, 199, 200, 201, 202, 203, 204, 205, 206, 208, 210, 216, 223, 225, 226, 227, 228, 229, 233, 235, 236, 237, 238, 239, 240, 241, 242, 263, 265, 266, 267, 269, 273, 276, 290, 296, 297, 301, 306, 308, 325, 341, 343, 344, 345], "argnum": [1, 106, 205, 353], "cotan": 1, "across": [1, 236], "pair": [1, 151, 251, 269], "repres": [1, 3, 304, 308, 356], "axi": [1, 3, 4, 11, 13, 21, 22, 23, 24, 28, 29, 30, 31, 39, 40, 41, 42, 44, 52, 55, 58, 67, 78, 88, 90, 93, 96, 97, 98, 99, 100, 101, 102, 119, 131, 133, 135, 142, 144, 151, 152, 153, 157, 167, 182, 183, 184, 187, 188, 192, 193, 194, 195, 199, 200, 201, 206, 208, 225, 226, 241, 242, 267, 290, 294, 296, 297, 301, 306, 308, 316, 354], "correspond": [1, 11, 13, 56, 65, 76, 78, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 131, 133, 142, 153, 192, 198, 208, 214, 353], "dimens": [1, 3, 11, 13, 21, 22, 43, 49, 56, 60, 61, 62, 66, 69, 70, 78, 88, 97, 98, 100, 101, 102, 110, 119, 120, 131, 132, 133, 135, 142, 153, 154, 157, 163, 192, 195, 198, 201, 206, 227, 228, 229, 231, 232, 235, 236, 237, 238, 239, 263, 266, 267, 269, 276, 277, 290, 297, 352, 353], "vmap": [1, 353, 355, 357], "print": [1, 2, 3, 4, 6, 213, 214, 215, 223, 349, 352, 353, 354, 355, 356, 357], "ostream": 1, "os": [1, 6], "equival": [1, 26, 46, 57, 81, 104, 194, 234, 262, 264, 265, 268, 270, 272, 274], "check": [1, 6, 59, 139, 251, 353, 354], "bool": [1, 11, 12, 13, 21, 22, 28, 29, 30, 31, 39, 40, 41, 42, 44, 55, 56, 58, 59, 66, 70, 111, 119, 122, 131, 133, 135, 139, 141, 142, 153, 155, 156, 161, 164, 165, 192, 206, 227, 228, 229, 235, 236, 237, 238, 239, 240, 244, 248, 249, 251, 257, 260, 263, 265, 267, 269, 273, 276, 277, 295, 298, 329, 340], "is_equival": 1, "privat": 1, "fall": 1, "eval": [1, 2, 3, 4, 223, 325, 327, 352, 353, 355, 357], "deriv": [1, 353, 355], "base": [1, 119, 124, 126, 269, 276, 325, 327, 333, 347, 349, 352, 354], "abov": [1, 3, 154, 203, 223, 277, 332, 353, 354, 355, 359], "demonstr": [1, 356], "treat": [1, 97, 98, 100, 101, 194, 277, 352], "paramet": [1, 2, 3, 4, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 32, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 76, 77, 78, 80, 81, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 140, 141, 142, 143, 144, 145, 146, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 244, 245, 248, 249, 251, 256, 257, 260, 261, 262, 263, 264, 265, 266, 267, 269, 271, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 290, 295, 296, 297, 298, 299, 300, 301, 302, 303, 304, 305, 306, 307, 308, 319, 321, 322, 325, 327, 328, 329, 330, 331, 332, 333, 334, 335, 336, 338, 339, 340, 341, 342, 343, 344, 345, 347, 352, 353, 355], "produc": [1, 66, 263, 322], "through": [1, 189, 276, 334, 352, 353, 356], "construct": [1, 4, 77, 105, 148, 199, 210], "its": [1, 6, 132, 152, 163, 202, 212, 215, 223, 265, 331, 332, 333, 356, 359], "type": [1, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 32, 49, 56, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 76, 77, 78, 80, 81, 83, 84, 85, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 140, 141, 142, 143, 144, 145, 146, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 163, 164, 165, 166, 167, 168, 169, 170, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 213, 223, 276, 278, 279, 280, 281, 282, 283, 284, 285, 295, 296, 297, 298, 299, 300, 301, 302, 303, 304, 305, 306, 307, 308, 352, 354], "shape": [1, 3, 4, 46, 59, 63, 66, 68, 69, 70, 78, 90, 93, 96, 99, 100, 101, 105, 116, 132, 148, 149, 156, 157, 158, 160, 161, 164, 165, 168, 195, 207, 209, 210, 211, 223, 225, 226, 227, 228, 229, 231, 232, 235, 237, 238, 240, 241, 242, 251, 267, 278, 279, 280, 281, 282, 283, 284, 285, 297, 308, 327, 352, 353, 354, 357, 359], "pass": [1, 3, 4, 46, 57, 150, 151, 205, 212, 213, 214, 223, 249, 260, 261, 262, 265, 271, 352, 355], "re": [1, 4, 6, 322], "now": [1, 3, 6, 265, 352, 356], "promot": 1, "dtype": [1, 3, 14, 25, 32, 56, 89, 102, 105, 109, 119, 120, 121, 148, 158, 160, 161, 164, 165, 202, 210, 218, 277, 278, 279, 280, 281, 282, 283, 284, 285, 295, 297, 304, 341, 342, 343, 344, 345, 352, 353, 354, 356, 357, 358], "promoted_dtyp": 1, "promote_typ": 1, "float32": [1, 14, 89, 109, 119, 120, 121, 148, 158, 160, 164, 165, 202, 210, 218, 277, 278, 279, 280, 281, 282, 283, 284, 285, 295, 297, 304, 341, 342, 343, 344, 345, 352, 353, 354, 355, 356, 357, 358], "non": [1, 6, 258, 267, 309, 325], "point": [1, 2, 3, 6, 104, 155, 218], "out_dtyp": 1, "is_floating_point": 1, "cast": [1, 32, 99, 100, 101, 122, 244, 356], "up": [1, 3, 265, 352], "determin": [1, 78, 255, 358], "x_cast": 1, "astyp": [1, 3, 244, 356], "y_cast": 1, "broadcasted_input": 1, "broadcast_arrai": 1, "out_shap": 1, "0": [1, 2, 3, 4, 6, 7, 14, 47, 52, 58, 67, 68, 69, 70, 77, 78, 89, 102, 106, 119, 120, 140, 151, 156, 160, 165, 167, 169, 184, 188, 202, 203, 204, 205, 206, 208, 213, 223, 225, 226, 227, 228, 229, 230, 231, 232, 234, 236, 237, 239, 241, 242, 264, 268, 269, 273, 274, 275, 276, 278, 279, 280, 281, 282, 283, 284, 285, 286, 288, 289, 291, 292, 295, 297, 299, 300, 304, 307, 308, 310, 311, 312, 313, 318, 319, 322, 325, 328, 329, 331, 332, 333, 334, 336, 339, 340, 341, 342, 343, 344, 345, 349, 352, 353, 354, 355, 356, 357, 358], "unique_ptr": 1, "make_uniqu": 1, "to_stream": 1, "handl": [1, 223, 352], "resolv": 1, "No": [1, 3], "happen": [1, 3, 276, 327, 352, 355], "alon": [1, 356], "effect": [1, 231, 352, 355], "onli": [1, 3, 5, 6, 59, 68, 69, 70, 119, 154, 218, 223, 248, 249, 251, 257, 260, 261, 262, 325, 352, 353, 358, 359], "execut": [1, 6, 60, 61, 62, 138, 356, 359], "depend": [1, 2, 56, 119, 235, 238, 267, 354, 358, 359], "devic": [1, 5, 6, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 27, 28, 29, 30, 31, 32, 33, 35, 37, 38, 39, 40, 41, 42, 44, 45, 46, 47, 48, 50, 52, 53, 54, 55, 57, 58, 59, 60, 61, 62, 63, 64, 65, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 80, 81, 83, 84, 85, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 107, 108, 109, 110, 111, 112, 113, 114, 115, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 160, 161, 163, 164, 165, 166, 167, 168, 169, 170, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 206, 209, 210, 211, 216, 359, 360], "specifi": [1, 14, 32, 69, 70, 78, 97, 98, 105, 106, 119, 121, 144, 148, 157, 167, 193, 194, 195, 198, 201, 205, 208, 210, 227, 275, 295, 296, 297, 298, 299, 300, 301, 302, 303, 304, 305, 306, 307, 308, 319, 353, 359], "memori": [1, 5, 136, 137, 138, 140, 141, 276, 325, 329, 352, 355, 356], "ha": [1, 3, 4, 5, 56, 66, 78, 96, 97, 99, 100, 101, 106, 137, 157, 227, 235, 238, 240, 267, 325, 327, 352, 354, 355, 357, 359], "been": [1, 3, 137, 355], "try": [1, 6], "naiv": [1, 353], "gener": [1, 2, 14, 70, 89, 97, 98, 121, 156, 160, 161, 164, 165, 276, 349, 352, 354, 355, 360], "version": [1, 6, 76, 127, 131, 154, 182, 208, 349, 353, 354], "declar": 1, "member": [1, 223, 254, 258], "method": [1, 3, 7, 8, 25, 216, 223, 255, 276, 325, 328, 329, 330, 331, 332, 333, 334, 336, 339, 340, 347], "each": [1, 49, 76, 86, 132, 151, 154, 155, 157, 167, 174, 175, 184, 199, 201, 208, 209, 231, 232, 233, 235, 236, 238, 267, 269, 276, 295, 297, 349, 352, 355], "find": [1, 2, 6], "pointwis": 1, "captur": [1, 66, 223, 352], "templat": 1, "axpby_impl": 1, "typenam": 1, "t": [1, 3, 84, 155, 205, 223, 225, 235, 238, 241, 267, 328, 329, 330, 331, 332, 333, 334, 339, 340, 352, 353, 359], "readi": 1, "fill": [1, 105, 149, 202, 211, 278, 279, 280, 281, 282, 284, 285], "malloc_or_wait": 1, "synchron": [1, 352], "avail": [1, 2, 3, 4, 6, 8, 139, 218, 359], "There": [1, 223, 277, 352], "wait": [1, 3, 141], "here": [1, 3, 352, 353, 355, 358, 359], "request": 1, "pressur": 1, "condit": [1, 209, 359], "set_data": 1, "nbyte": 1, "collect": [1, 214, 351], "pointer": 1, "x_ptr": 1, "y_ptr": 1, "out_ptr": 1, "relev": 1, "static_cast": 1, "size_t": 1, "out_idx": 1, "size": [1, 3, 4, 49, 69, 76, 88, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 105, 109, 119, 137, 141, 154, 155, 157, 168, 184, 187, 223, 225, 226, 228, 229, 233, 237, 241, 242, 265, 277, 329, 355, 356], "map": [1, 4, 122, 214, 233, 244], "linear": [1, 3, 4, 5, 214, 223, 234, 251, 265, 267, 268, 270, 272, 277, 286, 287, 288, 289, 290, 292, 311, 312, 313, 315, 322, 325, 336, 344, 352], "indic": [1, 12, 21, 22, 23, 24, 106, 111, 112, 113, 114, 115, 184, 194, 195, 205, 257, 259, 297, 304, 343, 354], "offset": [1, 3, 78], "x_offset": 1, "elem_to_loc": 1, "stride": [1, 68, 69, 70, 225, 226, 228, 229, 241, 242, 269, 354], "y_offset": 1, "contigu": 1, "regularli": 1, "default": [1, 6, 11, 12, 13, 14, 21, 22, 23, 24, 59, 66, 67, 68, 69, 70, 74, 75, 76, 77, 78, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 106, 109, 111, 119, 120, 121, 122, 131, 133, 135, 140, 141, 142, 148, 152, 153, 154, 155, 156, 157, 158, 160, 161, 163, 164, 165, 167, 168, 169, 176, 177, 183, 184, 187, 188, 190, 192, 198, 200, 201, 202, 203, 204, 205, 206, 208, 210, 218, 225, 226, 227, 228, 229, 235, 237, 238, 240, 241, 242, 244, 249, 251, 257, 260, 263, 264, 265, 267, 269, 273, 274, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 290, 295, 296, 297, 298, 299, 300, 301, 302, 303, 304, 305, 306, 307, 308, 325, 328, 329, 330, 331, 332, 333, 334, 339, 340, 349, 351, 352, 353, 356, 358, 360], "row": [1, 89, 109, 154, 202], "major": 1, "henc": [1, 154, 352], "doesn": [1, 223], "addit": [1, 3, 10, 122, 227, 236, 239, 263, 266, 325, 353], "abl": [1, 154], "all": [1, 4, 6, 12, 23, 60, 61, 62, 66, 69, 70, 89, 92, 95, 98, 101, 132, 151, 152, 187, 223, 244, 245, 249, 252, 253, 254, 258, 260, 263, 265, 273, 276, 277, 322, 325, 347, 349, 352, 354, 355, 357, 360], "incom": 1, "accordingli": 1, "dispatch": 1, "float16": [1, 122, 218, 244, 355, 356], "bfloat16": [1, 356], "complex64": 1, "throw": [1, 66], "error": [1, 6, 84, 85, 141, 184, 234, 265, 287, 288, 289, 303, 305, 353, 356], "encount": [1, 353], "unexpect": [1, 14], "regist": [1, 4], "op": [1, 150, 249, 355], "assert": 1, "2": [1, 2, 3, 4, 69, 77, 78, 84, 91, 94, 96, 97, 98, 99, 100, 101, 102, 119, 120, 126, 132, 154, 163, 198, 202, 203, 204, 218, 223, 225, 226, 229, 234, 241, 242, 266, 273, 277, 278, 279, 280, 281, 282, 283, 284, 285, 288, 297, 298, 300, 307, 308, 322, 325, 328, 330, 331, 332, 336, 339, 352, 353, 354, 355, 356, 357, 358, 359], "1": [1, 3, 4, 14, 23, 24, 68, 69, 70, 77, 78, 90, 91, 93, 94, 96, 97, 98, 99, 100, 101, 102, 110, 119, 120, 132, 141, 150, 152, 154, 157, 160, 165, 178, 183, 194, 200, 205, 218, 223, 225, 226, 227, 228, 229, 230, 231, 232, 234, 235, 236, 237, 238, 239, 240, 241, 242, 264, 266, 267, 269, 273, 275, 277, 279, 280, 281, 282, 283, 284, 285, 286, 288, 289, 290, 293, 294, 295, 296, 297, 298, 299, 300, 301, 303, 304, 306, 307, 308, 313, 314, 316, 317, 319, 322, 325, 327, 328, 329, 330, 331, 332, 333, 334, 336, 339, 340, 341, 342, 343, 344, 345, 352, 353, 354, 356, 357, 358, 359], "correct": [1, 6, 331, 332, 333, 354, 355], "els": [1, 3, 223, 249, 355], "float16_t": 1, "bfloat16_t": 1, "complex64_t": 1, "runtime_error": 1, "support": [1, 3, 5, 6, 12, 68, 69, 70, 102, 111, 120, 122, 132, 154, 353, 354, 356, 358], "have": [1, 3, 6, 12, 59, 60, 61, 62, 97, 98, 100, 101, 111, 132, 157, 213, 238, 263, 271, 334, 336, 351, 352, 354, 355, 359], "rememb": 1, "3": [1, 3, 6, 102, 119, 120, 277, 280, 282, 291, 329, 334, 349, 352, 354, 356, 357], "complic": 1, "keep": [1, 11, 13, 21, 22, 131, 133, 135, 142, 153, 192, 206, 223, 248, 353, 355], "mind": [1, 3], "half": [1, 14, 161, 165, 269, 355], "precis": [1, 3, 223, 234, 335, 352], "direct": [1, 3, 246, 334, 359], "fix": [1, 3, 6, 355], "possibl": [1, 3, 132, 184, 233, 352, 354, 359], "due": 1, "transpos": [1, 3, 26, 155], "aren": 1, "guarante": 1, "fit": [1, 154, 359], "requir": [1, 3, 223, 355, 356], "column": [1, 89, 109, 154], "inplac": 1, "expect": [1, 3, 228, 229, 230, 231, 232, 273, 276, 298, 352, 354], "answer": 1, "copi": [1, 3, 5, 152, 183, 356], "simpli": [1, 3, 6, 268, 286, 292, 311, 320, 325, 352, 353], "catlas_saxpbi": 1, "axpby_impl_acceler": 1, "first": [1, 2, 3, 4, 6, 78, 102, 106, 128, 130, 132, 152, 163, 193, 198, 205, 213, 223, 226, 236, 242, 277, 296, 304, 329, 331, 332, 333, 336, 352, 353, 356, 359], "mode": [1, 71, 247, 257, 259, 277, 281, 282], "i": [1, 3, 116, 119, 223, 228, 229, 231, 232, 238, 249, 303, 332, 343, 352, 353], "e": [1, 4, 6, 84, 116, 178, 227, 228, 229, 231, 232, 236, 237, 239, 249, 266, 293, 294, 316, 321, 327, 330, 352, 355, 360], "match": [1, 6, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 136, 251, 277, 297, 354, 356], "transposit": 1, "data_s": 1, "items": 1, "flag": [1, 352, 356], "copy_inplac": 1, "copytyp": 1, "n": [1, 3, 25, 68, 69, 70, 89, 90, 92, 93, 95, 96, 99, 101, 109, 202, 206, 225, 226, 227, 228, 229, 231, 232, 235, 238, 241, 242, 267, 277, 303, 308], "incx": 1, "inci": 1, "great": 1, "But": [1, 359], "criteria": 1, "luckili": [1, 355], "alwai": [1, 136, 213, 353], "With": 1, "final": [1, 2, 3, 4, 344], "singl": [1, 4, 86, 116, 122, 151, 207, 226, 242, 352, 354, 358], "row_contigu": 1, "col_contigu": 1, "common": [1, 327, 352, 355], "hit": 1, "mileston": 1, "enough": [1, 355], "run": [1, 3, 4, 5, 6, 7, 150, 216, 227, 244, 328, 329, 331, 332, 333, 352, 355, 359, 360], "If": [1, 3, 6, 11, 12, 13, 14, 21, 22, 23, 24, 56, 59, 65, 67, 71, 77, 78, 86, 99, 100, 101, 104, 105, 106, 111, 119, 122, 131, 132, 133, 135, 140, 141, 142, 148, 151, 152, 153, 157, 167, 182, 183, 184, 192, 194, 195, 198, 200, 205, 206, 208, 210, 214, 227, 228, 229, 236, 239, 240, 249, 251, 260, 265, 267, 269, 271, 273, 277, 295, 297, 308, 329, 352, 353, 355, 358, 359, 360], "plan": [1, 352], "stop": [1, 3, 14, 121, 189, 353, 354], "enjoi": 1, "speed": 1, "appl": [1, 3, 5, 6, 359], "silicon": [1, 3, 5, 6, 359], "address": 1, "shade": 1, "languag": [1, 218], "kernel": [1, 68, 69, 70, 225, 241, 352, 354], "written": 1, "help": [1, 3, 352, 359], "resourc": 1, "walkthrough": 1, "pipelin": 1, "specif": [1, 6, 353], "cpp": 1, "algorithm": [1, 277, 334], "launch": [1, 354], "exactli": [1, 3, 251, 353], "mani": [1, 184, 228, 229, 233, 352, 355], "thread": 1, "pick": 1, "updat": [1, 2, 3, 4, 66, 214, 227, 244, 251, 256, 262, 327, 329, 332, 334, 335, 336, 340, 341, 342, 343, 344, 345, 352, 355], "assign": [1, 325], "axpby_gener": 1, "buffer": [1, 136, 356], "constant": [1, 3, 6, 151, 223, 227, 236, 239, 266, 298, 308, 339, 341, 352, 356], "4": [1, 3, 76, 102, 119, 154, 155, 174, 218, 225, 226, 227, 237, 241, 242, 265, 276, 277, 279, 280, 281, 295, 352, 354, 357, 359], "5": [1, 2, 3, 6, 119, 141, 156, 225, 227, 230, 231, 232, 237, 241, 274, 277, 278, 281, 282, 307, 318, 322, 339, 341, 342, 352, 353, 354], "x_stride": 1, "6": [1, 3, 119, 174, 276, 280, 288, 289, 291, 298, 308, 312, 339, 352, 354, 357], "y_stride": 1, "7": [1, 3, 119, 154, 354], "ndim": [1, 102, 119, 277], "8": [1, 3, 6, 119, 154, 218, 226, 237, 242, 276, 296, 328, 329, 330, 331, 332, 333, 339, 352, 354, 357, 359], "uint": 1, "index": [1, 5, 7, 23, 88, 89, 106, 152, 194, 195, 205, 216], "thread_position_in_grid": 1, "convert": [1, 56, 60, 61, 62, 102, 265, 355, 356, 357], "instanti": [1, 4, 355], "uniqu": [1, 349], "host": 1, "name": [1, 122, 154, 155, 172, 173, 174, 175, 223, 236, 248, 251, 253, 354, 358], "identifi": [1, 213, 351], "instantiate_axpbi": 1, "type_nam": 1, "host_nam": 1, "axpby_general_": 1, "compil": [1, 5, 6, 79, 82, 353, 355], "mlx_ext": 1, "metallib": [1, 6], "see": [1, 3, 4, 6, 8, 27, 28, 29, 30, 31, 33, 35, 37, 38, 39, 40, 41, 42, 44, 45, 46, 47, 48, 50, 52, 53, 54, 55, 57, 58, 119, 140, 172, 173, 223, 227, 231, 234, 247, 264, 265, 268, 269, 270, 272, 273, 274, 277, 279, 280, 281, 282, 287, 288, 289, 313, 352, 353, 354, 357, 359], "later": [1, 6], "co": [1, 273, 353], "locat": [1, 261, 262, 359], "share": [1, 5, 76, 154, 155], "register_librari": 1, "potenti": [1, 141], "path": [1, 6, 174, 175, 251], "tri": 1, "load": [1, 4, 5, 251], "hasn": 1, "alreadi": [1, 3], "static": [1, 6], "object": [1, 8, 25, 36, 56, 156, 161, 164, 165, 208, 213, 214, 231, 276, 351], "why": [1, 3], "packag": [1, 2, 4, 322], "process": [1, 3, 70, 71, 214, 232, 233, 276, 351], "logic": [1, 128, 129, 130], "grid": 1, "shown": 1, "below": [1, 6, 119, 202, 204, 218, 277, 355], "prepar": [1, 3], "carri": 1, "should": [1, 2, 3, 4, 6, 78, 116, 154, 195, 205, 207, 213, 223, 228, 229, 231, 232, 257, 263, 271, 297, 299, 304, 325, 351, 352, 353, 355, 356, 360], "d": [1, 3, 77, 78, 110, 119, 132, 150, 194, 202, 203, 204, 215, 232, 235, 238, 267, 328, 331, 333, 359], "ostringstream": 1, "kname": 1, "axpby_": 1, "general_": 1, "type_to_nam": 1, "make": [1, 3, 4, 6, 132, 147, 177, 223, 341, 342, 344, 345, 352, 355, 357, 359], "sure": [1, 3, 6, 223, 352], "look": [1, 3], "folder": 1, "get_colocated_mtllib_path": 1, "get_kernel": 1, "str": [1, 71, 106, 119, 122, 171, 172, 173, 174, 175, 205, 213, 215, 244, 245, 248, 249, 251, 253, 255, 260, 277, 281, 282, 295, 296, 297, 298, 299, 300, 301, 302, 303, 304, 305, 306, 307, 308], "encod": [1, 269, 273, 276, 297], "compute_encod": 1, "get_command_encod": 1, "setcomputepipelinest": 1, "those": [1, 3, 223], "nelem": 1, "set_array_buff": 1, "setbyt": 1, "sizeof": 1, "threadgroup": 1, "higher": [1, 110, 304, 353], "than": [1, 3, 56, 71, 78, 81, 107, 108, 117, 118, 132, 140, 214, 269, 275, 277, 304, 307, 319, 329, 334, 352, 353, 359], "max": [1, 119, 134, 241, 242, 264, 291, 296, 298, 299, 304, 308, 310, 312, 329, 333, 352, 353, 359], "allow": [1, 223, 262, 325, 347, 354, 357], "tgp_size": 1, "min": [1, 119, 143, 264, 291, 310, 312], "maxtotalthreadsperthreadgroup": 1, "3d": [1, 227, 232, 277], "mtl": 1, "group_dim": 1, "grid_dim": 1, "divid": [1, 104, 154], "among": 1, "dispatchthread": 1, "few": [1, 3, 4, 5, 355, 357], "thing": [1, 3], "note": [1, 3, 6, 12, 66, 68, 69, 97, 98, 111, 119, 136, 154, 157, 223, 277, 356, 358], "befor": [1, 3, 6, 23, 152, 248, 276, 336, 354, 355], "move": [1, 144, 359], "track": [1, 223, 227], "activ": [1, 6, 136, 231, 275, 276, 309, 318, 319, 321, 352], "command": [1, 6], "instead": [1, 6, 223, 262, 273, 353, 355], "end_encod": 1, "end": [1, 78, 139, 154, 226, 235, 238, 242, 275, 300, 307, 313, 318, 319, 344], "until": [1, 355, 357], "limit": [1, 65, 140, 141, 354], "flush": 1, "enqueu": 1, "commit": 1, "associ": [1, 174, 175, 355], "suggest": 1, "deeper": 1, "dive": 1, "studi": 1, "come": [1, 3, 353], "far": [1, 327], "built": [1, 6, 276, 355], "includ": [1, 136, 137, 141, 245, 256, 265, 298, 352, 353, 354, 357, 358, 360], "forward": [1, 205, 352, 355], "diff": 1, "push": 1, "along": [1, 21, 22, 66, 67, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 119, 167, 182, 184, 188, 194, 195, 198, 199, 200, 223, 267, 290], "similarli": [1, 6, 132, 353, 355], "scale_arr": 1, "contribut": 1, "tangent_x": 1, "tangent_i": 1, "revers": [1, 201, 273], "arg": [1, 3, 8, 46, 57, 86, 174, 175], "push_back": 1, "fulli": [1, 5, 352, 356, 359], "overal": 1, "directori": [1, 3, 6], "extens": [1, 122, 218, 255, 358], "h": [1, 68, 69, 119, 226, 227, 229, 231, 232, 235, 238, 242, 267, 353, 355], "mlx_sample_extens": 1, "__init__": [1, 3, 4, 7, 8, 25, 216, 223, 325], "py": [1, 3, 6], "cmakelist": 1, "txt": 1, "setup": [1, 2, 4, 6, 352], "hold": [1, 3, 8, 119, 352], "instal": 1, "pybind11": [1, 6], "sinc": [1, 3, 4, 325, 334, 343, 356, 359], "compon": [1, 3], "etc": [1, 154, 223, 277], "pybind11_modul": 1, "m": [1, 6, 89, 119, 202, 225, 226, 241, 242, 328], "doc": [1, 4], "sampl": [1, 2, 3, 121, 156, 157, 158, 161, 164, 165, 279, 280, 281, 282, 284, 285, 298, 304, 308, 349, 352], "_a": 1, "pos_onli": 1, "kw_onli": 1, "none": [1, 3, 7, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 27, 28, 29, 30, 31, 32, 33, 35, 37, 38, 39, 40, 41, 42, 44, 45, 46, 47, 48, 50, 52, 53, 54, 55, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 142, 143, 144, 145, 146, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 173, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 208, 209, 210, 211, 213, 214, 216, 225, 226, 234, 241, 242, 244, 248, 249, 260, 263, 267, 273, 276, 295, 296, 297, 298, 299, 300, 301, 302, 303, 304, 305, 306, 307, 308, 329, 347, 354], "r": [1, 3, 120, 205, 231, 235], "pbdoc": 1, "most": [1, 157, 223, 338, 352, 353, 354, 355], "complex": [1, 97, 98, 99, 100, 101, 156, 161, 164, 165, 213, 223, 262, 352, 353], "bell": 1, "whistl": 1, "liter": [1, 277, 295, 296, 297, 298, 299, 300, 301, 302, 303, 304, 305, 306, 307, 308], "string": [1, 356, 358], "modul": [1, 3, 4, 212, 265, 271, 276, 322, 338, 351, 352, 355], "ensur": [1, 6, 303], "caster": 1, "find_packag": 1, "config": 1, "add_librari": 1, "sourc": [1, 144, 201], "target_sourc": 1, "cmake_current_list_dir": 1, "header": 1, "target_include_directori": 1, "target_link_librari": 1, "attach": 1, "conveni": [1, 4], "mlx_build_metallib": 1, "target": [1, 205, 295, 297, 298, 299, 300, 301, 302, 303, 304, 305, 306, 307, 352], "destin": [1, 144], "automat": [1, 5, 122, 357, 358, 359], "practic": [1, 352], "mlx_build_met": [1, 6], "mlx_ext_metallib": 1, "titl": 1, "include_dir": 1, "project_source_dir": 1, "mlx_include_dir": 1, "output_directori": 1, "cmake_library_output_directori": 1, "add_depend": 1, "endif": 1, "pybind11_add_modul": 1, "build_shared_lib": 1, "target_link_opt": 1, "wl": 1, "rpath": 1, "loader_path": 1, "onc": [1, 352], "describ": [1, 355], "util": [1, 3, 5, 6, 174, 223], "__name__": [1, 3], "__main__": [1, 3], "descript": [1, 3, 218], "ext_modul": 1, "cmakeextens": 1, "cmdclass": 1, "build_ext": 1, "cmakebuild": 1, "package_dir": 1, "package_data": 1, "dylib": 1, "zip_saf": 1, "fals": [1, 3, 11, 12, 13, 21, 22, 28, 29, 30, 31, 39, 40, 41, 42, 44, 55, 58, 59, 66, 70, 111, 119, 122, 131, 133, 135, 141, 142, 153, 192, 206, 209, 213, 214, 218, 236, 237, 240, 249, 251, 260, 263, 265, 269, 273, 276, 277, 295, 298, 329, 340, 356], "python_requir": 1, "even": [1, 3, 66, 352, 355, 356], "though": [1, 3, 352, 355, 356], "j8": 1, "libmlx_ext": 1, "cpython": 1, "3x": 1, "darwin": 1, "pip": [1, 6], "after": [1, 3, 4, 23, 102, 104, 152, 154, 227, 236, 239, 263, 276, 307, 352, 359], "plai": [1, 3], "ones": [1, 3, 149, 174, 202, 261, 262, 265, 354], "b": [1, 3, 10, 12, 59, 80, 81, 83, 104, 107, 108, 110, 111, 117, 118, 119, 127, 128, 130, 132, 134, 143, 145, 150, 154, 191, 198, 205, 240, 267, 277, 290, 353, 354, 355, 356, 357, 358, 359], "f": [1, 2, 4, 119, 223, 238, 332, 352, 356], "item": [1, 2, 3, 4, 214, 355, 356, 357], "true": [1, 2, 3, 12, 59, 66, 111, 119, 122, 141, 155, 182, 209, 213, 214, 218, 223, 227, 228, 229, 235, 236, 237, 238, 239, 240, 248, 249, 251, 257, 260, 265, 267, 269, 273, 276, 277, 295, 303, 329], "quick": [1, 5], "benchmark": [1, 352], "compar": [1, 59, 352], "time": [1, 3, 6, 141, 199, 223, 225, 226, 235, 238, 241, 242, 267, 352, 353, 355, 359], "set_default_devic": 1, "256": [1, 4], "512": [1, 3, 276, 359], "random": [1, 2, 3, 4, 5, 225, 226, 227, 237, 241, 242, 251, 257, 352, 353, 359, 360], "normal": [1, 2, 3, 164, 223, 225, 226, 227, 236, 237, 239, 241, 242, 266, 276, 279, 281, 356, 359], "bench": 1, "warm": [1, 352], "rang": [1, 2, 3, 4, 6, 14, 102, 121, 280, 282, 288, 289, 327, 341, 342, 343, 344, 345, 349, 352, 353, 355, 359], "100": [1, 2, 3, 344, 352, 353, 355, 359], "5000": 1, "simple_tim": 1, "custom_tim": 1, "3f": [1, 4, 352], "custom": [1, 276], "114": 1, "109": 1, "modest": 1, "improv": [1, 3, 328, 329, 330, 331, 332, 333, 339, 352], "awai": [1, 3], "good": [1, 6, 352, 359], "nn": [1, 3, 4, 174, 214, 223, 322, 325, 327, 336, 338, 352, 355], "grad": [1, 2, 4, 205, 327, 335, 352, 353, 354, 355, 357], "full": [1, 4, 46, 57, 71, 182, 261, 262, 298, 352, 355], "implement": [2, 4, 119, 233, 248, 263, 269, 271, 273, 275, 276, 277, 319, 328, 329, 330, 331, 333, 334, 335, 347, 352, 353, 356], "basic": [2, 169, 353], "model": [2, 4, 5, 174, 212, 214, 223, 244, 247, 249, 251, 255, 257, 259, 260, 261, 263, 276, 322, 325, 327, 335, 336, 338, 352, 355], "problem": [2, 4, 223], "metadata": [2, 122, 172, 173], "num_featur": [2, 227], "num_exampl": 2, "1_000": 2, "num_it": 2, "10_000": 2, "iter": [2, 4, 214, 349, 352, 355], "sgd": [2, 4, 327, 334, 336, 341, 342, 345, 352], "lr": [2, 334], "01": [2, 292, 332], "rate": [2, 328, 329, 330, 331, 332, 333, 334, 339, 340], "ll": [2, 4, 300, 352, 353], "synthet": 2, "dataset": [2, 355], "matrix": [2, 76, 77, 89, 109, 119, 120, 132, 154, 155, 265, 283, 322], "ground": [2, 3, 297, 307], "truth": [2, 297, 307], "w_star": 2, "valu": [2, 3, 9, 12, 14, 21, 22, 36, 56, 59, 65, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 105, 111, 119, 121, 151, 156, 157, 158, 160, 161, 164, 165, 172, 194, 195, 205, 208, 212, 213, 214, 218, 226, 230, 231, 232, 237, 240, 242, 248, 263, 264, 274, 275, 276, 278, 295, 296, 297, 298, 299, 300, 302, 303, 304, 305, 306, 307, 319, 325, 329, 332, 341, 342, 344, 345, 353], "gaussian": [2, 234, 287, 288, 289, 298], "nois": 2, "exampl": [2, 3, 4, 14, 102, 119, 120, 190, 194, 223, 225, 226, 227, 237, 241, 242, 249, 251, 257, 260, 277, 278, 279, 280, 281, 282, 283, 284, 285, 295, 297, 304, 322, 327, 336, 341, 342, 343, 344, 345, 349, 353, 354, 355, 356, 357, 358], "noisi": 2, "label": [2, 297, 304], "ep": [2, 227, 236, 237, 239, 266, 296, 298, 308, 328, 329, 330, 331, 332, 333, 339], "1e": [2, 4, 12, 111, 227, 236, 237, 239, 266, 296, 298, 308, 328, 329, 330, 331, 332, 333, 336, 339, 341, 342, 343, 344, 345], "us": [2, 3, 4, 5, 6, 14, 76, 79, 81, 102, 119, 120, 132, 136, 137, 138, 140, 154, 155, 167, 168, 213, 223, 226, 231, 233, 234, 235, 238, 240, 242, 244, 248, 255, 261, 262, 263, 265, 267, 269, 273, 276, 277, 281, 282, 288, 289, 296, 322, 325, 327, 328, 329, 331, 332, 333, 334, 335, 336, 347, 349, 351, 352, 353, 354, 357, 359], "weight": [2, 68, 69, 70, 214, 223, 251, 255, 265, 295, 297, 325, 329, 332, 334, 336, 340, 353, 355], "squar": [2, 3, 109, 170, 185, 205, 214, 223, 266, 305, 307, 328, 329, 331, 332, 333, 353, 356], "loss": [2, 4, 205, 223, 327, 352, 353, 355], "loss_fn": [2, 4, 327, 352, 353], "w": [2, 69, 76, 154, 155, 205, 226, 227, 229, 231, 232, 240, 242, 340, 353], "mean": [2, 3, 4, 160, 205, 223, 227, 236, 249, 266, 284, 295, 296, 297, 298, 299, 300, 301, 302, 303, 304, 305, 306, 307, 308, 352, 353, 356], "grad_fn": [2, 352, 353], "initi": [2, 3, 223, 227, 236, 237, 239, 240, 264, 266, 278, 279, 280, 281, 282, 283, 284, 285, 325, 336, 341, 342, 344, 345, 352, 355], "randomli": [2, 3, 230, 231, 232], "Then": [2, 6], "repeatedli": 2, "_": [2, 3, 223, 341, 342, 343, 344, 345, 349, 352, 355, 359], "verifi": [2, 6], "close": [2, 5, 6, 12, 111], "error_norm": 2, "5f": 2, "someth": [2, 3, 354], "00005": 2, "00364": 2, "complet": [2, 3, 6, 141, 261, 262, 353, 359], "logist": [2, 178, 288, 289, 315], "github": [2, 4, 6, 352], "repo": [2, 4, 6, 352], "enabl": [3, 6, 66, 82, 340], "larg": [3, 223, 263, 303, 352, 355], "ish": 3, "transform": [3, 5, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 212, 223, 227, 236, 239, 240, 248, 249, 260, 265, 269, 354], "compromis": 3, "eas": 3, "llama": 3, "famili": 3, "less": [3, 23, 118, 152, 269, 307], "200": [3, 343], "line": [3, 355, 356], "python": [3, 36, 49, 56, 86, 213, 214, 215, 325, 335, 336, 338, 351, 353, 356], "neural": [3, 5, 233, 279, 280, 309, 322, 325, 339], "network": [3, 5, 227, 231, 233, 279, 280, 322, 325, 339], "build": [3, 5, 281, 325, 352], "concis": 3, "architectur": [3, 6, 223, 262, 359], "notabl": [3, 5], "rope": [3, 223], "posit": [3, 23, 78, 102, 106, 115, 144, 152, 205, 214, 223, 228, 229, 263, 269, 273, 298, 308], "option": [3, 11, 13, 14, 21, 22, 23, 24, 25, 30, 31, 60, 61, 62, 66, 67, 68, 69, 70, 71, 76, 77, 78, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 105, 106, 109, 114, 115, 119, 120, 121, 122, 131, 133, 135, 141, 142, 148, 151, 152, 153, 154, 155, 156, 157, 158, 160, 161, 163, 164, 165, 167, 168, 173, 182, 183, 184, 187, 188, 192, 194, 195, 198, 200, 201, 202, 203, 204, 205, 206, 208, 210, 213, 214, 225, 226, 227, 228, 229, 235, 238, 240, 241, 242, 244, 248, 249, 251, 260, 263, 265, 267, 269, 273, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 295, 296, 297, 298, 299, 300, 301, 302, 303, 304, 305, 306, 307, 308, 328, 329, 330, 331, 332, 333, 334, 336, 339, 340, 349, 352, 358, 360], "kei": [3, 156, 157, 158, 160, 161, 163, 164, 165, 213, 214, 248, 249, 260, 263, 336, 349, 351, 353], "cach": [3, 136, 137, 140, 352], "concaten": 3, "project": [3, 263], "llamaattent": 3, "self": [3, 4, 7, 25, 26, 27, 28, 29, 30, 31, 32, 33, 35, 36, 37, 38, 39, 40, 41, 42, 44, 45, 46, 47, 48, 50, 52, 53, 54, 55, 56, 57, 58, 216, 223, 309, 325], "dim": [3, 233, 236, 237, 239, 263, 266, 269, 273, 276], "num_head": [3, 263, 276], "super": [3, 4, 223, 325], "tradit": [3, 231, 232, 269], "query_proj": 3, "bia": [3, 76, 154, 155, 214, 223, 228, 229, 235, 238, 240, 249, 251, 260, 263, 265, 267, 331, 332, 333, 336, 353], "key_proj": 3, "value_proj": 3, "out_proj": [3, 325], "__call__": [3, 4, 223, 325], "queri": [3, 263], "mask": [3, 257, 263, 354], "extract": [3, 77, 78, 223, 248, 325], "l": [3, 4, 223, 225, 227, 228, 235, 238, 241, 267, 307], "reshap": [3, 119, 277, 354], "combin": 3, "key_cach": 3, "value_cach": 3, "sqrt": [3, 84, 227, 236, 237, 239, 240, 266, 273, 279, 280, 281, 282, 328, 330, 331, 332, 339, 352], "score": [3, 304], "softmax": [3, 223, 294, 297], "values_hat": 3, "rm": [3, 6, 329], "swiglu": 3, "rmsnorm": [3, 223], "llamaencoderlay": 3, "mlp_dim": [3, 276], "norm1": 3, "norm2": 3, "linear1": 3, "linear2": 3, "linear3": 3, "sigmoid": [3, 223, 272, 288, 289, 293, 315], "instanc": [3, 154, 215, 223, 237, 244, 245, 246, 249, 252, 253, 260, 262, 271, 325, 356], "embed": [3, 223, 269, 273, 296], "emb": [3, 233, 273], "token": [3, 233], "num_lay": [3, 4, 327], "vocab_s": 3, "norm": [3, 236, 308, 333, 334], "multiheadattent": [3, 223], "create_additive_causal_mask": 3, "list": [3, 8, 11, 13, 25, 28, 29, 39, 40, 41, 42, 44, 52, 55, 56, 58, 60, 61, 62, 63, 66, 67, 70, 86, 88, 91, 92, 94, 95, 97, 98, 100, 101, 105, 106, 116, 119, 131, 133, 135, 142, 148, 151, 153, 156, 157, 158, 160, 161, 164, 165, 168, 172, 182, 184, 187, 188, 192, 198, 199, 201, 205, 206, 207, 210, 213, 215, 223, 249, 251, 252, 253, 254, 258, 260, 261, 262, 325, 331, 332, 333, 334, 343, 351, 352, 353, 355], "still": [3, 6, 119, 352, 355], "consid": [3, 12, 59, 111, 213, 214, 236, 351], "train": [3, 4, 223, 227, 230, 231, 232, 247, 249, 260, 279, 280], "ignor": [3, 65, 66, 86, 329], "whatsoev": 3, "rest": [3, 214, 269], "subsect": 3, "prompt": 3, "autoregress": 3, "yield": [3, 4, 349], "temp": 3, "causal": 3, "save": [3, 5, 122, 154, 172, 173, 174, 175, 255, 355], "append": [3, 132, 352, 355], "store": 3, "per": [3, 4, 76, 154, 155, 227, 236, 237, 239, 266, 347, 352, 355], "care": [3, 355], "last": [3, 24, 56, 92, 95, 97, 98, 100, 101, 102, 110, 120, 132, 157, 183, 198, 228, 229, 231, 232, 236, 277, 356], "logit": [3, 157, 295, 297, 352], "next": [3, 4, 140], "categor": 3, "lazili": [3, 223], "noth": [3, 223, 355], "yet": [3, 119, 223, 325, 336, 353, 354, 355, 357], "forc": [3, 4, 223, 357], "choos": [3, 269], "pars": 3, "feed": 3, "loop": [3, 4, 352, 353, 355], "unsqueez": 3, "sequenc": [3, 227, 228, 235, 238, 267, 276, 349, 359], "length": [3, 187, 227, 228, 235, 238, 267, 343], "len": [3, 92, 95, 98, 101, 343], "overwrit": 3, "discard": [3, 213], "old": 3, "moment": [3, 70, 329, 331, 332, 333], "anymor": 3, "everyth": 3, "small": [3, 227, 236, 239, 266, 298, 303, 308, 352, 359], "10": [3, 4, 124, 169, 174, 214, 223, 251, 322, 343, 345, 352, 354], "12": [3, 343], "8192": 3, "1024": 3, "actual": [3, 14, 251, 325, 355], "materi": [3, 5], "could": [3, 223], "20_000": 3, "machin": [3, 5, 6, 339], "8gb": 3, "ram": 3, "32": [3, 4, 154, 155, 218, 226, 242, 352], "44": 3, "doubl": 3, "bracket": 3, "becaus": [3, 136, 223, 355], "batch": [3, 132, 227, 228, 229, 231, 232, 235, 238, 263, 267, 277, 355], "zip": [3, 4], "haven": 3, "anyth": [3, 205, 355], "result": [3, 14, 56, 66, 76, 110, 119, 122, 132, 150, 155, 167, 169, 188, 198, 199, 209, 214, 273, 352, 353, 356], "similar": [3, 214, 261, 262, 263, 296, 356, 358], "runtim": [3, 352], "section": [3, 6, 184, 308, 352, 353], "access": [3, 36, 223, 325, 336, 355, 359], "origin": [3, 78, 227, 256, 279, 280, 281, 282, 328, 329, 330, 331, 333, 334, 356], "sentencepiec": 3, "pytorch": [3, 5, 236, 353], "compat": [3, 157, 358], "npz": [3, 122, 174, 175, 251, 255, 358], "file": [3, 6, 122, 171, 172, 173, 174, 175, 251, 255, 353, 358], "directli": 3, "argpars": 3, "itertool": [3, 214], "starmap": [3, 214], "np": [3, 4, 356, 357], "torch": [3, 356], "map_torch_to_mlx": 3, "tok_embed": 3, "elif": 3, "replac": [3, 261, 262, 276, 307], "attention_norm": 3, "ffn_norm": 3, "wq": 3, "wk": 3, "wv": 3, "wo": 3, "w1": 3, "w2": 3, "w3": 3, "ffn": 3, "separ": [3, 46, 57, 236, 304], "submodul": [3, 4, 223, 249, 250, 260, 262], "feed_forward": 3, "parser": 3, "argumentpars": 3, "add_argu": 3, "torch_weight": 3, "output_fil": 3, "parse_arg": 3, "state": [3, 4, 223, 235, 238, 267, 327, 336, 349, 352], "savez": [3, 255, 358], "k": [3, 77, 89, 200, 202, 203, 204, 225, 240, 241, 249], "v": [3, 71, 223, 249, 356], "left": [3, 119, 154, 225, 226, 234, 241, 242, 269, 277, 288, 289, 298, 300, 308], "disk": 3, "text": [3, 225, 226, 235, 238, 241, 242, 243, 267, 275, 279, 280, 281, 282, 291, 298, 299, 300, 303, 304, 307, 309, 310, 313, 314, 318, 319, 329, 334], "format": [3, 122, 171, 172, 173, 174, 175, 356], "oper": [3, 5, 7, 32, 60, 61, 62, 70, 182, 189, 195, 216, 223, 276, 334, 352, 353, 354, 355, 356, 357, 359, 360], "dictionari": [3, 66, 122, 172, 173, 213, 223, 248, 256, 261, 262, 337, 351, 358], "represent": [3, 154, 213, 215], "tree_unflatten": 3, "helper": [3, 352], "weight_fil": 3, "incur": 3, "sever": [3, 68, 69, 70, 174, 175, 352, 358], "futur": [3, 265, 354, 355], "pth": 3, "current": [3, 5, 6, 68, 69, 70, 137, 154, 223, 329, 355], "around": 3, "m1": [3, 352, 353, 359], "ultra": 3, "7b": 3, "me": 3, "ishmael": 3, "year": 3, "ago": 3, "never": [3, 355], "long": 3, "info": [3, 6], "247": 3, "press": [3, 119], "enter": 3, "littl": 3, "monei": 3, "my": [3, 6], "purs": 3, "greater": [3, 23, 108, 152, 275, 319], "consequ": 3, "walk": 3, "down": 3, "gower": 3, "street": 3, "afternoon": 3, "heavi": 3, "rain": 3, "saw": [3, 353], "off": [3, 6, 355], "man": 3, "rag": 3, "who": 3, "sat": 3, "upon": [3, 214], "hi": [3, 238], "bundl": 3, "hard": 3, "wet": 3, "he": [3, 281, 282], "were": [3, 359], "cry": 3, "watch": [3, 352], "him": 3, "observ": 3, "numer": [3, 119, 127, 131, 182, 227, 236, 237, 239, 266, 296, 298, 308, 328, 329, 330, 331, 332, 333, 339, 352, 355], "crowd": 3, "wa": [3, 355], "hurri": 3, "437": 3, "330": 3, "second": [3, 78, 128, 130, 132, 193, 205, 226, 242, 296, 304, 329, 331, 332, 333, 353, 359], "spent": 3, "amount": [3, 138, 225, 241], "39": 3, "ms": [3, 352], "By": [3, 353, 356], "bigger": [3, 329], "remain": [3, 205, 230, 231, 232], "almost": 3, "nobodi": 3, "took": 3, "least": [3, 60, 61, 62, 65, 120, 154], "notic": [3, 353, 358], "distanc": [3, 308], "had": 3, "doubt": 3, "minut": 3, "straight": 3, "slowli": 3, "rais": [3, 119, 141, 184, 251], "ey": 3, "speak": [3, 119], "resum": 3, "postur": 3, "stood": 3, "feel": 3, "pain": 3, "heart": 3, "smile": 3, "face": 3, "am": 3, "someon": 3, "three": [3, 62], "quarter": 3, "hour": 3, "made": 3, "immedi": [3, 244], "repli": 3, "again": [3, 6, 223, 352], "hand": [3, 353, 355], "did": 3, "accustom": 3, "thu": [3, 223], "question": [3, 355], "reason": [3, 354], "tell": [3, 352, 356], "understand": [3, 279, 280], "579": 3, "690": 3, "num": [3, 121, 163], "500": [3, 359], "628": 3, "went": 3, "nervou": 3, "trembl": 3, "told": 3, "And": 3, "perhap": 3, "surpris": 3, "matter": [3, 223], "shall": 3, "anyhow": 3, "friend": 3, "ye": 3, "slight": [3, 355], "kind": 3, "longer": [3, 71, 353], "soon": 3, "unless": [3, 12, 111, 119, 325], "unlik": [3, 12, 111, 231, 232, 256], "strang": 3, "amus": 3, "That": 3, "secret": 3, "disappoint": 3, "mine": 3, "cannot": [3, 65, 354, 356], "happi": 3, "ask": 3, "Is": [3, 273, 276], "shop": 3, "bui": 3, "food": 3, "633": 3, "21": [3, 345], "475": 3, "su": 3, "j": [3, 6, 119, 231, 330, 331, 333], "lu": 3, "pan": 3, "murtadha": 3, "wen": 3, "liu": 3, "2021": 3, "roform": [3, 269], "enhanc": [3, 269, 355], "rotari": [3, 269], "arxiv": [3, 236, 237, 239, 243, 266, 289, 309, 328, 334], "preprint": [3, 328, 334], "2104": 3, "09864": 3, "zhang": 3, "sennrich": 3, "2019": [3, 332], "root": [3, 170, 185, 266], "advanc": [3, 352], "inform": [3, 4, 6, 172, 173, 223, 227, 234, 263, 353, 359], "system": [3, 6, 136, 137], "shazeer": 3, "2020": 3, "glu": [3, 223], "variant": [3, 307, 333], "2002": 3, "05202": 3, "classifi": 4, "mnist": 4, "As": [4, 194, 223, 352], "mlp": [4, 223, 276, 327], "inherit": [4, 351], "standard": [4, 36, 56, 132, 158, 160, 276, 279, 281, 284, 357], "idiom": [4, 352], "input_dim": [4, 223, 240, 265], "hidden_dim": [4, 325, 327], "output_dim": [4, 223, 240, 265], "layer_s": 4, "idim": 4, "odim": 4, "maximum": [4, 21, 65, 138, 141, 223, 268, 273, 288, 289, 292, 311, 325, 355], "cross": [4, 70, 295, 297], "entropi": [4, 295, 297], "sub": [4, 78, 163], "commonli": [4, 261, 322, 352], "cross_entropi": [4, 223], "accuraci": 4, "valid": [4, 71, 102, 208, 213, 249, 260, 351], "eval_fn": 4, "argmax": 4, "loader": 4, "num_class": [4, 327], "batch_siz": [4, 327], "num_epoch": [4, 327], "learning_r": [4, 327, 328, 329, 330, 331, 332, 333, 334, 336, 339, 340, 341, 342, 343, 344, 345, 352], "train_imag": [4, 327], "train_label": [4, 327], "test_imag": 4, "test_label": 4, "shuffl": 4, "minibatch": 4, "batch_iter": [4, 327], "perm": 4, "permut": 4, "id": [4, 6], "put": [4, 352], "trainabl": [4, 212, 223, 325], "loss_and_grad_fn": [4, 327, 352, 353], "value_and_grad": [4, 223, 261, 325, 327, 338, 352, 353, 356, 357], "epoch": 4, "test": [4, 6], "confus": 4, "decent": 4, "95": 4, "brought": 5, "research": 5, "except": [5, 89, 96, 97, 99, 100, 101, 236, 251, 354, 356], "featur": [5, 68, 69, 70, 227, 235, 236, 237, 238, 239, 240, 265, 266, 267, 269, 276, 277, 352, 355], "main": [5, 78, 89, 214, 223], "differ": [5, 191, 307, 353], "lazi": [5, 325, 357], "multi": [5, 228, 229, 354, 356], "cpu": [5, 120, 352, 359], "gpu": [5, 352, 354, 359], "inspir": 5, "jax": [5, 349], "arrayfir": 5, "unifi": 5, "live": [5, 359], "guid": 5, "convers": 5, "regress": [5, 303], "layer": [5, 223, 225, 226, 231, 232, 235, 236, 238, 239, 240, 241, 242, 257, 262, 265, 267, 271, 276, 321, 325], "perceptron": 5, "llm": 5, "infer": [5, 105, 122], "fft": 5, "algebra": 5, "tree": [5, 66, 86, 106, 205, 208, 213, 214, 215, 335, 336, 338, 347, 353], "develop": [5, 6], "document": [5, 46, 57, 172, 173, 352, 353, 354], "pypi": 6, "meet": 6, "seri": 6, "chip": 6, "nativ": 6, "maco": 6, "13": 6, "recommend": [6, 141, 334], "14": 6, "sonoma": 6, "conda": 6, "forg": 6, "distribut": [6, 156, 157, 158, 160, 164, 165, 240, 279, 280, 281, 282, 284, 285, 298, 301, 306, 308, 322], "probabl": [6, 161, 230, 231, 232, 265, 295, 297, 301, 359], "platform": 6, "processor": 6, "arm": [6, 218], "i386": 6, "switch": 6, "17": 6, "g": [6, 119, 154, 238, 321, 339, 340, 355, 360], "clang": 6, "cmake": 6, "24": 6, "xcode": 6, "15": [6, 119, 352], "sdk": 6, "environ": [6, 79, 82], "via": [6, 335, 338, 355, 356], "rosetta": 6, "unam": 6, "p": [6, 156, 223, 230, 231, 232, 308, 331, 333], "clone": 6, "git": 6, "com": 6, "ml": 6, "explor": 6, "cd": 6, "brew": 6, "global": [6, 79, 82, 162, 349, 352], "env": 6, "cmake_build_parallel_level": 6, "edit": [6, 262], "unittest": 6, "discov": 6, "stub": 6, "dev": 6, "generate_stub": 6, "mkdir": 6, "either": [6, 10, 46, 56, 57, 65, 80, 81, 83, 104, 107, 108, 117, 118, 119, 127, 132, 134, 143, 145, 191, 205, 226, 242, 271, 277, 281, 282], "libmlx": 6, "preprocessor": 6, "metal_path": 6, "mlx_build_test": 6, "ON": 6, "mlx_build_exampl": 6, "mlx_build_benchmark": 6, "mlx_build_python_bind": 6, "multipl": [6, 132, 145, 154, 155, 263, 273, 342, 343, 345, 352, 355, 358], "wish": 6, "variabl": [6, 66, 79, 82, 106, 116, 205, 207, 208], "export": 6, "developer_dir": 6, "app": 6, "content": [6, 248, 352], "xcrun": 6, "macosx": 6, "show": [6, 218, 352], "unabl": 6, "tool": 6, "select": [6, 200, 209, 244, 248], "sudo": 6, "ouptut": 6, "finder": 6, "iterm": 6, "termin": 6, "click": 6, "uncheck": 6, "window": [6, 225, 226, 241, 242], "restart": 6, "grep": 6, "cmake_host_system_processor": 6, "arm64": 6, "x86_64": 6, "wipe": 6, "cahc": 6, "rf": 6, "devicetyp": 7, "attribut": [7, 8, 25, 216, 256, 325, 347], "kwarg": [8, 174, 175, 360], "union": [9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 27, 28, 29, 30, 31, 32, 33, 35, 37, 38, 39, 40, 41, 42, 44, 45, 46, 47, 48, 50, 52, 53, 54, 55, 57, 58, 59, 60, 61, 62, 63, 64, 65, 67, 68, 69, 70, 71, 72, 73, 76, 77, 78, 80, 81, 83, 84, 85, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 142, 143, 144, 145, 146, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 160, 161, 163, 164, 165, 166, 167, 168, 169, 170, 172, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 209, 210, 211, 225, 226, 229, 241, 242, 249, 251, 260, 277, 328, 329, 330, 331, 332, 333, 334, 339, 340], "absolut": [9, 12, 111, 288, 289, 307], "semant": [10, 63, 80, 81, 83, 107, 108, 117, 118, 127, 132, 134, 143, 145, 191, 359], "keepdim": [11, 13, 21, 22, 28, 29, 30, 31, 39, 40, 41, 42, 44, 55, 58, 119, 131, 133, 135, 142, 153, 182, 192, 206], "reduct": [11, 13, 131, 133, 142, 153, 295, 296, 297, 298, 299, 300, 301, 302, 303, 304, 305, 306, 307, 308], "reduc": [11, 13, 21, 22, 131, 133, 135, 142, 153, 192, 206, 227, 276, 303], "unspecifi": [11, 13, 14, 21, 22, 23, 24, 67, 105, 131, 133, 135, 142, 148, 152, 153, 167, 182, 183, 192, 194, 200, 206, 210, 360], "entir": [11, 13, 21, 22, 131, 133, 135, 142, 153, 192, 206, 231, 232], "singleton": [11, 13, 21, 22, 131, 132, 133, 135, 142, 153, 192, 206], "rtol": [12, 111], "05": [12, 111, 227, 236, 237, 239, 266], "atol": [12, 111], "08": [12, 111, 296, 330, 331, 332, 333, 339], "equal_nan": [12, 59, 111], "approxim": [12, 234, 287, 288, 289], "comparison": [12, 83, 107, 108, 117, 118], "infinit": [12, 111], "equal": [12, 23, 59, 89, 108, 111, 118, 152, 161, 184, 237, 240], "sign": [12, 111, 218, 334], "nan": [12, 59, 111, 113], "ab": [12, 111, 119, 205, 236, 237, 239, 243, 266, 289, 309, 352], "array_equ": [12, 111], "rel": [12, 111, 329, 352], "toler": [12, 111], "boolean": [12, 59, 111, 112, 113, 114, 115, 128, 129, 130, 218, 259, 354], "interv": [14, 121, 161, 165], "increment": 14, "otherwis": [14, 70, 141, 213, 214, 249, 251, 260, 275, 276, 277, 295, 300, 307, 318, 319, 355, 356], "int32": [14, 102, 119, 161, 218, 277, 354, 357], "convent": [14, 71, 277, 332], "lead": [14, 352], "fraction": 14, "integr": [14, 194, 355], "invers": [15, 16, 17, 18, 19, 20, 85, 93, 94, 95, 96, 97, 98], "cosin": [15, 16, 72, 73, 296, 341, 343, 353], "hyperbol": [16, 18, 20, 73, 181, 197, 320], "sine": [17, 18, 180, 181, 353], "uint32": [21, 22, 23, 24, 157, 218], "minimum": [22, 65, 273, 296], "kth": [23, 152], "partit": 23, "order": [23, 70, 119, 152, 154, 200, 223, 236, 261, 271, 336, 352, 353], "undefin": [23, 152, 354], "sort": [23, 24, 152, 200], "flatten": [23, 24, 119, 150, 152, 167, 183, 194, 195, 200, 213], "dimension": [25, 90, 91, 92, 93, 94, 95, 99, 100, 101, 225, 226, 227, 228, 229, 233, 240, 241, 242, 265, 273, 354, 356], "val": [25, 105], "tupl": [25, 46, 49, 57, 67, 69, 70, 81, 86, 88, 116, 119, 120, 151, 154, 168, 187, 205, 207, 213, 214, 215, 225, 226, 229, 241, 242, 251, 253, 271, 277, 329, 331, 332, 333, 334, 351, 353], "ndarrai": [25, 354, 355, 357], "properti": [26, 34, 43, 49, 51, 256, 259, 337, 353], "argument": [26, 46, 57, 66, 86, 106, 205, 214, 223, 277, 349, 353, 358, 359, 360], "decim": [47, 169], "indices_or_sect": [52, 184], "nest": [56, 66, 223, 325, 351, 353], "ddof": [58, 206], "ari": [60, 61, 62], "a_min": 65, "a_max": 65, "edg": [65, 151, 277, 352], "At": 65, "anoth": [65, 132, 191, 209, 223, 244, 352, 353, 354, 359], "fun": [66, 106, 116, 205, 207, 208, 352, 354, 355, 359], "dict": [66, 86, 122, 172, 173, 174, 254, 258, 261, 262, 325, 335, 336, 338, 351, 353, 358], "dure": [66, 230, 231, 232, 277, 356], "arbitrarili": [66, 223, 351, 353, 357], "leaf": [66, 213, 214, 248], "node": [66, 86, 208], "shapeless": 66, "recompil": [66, 352], "chang": [66, 177, 261, 265, 277, 300, 307, 352, 356], "Not": [66, 352], "attempt": 66, "pad": [68, 69, 70, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 225, 226, 228, 229, 241, 242], "dilat": [68, 69, 70, 228, 229], "group": [68, 69, 70, 76, 154, 155, 236, 265], "1d": [68, 70, 71, 172, 195], "convolut": [68, 69, 70, 71, 228, 229, 231, 232], "channel": [68, 69, 70, 227, 228, 229, 231, 232], "c_in": [68, 69, 70], "c_out": [68, 69, 70], "convolv": [68, 69, 70], "2d": [69, 70, 78, 154, 227, 231], "spatial": [69, 70, 225, 236, 241, 277], "symmetr": 69, "kernel_dil": 70, "input_dil": 70, "flip": [70, 71], "correl": [70, 231], "discret": [71, 90, 91, 92, 93, 94, 95, 99, 100, 101, 233], "swap": [71, 141, 193, 262, 265], "conv": 71, "filter": [71, 228, 229, 244, 248], "signal": [71, 277], "bias": [76, 154, 155, 235, 238, 249, 260, 263], "group_siz": [76, 154, 155, 265], "64": [76, 154, 155, 218, 265], "configur": 76, "formal": [76, 154], "notat": [76, 213, 253], "quantiz": [76, 122, 155, 265], "w_i": [76, 154], "hat": [76, 154], "occupi": [76, 154, 155], "diagon": [77, 89, 202, 203, 204], "th": [77, 89, 343], "axis1": [78, 193], "axis2": [78, 193], "subarrai": [78, 184], "remov": [78, 132, 157, 187, 297], "insert": [78, 88, 359], "neg": [78, 102, 114, 241, 242, 263, 298, 306, 308, 354], "taken": [78, 194], "disabl": [79, 140, 352], "mlx_disable_compil": [79, 82, 352], "divis": [80, 104, 154], "quotient": [80, 81, 104], "remaind": 81, "fuction": 81, "faster": [81, 287, 352, 353], "mathrm": [84, 178, 237], "frac": [84, 154, 178, 225, 226, 227, 230, 231, 232, 236, 237, 239, 240, 241, 242, 266, 279, 280, 281, 282, 296, 298, 300, 303, 314, 316, 328, 330, 331, 332, 333, 339], "pi": [84, 273, 353], "int_0": 84, "dt": 84, "erf": [85, 352], "exponenti": [87, 270, 286, 313, 342], "ident": [89, 189, 223, 257], "zero": [89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 202, 203, 204, 211, 223, 225, 226, 230, 231, 232, 251, 278, 279, 280, 281, 282, 283, 284, 285, 322, 329, 354], "whose": [89, 212], "One": [90, 93, 99, 170, 352, 353], "fourier": [90, 91, 92, 93, 94, 95, 99, 100, 101], "truncat": [90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 164], "dft": [90, 91, 92, 93, 94, 95, 99, 100, 101], "rfft": 96, "real": [96, 97, 98, 99, 100, 101], "rfft2": 97, "rfftn": 98, "silent": [99, 100, 101], "start_axi": 102, "end_axi": 102, "inclus": 102, "outsid": 102, "clamp": 102, "integ": [104, 119, 151, 154, 155, 156, 161, 184, 198, 208, 218, 233, 343, 354], "floor": 104, "argnam": [106, 205], "neither": [106, 205], "keyword": [106, 174, 175, 205, 214, 223, 349, 358, 360], "strict": [107, 117, 249, 251, 260], "ordinari": 110, "inifn": 112, "infin": [112, 114, 115, 241, 242, 333], "ord": 119, "tabl": [119, 218, 233], "frobeniu": 119, "matric": [119, 120], "strictli": 119, "mathemat": 119, "variou": 119, "purpos": 119, "calcul": [119, 298, 304, 329], "fro": 119, "inf": [119, 263], "largest": [119, 200], "sing": 119, "smallest": 119, "singular": 119, "nuclear": 119, "_f": 119, "sum_": [119, 225, 226, 303], "a_": 119, "valueerror": [119, 251, 353], "refer": [119, 237, 243, 256, 279, 280, 281, 282, 289, 309, 354], "golub": 119, "van": 119, "loan": 119, "baltimor": 119, "md": 119, "john": 119, "hopkin": 119, "univers": 119, "1985": 119, "pg": 119, "la": 119, "arang": [119, 277, 354, 356], "9": [119, 297, 328, 331, 332, 333, 334, 336, 342, 345, 356], "74597": 119, "20": 119, "84804": 119, "41421": 119, "23607": [119, 120], "74166": 119, "24264": 119, "11": 119, "225": 119, "q": 120, "894427": 120, "447214": 120, "57771": 120, "50": 121, "evenli": 121, "return_metadata": 122, "binari": [122, 171, 172, 173, 174, 175, 275, 295, 319, 352], "npy": [122, 171, 358], "safetensor": [122, 173, 251, 255, 355, 358], "gguf": [122, 172, 358], "matadata": 122, "unsupport": 122, "tensor": [122, 198, 225, 226, 241, 242, 308, 356], "natur": [123, 125, 355], "logarithm": [123, 124, 125, 126], "log": [125, 127, 131, 293, 294, 298, 301, 303, 306, 317], "plu": 125, "exp": [127, 131, 158, 182, 286, 301, 313, 314, 317, 352, 359], "stabl": [127, 131, 182, 303], "prepend": 132, "byte": [136, 137, 138, 140, 141, 218], "report": [136, 141], "peak": 138, "record": [138, 355], "begin": [138, 154, 226, 235, 238, 242, 275, 300, 307, 313, 318, 319], "program": 138, "free": 140, "reclaim": 140, "set_memory_limit": 140, "previou": [140, 141], "relax": 141, "task": [141, 303], "exceed": 141, "negat": 146, "beforehand": 150, "pad_with": 151, "constant_valu": 151, "pad_width": 151, "before_1": 151, "after_1": 151, "before_2": 151, "after_2": 151, "before_n": 151, "after_n": 151, "before_i": 151, "after_i": 151, "extend": 151, "side": [151, 225, 226, 241, 242, 352], "smaller": [152, 334, 352], "everi": [154, 214, 345, 353], "particular": [154, 236], "consecut": [154, 269], "w_1": 154, "w_g": 154, "align": [154, 226, 235, 238, 242], "max_i": 154, "min_i": 154, "textrm": [154, 234, 287, 290], "round": 154, "pack": [154, 155], "unsign": [154, 155, 218], "lower": [154, 161, 164, 165, 202, 285], "upper": [154, 161, 164, 165, 285], "1st": 154, "signific": 154, "2nd": 154, "dequant": 154, "w_q": 154, "whether": [155, 235, 238, 248, 263, 267, 295, 298, 304], "prng": [156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 349], "num_sampl": 157, "unnorm": [157, 295, 297], "draw": 157, "cdf": [158, 234, 287], "accord": [158, 209, 263, 279, 280, 281, 282], "seed": 159, "loc": 160, "deviat": [160, 279, 281, 284], "low": [161, 165, 285, 322], "high": [161, 165, 223, 233, 285, 322], "bound": [161, 164, 165, 234, 285, 352, 354, 359], "roadcast": 161, "domain": 164, "uniformli": 165, "repetit": 167, "preserv": [168, 353], "reciproc": 170, "arr": [171, 354], "obj": 172, "uncompress": 174, "my_path": 174, "tree_flatten": [174, 214, 215, 223], "transformerencod": 174, "128": [174, 223], "flat_param": 174, "compress": 175, "being": [189, 223], "prevent": [189, 308, 356], "flow": [189, 355], "unchang": [189, 269], "streamcontext": 190, "context": 190, "manag": [190, 349, 359], "prior": [194, 195], "exclud": 195, "dot": [198, 213, 253, 263], "rep": 199, "repeat": 199, "necessarili": 200, "elsewher": [202, 354], "col": 202, "triangl": 202, "mse": 205, "param": [205, 223, 322, 353], "lvalu": 205, "dlvalu": 205, "dparam": 205, "lasso": 205, "l1": [205, 300, 302, 303, 307], "varianc": [206, 227, 236, 298], "divisor": 206, "cotang": 207, "in_ax": [208, 353], "out_ax": [208, 353], "prefix": [208, 213], "fn": [212, 214, 357], "callabl": [212, 213, 214, 244, 245, 248, 267, 271, 276, 278, 279, 280, 281, 282, 283, 284, 285, 328, 329, 330, 331, 332, 333, 334, 339, 340, 341, 342, 343, 344, 345], "wrt": 212, "is_leaf": [213, 214], "arbitrari": [213, 325], "depth": [213, 232, 353], "hello": [213, 215], "charact": 213, "flat": [213, 215], "appli": [214, 223, 225, 226, 227, 228, 229, 231, 232, 234, 236, 237, 239, 240, 241, 242, 243, 245, 257, 264, 265, 266, 267, 268, 270, 272, 274, 275, 277, 286, 287, 290, 291, 292, 293, 294, 295, 296, 297, 298, 299, 300, 301, 302, 303, 304, 305, 306, 307, 308, 309, 310, 311, 312, 313, 314, 315, 316, 317, 318, 319, 320, 322, 335, 338, 344, 347, 352], "superset": [214, 335], "extra": 214, "closer": 214, "constitut": 214, "dict_kei": [214, 336], "lambda": [214, 223, 244, 249, 274, 313, 318, 328, 329, 330, 331, 332, 333, 334, 339, 340, 352, 353], "recreat": 215, "world": 215, "42": 215, "bool_": 218, "uint8": 218, "uint16": 218, "16": [218, 225, 237, 241, 244, 325], "uint64": 218, "int8": 218, "int16": 218, "int64": 218, "done": [223, 230, 352, 355, 356], "manual": 223, "explicitli": [223, 349], "solv": 223, "intuit": 223, "freez": [223, 260, 325], "finetun": 223, "in_dim": [223, 325], "out_dim": [223, 325], "enumer": 223, "caus": [223, 352, 355], "local": [223, 231], "scope": 223, "l2_loss": 223, "y_hat": 223, "trainable_paramet": [223, 248, 336], "loss_and_grad": 223, "workhors": 223, "Its": 223, "recurs": [223, 248, 249, 254, 258, 260, 325], "frozen": [223, 249, 258, 260, 265, 325], "individu": [223, 231, 232], "subset": [223, 248], "action": 223, "displai": 223, "tree_map": 223, "count": [223, 343], "num_param": 223, "preclud": 223, "pure": [223, 327], "pattern": [223, 355], "achiev": 223, "other_input": 223, "necessari": 223, "wrap": 223, "apply_to_modul": [223, 249], "children": 223, "filter_and_map": 223, "leaf_modul": 223, "load_weight": [223, 355], "named_modul": 223, "save_weight": 223, "unfreez": [223, 249], "update_modul": 223, "alibi": 223, "avgpool1d": 223, "avgpool2d": 223, "batchnorm": 223, "conv1d": 223, "conv2d": 223, "dropout": [223, 231, 232, 257, 276, 352], "dropout2d": 223, "dropout3d": 223, "gelu": [223, 288, 289, 352], "groupnorm": 223, "gru": 223, "instancenorm": 223, "layernorm": 223, "lstm": 223, "maxpool1d": 223, "maxpool2d": [223, 226], "mish": 223, "prelu": 223, "quantizedlinear": 223, "relu": [223, 264, 276, 310, 322], "rnn": [223, 235], "selu": 223, "sequenti": [223, 322], "silu": 223, "sinusoidalpositionalencod": 223, "softshrink": 223, "upsampl": 223, "elu": [223, 313], "gelu_approx": [223, 234, 287], "gelu_fast_approx": [223, 234, 287], "hardswish": 223, "leaky_relu": 223, "log_sigmoid": 223, "log_softmax": 223, "relu6": 223, "softplu": [223, 243, 309], "tanh": [223, 235, 238, 243, 267, 309], "binary_cross_entropi": [223, 352], "cosine_similarity_loss": 223, "gaussian_nll_loss": 223, "hinge_loss": 223, "huber_loss": 223, "kl_div_loss": 223, "l1_loss": 223, "log_cosh_loss": 223, "margin_ranking_loss": 223, "mse_loss": 223, "nll_loss": 223, "smooth_l1_loss": 223, "triplet_loss": 223, "init": [223, 264, 322, 327, 341, 342, 344, 345], "uniform": [223, 240, 251, 280, 282, 322, 349, 352, 353, 359], "glorot_norm": 223, "glorot_uniform": 223, "he_norm": 223, "he_uniform": 223, "kernel_s": [225, 226, 228, 229, 241, 242], "averag": [225, 226, 328, 329, 331, 332, 333], "pool": [225, 226, 241, 242, 359], "l_": [225, 241, 300], "n_i": [225, 226, 241, 242], "c_j": [225, 226, 241, 242], "ldot": [225, 226, 241, 242], "lfloor": [225, 226, 241, 242], "rfloor": [225, 226, 241, 242], "k_h": [226, 242], "k_w": [226, 242], "h_": [226, 235, 238, 242, 267], "w_": [226, 235, 238, 242, 267, 328, 329, 330, 331, 332, 333, 334, 339, 340], "height": [226, 227, 229, 231, 232, 242], "width": [226, 227, 229, 231, 232, 242, 265], "momentum": [227, 334, 336, 340, 352], "affin": [227, 236, 237, 239, 240, 265], "track_running_stat": 227, "var": [227, 236, 237, 239, 298], "epsilon": [227, 236, 237, 239, 266, 296, 298, 328, 330, 331, 332, 333, 339], "gamma": [227, 236, 237, 239, 266, 279, 280, 281, 282], "nc": 227, "nlc": [227, 228], "four": 227, "nhwc": [227, 229], "paper": [227, 273, 328, 329, 330, 331, 333, 334], "deep": [227, 279, 280, 281, 282], "intern": 227, "covari": 227, "shift": 227, "stabil": [227, 236, 237, 239, 266, 296, 298, 328, 329, 330, 331, 332, 333, 339], "bn": 227, "in_channel": [228, 229], "out_channel": [228, 229], "learnabl": [228, 229, 271], "portion": 230, "independ": [231, 232], "nwhc": 231, "whc": 231, "maintain": [231, 232, 334], "entri": [231, 232], "benefici": [231, 232, 355], "earli": 231, "adjac": 231, "pixel": 231, "regular": [231, 309, 332, 352, 354], "thompson": 231, "goroshin": 231, "jain": 231, "lecun": 231, "bregler": 231, "2015": [231, 331, 333], "cvpr": 231, "ndhwc": 232, "dhwc": 232, "medic": 232, "video": 232, "num_embed": 233, "lookup": 233, "typic": [233, 327, 352, 355], "usual": [233, 351, 355], "vocabulari": 233, "approx": 234, "unit": [234, 235, 268, 270, 272, 279, 280, 281, 282, 286, 287, 288, 289, 290, 292, 311, 312, 313, 315], "phi": [234, 287], "geluapprox": 234, "sigma": [234, 235, 238, 279, 280, 281, 282, 288, 289, 290, 293, 314, 315], "60033": [234, 288], "0433603": [234, 288], "gelufast": 234, "773": 234, "regard": 234, "input_s": [235, 238, 267], "hidden_s": [235, 238, 267], "gate": [235, 290], "recurr": [235, 238, 267], "nld": [235, 238, 267], "ld": [235, 238, 267], "r_t": 235, "xr": 235, "x_t": [235, 238, 267], "hr": 235, "h_t": [235, 238, 267], "b_": [235, 238], "z_t": 235, "xz": 235, "hz": 235, "n_t": 235, "xn": 235, "odot": [235, 238], "hn": 235, "hidden": [235, 238, 267, 276], "nh": [235, 238, 267], "nlh": [235, 238, 267], "lh": [235, 238, 267], "num_group": 236, "pytorch_compat": 236, "split": [236, 290], "preced": 236, "http": [236, 237, 239, 243, 266, 289, 309], "org": [236, 237, 239, 243, 266, 289, 309], "1803": 236, "08494": 236, "denomin": [237, 296, 328, 330, 331, 332, 333, 339], "inorm": 237, "1607": [237, 239], "08022": 237, "i_t": 238, "xi": 238, "f_t": 238, "xf": 238, "hf": 238, "g_t": [238, 328, 330, 331, 332, 333, 334, 339, 340], "xg": 238, "hg": 238, "o_t": 238, "xo": 238, "ho": 238, "o": 238, "c_": [238, 334], "c_t": [238, 334], "cell": 238, "06450": 239, "mathcal": 240, "u": 240, "d_i": 240, "max_": [241, 242], "1908": [243, 309], "08681": [243, 309], "map_fn": [244, 248], "filter_fn": [244, 248], "valid_parameter_filt": 244, "apply_fn": 245, "descend": 246, "is_leaf_fn": 248, "found": 248, "drop": 248, "idempot": [249, 260], "attent": [249, 263, 273, 276], "endswith": 249, "file_or_weight": 251, "miss": [251, 358], "ok": [251, 353], "save_safetensor": [255, 358], "reflect": [256, 352, 354, 356], "certain": [257, 352], "ie": 260, "noop": 260, "unfrozen": 260, "tracer": 261, "partial": [261, 262, 352, 355], "child": 262, "programmat": 262, "query_input_dim": 263, "key_input_dim": 263, "value_input_dim": 263, "value_dim": 263, "value_output_dim": 263, "head": [263, 276], "aggreg": 263, "linearli": 263, "attend": 263, "num_paramet": 264, "25": [264, 277], "parametr": [264, 310], "classmethod": 265, "from_linear": 265, "quantize_modul": 265, "1910": 266, "07467": 266, "nonlinear": [267, 352], "elman": 267, "ih": 267, "hh": 267, "func": 267, "rectifi": [268, 281, 282, 292, 311, 312], "10000": 269, "rotat": 269, "larger": [269, 334], "slightli": [269, 359], "angular": 269, "frequenc": [269, 273], "plain": 271, "known": [272, 315], "swish": [272, 315], "min_freq": 273, "0001": 273, "max_freq": 273, "cos_first": 273, "full_turn": 273, "sinusoid": 273, "sin": [273, 353, 357], "lambd": [274, 318], "threshold": [275, 300, 307, 319], "geq": [275, 319], "num_encoder_lay": 276, "num_decoder_lay": 276, "pycapsul": 276, "custom_encod": 276, "custom_decod": 276, "norm_first": 276, "checkpoint": 276, "decod": 276, "interact": 276, "mechan": 276, "chekpoint": 276, "usag": [276, 352], "expens": 276, "scale_factor": 277, "nearest": 277, "align_corn": 277, "audio": 277, "4d": 277, "forth": 277, "neighbor": 277, "interpol": 277, "bilinear": 277, "trilinear": 277, "corner": 277, "bottom": 277, "squeez": [277, 352], "75": 277, "33333": 277, "66667": 277, "init_fn": [278, 279, 280, 281, 282, 283, 284, 285, 322], "glorot": [279, 280], "fan_in": [279, 280, 281, 282], "fan_out": [279, 280, 281, 282], "difficulti": [279, 280], "feedforward": [279, 280], "191107": 279, "61278": 279, "150594": 279, "363207": 279, "gain": [279, 280, 281, 282], "89613": 279, "53947": 279, "48095": 279, "995016": 279, "223404": 280, "890597": 280, "379159": 280, "776856": 280, "90041": 280, "02264": 280, "912766": 280, "12451": 280, "fan": [281, 282], "delv": [281, 282], "surpass": [281, 282], "human": [281, 282], "level": [281, 282], "imagenet": [281, 282], "classif": [281, 282], "25211": 281, "458835": 281, "177208": 281, "0137595": 281, "6967": 281, "02765": 281, "15268": 281, "75787": 281, "kaim": 282, "0300242": 282, "0184009": 282, "793615": 282, "666329": 282, "64331": 282, "16506": 282, "08619": 282, "79854": 282, "982273": 284, "534422": 284, "380709": 284, "0645099": 284, "883935": 285, "863726": 285, "617261": 285, "417497": 285, "exact": [288, 289], "0003": 288, "cdot": [288, 289, 296, 299, 315], "015": 289, "702": 289, "hendryck": 289, "1606": 289, "08415": 289, "halv": 290, "negative_slop": 292, "leaki": 292, "sum_i": 294, "x_i": [294, 316], "with_logit": 295, "predict": [295, 298, 299, 300, 301, 302, 303, 305, 306, 307], "105361": 295, "223144": 295, "20397": 295, "916291": 295, "539245": 295, "prob": 295, "510826": 295, "x1": 296, "x2": 296, "x_1": [296, 304], "x_2": [296, 304], "label_smooth": 297, "hot": 297, "smooth": [297, 307, 339], "0485873": 297, "348587": 297, "06": [298, 308, 328], "likelihood": [298, 306], "nll": [298, 306], "hing": 299, "y_": [299, 303], "pred": [299, 303], "delta": [300, 328], "huber": 300, "leq": [300, 313], "l2": [300, 303, 340], "kullback": 301, "leibler": 301, "diverg": 301, "cosh": 303, "logcosh": 303, "sensit": 303, "outlier": 303, "dual": 303, "behavior": [303, 354, 355], "offer": 303, "balanc": 303, "robust": 303, "approach": [303, 353], "inputs1": 304, "inputs2": 304, "margin": [304, 308], "rank": 304, "573409": 304, "765166": 304, "0638": 304, "75596": 304, "225763": 304, "256995": 304, "773433": 304, "formula": 307, "anchor": 308, "triplet": 308, "_p": 308, "degre": 308, "pairwis": 308, "instabl": 308, "monoton": 309, "0507": 313, "67326": 313, "sum_j": 316, "x_j": 316, "subclass": 325, "concept": 325, "mymlp": 325, "in_proj": 325, "subsequ": 327, "apply_gradi": 327, "rmsprop": 327, "adagrad": 327, "adafactor": 327, "adadelta": 327, "adam": [327, 333, 334, 343, 344], "adamw": [327, 334], "adamax": 327, "lion": 327, "cosine_decai": [327, 343], "exponential_decai": 327, "join_schedul": 327, "linear_schedul": [327, 343], "step_decai": 327, "rho": 328, "zeiler": 328, "2012": [328, 339], "adapt": [328, 329, 330], "1212": 328, "5701": 328, "v_": [328, 330, 331, 332, 333, 339, 340], "v_t": [328, 330, 331, 332, 333, 339, 340], "u_t": 328, "u_": 328, "w_t": [328, 330, 331, 332, 333, 334, 339, 340], "30": 329, "001": 329, "clip_threshold": 329, "decay_r": [329, 342, 345], "beta_1": [329, 331, 332, 333, 334], "weight_decai": [329, 332, 334, 340], "scale_paramet": 329, "relative_step": 329, "warmup_init": 329, "sublinear": 329, "cost": [329, 355], "epsilon_1": 329, "epsilon_2": 329, "parameter_scal": 329, "clip": 329, "unscal": 329, "decai": [329, 332, 334, 340, 341, 342, 345], "duchi": 330, "hazan": 330, "singer": 330, "2011": 330, "subgradi": 330, "onlin": 330, "stochast": [330, 331, 333, 340, 355], "jmlr": 330, "999": [331, 332, 333], "omit": [331, 333], "estim": [331, 333], "kingma": [331, 333], "ba": [331, 333], "iclr": [331, 332, 333], "m_": [331, 332, 333, 334], "m_t": [331, 332, 333, 334], "beta_2": [331, 332, 333, 334], "contrast": 332, "loshchilov": 332, "hutter": 332, "decoupl": 332, "99": [334, 339], "tend": 334, "10x": 334, "strength": [334, 340], "wd": 334, "chen": 334, "symbol": 334, "discoveri": 334, "2302": 334, "06675": 334, "eta": 334, "opt": 335, "tieleman": 339, "hinton": 339, "lectur": 339, "coursera": 339, "dampen": 340, "nesterov": 340, "descent": [340, 352, 355], "mu": 340, "tau": 340, "penalti": 340, "decay_step": 341, "beyond": [341, 344], "lr_schedul": [341, 342, 343, 345], "1000": [341, 352], "0999961": 341, "06561": 342, "boundari": 343, "join": 343, "receiv": [343, 356], "transit": 343, "warmup": [343, 344], "0999938": 343, "101": 344, "step_siz": 345, "081": 345, "basi": 347, "implicit": [349, 352, 353], "fine": [349, 355], "grain": 349, "control": [349, 355], "pseudo": 349, "altern": 349, "splittabl": 349, "threefri": 349, "counter": 349, "cycl": 351, "merg": 352, "fuse": 352, "big": 352, "awar": [352, 355], "36788": 352, "compiled_fun": 352, "code": [352, 355], "slow": 352, "stack": 352, "rerun": [352, 355], "too": [352, 355], "frequent": [352, 355], "destroi": 352, "anonym": 352, "don": [352, 359], "unari": 352, "overhead": [352, 355, 359], "bandwidth": 352, "fusibl": 352, "consider": 352, "versu": 352, "timeit": [352, 353], "tic": 352, "perf_count": 352, "toc": 352, "tpi": 352, "1e3": 352, "4096": [352, 353, 359], "On": [352, 353, 355], "millisecond": [352, 359], "five": 352, "latest": 352, "won": 352, "trace": 352, "placehold": 352, "insid": 352, "crash": 352, "inspect": [352, 357], "disable_compil": 352, "okai": [352, 355], "intend": 352, "deal": 352, "pretti": [352, 355], "inconveni": 352, "functool": 352, "particularli": 352, "backward": [352, 353], "checkout": 352, "compiled_grad_fn": 352, "71828": 352, "outer": [352, 355], "opportun": 352, "idea": [353, 355], "behind": 353, "dfdx": [353, 354], "d2fdx2": 353, "zero_grad": 353, "detach": 353, "requires_grad": 353, "dloss_dw": 353, "dloss_dx": 353, "lot": 353, "redund": 353, "suppos": [353, 359], "nice": [353, 355], "propag": [353, 354], "stop_gradi": 353, "autom": 353, "contriv": [353, 359], "sake": 353, "clariti": 353, "quit": [353, 356], "power": [353, 356], "difficult": 353, "primit": 353, "issu": [353, 356], "priorit": 353, "xs": 353, "ys": 353, "naive_add": 353, "vmap_add": 353, "total": 353, "390": 353, "wherea": 353, "025": 353, "ten": [353, 355], "Of": 353, "better": [353, 359], "handi": 353, "slice": 354, "ellipsi": 354, "syntax": 354, "idx": 354, "mix": 354, "take_along_axi": 354, "lack": 354, "extrem": [354, 355], "ineffici": [354, 355], "nonzero": 354, "dynam": 355, "easier": 355, "worri": 355, "fun1": 355, "expensive_fun": 355, "consum": 355, "eager": 355, "thank": 355, "weights_fp16": 355, "trade": 355, "bad": 355, "grow": 355, "computation": 355, "costli": 355, "wide": 355, "thousand": 355, "value_and_grad_fn": 355, "implicitli": 355, "anytim": 355, "memoryview": [355, 356], "perfectli": 355, "first_lay": 355, "second_layer_a": 355, "second_layer_b": 355, "protocol": 356, "pep": 356, "3118": 356, "view": 356, "a_view": 356, "owndata": 356, "extern": 356, "x_view": 356, "modifi": 356, "df": 356, "x\u00b2": 356, "2x": 356, "indirectli": 356, "modif": 356, "seen": 356, "occur": 356, "incorpor": 356, "incorrect": 356, "experiment": 356, "break": 356, "advis": 356, "intermedi": 356, "jnp": 356, "tf": 356, "page": 357, "composit": 357, "archiv": 358, "savez_compress": 358, "save_gguf": 358, "arr_0": 358, "advantag": 359, "parallel": 359, "race": 359, "interest": 359, "albeit": 359, "d1": 359, "d2": 359, "matmul": 359, "dens": 359, "twice": 359, "measur": 359, "default_stream": 360, "default_devic": 360, "my_devic": 360}, "objects": {"mlx.core": [[7, 0, 1, "", "Device"], [8, 0, 1, "", "Dtype"], [216, 0, 1, "", "Stream"], [9, 2, 1, "", "abs"], [10, 2, 1, "", "add"], [11, 2, 1, "", "all"], [12, 2, 1, "", "allclose"], [13, 2, 1, "", "any"], [14, 2, 1, "", "arange"], [15, 2, 1, "", "arccos"], [16, 2, 1, "", "arccosh"], [17, 2, 1, "", "arcsin"], [18, 2, 1, "", "arcsinh"], [19, 2, 1, "", "arctan"], [20, 2, 1, "", "arctanh"], [21, 2, 1, "", "argmax"], [22, 2, 1, "", "argmin"], [23, 2, 1, "", "argpartition"], [24, 2, 1, "", "argsort"], [25, 0, 1, "", "array"], [59, 2, 1, "", "array_equal"], [60, 2, 1, "", "atleast_1d"], [61, 2, 1, "", "atleast_2d"], [62, 2, 1, "", "atleast_3d"], [63, 2, 1, "", "broadcast_to"], [64, 2, 1, "", "ceil"], [65, 2, 1, "", "clip"], [66, 2, 1, "", "compile"], [67, 2, 1, "", "concatenate"], [68, 2, 1, "", "conv1d"], [69, 2, 1, "", "conv2d"], [70, 2, 1, "", "conv_general"], [71, 2, 1, "", "convolve"], [72, 2, 1, "", "cos"], [73, 2, 1, "", "cosh"], [74, 2, 1, "", "default_device"], [75, 2, 1, "", "default_stream"], [76, 2, 1, "", "dequantize"], [77, 2, 1, "", "diag"], [78, 2, 1, "", "diagonal"], [79, 2, 1, "", "disable_compile"], [80, 2, 1, "", "divide"], [81, 2, 1, "", "divmod"], [82, 2, 1, "", "enable_compile"], [83, 2, 1, "", "equal"], [84, 2, 1, "", "erf"], [85, 2, 1, "", "erfinv"], [86, 2, 1, "", "eval"], [87, 2, 1, "", "exp"], [88, 2, 1, "", "expand_dims"], [89, 2, 1, "", "eye"], [102, 2, 1, "", "flatten"], [103, 2, 1, "", "floor"], [104, 2, 1, "", "floor_divide"], [105, 2, 1, "", "full"], [106, 2, 1, "", "grad"], [107, 2, 1, "", "greater"], [108, 2, 1, "", "greater_equal"], [109, 2, 1, "", "identity"], [110, 2, 1, "", "inner"], [111, 2, 1, "", "isclose"], [112, 2, 1, "", "isinf"], [113, 2, 1, "", "isnan"], [114, 2, 1, "", "isneginf"], [115, 2, 1, "", "isposinf"], [116, 2, 1, "", "jvp"], [117, 2, 1, "", "less"], [118, 2, 1, "", "less_equal"], [121, 2, 1, "", "linspace"], [122, 2, 1, "", "load"], [123, 2, 1, "", "log"], [124, 2, 1, "", "log10"], [125, 2, 1, "", "log1p"], [126, 2, 1, "", "log2"], [127, 2, 1, "", "logaddexp"], [128, 2, 1, "", "logical_and"], [129, 2, 1, "", "logical_not"], [130, 2, 1, "", "logical_or"], [131, 2, 1, "", "logsumexp"], [132, 2, 1, "", "matmul"], [133, 2, 1, "", "max"], [134, 2, 1, "", "maximum"], [135, 2, 1, "", "mean"], [142, 2, 1, "", "min"], [143, 2, 1, "", "minimum"], [144, 2, 1, "", "moveaxis"], [145, 2, 1, "", "multiply"], [146, 2, 1, "", "negative"], [147, 2, 1, "", "new_stream"], [148, 2, 1, "", "ones"], [149, 2, 1, "", "ones_like"], [150, 2, 1, "", "outer"], [151, 2, 1, "", "pad"], [152, 2, 1, "", "partition"], [153, 2, 1, "", "prod"], [154, 2, 1, "", "quantize"], [155, 2, 1, "", "quantized_matmul"], [166, 2, 1, "", "reciprocal"], [167, 2, 1, "", "repeat"], [168, 2, 1, "", "reshape"], [169, 2, 1, "", "round"], [170, 2, 1, "", "rsqrt"], [171, 2, 1, "", "save"], [172, 2, 1, "", "save_gguf"], [173, 2, 1, "", "save_safetensors"], [174, 2, 1, "", "savez"], [175, 2, 1, "", "savez_compressed"], [176, 2, 1, "", "set_default_device"], [177, 2, 1, "", "set_default_stream"], [178, 2, 1, "", "sigmoid"], [179, 2, 1, "", "sign"], [180, 2, 1, "", "sin"], [181, 2, 1, "", "sinh"], [182, 2, 1, "", "softmax"], [183, 2, 1, "", "sort"], [184, 2, 1, "", "split"], [185, 2, 1, "", "sqrt"], [186, 2, 1, "", "square"], [187, 2, 1, "", "squeeze"], [188, 2, 1, "", "stack"], [189, 2, 1, "", "stop_gradient"], [190, 2, 1, "", "stream"], [191, 2, 1, "", "subtract"], [192, 2, 1, "", "sum"], [193, 2, 1, "", "swapaxes"], [194, 2, 1, "", "take"], [195, 2, 1, "", "take_along_axis"], [196, 2, 1, "", "tan"], [197, 2, 1, "", "tanh"], [198, 2, 1, "", "tensordot"], [199, 2, 1, "", "tile"], [200, 2, 1, "", "topk"], [201, 2, 1, "", "transpose"], [202, 2, 1, "", "tri"], [203, 2, 1, "", "tril"], [204, 2, 1, "", "triu"], [205, 2, 1, "", "value_and_grad"], [206, 2, 1, "", "var"], [207, 2, 1, "", "vjp"], [208, 2, 1, "", "vmap"], [209, 2, 1, "", "where"], [210, 2, 1, "", "zeros"], [211, 2, 1, "", "zeros_like"]], "mlx.core.Device": [[7, 1, 1, "", "__init__"]], "mlx.core.Dtype": [[8, 1, 1, "", "__init__"]], "mlx.core.Stream": [[216, 1, 1, "", "__init__"]], "mlx.core.array": [[26, 3, 1, "", "T"], [25, 1, 1, "", "__init__"], [27, 1, 1, "", "abs"], [28, 1, 1, "", "all"], [29, 1, 1, "", "any"], [30, 1, 1, "", "argmax"], [31, 1, 1, "", "argmin"], [32, 1, 1, "", "astype"], [33, 1, 1, "", "cos"], [34, 3, 1, "", "dtype"], [35, 1, 1, "", "exp"], [36, 1, 1, "", "item"], [37, 1, 1, "", "log"], [38, 1, 1, "", "log1p"], [39, 1, 1, "", "logsumexp"], [40, 1, 1, "", "max"], [41, 1, 1, "", "mean"], [42, 1, 1, "", "min"], [43, 3, 1, "", "ndim"], [44, 1, 1, "", "prod"], [45, 1, 1, "", "reciprocal"], [46, 1, 1, "", "reshape"], [47, 1, 1, "", "round"], [48, 1, 1, "", "rsqrt"], [49, 3, 1, "", "shape"], [50, 1, 1, "", "sin"], [51, 3, 1, "", "size"], [52, 1, 1, "", "split"], [53, 1, 1, "", "sqrt"], [54, 1, 1, "", "square"], [55, 1, 1, "", "sum"], [56, 1, 1, "", "tolist"], [57, 1, 1, "", "transpose"], [58, 1, 1, "", "var"]], "mlx.core.fft": [[90, 2, 1, "", "fft"], [91, 2, 1, "", "fft2"], [92, 2, 1, "", "fftn"], [93, 2, 1, "", "ifft"], [94, 2, 1, "", "ifft2"], [95, 2, 1, "", "ifftn"], [96, 2, 1, "", "irfft"], [97, 2, 1, "", "irfft2"], [98, 2, 1, "", "irfftn"], [99, 2, 1, "", "rfft"], [100, 2, 1, "", "rfft2"], [101, 2, 1, "", "rfftn"]], "mlx.core.linalg": [[119, 2, 1, "", "norm"], [120, 2, 1, "", "qr"]], "mlx.core.metal": [[136, 2, 1, "", "get_active_memory"], [137, 2, 1, "", "get_cache_memory"], [138, 2, 1, "", "get_peak_memory"], [139, 2, 1, "", "is_available"], [140, 2, 1, "", "set_cache_limit"], [141, 2, 1, "", "set_memory_limit"]], "mlx.core.random": [[156, 2, 1, "", "bernoulli"], [157, 2, 1, "", "categorical"], [158, 2, 1, "", "gumbel"], [159, 2, 1, "", "key"], [160, 2, 1, "", "normal"], [161, 2, 1, "", "randint"], [162, 2, 1, "", "seed"], [163, 2, 1, "", "split"], [164, 2, 1, "", "truncated_normal"], [165, 2, 1, "", "uniform"]], "mlx.nn": [[224, 0, 1, "", "ALiBi"], [225, 0, 1, "", "AvgPool1d"], [226, 0, 1, "", "AvgPool2d"], [227, 0, 1, "", "BatchNorm"], [228, 0, 1, "", "Conv1d"], [229, 0, 1, "", "Conv2d"], [230, 0, 1, "", "Dropout"], [231, 0, 1, "", "Dropout2d"], [232, 0, 1, "", "Dropout3d"], [233, 0, 1, "", "Embedding"], [234, 0, 1, "", "GELU"], [235, 0, 1, "", "GRU"], [236, 0, 1, "", "GroupNorm"], [237, 0, 1, "", "InstanceNorm"], [238, 0, 1, "", "LSTM"], [239, 0, 1, "", "LayerNorm"], [240, 0, 1, "", "Linear"], [241, 0, 1, "", "MaxPool1d"], [242, 0, 1, "", "MaxPool2d"], [243, 0, 1, "", "Mish"], [325, 0, 1, "", "Module"], [263, 0, 1, "", "MultiHeadAttention"], [264, 0, 1, "", "PReLU"], [265, 0, 1, "", "QuantizedLinear"], [266, 0, 1, "", "RMSNorm"], [267, 0, 1, "", "RNN"], [268, 0, 1, "", "ReLU"], [269, 0, 1, "", "RoPE"], [270, 0, 1, "", "SELU"], [271, 0, 1, "", "Sequential"], [272, 0, 1, "", "SiLU"], [273, 0, 1, "", "SinusoidalPositionalEncoding"], [274, 0, 1, "", "Softshrink"], [275, 0, 1, "", "Step"], [276, 0, 1, "", "Transformer"], [277, 0, 1, "", "Upsample"], [286, 2, 1, "", "elu"], [287, 2, 1, "", "gelu"], [288, 2, 1, "", "gelu_approx"], [289, 2, 1, "", "gelu_fast_approx"], [290, 2, 1, "", "glu"], [291, 2, 1, "", "hardswish"], [292, 2, 1, "", "leaky_relu"], [293, 2, 1, "", "log_sigmoid"], [294, 2, 1, "", "log_softmax"], [309, 2, 1, "", "mish"], [310, 2, 1, "", "prelu"], [311, 2, 1, "", "relu"], [312, 2, 1, "", "relu6"], [313, 2, 1, "", "selu"], [314, 2, 1, "", "sigmoid"], [315, 2, 1, "", "silu"], [316, 2, 1, "", "softmax"], [317, 2, 1, "", "softplus"], [318, 2, 1, "", "softshrink"], [319, 2, 1, "", "step"], [320, 2, 1, "", "tanh"], [212, 2, 1, "", "value_and_grad"]], "mlx.nn.Module": [[244, 1, 1, "", "apply"], [245, 1, 1, "", "apply_to_modules"], [246, 1, 1, "", "children"], [247, 1, 1, "", "eval"], [248, 1, 1, "", "filter_and_map"], [249, 1, 1, "", "freeze"], [250, 1, 1, "", "leaf_modules"], [251, 1, 1, "", "load_weights"], [252, 1, 1, "", "modules"], [253, 1, 1, "", "named_modules"], [254, 1, 1, "", "parameters"], [255, 1, 1, "", "save_weights"], [256, 3, 1, "", "state"], [257, 1, 1, "", "train"], [258, 1, 1, "", "trainable_parameters"], [259, 3, 1, "", "training"], [260, 1, 1, "", "unfreeze"], [261, 1, 1, "", "update"], [262, 1, 1, "", "update_modules"]], "mlx.nn.init": [[278, 2, 1, "", "constant"], [279, 2, 1, "", "glorot_normal"], [280, 2, 1, "", "glorot_uniform"], [281, 2, 1, "", "he_normal"], [282, 2, 1, "", "he_uniform"], [283, 2, 1, "", "identity"], [284, 2, 1, "", "normal"], [285, 2, 1, "", "uniform"]], "mlx.nn.losses": [[295, 2, 1, "", "binary_cross_entropy"], [296, 2, 1, "", "cosine_similarity_loss"], [297, 2, 1, "", "cross_entropy"], [298, 2, 1, "", "gaussian_nll_loss"], [299, 2, 1, "", "hinge_loss"], [300, 2, 1, "", "huber_loss"], [301, 2, 1, "", "kl_div_loss"], [302, 2, 1, "", "l1_loss"], [303, 2, 1, "", "log_cosh_loss"], [304, 2, 1, "", "margin_ranking_loss"], [305, 2, 1, "", "mse_loss"], [306, 2, 1, "", "nll_loss"], [307, 2, 1, "", "smooth_l1_loss"], [308, 2, 1, "", "triplet_loss"]], "mlx.optimizers": [[328, 0, 1, "", "AdaDelta"], [329, 0, 1, "", "Adafactor"], [330, 0, 1, "", "Adagrad"], [331, 0, 1, "", "Adam"], [332, 0, 1, "", "AdamW"], [333, 0, 1, "", "Adamax"], [334, 0, 1, "", "Lion"], [347, 0, 1, "", "Optimizer"], [339, 0, 1, "", "RMSprop"], [340, 0, 1, "", "SGD"], [341, 2, 1, "", "cosine_decay"], [342, 2, 1, "", "exponential_decay"], [343, 2, 1, "", "join_schedules"], [344, 2, 1, "", "linear_schedule"], [345, 2, 1, "", "step_decay"]], "mlx.optimizers.Optimizer": [[335, 1, 1, "", "apply_gradients"], [336, 1, 1, "", "init"], [337, 3, 1, "", "state"], [338, 1, 1, "", "update"]], "mlx.utils": [[213, 2, 1, "", "tree_flatten"], [214, 2, 1, "", "tree_map"], [215, 2, 1, "", "tree_unflatten"]]}, "objtypes": {"0": "py:class", "1": "py:method", "2": "py:function", "3": "py:property"}, "objnames": {"0": ["py", "class", "Python class"], "1": ["py", "method", "Python method"], "2": ["py", "function", "Python function"], "3": ["py", "property", "Python property"]}, "titleterms": {"oper": [0, 1, 326], "develop": 1, "document": 1, "introduc": 1, "exampl": [1, 5, 352, 359], "primit": 1, "us": [1, 355, 360], "implement": [1, 3], "cpu": 1, "backend": 1, "gpu": 1, "transform": [1, 276, 350, 352, 353, 355, 357], "build": [1, 6], "bind": 1, "python": [1, 5, 6], "cmake": 1, "setuptool": 1, "usag": [1, 5], "result": 1, "script": [1, 3], "download": [1, 3], "code": [1, 3], "linear": [2, 221, 240], "regress": 2, "llm": 3, "infer": 3, "model": 3, "attent": 3, "layer": [3, 4, 323], "encod": 3, "full": [3, 105], "gener": 3, "put": 3, "all": [3, 11, 28], "togeth": 3, "convert": 3, "weight": 3, "load": [3, 122, 358], "benchmark": 3, "multi": 4, "perceptron": 4, "mlx": [5, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294, 295, 296, 297, 298, 299, 300, 301, 302, 303, 304, 305, 306, 307, 308, 309, 310, 311, 312, 313, 314, 315, 316, 317, 318, 319, 320, 328, 329, 330, 331, 332, 333, 334, 335, 336, 337, 338, 339, 340, 341, 342, 343, 344, 345], "instal": [5, 6], "api": [5, 6], "refer": 5, "c": [5, 6], "further": 5, "read": 5, "troubleshoot": 6, "from": [6, 354], "sourc": 6, "requir": 6, "option": 6, "metal": [6, 136, 137, 138, 139, 140, 141, 222], "found": 6, "x86": 6, "shell": 6, "core": [7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 216], "devic": [7, 219], "dtype": [8, 34], "ab": [9, 27], "add": 10, "allclos": 12, "ani": [13, 29], "arang": 14, "arcco": 15, "arccosh": 16, "arcsin": 17, "arcsinh": 18, "arctan": 19, "arctanh": 20, "argmax": [21, 30], "argmin": [22, 31], "argpartit": 23, "argsort": 24, "arrai": [25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 217, 354, 358], "t": 26, "astyp": 32, "co": [33, 72], "exp": [35, 87], "item": 36, "log": [37, 123], "log1p": [38, 125], "logsumexp": [39, 131], "max": [40, 133], "mean": [41, 135], "min": [42, 142], "ndim": 43, "prod": [44, 153], "reciproc": [45, 166], "reshap": [46, 168], "round": [47, 169], "rsqrt": [48, 170], "shape": 49, "sin": [50, 180], "size": 51, "split": [52, 163, 184], "sqrt": [53, 185], "squar": [54, 186], "sum": [55, 192], "tolist": 56, "transpos": [57, 201], "var": [58, 206], "array_equ": 59, "atleast_1d": 60, "atleast_2d": 61, "atleast_3d": 62, "broadcast_to": 63, "ceil": 64, "clip": 65, "compil": [66, 352], "concaten": 67, "conv1d": [68, 228], "conv2d": [69, 229], "conv_gener": 70, "convolv": 71, "cosh": 73, "default_devic": 74, "default_stream": 75, "dequant": 76, "diag": 77, "diagon": 78, "disable_compil": 79, "divid": 80, "divmod": 81, "enable_compil": 82, "equal": 83, "erf": 84, "erfinv": 85, "eval": [86, 247], "expand_dim": 88, "ey": 89, "fft": [90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 220], "fft2": 91, "fftn": 92, "ifft": 93, "ifft2": 94, "ifftn": 95, "irfft": 96, "irfft2": 97, "irfftn": 98, "rfft": 99, "rfft2": 100, "rfftn": 101, "flatten": 102, "floor": 103, "floor_divid": 104, "grad": [106, 223], "greater": 107, "greater_equ": 108, "ident": [109, 283], "inner": 110, "isclos": 111, "isinf": 112, "isnan": 113, "isneginf": 114, "isposinf": 115, "jvp": 116, "less": 117, "less_equ": 118, "linalg": [119, 120], "norm": 119, "qr": 120, "linspac": 121, "log10": 124, "log2": 126, "logaddexp": 127, "logical_and": 128, "logical_not": 129, "logical_or": 130, "matmul": 132, "maximum": 134, "get_active_memori": 136, "get_cache_memori": 137, "get_peak_memori": 138, "is_avail": 139, "set_cache_limit": 140, "set_memory_limit": 141, "minimum": 143, "moveaxi": 144, "multipli": 145, "neg": 146, "new_stream": 147, "ones": 148, "ones_lik": 149, "outer": 150, "pad": 151, "partit": 152, "quantiz": 154, "quantized_matmul": 155, "random": [156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 349], "bernoulli": 156, "categor": 157, "gumbel": 158, "kei": 159, "normal": [160, 284], "randint": 161, "seed": 162, "truncated_norm": 164, "uniform": [165, 285], "repeat": 167, "save": [171, 358], "save_gguf": 172, "save_safetensor": 173, "savez": 174, "savez_compress": 175, "set_default_devic": 176, "set_default_stream": 177, "sigmoid": [178, 314], "sign": 179, "sinh": 181, "softmax": [182, 316], "sort": 183, "squeez": 187, "stack": 188, "stop_gradi": 189, "stream": [190, 216, 219, 360], "subtract": 191, "swapax": 193, "take": 194, "take_along_axi": 195, "tan": 196, "tanh": [197, 320], "tensordot": 198, "tile": 199, "topk": 200, "tri": 202, "tril": 203, "triu": 204, "value_and_grad": [205, 212], "vjp": 207, "vmap": 208, "where": 209, "zero": 210, "zeros_lik": 211, "nn": [212, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294, 295, 296, 297, 298, 299, 300, 301, 302, 303, 304, 305, 306, 307, 308, 309, 310, 311, 312, 313, 314, 315, 316, 317, 318, 319, 320], "util": [213, 214, 215, 351], "tree_flatten": 213, "tree_map": 214, "tree_unflatten": 215, "data": 218, "type": 218, "support": 218, "algebra": 221, "neural": 223, "network": 223, "quick": [223, 357], "start": [223, 357], "The": 223, "modul": [223, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 325], "class": 223, "paramet": [223, 254], "updat": [223, 261, 338, 354], "inspect": 223, "valu": 223, "alibi": 224, "avgpool1d": 225, "avgpool2d": 226, "batchnorm": 227, "dropout": 230, "dropout2d": 231, "dropout3d": 232, "embed": 233, "gelu": [234, 287], "gru": 235, "groupnorm": 236, "instancenorm": 237, "lstm": 238, "layernorm": 239, "maxpool1d": 241, "maxpool2d": 242, "mish": [243, 309], "appli": 244, "apply_to_modul": 245, "children": 246, "filter_and_map": 248, "freez": 249, "leaf_modul": 250, "load_weight": 251, "named_modul": 253, "save_weight": 255, "state": [256, 337], "train": [257, 259, 352], "trainable_paramet": 258, "unfreez": 260, "update_modul": 262, "multiheadattent": 263, "prelu": [264, 310], "quantizedlinear": 265, "rmsnorm": 266, "rnn": 267, "relu": [268, 311], "rope": 269, "selu": [270, 313], "sequenti": 271, "silu": [272, 315], "sinusoidalpositionalencod": 273, "softshrink": [274, 318], "step": [275, 319], "upsampl": 277, "init": [278, 279, 280, 281, 282, 283, 284, 285, 336], "constant": 278, "glorot_norm": 279, "glorot_uniform": 280, "he_norm": 281, "he_uniform": 282, "elu": 286, "gelu_approx": 288, "gelu_fast_approx": 289, "glu": 290, "hardswish": 291, "leaky_relu": 292, "log_sigmoid": 293, "log_softmax": 294, "loss": [295, 296, 297, 298, 299, 300, 301, 302, 303, 304, 305, 306, 307, 308, 324], "binary_cross_entropi": 295, "cosine_similarity_loss": 296, "cross_entropi": 297, "gaussian_nll_loss": 298, "hinge_loss": 299, "huber_loss": 300, "kl_div_loss": 301, "l1_loss": 302, "log_cosh_loss": 303, "margin_ranking_loss": 304, "mse_loss": 305, "nll_loss": 306, "smooth_l1_loss": 307, "triplet_loss": 308, "relu6": 312, "softplu": 317, "function": [321, 324, 352, 353, 357], "initi": 322, "optim": [327, 328, 329, 330, 331, 332, 333, 334, 335, 336, 337, 338, 339, 340, 341, 342, 343, 344, 345, 346, 347], "adadelta": 328, "adafactor": 329, "adagrad": 330, "adam": 331, "adamw": 332, "adamax": 333, "lion": 334, "apply_gradi": 335, "rmsprop": 339, "sgd": 340, "cosine_decai": 341, "exponential_decai": 342, "join_schedul": 343, "linear_schedul": 344, "step_decai": 345, "common": 346, "schedul": 348, "tree": 351, "basic": [352, 357], "speedup": 352, "debug": 352, "pure": 352, "graph": [352, 355, 357], "automat": 353, "differenti": 353, "vector": 353, "index": 354, "differ": 354, "numpi": [354, 356], "In": 354, "place": 354, "lazi": 355, "evalu": 355, "why": 355, "comput": 355, "onli": 355, "what": 355, "you": 355, "when": 355, "convers": 356, "other": 356, "framework": 356, "pytorch": 356, "jax": 356, "tensorflow": 356, "guid": 357, "serial": 358, "format": 358, "unifi": 359, "memori": 359, "A": 359, "simpl": 359, "specifi": 360}, "envversion": {"sphinx.domains.c": 2, "sphinx.domains.changeset": 1, "sphinx.domains.citation": 1, "sphinx.domains.cpp": 6, "sphinx.domains.index": 1, "sphinx.domains.javascript": 2, "sphinx.domains.math": 2, "sphinx.domains.python": 3, "sphinx.domains.rst": 2, "sphinx.domains.std": 2, "sphinx.ext.intersphinx": 1, "sphinx": 56}})