mirror of
https://github.com/ml-explore/mlx.git
synced 2025-06-25 18:11:15 +08:00
1 line
142 KiB
JavaScript
1 line
142 KiB
JavaScript
Search.setIndex({"docnames": ["cpp/ops", "dev/extensions", "examples/linear_regression", "examples/llama-inference", "examples/mlp", "index", "install", "python/_autosummary/mlx.core.Device", "python/_autosummary/mlx.core.Dtype", "python/_autosummary/mlx.core.abs", "python/_autosummary/mlx.core.add", "python/_autosummary/mlx.core.all", "python/_autosummary/mlx.core.allclose", "python/_autosummary/mlx.core.any", "python/_autosummary/mlx.core.arange", "python/_autosummary/mlx.core.arccos", "python/_autosummary/mlx.core.arccosh", "python/_autosummary/mlx.core.arcsin", "python/_autosummary/mlx.core.arcsinh", "python/_autosummary/mlx.core.arctan", "python/_autosummary/mlx.core.arctanh", "python/_autosummary/mlx.core.argmax", "python/_autosummary/mlx.core.argmin", "python/_autosummary/mlx.core.argpartition", "python/_autosummary/mlx.core.argsort", "python/_autosummary/mlx.core.array", "python/_autosummary/mlx.core.array.T", "python/_autosummary/mlx.core.array.abs", "python/_autosummary/mlx.core.array.all", "python/_autosummary/mlx.core.array.any", "python/_autosummary/mlx.core.array.argmax", "python/_autosummary/mlx.core.array.argmin", "python/_autosummary/mlx.core.array.astype", "python/_autosummary/mlx.core.array.cos", "python/_autosummary/mlx.core.array.dtype", "python/_autosummary/mlx.core.array.exp", "python/_autosummary/mlx.core.array.item", "python/_autosummary/mlx.core.array.log", "python/_autosummary/mlx.core.array.log1p", "python/_autosummary/mlx.core.array.logsumexp", "python/_autosummary/mlx.core.array.max", "python/_autosummary/mlx.core.array.mean", "python/_autosummary/mlx.core.array.min", "python/_autosummary/mlx.core.array.ndim", "python/_autosummary/mlx.core.array.prod", "python/_autosummary/mlx.core.array.reciprocal", "python/_autosummary/mlx.core.array.reshape", "python/_autosummary/mlx.core.array.round", "python/_autosummary/mlx.core.array.rsqrt", "python/_autosummary/mlx.core.array.shape", "python/_autosummary/mlx.core.array.sin", "python/_autosummary/mlx.core.array.size", "python/_autosummary/mlx.core.array.split", "python/_autosummary/mlx.core.array.sqrt", "python/_autosummary/mlx.core.array.square", "python/_autosummary/mlx.core.array.sum", "python/_autosummary/mlx.core.array.tolist", "python/_autosummary/mlx.core.array.transpose", "python/_autosummary/mlx.core.array.var", "python/_autosummary/mlx.core.array_equal", "python/_autosummary/mlx.core.atleast_1d", "python/_autosummary/mlx.core.atleast_2d", "python/_autosummary/mlx.core.atleast_3d", "python/_autosummary/mlx.core.broadcast_to", "python/_autosummary/mlx.core.ceil", "python/_autosummary/mlx.core.clip", "python/_autosummary/mlx.core.compile", "python/_autosummary/mlx.core.concatenate", "python/_autosummary/mlx.core.conv1d", "python/_autosummary/mlx.core.conv2d", "python/_autosummary/mlx.core.conv_general", "python/_autosummary/mlx.core.convolve", "python/_autosummary/mlx.core.cos", "python/_autosummary/mlx.core.cosh", "python/_autosummary/mlx.core.default_device", "python/_autosummary/mlx.core.default_stream", "python/_autosummary/mlx.core.dequantize", "python/_autosummary/mlx.core.diag", "python/_autosummary/mlx.core.diagonal", "python/_autosummary/mlx.core.disable_compile", "python/_autosummary/mlx.core.divide", "python/_autosummary/mlx.core.divmod", "python/_autosummary/mlx.core.enable_compile", "python/_autosummary/mlx.core.equal", "python/_autosummary/mlx.core.erf", "python/_autosummary/mlx.core.erfinv", "python/_autosummary/mlx.core.eval", "python/_autosummary/mlx.core.exp", "python/_autosummary/mlx.core.expand_dims", "python/_autosummary/mlx.core.eye", "python/_autosummary/mlx.core.fft.fft", "python/_autosummary/mlx.core.fft.fft2", "python/_autosummary/mlx.core.fft.fftn", "python/_autosummary/mlx.core.fft.ifft", "python/_autosummary/mlx.core.fft.ifft2", "python/_autosummary/mlx.core.fft.ifftn", "python/_autosummary/mlx.core.fft.irfft", "python/_autosummary/mlx.core.fft.irfft2", "python/_autosummary/mlx.core.fft.irfftn", "python/_autosummary/mlx.core.fft.rfft", "python/_autosummary/mlx.core.fft.rfft2", "python/_autosummary/mlx.core.fft.rfftn", "python/_autosummary/mlx.core.flatten", "python/_autosummary/mlx.core.floor", "python/_autosummary/mlx.core.floor_divide", "python/_autosummary/mlx.core.full", "python/_autosummary/mlx.core.grad", "python/_autosummary/mlx.core.greater", "python/_autosummary/mlx.core.greater_equal", "python/_autosummary/mlx.core.identity", "python/_autosummary/mlx.core.inner", "python/_autosummary/mlx.core.isinf", "python/_autosummary/mlx.core.isnan", "python/_autosummary/mlx.core.isneginf", "python/_autosummary/mlx.core.isposinf", "python/_autosummary/mlx.core.jvp", "python/_autosummary/mlx.core.less", "python/_autosummary/mlx.core.less_equal", "python/_autosummary/mlx.core.linalg.norm", "python/_autosummary/mlx.core.linalg.qr", "python/_autosummary/mlx.core.linspace", "python/_autosummary/mlx.core.load", "python/_autosummary/mlx.core.log", "python/_autosummary/mlx.core.log10", "python/_autosummary/mlx.core.log1p", "python/_autosummary/mlx.core.log2", "python/_autosummary/mlx.core.logaddexp", "python/_autosummary/mlx.core.logical_and", "python/_autosummary/mlx.core.logical_not", "python/_autosummary/mlx.core.logical_or", "python/_autosummary/mlx.core.logsumexp", "python/_autosummary/mlx.core.matmul", "python/_autosummary/mlx.core.max", "python/_autosummary/mlx.core.maximum", "python/_autosummary/mlx.core.mean", "python/_autosummary/mlx.core.min", "python/_autosummary/mlx.core.minimum", "python/_autosummary/mlx.core.moveaxis", "python/_autosummary/mlx.core.multiply", "python/_autosummary/mlx.core.negative", "python/_autosummary/mlx.core.new_stream", "python/_autosummary/mlx.core.ones", "python/_autosummary/mlx.core.ones_like", "python/_autosummary/mlx.core.outer", "python/_autosummary/mlx.core.pad", "python/_autosummary/mlx.core.partition", "python/_autosummary/mlx.core.prod", "python/_autosummary/mlx.core.quantize", "python/_autosummary/mlx.core.quantized_matmul", "python/_autosummary/mlx.core.random.bernoulli", "python/_autosummary/mlx.core.random.categorical", "python/_autosummary/mlx.core.random.gumbel", "python/_autosummary/mlx.core.random.key", "python/_autosummary/mlx.core.random.normal", "python/_autosummary/mlx.core.random.randint", "python/_autosummary/mlx.core.random.seed", "python/_autosummary/mlx.core.random.split", "python/_autosummary/mlx.core.random.truncated_normal", "python/_autosummary/mlx.core.random.uniform", "python/_autosummary/mlx.core.reciprocal", "python/_autosummary/mlx.core.repeat", "python/_autosummary/mlx.core.reshape", "python/_autosummary/mlx.core.round", "python/_autosummary/mlx.core.rsqrt", "python/_autosummary/mlx.core.save", "python/_autosummary/mlx.core.save_gguf", "python/_autosummary/mlx.core.save_safetensors", "python/_autosummary/mlx.core.savez", "python/_autosummary/mlx.core.savez_compressed", "python/_autosummary/mlx.core.set_default_device", "python/_autosummary/mlx.core.set_default_stream", "python/_autosummary/mlx.core.sigmoid", "python/_autosummary/mlx.core.sign", "python/_autosummary/mlx.core.sin", "python/_autosummary/mlx.core.sinh", "python/_autosummary/mlx.core.softmax", "python/_autosummary/mlx.core.sort", "python/_autosummary/mlx.core.split", "python/_autosummary/mlx.core.sqrt", "python/_autosummary/mlx.core.square", "python/_autosummary/mlx.core.squeeze", "python/_autosummary/mlx.core.stack", "python/_autosummary/mlx.core.stop_gradient", "python/_autosummary/mlx.core.stream", "python/_autosummary/mlx.core.subtract", "python/_autosummary/mlx.core.sum", "python/_autosummary/mlx.core.swapaxes", "python/_autosummary/mlx.core.take", "python/_autosummary/mlx.core.take_along_axis", "python/_autosummary/mlx.core.tan", "python/_autosummary/mlx.core.tanh", "python/_autosummary/mlx.core.tensordot", "python/_autosummary/mlx.core.transpose", "python/_autosummary/mlx.core.tri", "python/_autosummary/mlx.core.tril", "python/_autosummary/mlx.core.triu", "python/_autosummary/mlx.core.value_and_grad", "python/_autosummary/mlx.core.var", "python/_autosummary/mlx.core.vjp", "python/_autosummary/mlx.core.vmap", "python/_autosummary/mlx.core.where", "python/_autosummary/mlx.core.zeros", "python/_autosummary/mlx.core.zeros_like", "python/_autosummary/mlx.nn.value_and_grad", "python/_autosummary/mlx.utils.tree_flatten", "python/_autosummary/mlx.utils.tree_map", "python/_autosummary/mlx.utils.tree_unflatten", "python/_autosummary/stream_class", "python/array", "python/data_types", "python/devices_and_streams", "python/fft", "python/linalg", "python/nn", "python/nn/_autosummary/mlx.nn.ALiBi", "python/nn/_autosummary/mlx.nn.AvgPool1d", "python/nn/_autosummary/mlx.nn.AvgPool2d", "python/nn/_autosummary/mlx.nn.BatchNorm", "python/nn/_autosummary/mlx.nn.Conv1d", "python/nn/_autosummary/mlx.nn.Conv2d", "python/nn/_autosummary/mlx.nn.Dropout", "python/nn/_autosummary/mlx.nn.Dropout2d", "python/nn/_autosummary/mlx.nn.Dropout3d", "python/nn/_autosummary/mlx.nn.Embedding", "python/nn/_autosummary/mlx.nn.GELU", "python/nn/_autosummary/mlx.nn.GroupNorm", "python/nn/_autosummary/mlx.nn.InstanceNorm", "python/nn/_autosummary/mlx.nn.LayerNorm", "python/nn/_autosummary/mlx.nn.Linear", "python/nn/_autosummary/mlx.nn.MaxPool1d", "python/nn/_autosummary/mlx.nn.MaxPool2d", "python/nn/_autosummary/mlx.nn.Mish", "python/nn/_autosummary/mlx.nn.Module.apply", "python/nn/_autosummary/mlx.nn.Module.apply_to_modules", "python/nn/_autosummary/mlx.nn.Module.children", "python/nn/_autosummary/mlx.nn.Module.eval", "python/nn/_autosummary/mlx.nn.Module.filter_and_map", "python/nn/_autosummary/mlx.nn.Module.freeze", "python/nn/_autosummary/mlx.nn.Module.leaf_modules", "python/nn/_autosummary/mlx.nn.Module.load_weights", "python/nn/_autosummary/mlx.nn.Module.modules", "python/nn/_autosummary/mlx.nn.Module.named_modules", "python/nn/_autosummary/mlx.nn.Module.parameters", "python/nn/_autosummary/mlx.nn.Module.save_weights", "python/nn/_autosummary/mlx.nn.Module.state", "python/nn/_autosummary/mlx.nn.Module.train", "python/nn/_autosummary/mlx.nn.Module.trainable_parameters", "python/nn/_autosummary/mlx.nn.Module.training", "python/nn/_autosummary/mlx.nn.Module.unfreeze", "python/nn/_autosummary/mlx.nn.Module.update", "python/nn/_autosummary/mlx.nn.Module.update_modules", "python/nn/_autosummary/mlx.nn.MultiHeadAttention", "python/nn/_autosummary/mlx.nn.PReLU", "python/nn/_autosummary/mlx.nn.QuantizedLinear", "python/nn/_autosummary/mlx.nn.RMSNorm", "python/nn/_autosummary/mlx.nn.ReLU", "python/nn/_autosummary/mlx.nn.RoPE", "python/nn/_autosummary/mlx.nn.SELU", "python/nn/_autosummary/mlx.nn.Sequential", "python/nn/_autosummary/mlx.nn.SiLU", "python/nn/_autosummary/mlx.nn.SinusoidalPositionalEncoding", "python/nn/_autosummary/mlx.nn.Softshrink", "python/nn/_autosummary/mlx.nn.Step", "python/nn/_autosummary/mlx.nn.Transformer", "python/nn/_autosummary/mlx.nn.Upsample", "python/nn/_autosummary/mlx.nn.init.constant", "python/nn/_autosummary/mlx.nn.init.glorot_normal", "python/nn/_autosummary/mlx.nn.init.glorot_uniform", "python/nn/_autosummary/mlx.nn.init.he_normal", "python/nn/_autosummary/mlx.nn.init.he_uniform", "python/nn/_autosummary/mlx.nn.init.identity", "python/nn/_autosummary/mlx.nn.init.normal", "python/nn/_autosummary/mlx.nn.init.uniform", "python/nn/_autosummary_functions/mlx.nn.elu", "python/nn/_autosummary_functions/mlx.nn.gelu", "python/nn/_autosummary_functions/mlx.nn.gelu_approx", "python/nn/_autosummary_functions/mlx.nn.gelu_fast_approx", "python/nn/_autosummary_functions/mlx.nn.glu", "python/nn/_autosummary_functions/mlx.nn.hardswish", "python/nn/_autosummary_functions/mlx.nn.leaky_relu", "python/nn/_autosummary_functions/mlx.nn.log_sigmoid", "python/nn/_autosummary_functions/mlx.nn.log_softmax", "python/nn/_autosummary_functions/mlx.nn.losses.binary_cross_entropy", "python/nn/_autosummary_functions/mlx.nn.losses.cosine_similarity_loss", "python/nn/_autosummary_functions/mlx.nn.losses.cross_entropy", "python/nn/_autosummary_functions/mlx.nn.losses.gaussian_nll_loss", "python/nn/_autosummary_functions/mlx.nn.losses.hinge_loss", "python/nn/_autosummary_functions/mlx.nn.losses.huber_loss", "python/nn/_autosummary_functions/mlx.nn.losses.kl_div_loss", "python/nn/_autosummary_functions/mlx.nn.losses.l1_loss", "python/nn/_autosummary_functions/mlx.nn.losses.log_cosh_loss", "python/nn/_autosummary_functions/mlx.nn.losses.margin_ranking_loss", "python/nn/_autosummary_functions/mlx.nn.losses.mse_loss", "python/nn/_autosummary_functions/mlx.nn.losses.nll_loss", "python/nn/_autosummary_functions/mlx.nn.losses.smooth_l1_loss", "python/nn/_autosummary_functions/mlx.nn.losses.triplet_loss", "python/nn/_autosummary_functions/mlx.nn.mish", "python/nn/_autosummary_functions/mlx.nn.prelu", "python/nn/_autosummary_functions/mlx.nn.relu", "python/nn/_autosummary_functions/mlx.nn.relu6", "python/nn/_autosummary_functions/mlx.nn.selu", "python/nn/_autosummary_functions/mlx.nn.sigmoid", "python/nn/_autosummary_functions/mlx.nn.silu", "python/nn/_autosummary_functions/mlx.nn.softmax", "python/nn/_autosummary_functions/mlx.nn.softplus", "python/nn/_autosummary_functions/mlx.nn.softshrink", "python/nn/_autosummary_functions/mlx.nn.step", "python/nn/_autosummary_functions/mlx.nn.tanh", "python/nn/functions", "python/nn/init", "python/nn/layers", "python/nn/losses", "python/nn/module", "python/ops", "python/optimizers", "python/optimizers/_autosummary/mlx.optimizers.AdaDelta", "python/optimizers/_autosummary/mlx.optimizers.Adafactor", "python/optimizers/_autosummary/mlx.optimizers.Adagrad", "python/optimizers/_autosummary/mlx.optimizers.Adam", "python/optimizers/_autosummary/mlx.optimizers.AdamW", "python/optimizers/_autosummary/mlx.optimizers.Adamax", "python/optimizers/_autosummary/mlx.optimizers.Lion", "python/optimizers/_autosummary/mlx.optimizers.Optimizer.apply_gradients", "python/optimizers/_autosummary/mlx.optimizers.Optimizer.init", "python/optimizers/_autosummary/mlx.optimizers.Optimizer.state", "python/optimizers/_autosummary/mlx.optimizers.Optimizer.update", "python/optimizers/_autosummary/mlx.optimizers.RMSprop", "python/optimizers/_autosummary/mlx.optimizers.SGD", "python/optimizers/_autosummary/mlx.optimizers.cosine_decay", "python/optimizers/_autosummary/mlx.optimizers.exponential_decay", "python/optimizers/_autosummary/mlx.optimizers.join_schedules", "python/optimizers/_autosummary/mlx.optimizers.linear_schedule", "python/optimizers/_autosummary/mlx.optimizers.step_decay", "python/optimizers/common_optimizers", "python/optimizers/optimizer", "python/optimizers/schedulers", "python/random", "python/transforms", "python/tree_utils", "usage/compile", "usage/function_transforms", "usage/indexing", "usage/lazy_evaluation", "usage/numpy", "usage/quick_start", "usage/saving_and_loading", "usage/unified_memory", "usage/using_streams"], "filenames": ["cpp/ops.rst", "dev/extensions.rst", "examples/linear_regression.rst", "examples/llama-inference.rst", "examples/mlp.rst", "index.rst", "install.rst", "python/_autosummary/mlx.core.Device.rst", "python/_autosummary/mlx.core.Dtype.rst", "python/_autosummary/mlx.core.abs.rst", "python/_autosummary/mlx.core.add.rst", "python/_autosummary/mlx.core.all.rst", "python/_autosummary/mlx.core.allclose.rst", "python/_autosummary/mlx.core.any.rst", "python/_autosummary/mlx.core.arange.rst", "python/_autosummary/mlx.core.arccos.rst", "python/_autosummary/mlx.core.arccosh.rst", "python/_autosummary/mlx.core.arcsin.rst", "python/_autosummary/mlx.core.arcsinh.rst", "python/_autosummary/mlx.core.arctan.rst", "python/_autosummary/mlx.core.arctanh.rst", "python/_autosummary/mlx.core.argmax.rst", "python/_autosummary/mlx.core.argmin.rst", "python/_autosummary/mlx.core.argpartition.rst", "python/_autosummary/mlx.core.argsort.rst", "python/_autosummary/mlx.core.array.rst", "python/_autosummary/mlx.core.array.T.rst", "python/_autosummary/mlx.core.array.abs.rst", "python/_autosummary/mlx.core.array.all.rst", "python/_autosummary/mlx.core.array.any.rst", "python/_autosummary/mlx.core.array.argmax.rst", "python/_autosummary/mlx.core.array.argmin.rst", "python/_autosummary/mlx.core.array.astype.rst", "python/_autosummary/mlx.core.array.cos.rst", "python/_autosummary/mlx.core.array.dtype.rst", "python/_autosummary/mlx.core.array.exp.rst", "python/_autosummary/mlx.core.array.item.rst", "python/_autosummary/mlx.core.array.log.rst", "python/_autosummary/mlx.core.array.log1p.rst", "python/_autosummary/mlx.core.array.logsumexp.rst", "python/_autosummary/mlx.core.array.max.rst", "python/_autosummary/mlx.core.array.mean.rst", "python/_autosummary/mlx.core.array.min.rst", "python/_autosummary/mlx.core.array.ndim.rst", "python/_autosummary/mlx.core.array.prod.rst", "python/_autosummary/mlx.core.array.reciprocal.rst", "python/_autosummary/mlx.core.array.reshape.rst", "python/_autosummary/mlx.core.array.round.rst", "python/_autosummary/mlx.core.array.rsqrt.rst", "python/_autosummary/mlx.core.array.shape.rst", "python/_autosummary/mlx.core.array.sin.rst", "python/_autosummary/mlx.core.array.size.rst", "python/_autosummary/mlx.core.array.split.rst", "python/_autosummary/mlx.core.array.sqrt.rst", "python/_autosummary/mlx.core.array.square.rst", "python/_autosummary/mlx.core.array.sum.rst", "python/_autosummary/mlx.core.array.tolist.rst", "python/_autosummary/mlx.core.array.transpose.rst", "python/_autosummary/mlx.core.array.var.rst", "python/_autosummary/mlx.core.array_equal.rst", "python/_autosummary/mlx.core.atleast_1d.rst", "python/_autosummary/mlx.core.atleast_2d.rst", "python/_autosummary/mlx.core.atleast_3d.rst", "python/_autosummary/mlx.core.broadcast_to.rst", "python/_autosummary/mlx.core.ceil.rst", "python/_autosummary/mlx.core.clip.rst", "python/_autosummary/mlx.core.compile.rst", "python/_autosummary/mlx.core.concatenate.rst", "python/_autosummary/mlx.core.conv1d.rst", "python/_autosummary/mlx.core.conv2d.rst", "python/_autosummary/mlx.core.conv_general.rst", "python/_autosummary/mlx.core.convolve.rst", "python/_autosummary/mlx.core.cos.rst", "python/_autosummary/mlx.core.cosh.rst", "python/_autosummary/mlx.core.default_device.rst", "python/_autosummary/mlx.core.default_stream.rst", "python/_autosummary/mlx.core.dequantize.rst", "python/_autosummary/mlx.core.diag.rst", "python/_autosummary/mlx.core.diagonal.rst", "python/_autosummary/mlx.core.disable_compile.rst", "python/_autosummary/mlx.core.divide.rst", "python/_autosummary/mlx.core.divmod.rst", "python/_autosummary/mlx.core.enable_compile.rst", "python/_autosummary/mlx.core.equal.rst", "python/_autosummary/mlx.core.erf.rst", "python/_autosummary/mlx.core.erfinv.rst", "python/_autosummary/mlx.core.eval.rst", "python/_autosummary/mlx.core.exp.rst", "python/_autosummary/mlx.core.expand_dims.rst", "python/_autosummary/mlx.core.eye.rst", "python/_autosummary/mlx.core.fft.fft.rst", "python/_autosummary/mlx.core.fft.fft2.rst", "python/_autosummary/mlx.core.fft.fftn.rst", "python/_autosummary/mlx.core.fft.ifft.rst", "python/_autosummary/mlx.core.fft.ifft2.rst", "python/_autosummary/mlx.core.fft.ifftn.rst", "python/_autosummary/mlx.core.fft.irfft.rst", "python/_autosummary/mlx.core.fft.irfft2.rst", "python/_autosummary/mlx.core.fft.irfftn.rst", "python/_autosummary/mlx.core.fft.rfft.rst", "python/_autosummary/mlx.core.fft.rfft2.rst", "python/_autosummary/mlx.core.fft.rfftn.rst", "python/_autosummary/mlx.core.flatten.rst", "python/_autosummary/mlx.core.floor.rst", "python/_autosummary/mlx.core.floor_divide.rst", "python/_autosummary/mlx.core.full.rst", "python/_autosummary/mlx.core.grad.rst", "python/_autosummary/mlx.core.greater.rst", "python/_autosummary/mlx.core.greater_equal.rst", "python/_autosummary/mlx.core.identity.rst", "python/_autosummary/mlx.core.inner.rst", "python/_autosummary/mlx.core.isinf.rst", "python/_autosummary/mlx.core.isnan.rst", "python/_autosummary/mlx.core.isneginf.rst", "python/_autosummary/mlx.core.isposinf.rst", "python/_autosummary/mlx.core.jvp.rst", "python/_autosummary/mlx.core.less.rst", "python/_autosummary/mlx.core.less_equal.rst", "python/_autosummary/mlx.core.linalg.norm.rst", "python/_autosummary/mlx.core.linalg.qr.rst", "python/_autosummary/mlx.core.linspace.rst", "python/_autosummary/mlx.core.load.rst", "python/_autosummary/mlx.core.log.rst", "python/_autosummary/mlx.core.log10.rst", "python/_autosummary/mlx.core.log1p.rst", "python/_autosummary/mlx.core.log2.rst", "python/_autosummary/mlx.core.logaddexp.rst", "python/_autosummary/mlx.core.logical_and.rst", "python/_autosummary/mlx.core.logical_not.rst", "python/_autosummary/mlx.core.logical_or.rst", "python/_autosummary/mlx.core.logsumexp.rst", "python/_autosummary/mlx.core.matmul.rst", "python/_autosummary/mlx.core.max.rst", "python/_autosummary/mlx.core.maximum.rst", "python/_autosummary/mlx.core.mean.rst", "python/_autosummary/mlx.core.min.rst", "python/_autosummary/mlx.core.minimum.rst", "python/_autosummary/mlx.core.moveaxis.rst", "python/_autosummary/mlx.core.multiply.rst", "python/_autosummary/mlx.core.negative.rst", "python/_autosummary/mlx.core.new_stream.rst", "python/_autosummary/mlx.core.ones.rst", "python/_autosummary/mlx.core.ones_like.rst", "python/_autosummary/mlx.core.outer.rst", "python/_autosummary/mlx.core.pad.rst", "python/_autosummary/mlx.core.partition.rst", "python/_autosummary/mlx.core.prod.rst", "python/_autosummary/mlx.core.quantize.rst", "python/_autosummary/mlx.core.quantized_matmul.rst", "python/_autosummary/mlx.core.random.bernoulli.rst", "python/_autosummary/mlx.core.random.categorical.rst", "python/_autosummary/mlx.core.random.gumbel.rst", "python/_autosummary/mlx.core.random.key.rst", "python/_autosummary/mlx.core.random.normal.rst", "python/_autosummary/mlx.core.random.randint.rst", "python/_autosummary/mlx.core.random.seed.rst", "python/_autosummary/mlx.core.random.split.rst", "python/_autosummary/mlx.core.random.truncated_normal.rst", "python/_autosummary/mlx.core.random.uniform.rst", "python/_autosummary/mlx.core.reciprocal.rst", "python/_autosummary/mlx.core.repeat.rst", "python/_autosummary/mlx.core.reshape.rst", "python/_autosummary/mlx.core.round.rst", "python/_autosummary/mlx.core.rsqrt.rst", "python/_autosummary/mlx.core.save.rst", "python/_autosummary/mlx.core.save_gguf.rst", "python/_autosummary/mlx.core.save_safetensors.rst", "python/_autosummary/mlx.core.savez.rst", "python/_autosummary/mlx.core.savez_compressed.rst", "python/_autosummary/mlx.core.set_default_device.rst", "python/_autosummary/mlx.core.set_default_stream.rst", "python/_autosummary/mlx.core.sigmoid.rst", "python/_autosummary/mlx.core.sign.rst", "python/_autosummary/mlx.core.sin.rst", "python/_autosummary/mlx.core.sinh.rst", "python/_autosummary/mlx.core.softmax.rst", "python/_autosummary/mlx.core.sort.rst", "python/_autosummary/mlx.core.split.rst", "python/_autosummary/mlx.core.sqrt.rst", "python/_autosummary/mlx.core.square.rst", "python/_autosummary/mlx.core.squeeze.rst", "python/_autosummary/mlx.core.stack.rst", "python/_autosummary/mlx.core.stop_gradient.rst", "python/_autosummary/mlx.core.stream.rst", "python/_autosummary/mlx.core.subtract.rst", "python/_autosummary/mlx.core.sum.rst", "python/_autosummary/mlx.core.swapaxes.rst", "python/_autosummary/mlx.core.take.rst", "python/_autosummary/mlx.core.take_along_axis.rst", "python/_autosummary/mlx.core.tan.rst", "python/_autosummary/mlx.core.tanh.rst", "python/_autosummary/mlx.core.tensordot.rst", "python/_autosummary/mlx.core.transpose.rst", "python/_autosummary/mlx.core.tri.rst", "python/_autosummary/mlx.core.tril.rst", "python/_autosummary/mlx.core.triu.rst", "python/_autosummary/mlx.core.value_and_grad.rst", "python/_autosummary/mlx.core.var.rst", "python/_autosummary/mlx.core.vjp.rst", "python/_autosummary/mlx.core.vmap.rst", "python/_autosummary/mlx.core.where.rst", "python/_autosummary/mlx.core.zeros.rst", "python/_autosummary/mlx.core.zeros_like.rst", "python/_autosummary/mlx.nn.value_and_grad.rst", "python/_autosummary/mlx.utils.tree_flatten.rst", "python/_autosummary/mlx.utils.tree_map.rst", "python/_autosummary/mlx.utils.tree_unflatten.rst", "python/_autosummary/stream_class.rst", "python/array.rst", "python/data_types.rst", "python/devices_and_streams.rst", "python/fft.rst", "python/linalg.rst", "python/nn.rst", "python/nn/_autosummary/mlx.nn.ALiBi.rst", "python/nn/_autosummary/mlx.nn.AvgPool1d.rst", "python/nn/_autosummary/mlx.nn.AvgPool2d.rst", "python/nn/_autosummary/mlx.nn.BatchNorm.rst", "python/nn/_autosummary/mlx.nn.Conv1d.rst", "python/nn/_autosummary/mlx.nn.Conv2d.rst", "python/nn/_autosummary/mlx.nn.Dropout.rst", "python/nn/_autosummary/mlx.nn.Dropout2d.rst", "python/nn/_autosummary/mlx.nn.Dropout3d.rst", "python/nn/_autosummary/mlx.nn.Embedding.rst", "python/nn/_autosummary/mlx.nn.GELU.rst", "python/nn/_autosummary/mlx.nn.GroupNorm.rst", "python/nn/_autosummary/mlx.nn.InstanceNorm.rst", "python/nn/_autosummary/mlx.nn.LayerNorm.rst", "python/nn/_autosummary/mlx.nn.Linear.rst", "python/nn/_autosummary/mlx.nn.MaxPool1d.rst", "python/nn/_autosummary/mlx.nn.MaxPool2d.rst", "python/nn/_autosummary/mlx.nn.Mish.rst", "python/nn/_autosummary/mlx.nn.Module.apply.rst", "python/nn/_autosummary/mlx.nn.Module.apply_to_modules.rst", "python/nn/_autosummary/mlx.nn.Module.children.rst", "python/nn/_autosummary/mlx.nn.Module.eval.rst", "python/nn/_autosummary/mlx.nn.Module.filter_and_map.rst", "python/nn/_autosummary/mlx.nn.Module.freeze.rst", "python/nn/_autosummary/mlx.nn.Module.leaf_modules.rst", "python/nn/_autosummary/mlx.nn.Module.load_weights.rst", "python/nn/_autosummary/mlx.nn.Module.modules.rst", "python/nn/_autosummary/mlx.nn.Module.named_modules.rst", "python/nn/_autosummary/mlx.nn.Module.parameters.rst", "python/nn/_autosummary/mlx.nn.Module.save_weights.rst", "python/nn/_autosummary/mlx.nn.Module.state.rst", "python/nn/_autosummary/mlx.nn.Module.train.rst", "python/nn/_autosummary/mlx.nn.Module.trainable_parameters.rst", "python/nn/_autosummary/mlx.nn.Module.training.rst", "python/nn/_autosummary/mlx.nn.Module.unfreeze.rst", "python/nn/_autosummary/mlx.nn.Module.update.rst", "python/nn/_autosummary/mlx.nn.Module.update_modules.rst", "python/nn/_autosummary/mlx.nn.MultiHeadAttention.rst", "python/nn/_autosummary/mlx.nn.PReLU.rst", "python/nn/_autosummary/mlx.nn.QuantizedLinear.rst", "python/nn/_autosummary/mlx.nn.RMSNorm.rst", "python/nn/_autosummary/mlx.nn.ReLU.rst", "python/nn/_autosummary/mlx.nn.RoPE.rst", "python/nn/_autosummary/mlx.nn.SELU.rst", "python/nn/_autosummary/mlx.nn.Sequential.rst", "python/nn/_autosummary/mlx.nn.SiLU.rst", "python/nn/_autosummary/mlx.nn.SinusoidalPositionalEncoding.rst", "python/nn/_autosummary/mlx.nn.Softshrink.rst", "python/nn/_autosummary/mlx.nn.Step.rst", "python/nn/_autosummary/mlx.nn.Transformer.rst", "python/nn/_autosummary/mlx.nn.Upsample.rst", "python/nn/_autosummary/mlx.nn.init.constant.rst", "python/nn/_autosummary/mlx.nn.init.glorot_normal.rst", "python/nn/_autosummary/mlx.nn.init.glorot_uniform.rst", "python/nn/_autosummary/mlx.nn.init.he_normal.rst", "python/nn/_autosummary/mlx.nn.init.he_uniform.rst", "python/nn/_autosummary/mlx.nn.init.identity.rst", "python/nn/_autosummary/mlx.nn.init.normal.rst", "python/nn/_autosummary/mlx.nn.init.uniform.rst", "python/nn/_autosummary_functions/mlx.nn.elu.rst", "python/nn/_autosummary_functions/mlx.nn.gelu.rst", "python/nn/_autosummary_functions/mlx.nn.gelu_approx.rst", "python/nn/_autosummary_functions/mlx.nn.gelu_fast_approx.rst", "python/nn/_autosummary_functions/mlx.nn.glu.rst", "python/nn/_autosummary_functions/mlx.nn.hardswish.rst", "python/nn/_autosummary_functions/mlx.nn.leaky_relu.rst", "python/nn/_autosummary_functions/mlx.nn.log_sigmoid.rst", "python/nn/_autosummary_functions/mlx.nn.log_softmax.rst", "python/nn/_autosummary_functions/mlx.nn.losses.binary_cross_entropy.rst", "python/nn/_autosummary_functions/mlx.nn.losses.cosine_similarity_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.cross_entropy.rst", "python/nn/_autosummary_functions/mlx.nn.losses.gaussian_nll_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.hinge_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.huber_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.kl_div_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.l1_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.log_cosh_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.margin_ranking_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.mse_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.nll_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.smooth_l1_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.triplet_loss.rst", "python/nn/_autosummary_functions/mlx.nn.mish.rst", "python/nn/_autosummary_functions/mlx.nn.prelu.rst", "python/nn/_autosummary_functions/mlx.nn.relu.rst", "python/nn/_autosummary_functions/mlx.nn.relu6.rst", "python/nn/_autosummary_functions/mlx.nn.selu.rst", "python/nn/_autosummary_functions/mlx.nn.sigmoid.rst", "python/nn/_autosummary_functions/mlx.nn.silu.rst", "python/nn/_autosummary_functions/mlx.nn.softmax.rst", "python/nn/_autosummary_functions/mlx.nn.softplus.rst", "python/nn/_autosummary_functions/mlx.nn.softshrink.rst", "python/nn/_autosummary_functions/mlx.nn.step.rst", "python/nn/_autosummary_functions/mlx.nn.tanh.rst", "python/nn/functions.rst", "python/nn/init.rst", "python/nn/layers.rst", "python/nn/losses.rst", "python/nn/module.rst", "python/ops.rst", "python/optimizers.rst", "python/optimizers/_autosummary/mlx.optimizers.AdaDelta.rst", "python/optimizers/_autosummary/mlx.optimizers.Adafactor.rst", "python/optimizers/_autosummary/mlx.optimizers.Adagrad.rst", "python/optimizers/_autosummary/mlx.optimizers.Adam.rst", "python/optimizers/_autosummary/mlx.optimizers.AdamW.rst", "python/optimizers/_autosummary/mlx.optimizers.Adamax.rst", "python/optimizers/_autosummary/mlx.optimizers.Lion.rst", "python/optimizers/_autosummary/mlx.optimizers.Optimizer.apply_gradients.rst", "python/optimizers/_autosummary/mlx.optimizers.Optimizer.init.rst", "python/optimizers/_autosummary/mlx.optimizers.Optimizer.state.rst", "python/optimizers/_autosummary/mlx.optimizers.Optimizer.update.rst", "python/optimizers/_autosummary/mlx.optimizers.RMSprop.rst", "python/optimizers/_autosummary/mlx.optimizers.SGD.rst", "python/optimizers/_autosummary/mlx.optimizers.cosine_decay.rst", "python/optimizers/_autosummary/mlx.optimizers.exponential_decay.rst", "python/optimizers/_autosummary/mlx.optimizers.join_schedules.rst", "python/optimizers/_autosummary/mlx.optimizers.linear_schedule.rst", "python/optimizers/_autosummary/mlx.optimizers.step_decay.rst", "python/optimizers/common_optimizers.rst", "python/optimizers/optimizer.rst", "python/optimizers/schedulers.rst", "python/random.rst", "python/transforms.rst", "python/tree_utils.rst", "usage/compile.rst", "usage/function_transforms.rst", "usage/indexing.rst", "usage/lazy_evaluation.rst", "usage/numpy.rst", "usage/quick_start.rst", "usage/saving_and_loading.rst", "usage/unified_memory.rst", "usage/using_streams.rst"], "titles": ["Operations", "Developer Documentation", "Linear Regression", "LLM inference", "Multi-Layer Perceptron", "MLX", "Build and Install", "mlx.core.Device", "mlx.core.Dtype", "mlx.core.abs", "mlx.core.add", "mlx.core.all", "mlx.core.allclose", "mlx.core.any", "mlx.core.arange", "mlx.core.arccos", "mlx.core.arccosh", "mlx.core.arcsin", "mlx.core.arcsinh", "mlx.core.arctan", "mlx.core.arctanh", "mlx.core.argmax", "mlx.core.argmin", "mlx.core.argpartition", "mlx.core.argsort", "mlx.core.array", "mlx.core.array.T", "mlx.core.array.abs", "mlx.core.array.all", "mlx.core.array.any", "mlx.core.array.argmax", "mlx.core.array.argmin", "mlx.core.array.astype", "mlx.core.array.cos", "mlx.core.array.dtype", "mlx.core.array.exp", "mlx.core.array.item", "mlx.core.array.log", "mlx.core.array.log1p", "mlx.core.array.logsumexp", "mlx.core.array.max", "mlx.core.array.mean", "mlx.core.array.min", "mlx.core.array.ndim", "mlx.core.array.prod", "mlx.core.array.reciprocal", "mlx.core.array.reshape", "mlx.core.array.round", "mlx.core.array.rsqrt", "mlx.core.array.shape", "mlx.core.array.sin", "mlx.core.array.size", "mlx.core.array.split", "mlx.core.array.sqrt", "mlx.core.array.square", "mlx.core.array.sum", "mlx.core.array.tolist", "mlx.core.array.transpose", "mlx.core.array.var", "mlx.core.array_equal", "mlx.core.atleast_1d", "mlx.core.atleast_2d", "mlx.core.atleast_3d", "mlx.core.broadcast_to", "mlx.core.ceil", "mlx.core.clip", "mlx.core.compile", "mlx.core.concatenate", "mlx.core.conv1d", "mlx.core.conv2d", "mlx.core.conv_general", "mlx.core.convolve", "mlx.core.cos", "mlx.core.cosh", "mlx.core.default_device", "mlx.core.default_stream", "mlx.core.dequantize", "mlx.core.diag", "mlx.core.diagonal", "mlx.core.disable_compile", "mlx.core.divide", "mlx.core.divmod", "mlx.core.enable_compile", "mlx.core.equal", "mlx.core.erf", "mlx.core.erfinv", "mlx.core.eval", "mlx.core.exp", "mlx.core.expand_dims", "mlx.core.eye", "mlx.core.fft.fft", "mlx.core.fft.fft2", "mlx.core.fft.fftn", "mlx.core.fft.ifft", "mlx.core.fft.ifft2", "mlx.core.fft.ifftn", "mlx.core.fft.irfft", "mlx.core.fft.irfft2", "mlx.core.fft.irfftn", "mlx.core.fft.rfft", "mlx.core.fft.rfft2", "mlx.core.fft.rfftn", "mlx.core.flatten", "mlx.core.floor", "mlx.core.floor_divide", "mlx.core.full", "mlx.core.grad", "mlx.core.greater", "mlx.core.greater_equal", "mlx.core.identity", "mlx.core.inner", "mlx.core.isinf", "mlx.core.isnan", "mlx.core.isneginf", "mlx.core.isposinf", "mlx.core.jvp", "mlx.core.less", "mlx.core.less_equal", "mlx.core.linalg.norm", "mlx.core.linalg.qr", "mlx.core.linspace", "mlx.core.load", "mlx.core.log", "mlx.core.log10", "mlx.core.log1p", "mlx.core.log2", "mlx.core.logaddexp", "mlx.core.logical_and", "mlx.core.logical_not", "mlx.core.logical_or", "mlx.core.logsumexp", "mlx.core.matmul", "mlx.core.max", "mlx.core.maximum", "mlx.core.mean", "mlx.core.min", "mlx.core.minimum", "mlx.core.moveaxis", "mlx.core.multiply", "mlx.core.negative", "mlx.core.new_stream", "mlx.core.ones", "mlx.core.ones_like", "mlx.core.outer", "mlx.core.pad", "mlx.core.partition", "mlx.core.prod", "mlx.core.quantize", "mlx.core.quantized_matmul", "mlx.core.random.bernoulli", "mlx.core.random.categorical", "mlx.core.random.gumbel", "mlx.core.random.key", "mlx.core.random.normal", "mlx.core.random.randint", "mlx.core.random.seed", "mlx.core.random.split", "mlx.core.random.truncated_normal", "mlx.core.random.uniform", "mlx.core.reciprocal", "mlx.core.repeat", "mlx.core.reshape", "mlx.core.round", "mlx.core.rsqrt", "mlx.core.save", "mlx.core.save_gguf", "mlx.core.save_safetensors", "mlx.core.savez", "mlx.core.savez_compressed", "mlx.core.set_default_device", "mlx.core.set_default_stream", "mlx.core.sigmoid", "mlx.core.sign", "mlx.core.sin", "mlx.core.sinh", "mlx.core.softmax", "mlx.core.sort", "mlx.core.split", "mlx.core.sqrt", "mlx.core.square", "mlx.core.squeeze", "mlx.core.stack", "mlx.core.stop_gradient", "mlx.core.stream", "mlx.core.subtract", "mlx.core.sum", "mlx.core.swapaxes", "mlx.core.take", "mlx.core.take_along_axis", "mlx.core.tan", "mlx.core.tanh", "mlx.core.tensordot", "mlx.core.transpose", "mlx.core.tri", "mlx.core.tril", "mlx.core.triu", "mlx.core.value_and_grad", "mlx.core.var", "mlx.core.vjp", "mlx.core.vmap", "mlx.core.where", "mlx.core.zeros", "mlx.core.zeros_like", "mlx.nn.value_and_grad", "mlx.utils.tree_flatten", "mlx.utils.tree_map", "mlx.utils.tree_unflatten", "mlx.core.Stream", "Array", "Data Types", "Devices and Streams", "FFT", "Linear Algebra", "Neural Networks", "mlx.nn.ALiBi", "mlx.nn.AvgPool1d", "mlx.nn.AvgPool2d", "mlx.nn.BatchNorm", "mlx.nn.Conv1d", "mlx.nn.Conv2d", "mlx.nn.Dropout", "mlx.nn.Dropout2d", "mlx.nn.Dropout3d", "mlx.nn.Embedding", "mlx.nn.GELU", "mlx.nn.GroupNorm", "mlx.nn.InstanceNorm", "mlx.nn.LayerNorm", "mlx.nn.Linear", "mlx.nn.MaxPool1d", "mlx.nn.MaxPool2d", "mlx.nn.Mish", "mlx.nn.Module.apply", "mlx.nn.Module.apply_to_modules", "mlx.nn.Module.children", "mlx.nn.Module.eval", "mlx.nn.Module.filter_and_map", "mlx.nn.Module.freeze", "mlx.nn.Module.leaf_modules", "mlx.nn.Module.load_weights", "mlx.nn.Module.modules", "mlx.nn.Module.named_modules", "mlx.nn.Module.parameters", "mlx.nn.Module.save_weights", "mlx.nn.Module.state", "mlx.nn.Module.train", "mlx.nn.Module.trainable_parameters", "mlx.nn.Module.training", "mlx.nn.Module.unfreeze", "mlx.nn.Module.update", "mlx.nn.Module.update_modules", "mlx.nn.MultiHeadAttention", "mlx.nn.PReLU", "mlx.nn.QuantizedLinear", "mlx.nn.RMSNorm", "mlx.nn.ReLU", "mlx.nn.RoPE", "mlx.nn.SELU", "mlx.nn.Sequential", "mlx.nn.SiLU", "mlx.nn.SinusoidalPositionalEncoding", "mlx.nn.Softshrink", "mlx.nn.Step", "mlx.nn.Transformer", "mlx.nn.Upsample", "mlx.nn.init.constant", "mlx.nn.init.glorot_normal", "mlx.nn.init.glorot_uniform", "mlx.nn.init.he_normal", "mlx.nn.init.he_uniform", "mlx.nn.init.identity", "mlx.nn.init.normal", "mlx.nn.init.uniform", "mlx.nn.elu", "mlx.nn.gelu", "mlx.nn.gelu_approx", "mlx.nn.gelu_fast_approx", "mlx.nn.glu", "mlx.nn.hardswish", "mlx.nn.leaky_relu", "mlx.nn.log_sigmoid", "mlx.nn.log_softmax", "mlx.nn.losses.binary_cross_entropy", "mlx.nn.losses.cosine_similarity_loss", "mlx.nn.losses.cross_entropy", "mlx.nn.losses.gaussian_nll_loss", "mlx.nn.losses.hinge_loss", "mlx.nn.losses.huber_loss", "mlx.nn.losses.kl_div_loss", "mlx.nn.losses.l1_loss", "mlx.nn.losses.log_cosh_loss", "mlx.nn.losses.margin_ranking_loss", "mlx.nn.losses.mse_loss", "mlx.nn.losses.nll_loss", "mlx.nn.losses.smooth_l1_loss", "mlx.nn.losses.triplet_loss", "mlx.nn.mish", "mlx.nn.prelu", "mlx.nn.relu", "mlx.nn.relu6", "mlx.nn.selu", "mlx.nn.sigmoid", "mlx.nn.silu", "mlx.nn.softmax", "mlx.nn.softplus", "mlx.nn.softshrink", "mlx.nn.step", "mlx.nn.tanh", "Functions", "Initializers", "Layers", "Loss Functions", "Module", "Operations", "Optimizers", "mlx.optimizers.AdaDelta", "mlx.optimizers.Adafactor", "mlx.optimizers.Adagrad", "mlx.optimizers.Adam", "mlx.optimizers.AdamW", "mlx.optimizers.Adamax", "mlx.optimizers.Lion", "mlx.optimizers.Optimizer.apply_gradients", "mlx.optimizers.Optimizer.init", "mlx.optimizers.Optimizer.state", "mlx.optimizers.Optimizer.update", "mlx.optimizers.RMSprop", "mlx.optimizers.SGD", "mlx.optimizers.cosine_decay", "mlx.optimizers.exponential_decay", "mlx.optimizers.join_schedules", "mlx.optimizers.linear_schedule", "mlx.optimizers.step_decay", "Common Optimizers", "Optimizer", "Schedulers", "Random", "Transforms", "Tree Utils", "Compilation", "Function Transforms", "Indexing Arrays", "Lazy Evaluation", "Conversion to NumPy and Other Frameworks", "Quick Start Guide", "Saving and Loading Arrays", "Unified Memory", "Using Streams"], "terms": {"mlx": [1, 2, 3, 4, 6, 213, 309, 312, 314, 334, 336, 338, 339, 340, 341, 342, 343, 344, 345, 346], "provid": [1, 3, 76, 106, 191, 196, 205, 213, 232, 237, 239, 248, 249, 250, 253, 263, 264, 308, 312, 345, 347], "open": [1, 6, 14, 154, 158], "flexibl": [1, 5, 250], "which": [1, 3, 4, 5, 6, 14, 32, 66, 70, 78, 86, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 106, 111, 112, 113, 114, 115, 118, 119, 121, 147, 150, 151, 160, 161, 164, 165, 166, 167, 168, 180, 181, 187, 196, 198, 199, 216, 221, 222, 224, 230, 232, 236, 256, 284, 287, 291, 294, 309, 322, 323, 336, 339, 340, 341, 342, 346, 347], "user": [1, 3, 213], "mai": [1, 118, 221, 340, 341], "add": [1, 3, 88, 126, 144, 147, 218, 219, 340, 346], "special": 1, "without": [1, 3, 5, 182, 251, 308, 338, 339, 342, 343, 346], "much": [1, 3, 215, 216, 229, 230, 339, 342], "hassl": 1, "while": [1, 3, 6, 161, 256, 342, 343], "librari": [1, 6, 213], "suppli": 1, "effici": [1, 3, 5, 221, 256, 342, 344], "can": [1, 3, 5, 6, 10, 14, 46, 57, 66, 78, 79, 80, 81, 83, 86, 107, 108, 116, 117, 118, 126, 133, 136, 138, 149, 150, 154, 157, 158, 165, 184, 196, 213, 216, 223, 230, 236, 248, 258, 264, 284, 309, 312, 314, 322, 323, 336, 338, 339, 340, 341, 342, 343, 344, 345, 346, 347], "compos": [1, 5, 213, 339, 340, 344], "ani": [1, 3, 5, 14, 66, 204, 205, 206, 213, 224, 232, 233, 236, 244, 253, 263, 264, 309, 331, 338, 339, 340, 342, 344, 345, 346], "number": [1, 14, 51, 66, 69, 70, 76, 89, 106, 109, 115, 120, 144, 147, 148, 150, 153, 156, 158, 160, 162, 191, 193, 196, 198, 199, 213, 217, 218, 219, 221, 222, 225, 226, 251, 252, 263, 264, 266, 267, 268, 269, 328, 330, 331, 336, 339, 340, 347], "applic": [1, 6], "aris": [1, 343], "case": [1, 3, 92, 95, 96, 98, 99, 100, 101, 102, 119, 131, 161, 180, 216, 221, 230, 262, 294, 300, 305, 306, 322, 323, 339, 340, 344, 345, 346, 347], "where": [1, 4, 89, 147, 196, 199, 215, 216, 217, 218, 219, 220, 221, 222, 224, 225, 226, 227, 228, 229, 230, 236, 252, 254, 262, 268, 269, 273, 274, 275, 276, 285, 291, 297, 300, 302, 306, 323, 340, 341], "new": [1, 4, 63, 78, 137, 140, 161, 181, 192, 205, 251, 312, 314, 325, 330, 339, 341, 342, 343], "function": [1, 2, 3, 4, 5, 12, 66, 81, 84, 85, 106, 115, 118, 119, 131, 171, 196, 198, 199, 203, 205, 213, 224, 231, 233, 237, 248, 252, 255, 257, 258, 259, 261, 262, 263, 274, 275, 276, 277, 278, 280, 281, 296, 301, 303, 304, 305, 306, 307, 309, 314, 323, 336, 338, 341, 342, 343, 345], "highli": [1, 6], "optim": [1, 2, 4, 5, 249, 339, 340, 342], "ar": [1, 2, 3, 4, 5, 6, 12, 14, 59, 63, 65, 66, 70, 71, 78, 86, 89, 91, 92, 94, 95, 97, 98, 100, 101, 102, 106, 111, 112, 113, 114, 115, 118, 119, 121, 131, 143, 144, 145, 147, 148, 149, 150, 151, 154, 157, 158, 167, 168, 180, 181, 187, 196, 198, 199, 204, 205, 217, 218, 219, 220, 221, 222, 225, 226, 227, 228, 239, 251, 253, 264, 282, 284, 285, 308, 312, 321, 323, 338, 339, 340, 341, 342, 343, 344, 345, 346], "need": [1, 3, 4, 5, 59, 147, 213, 249, 250, 260, 263, 336, 340, 342, 343, 344, 346], "For": [1, 3, 6, 118, 147, 206, 213, 217, 221, 232, 237, 245, 248, 253, 256, 260, 264, 266, 267, 268, 269, 309, 336, 339, 340, 341, 342, 343, 344, 345, 346], "you": [1, 3, 4, 5, 6, 213, 260, 263, 309, 336, 339, 340, 341, 343, 345, 346], "design": [1, 2, 5, 336, 346], "your": [1, 3, 6, 312, 340, 342], "own": [1, 6, 343], "link": [1, 6], "top": [1, 228, 264], "core": [1, 2, 3, 4, 213, 215, 216, 217, 226, 229, 230, 239, 242, 246, 264, 265, 266, 267, 268, 269, 270, 271, 272, 282, 284, 291, 309, 312, 314, 339, 343, 344], "we": [1, 2, 3, 4, 76, 147, 148, 213, 223, 258, 319, 321, 336, 338, 339, 340, 342, 346], "inner": [1, 339], "work": [1, 3, 6, 339, 340, 341, 342], "go": [1, 3, 340], "over": [1, 3, 4, 11, 13, 21, 22, 23, 24, 68, 69, 70, 92, 95, 98, 101, 110, 118, 120, 130, 132, 134, 135, 145, 146, 163, 175, 176, 185, 191, 197, 217, 218, 219, 225, 227, 254, 284, 328, 331, 340], "simpl": [1, 3, 4, 213, 223, 308, 339, 340, 342], "learn": [1, 2, 4, 5, 217, 225, 226, 227, 252, 254, 315, 316, 317, 318, 319, 320, 321, 326, 327], "step": [1, 3, 4, 14, 213, 316, 323, 328, 330, 331, 332, 339], "involv": [1, 314, 339], "ad": [1, 2, 6, 226, 312, 315, 316, 317, 318, 319, 320, 326, 342, 345], "let": [1, 2, 3, 339, 340, 342, 343], "s": [1, 2, 3, 4, 34, 43, 66, 75, 76, 91, 92, 94, 95, 97, 98, 100, 101, 106, 118, 121, 134, 143, 147, 150, 162, 165, 166, 183, 196, 197, 199, 203, 213, 216, 230, 236, 237, 239, 243, 244, 248, 314, 323, 324, 336, 339, 340, 342, 343, 344, 345, 346], "sai": [1, 3, 309, 342], "would": [1, 3, 264, 341, 342, 343, 346], "like": [1, 3, 5, 142, 202, 222, 290, 323, 325, 339, 340, 342, 343, 344, 346], "an": [1, 3, 4, 6, 8, 11, 13, 25, 60, 61, 62, 63, 68, 69, 70, 86, 89, 102, 105, 109, 118, 121, 132, 135, 137, 141, 142, 144, 146, 147, 148, 160, 161, 162, 177, 180, 186, 187, 188, 191, 193, 199, 201, 202, 204, 205, 213, 215, 216, 220, 225, 227, 228, 229, 230, 232, 251, 252, 253, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 275, 297, 309, 315, 325, 329, 334, 336, 338, 339, 340, 341, 342, 343, 344, 345, 346, 347], "take": [1, 3, 4, 66, 106, 115, 133, 136, 142, 148, 188, 196, 198, 199, 202, 251, 336, 340, 341, 345, 346, 347], "two": [1, 10, 12, 59, 61, 78, 80, 83, 91, 94, 100, 107, 108, 116, 117, 119, 126, 131, 133, 136, 138, 143, 186, 216, 230, 253, 264, 277, 283, 339, 340, 341, 346], "arrai": [1, 3, 4, 5, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 76, 77, 78, 80, 81, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 213, 217, 232, 239, 242, 246, 252, 264, 265, 266, 267, 268, 269, 270, 271, 272, 274, 277, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294, 295, 296, 297, 306, 309, 312, 327, 328, 329, 330, 331, 332, 339, 340, 342, 343, 344, 346], "x": [1, 2, 3, 4, 84, 109, 118, 148, 151, 162, 167, 171, 194, 195, 200, 205, 213, 215, 216, 217, 224, 225, 226, 227, 228, 229, 230, 231, 232, 252, 254, 255, 260, 262, 264, 273, 274, 275, 276, 277, 278, 279, 280, 281, 294, 296, 297, 298, 299, 300, 301, 302, 303, 304, 305, 306, 307, 312, 314, 321, 339, 340, 341, 342, 343, 344, 346], "y": [1, 2, 3, 4, 200, 213, 217, 221, 225, 226, 227, 228, 254, 286, 291, 294, 314, 317, 339, 340, 342, 343], "scale": [1, 3, 76, 147, 148, 153, 221, 222, 251, 256, 257, 260, 264, 300, 316], "them": [1, 3, 213, 237, 248, 346], "both": [1, 10, 80, 81, 83, 107, 108, 116, 117, 118, 126, 133, 136, 138, 150, 184, 215, 216, 226, 229, 230, 264, 314, 339, 340, 344, 346], "some": [1, 2, 3, 4, 237, 248, 323, 339, 340, 342], "coeffici": [1, 315, 316, 318, 319, 320, 321], "alpha": [1, 147, 273, 295, 297, 300, 319, 326], "beta": [1, 76, 147, 217, 225, 226, 227, 294, 318, 319, 320, 321], "respect": [1, 2, 4, 106, 147, 196, 205, 213, 217, 224, 225, 226, 227, 312, 340, 344], "togeth": [1, 4, 147, 205], "get": [1, 2, 4, 6, 69, 70, 74, 75, 152, 213, 339, 340, 342, 346], "z": [1, 339, 342], "well": [1, 3, 213, 237, 248, 251, 342], "veri": [1, 3, 251, 342, 346], "easili": 1, "do": [1, 3, 6, 213, 238, 248, 309, 312, 319, 339, 340, 342], "just": [1, 4, 339, 341], "write": [1, 3, 213, 343], "out": [1, 6, 215, 216, 221, 222, 229, 230, 245, 339, 340, 341], "follow": [1, 3, 4, 5, 6, 14, 71, 76, 118, 147, 213, 275, 276, 288, 315, 316, 317, 318, 319, 320, 321, 327, 336, 339, 340, 346], "import": [1, 2, 3, 4, 6, 118, 167, 196, 204, 205, 206, 213, 215, 216, 217, 226, 229, 230, 239, 264, 282, 284, 291, 309, 312, 339, 340, 341, 342, 343, 344], "mx": [1, 2, 3, 4, 102, 118, 119, 121, 167, 196, 213, 215, 216, 217, 226, 229, 230, 232, 239, 243, 255, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 279, 282, 283, 284, 288, 291, 298, 307, 309, 312, 314, 336, 339, 340, 341, 342, 343, 344, 345, 346, 347], "def": [1, 2, 3, 4, 196, 213, 312, 339, 340, 341, 342, 343, 346], "simple_axpbi": 1, "float": [1, 12, 14, 56, 104, 105, 118, 148, 149, 153, 154, 157, 158, 209, 217, 220, 221, 222, 225, 226, 227, 232, 254, 256, 260, 262, 263, 264, 265, 266, 267, 268, 269, 271, 272, 283, 284, 285, 287, 291, 294, 295, 305, 306, 315, 316, 317, 318, 319, 320, 321, 326, 327, 328, 329, 331, 332], "return": [1, 2, 3, 4, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 32, 36, 49, 56, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 76, 77, 78, 80, 81, 83, 84, 85, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 156, 157, 158, 159, 160, 161, 162, 163, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 213, 234, 236, 238, 240, 241, 242, 246, 253, 265, 266, 267, 268, 269, 270, 271, 272, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294, 295, 309, 312, 322, 338, 339, 340, 341, 342, 343, 345, 346], "thi": [1, 3, 4, 6, 11, 12, 13, 14, 21, 22, 23, 24, 82, 115, 118, 119, 126, 130, 131, 132, 134, 135, 145, 146, 150, 170, 175, 176, 177, 185, 187, 197, 213, 220, 221, 222, 233, 234, 236, 237, 240, 241, 242, 246, 248, 249, 250, 251, 253, 262, 266, 267, 268, 269, 275, 276, 277, 290, 306, 312, 323, 338, 339, 340, 342, 343, 345], "perform": [1, 3, 5, 70, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 131, 148, 162, 175, 187, 213, 225, 263, 268, 269, 339, 341, 342, 346], "leav": [1, 86, 205], "differenti": [1, 5], "howev": [1, 213, 224, 225, 323, 336, 339, 342, 343], "vector": [1, 2, 5, 110, 115, 118, 187, 198, 199, 223, 284, 344], "math": [1, 3, 295, 339], "often": [1, 222], "realiz": 1, "axpbi": 1, "routin": 1, "defin": [1, 2, 3, 4, 6, 118, 148, 204, 343], "same": [1, 3, 6, 12, 59, 63, 66, 69, 70, 71, 96, 99, 100, 101, 106, 115, 144, 150, 162, 198, 200, 213, 216, 217, 220, 225, 226, 230, 253, 265, 266, 267, 268, 269, 270, 271, 272, 284, 295, 312, 322, 336, 339, 341, 346], "realli": 1, "part": [1, 340, 341], "doe": [1, 3, 6, 213, 339, 341, 342, 343], "fast": [1, 224, 276, 346], "so": [1, 3, 6, 106, 196, 220, 264, 314, 339, 342, 346], "decid": [1, 205, 236], "want": [1, 3, 340, 346], "reli": 1, "acceler": [1, 217], "framework": [1, 5], "continu": [1, 340], "impos": 1, "our": [1, 3, 4, 258, 315, 316, 317, 318, 320, 321], "assumpt": 1, "also": [1, 3, 4, 5, 6, 10, 79, 80, 81, 83, 92, 95, 98, 101, 107, 108, 116, 117, 126, 133, 136, 138, 147, 184, 203, 213, 236, 249, 251, 253, 259, 274, 300, 302, 308, 314, 339, 340, 341, 342, 343, 344, 347], "assum": [1, 3, 119, 205, 213, 215, 216, 225, 229, 230], "how": [1, 3, 4, 213, 215, 216, 218, 219, 223, 229, 230, 264, 322, 339, 341, 346], "gradient": [1, 2, 4, 106, 182, 196, 203, 213, 237, 249, 253, 263, 290, 312, 314, 315, 316, 318, 319, 320, 321, 322, 325, 327, 339, 340, 341, 342, 343, 344], "ins": 1, "what": [1, 3, 205], "coincid": 1, "right": [1, 6, 147, 215, 216, 224, 229, 230, 264, 275, 276, 285, 287, 295], "place": [1, 3, 162, 342, 343], "cours": [1, 340], "The": [1, 3, 4, 5, 6, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 32, 34, 43, 49, 56, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 76, 77, 78, 80, 81, 83, 84, 85, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 156, 157, 158, 159, 160, 161, 165, 166, 171, 172, 173, 174, 175, 176, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 209, 215, 216, 217, 218, 219, 220, 221, 222, 223, 225, 226, 227, 228, 229, 230, 233, 239, 243, 244, 249, 250, 251, 253, 254, 256, 258, 260, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 277, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294, 295, 306, 309, 312, 314, 315, 316, 317, 318, 319, 320, 321, 324, 326, 327, 328, 331, 334, 339, 340, 341, 342, 343, 344, 345, 346, 347], "structur": [1, 322, 340], "from": [1, 3, 4, 5, 76, 78, 97, 98, 100, 101, 105, 118, 121, 131, 142, 147, 149, 150, 151, 152, 154, 157, 167, 180, 182, 184, 187, 188, 200, 202, 204, 205, 206, 213, 228, 237, 239, 251, 266, 267, 268, 269, 271, 272, 285, 294, 309, 338, 339, 340, 342, 343, 344, 345, 346], "frontend": 1, "api": [1, 340], "redirect": 1, "when": [1, 3, 5, 6, 66, 70, 118, 121, 218, 219, 264, 268, 269, 288, 294, 312, 330, 336, 339, 346], "appropri": [1, 339], "fallback": 1, "metal": 1, "vjp": [1, 344], "jvp": [1, 344], "In": [1, 3, 4, 131, 147, 205, 213, 221, 225, 312, 315, 317, 318, 320, 321, 322, 338, 339, 340, 342, 345, 346], "one": [1, 3, 6, 56, 60, 65, 69, 70, 88, 89, 118, 124, 131, 148, 150, 180, 184, 248, 264, 284, 346], "sentenc": 1, "comput": [1, 2, 3, 4, 5, 6, 76, 106, 115, 118, 126, 134, 143, 147, 175, 182, 191, 196, 197, 198, 203, 213, 217, 225, 226, 227, 237, 249, 253, 254, 256, 263, 266, 267, 268, 269, 275, 276, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294, 295, 314, 315, 316, 318, 319, 320, 321, 325, 339, 340, 344, 346], "graph": [1, 3, 4, 5, 340], "rule": 1, "evalu": [1, 3, 4, 5, 86, 115, 198, 213, 235, 245, 312, 314, 339, 344], "said": [1, 3], "start": [1, 2, 3, 5, 6, 14, 120, 177, 339, 341, 346], "discuss": 1, "more": [1, 4, 8, 56, 78, 131, 165, 166, 213, 217, 221, 256, 260, 263, 264, 266, 267, 268, 269, 336, 339, 340, 341, 344, 346], "detail": [1, 8, 213, 221, 256, 260, 264, 266, 267, 268, 269, 315, 317, 318, 320, 321, 341, 344], "thei": [1, 2, 3, 12, 71, 258, 286, 312, 321, 338, 339, 342, 344, 345, 346], "c": [1, 3, 118, 209, 215, 216, 217, 218, 219, 221, 222, 226, 229, 230, 343, 344, 346], "scalar": [1, 10, 12, 25, 36, 56, 59, 63, 65, 80, 81, 83, 104, 105, 106, 107, 108, 116, 117, 118, 120, 126, 127, 128, 129, 131, 133, 136, 138, 144, 154, 157, 158, 165, 184, 196, 200, 203, 295, 340, 342, 344], "sum": [1, 2, 10, 110, 118, 130, 175, 191, 213, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294, 295, 341, 343], "element": [1, 9, 10, 15, 16, 17, 18, 19, 20, 23, 51, 64, 72, 73, 76, 80, 81, 83, 84, 85, 87, 89, 103, 104, 107, 108, 111, 112, 113, 114, 116, 117, 122, 123, 124, 125, 126, 127, 128, 129, 133, 136, 138, 139, 145, 147, 148, 159, 160, 163, 171, 172, 173, 174, 178, 179, 184, 187, 189, 190, 196, 200, 220, 221, 222, 231, 252, 256, 278, 280, 281, 296, 297, 299, 302, 303, 304, 339, 340], "wise": [1, 9, 10, 15, 16, 17, 18, 19, 20, 64, 72, 73, 80, 81, 83, 84, 85, 87, 103, 104, 107, 108, 116, 117, 122, 123, 124, 125, 126, 127, 128, 129, 133, 136, 138, 139, 159, 163, 171, 172, 173, 174, 178, 179, 184, 189, 190, 221, 222, 231, 252, 278, 280, 281, 296, 297, 299, 302, 303, 304, 339], "numpi": [1, 3, 4, 5, 10, 12, 14, 63, 80, 81, 83, 107, 108, 116, 117, 126, 131, 133, 136, 138, 184, 342, 344, 345], "style": [1, 10, 12, 80, 81, 83, 107, 108, 116, 117, 126, 131, 133, 136, 138, 184], "broadcast": [1, 10, 12, 63, 65, 80, 81, 83, 105, 107, 108, 116, 117, 126, 131, 133, 136, 138, 149, 150, 157, 158, 184, 188, 200, 251], "between": [1, 5, 65, 102, 263, 283, 286, 287, 290, 330, 342, 346], "input": [1, 2, 3, 9, 10, 11, 12, 13, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 77, 78, 80, 81, 83, 84, 85, 87, 88, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 106, 107, 108, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 142, 143, 144, 145, 146, 147, 148, 156, 159, 160, 161, 162, 163, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 184, 185, 186, 187, 188, 189, 190, 191, 192, 194, 195, 196, 197, 199, 200, 202, 215, 216, 217, 218, 219, 221, 222, 223, 225, 226, 227, 228, 229, 230, 251, 253, 254, 256, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 277, 282, 283, 285, 286, 287, 288, 290, 291, 293, 295, 306, 309, 339, 340, 341, 344, 345], "upcast": 1, "const": [1, 285], "factor": [1, 119, 264, 284, 329, 332], "streamordevic": 1, "stream": [1, 5, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 27, 28, 29, 30, 31, 32, 33, 35, 37, 38, 39, 40, 41, 42, 44, 45, 46, 47, 48, 50, 52, 53, 54, 55, 57, 58, 59, 60, 61, 62, 63, 64, 65, 67, 68, 69, 70, 71, 72, 73, 75, 76, 77, 78, 80, 81, 83, 84, 85, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 107, 108, 109, 110, 111, 112, 113, 114, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 153, 154, 156, 157, 158, 159, 160, 161, 162, 163, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 197, 200, 201, 202, 346], "schedul": [1, 314, 328, 329, 330, 331, 332, 334, 346], "itself": [1, 323], "call": [1, 3, 4, 26, 104, 213, 223, 237, 248, 258, 312, 314, 323, 339, 340, 342], "other": [1, 3, 5, 118, 213, 238, 312, 321, 339, 341, 342, 344], "within": [1, 23], "simplest": [1, 213], "wai": [1, 3, 6, 213, 264, 339, 340, 341], "about": [1, 3, 4, 342, 346], "term": [1, 285, 315, 316, 317, 318, 319, 320, 326], "exist": [1, 3, 237, 248], "auto": [1, 6], "ax": [1, 11, 13, 21, 22, 57, 88, 91, 92, 94, 95, 97, 98, 100, 101, 102, 110, 118, 130, 132, 134, 135, 144, 146, 175, 180, 185, 186, 191, 192, 197, 340], "multipli": [1, 147, 148, 220, 260, 264], "earlier": 1, "goal": 1, "themselv": [1, 339], "contain": [1, 3, 23, 24, 49, 66, 78, 96, 97, 98, 118, 127, 128, 129, 147, 177, 200, 213, 236, 238, 239, 244, 263, 291, 309, 312, 339, 340], "act": [1, 290], "data": [1, 4, 5, 8, 14, 89, 99, 100, 105, 109, 120, 141, 157, 193, 201, 222, 265, 266, 267, 268, 269, 270, 271, 272, 339, 341, 343], "nor": [1, 106, 196], "rather": [1, 340, 346], "easi": [1, 213], "interfac": 1, "block": [1, 3, 263], "A": [1, 3, 5, 6, 7, 49, 59, 66, 106, 115, 118, 119, 121, 130, 131, 147, 149, 150, 151, 153, 154, 157, 158, 177, 181, 183, 196, 198, 199, 203, 204, 205, 206, 207, 213, 217, 221, 225, 226, 227, 236, 240, 241, 249, 250, 254, 258, 260, 263, 266, 267, 269, 276, 295, 296, 312, 314, 318, 320, 322, 323, 325, 330, 339, 340, 342, 343], "It": [1, 3, 6, 106, 170, 196, 213, 250, 253, 322, 334, 343, 345], "creat": [1, 3, 6, 89, 109, 183, 213, 312, 314, 330, 339, 341, 343], "output": [1, 3, 6, 11, 12, 13, 14, 23, 63, 66, 89, 96, 99, 100, 101, 105, 106, 109, 118, 120, 130, 132, 134, 135, 141, 142, 145, 146, 149, 150, 151, 153, 154, 157, 158, 167, 168, 175, 180, 185, 188, 193, 196, 197, 198, 199, 200, 201, 202, 215, 216, 217, 218, 219, 226, 228, 229, 230, 251, 253, 262, 263, 264, 266, 267, 268, 269, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294, 295, 306, 309, 339, 340, 341, 342, 343, 344, 345, 346], "given": [1, 11, 13, 23, 63, 65, 67, 76, 78, 86, 88, 90, 91, 92, 93, 94, 95, 99, 100, 101, 105, 118, 130, 132, 134, 135, 140, 146, 154, 162, 170, 175, 177, 185, 193, 194, 195, 197, 207, 215, 216, 220, 229, 230, 236, 251, 283, 285, 291], "set": [1, 3, 4, 6, 66, 79, 82, 169, 170, 183, 224, 228, 235, 237, 244, 245, 248, 249, 253, 256, 262, 283, 295, 306, 312, 316, 323, 336, 340, 342], "further": [1, 6, 340], "class": [1, 3, 4, 7, 8, 25, 207, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 284, 312, 315, 316, 317, 318, 319, 320, 321, 326, 327, 334], "under": [1, 118], "These": [1, 66, 188, 284, 346], "word": 1, "bit": [1, 76, 147, 148, 209, 232, 253], "abstract": 1, "back": [1, 3, 343], "give": [1, 3, 4, 23, 339], "ourselv": 1, "concret": [1, 228, 342, 346], "imag": [1, 219, 221, 222, 264], "public": [1, 213], "explicit": [1, 323, 336, 343], "alpha_": 1, "beta_": 1, "must": [1, 6, 65, 105, 118, 149, 150, 154, 157, 158, 200, 264, 343], "know": [1, 3], "popul": 1, "To": [1, 2, 3, 4, 6, 213, 309, 339, 340, 344], "avoid": [1, 339], "unnecessari": [1, 3], "alloc": [1, 312], "respons": 1, "space": [1, 120, 293], "void": 1, "eval_cpu": 1, "std": [1, 271], "overrid": [1, 82], "eval_gpu": 1, "jacobian": [1, 115, 198, 344], "product": [1, 110, 115, 131, 143, 146, 191, 198, 251, 344], "primal": [1, 115, 198], "tangent": [1, 19, 20, 115, 189, 190, 307], "int": [1, 3, 4, 7, 11, 13, 14, 21, 22, 23, 24, 28, 29, 30, 31, 39, 40, 41, 42, 44, 47, 49, 52, 55, 56, 58, 63, 67, 68, 69, 70, 76, 77, 78, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 105, 106, 109, 118, 120, 130, 132, 134, 135, 137, 141, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 160, 161, 162, 175, 176, 177, 180, 181, 185, 186, 187, 188, 191, 192, 193, 194, 195, 196, 197, 199, 201, 207, 213, 215, 216, 217, 218, 219, 223, 225, 226, 227, 228, 229, 230, 251, 253, 254, 256, 260, 263, 277, 283, 284, 288, 293, 295, 312, 328, 330, 331, 332], "argnum": [1, 106, 196, 340], "cotan": 1, "across": [1, 225], "pair": [1, 144, 239, 256], "repres": [1, 3, 291, 295, 343], "axi": [1, 3, 4, 11, 13, 21, 22, 23, 24, 28, 29, 30, 31, 39, 40, 41, 42, 44, 52, 55, 58, 67, 78, 88, 90, 93, 96, 97, 98, 99, 100, 101, 102, 118, 130, 132, 134, 135, 137, 144, 145, 146, 150, 160, 175, 176, 177, 180, 181, 185, 186, 187, 188, 192, 197, 199, 215, 216, 229, 230, 277, 281, 283, 284, 288, 293, 295, 303, 341], "correspond": [1, 11, 13, 56, 65, 76, 78, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 130, 132, 135, 146, 185, 191, 199, 205, 340], "dimens": [1, 3, 11, 13, 21, 22, 43, 49, 56, 60, 61, 62, 66, 69, 70, 78, 88, 97, 98, 100, 101, 102, 110, 118, 119, 130, 131, 132, 134, 135, 146, 147, 150, 156, 185, 188, 191, 192, 197, 217, 218, 219, 221, 222, 225, 226, 227, 251, 254, 256, 263, 264, 277, 284, 339, 340], "vmap": [1, 340, 342, 344], "print": [1, 2, 3, 4, 6, 204, 205, 206, 213, 336, 339, 340, 341, 342, 343, 344], "ostream": 1, "os": [1, 6], "equival": [1, 26, 46, 57, 81, 104, 187, 224, 250, 252, 253, 255, 257, 259, 261], "check": [1, 6, 59, 239, 340, 341], "bool": [1, 11, 12, 13, 21, 22, 28, 29, 30, 31, 39, 40, 41, 42, 44, 55, 56, 58, 59, 66, 70, 118, 121, 130, 132, 134, 135, 146, 148, 149, 154, 157, 158, 185, 197, 217, 218, 219, 225, 226, 227, 228, 232, 236, 237, 239, 245, 248, 251, 253, 256, 260, 263, 264, 282, 285, 316, 327], "is_equival": 1, "privat": 1, "fall": 1, "eval": [1, 2, 3, 4, 213, 312, 314, 339, 340, 342, 344], "deriv": [1, 340, 342], "base": [1, 118, 123, 125, 256, 263, 312, 314, 320, 334, 336, 339, 341], "abov": [1, 3, 6, 147, 194, 213, 264, 319, 340, 341, 342, 346], "demonstr": [1, 343], "treat": [1, 97, 98, 100, 101, 187, 264, 339], "paramet": [1, 2, 3, 4, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 32, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 76, 77, 78, 80, 81, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 232, 233, 236, 237, 239, 244, 245, 248, 249, 250, 251, 252, 253, 254, 256, 258, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 277, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294, 295, 306, 308, 309, 312, 314, 315, 316, 317, 318, 319, 320, 321, 322, 323, 325, 326, 327, 328, 329, 330, 331, 332, 334, 339, 340, 342], "produc": [1, 66, 251, 309], "through": [1, 182, 263, 321, 339, 340, 343], "construct": [1, 4, 77, 105, 141, 201], "its": [1, 6, 131, 145, 156, 193, 203, 206, 213, 253, 318, 319, 320, 343, 346], "type": [1, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 32, 49, 56, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 76, 77, 78, 80, 81, 83, 84, 85, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 156, 157, 158, 159, 160, 161, 162, 163, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 204, 213, 263, 265, 266, 267, 268, 269, 270, 271, 272, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294, 295, 339, 341], "shape": [1, 3, 4, 46, 59, 63, 66, 68, 69, 70, 78, 90, 93, 96, 99, 100, 101, 105, 115, 131, 141, 142, 149, 150, 151, 153, 154, 157, 158, 161, 188, 198, 200, 201, 202, 213, 215, 216, 217, 218, 219, 221, 222, 226, 228, 229, 230, 239, 265, 266, 267, 268, 269, 270, 271, 272, 284, 295, 314, 339, 340, 341, 344, 346], "pass": [1, 3, 4, 46, 57, 143, 144, 196, 203, 204, 205, 213, 237, 248, 249, 250, 253, 258, 339, 342], "re": [1, 4, 6, 309], "now": [1, 3, 6, 253, 339, 343], "promot": 1, "dtype": [1, 3, 14, 25, 32, 56, 89, 102, 105, 109, 118, 119, 120, 141, 151, 153, 154, 157, 158, 193, 201, 209, 264, 265, 266, 267, 268, 269, 270, 271, 272, 282, 284, 291, 328, 329, 330, 331, 332, 339, 340, 341, 343, 344, 345], "promoted_dtyp": 1, "promote_typ": 1, "float32": [1, 14, 89, 109, 118, 119, 120, 141, 151, 153, 157, 158, 193, 201, 209, 264, 265, 266, 267, 268, 269, 270, 271, 272, 282, 284, 291, 328, 329, 330, 331, 332, 339, 340, 341, 342, 343, 344, 345], "non": [1, 6, 246, 296, 312], "point": [1, 2, 3, 6, 104, 148, 209], "out_dtyp": 1, "is_floating_point": 1, "cast": [1, 32, 99, 100, 101, 121, 232, 343], "up": [1, 3, 253, 339], "determin": [1, 78, 243, 345], "x_cast": 1, "astyp": [1, 3, 232, 343], "y_cast": 1, "broadcasted_input": 1, "broadcast_arrai": 1, "out_shap": 1, "0": [1, 2, 3, 4, 6, 7, 14, 47, 52, 58, 67, 68, 69, 70, 77, 78, 89, 102, 106, 118, 119, 144, 149, 153, 158, 160, 162, 177, 181, 193, 194, 195, 196, 197, 199, 204, 213, 215, 216, 217, 218, 219, 220, 221, 222, 224, 225, 226, 227, 229, 230, 252, 255, 256, 260, 261, 262, 263, 265, 266, 267, 268, 269, 270, 271, 272, 273, 275, 276, 278, 279, 282, 284, 286, 287, 291, 294, 295, 297, 298, 299, 300, 305, 306, 309, 312, 315, 316, 318, 319, 320, 321, 323, 326, 327, 328, 329, 330, 331, 332, 336, 339, 340, 341, 342, 343, 344, 345], "unique_ptr": 1, "make_uniqu": 1, "to_stream": 1, "handl": [1, 213, 339], "resolv": 1, "No": [1, 3], "happen": [1, 3, 263, 314, 339, 342], "alon": [1, 343], "effect": [1, 221, 339, 342], "onli": [1, 3, 5, 6, 59, 68, 69, 70, 118, 147, 209, 213, 236, 237, 239, 245, 248, 249, 250, 312, 339, 340, 345, 346], "execut": [1, 6, 60, 61, 62, 343, 346], "depend": [1, 2, 56, 118, 341, 345, 346], "devic": [1, 5, 6, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 27, 28, 29, 30, 31, 32, 33, 35, 37, 38, 39, 40, 41, 42, 44, 45, 46, 47, 48, 50, 52, 53, 54, 55, 57, 58, 59, 60, 61, 62, 63, 64, 65, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 80, 81, 83, 84, 85, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 107, 108, 109, 110, 111, 112, 113, 114, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 153, 154, 156, 157, 158, 159, 160, 161, 162, 163, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 197, 200, 201, 202, 207, 346, 347], "specifi": [1, 14, 32, 69, 70, 78, 97, 98, 105, 106, 118, 120, 137, 141, 150, 160, 186, 187, 188, 191, 192, 196, 199, 201, 217, 262, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294, 295, 306, 340, 346], "memori": [1, 5, 263, 312, 316, 339, 342, 343], "ha": [1, 3, 4, 5, 56, 66, 78, 96, 97, 99, 100, 101, 106, 150, 217, 228, 312, 314, 339, 341, 342, 344, 346], "been": [1, 3, 342], "try": [1, 6], "naiv": [1, 340], "gener": [1, 2, 14, 70, 89, 97, 98, 120, 149, 153, 154, 157, 158, 263, 336, 339, 341, 342, 347], "version": [1, 6, 76, 126, 130, 147, 175, 199, 336, 340, 341], "declar": 1, "member": [1, 213, 242, 246], "method": [1, 3, 7, 8, 25, 207, 213, 243, 263, 312, 315, 316, 317, 318, 319, 320, 321, 323, 326, 327, 334], "each": [1, 49, 76, 86, 131, 144, 147, 148, 150, 160, 167, 168, 177, 192, 199, 200, 221, 222, 223, 225, 256, 263, 282, 284, 336, 339, 342], "find": [1, 2, 6], "pointwis": 1, "captur": [1, 66, 213, 339], "templat": 1, "axpby_impl": 1, "typenam": 1, "t": [1, 3, 84, 148, 196, 213, 215, 229, 315, 316, 317, 318, 319, 320, 321, 326, 327, 339, 340, 346], "readi": 1, "fill": [1, 105, 142, 193, 202, 265, 266, 267, 268, 269, 271, 272], "malloc_or_wait": 1, "synchron": [1, 339], "avail": [1, 2, 3, 4, 6, 8, 209, 346], "There": [1, 213, 264, 339], "wait": [1, 3], "here": [1, 3, 339, 340, 342, 345, 346], "request": 1, "pressur": 1, "condit": [1, 200, 346], "set_data": 1, "nbyte": 1, "collect": [1, 205, 338], "pointer": 1, "x_ptr": 1, "y_ptr": 1, "out_ptr": 1, "relev": 1, "static_cast": 1, "size_t": 1, "out_idx": 1, "size": [1, 3, 4, 49, 69, 76, 88, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 105, 109, 118, 147, 148, 150, 161, 177, 180, 213, 215, 216, 218, 219, 223, 226, 229, 230, 253, 264, 316, 342, 343], "map": [1, 4, 121, 205, 223, 232], "linear": [1, 3, 4, 5, 205, 213, 224, 239, 253, 255, 257, 259, 264, 273, 274, 275, 276, 277, 279, 298, 299, 300, 302, 309, 312, 323, 331, 339], "indic": [1, 12, 21, 22, 23, 24, 106, 111, 112, 113, 114, 177, 187, 188, 196, 245, 247, 284, 291, 330, 341], "offset": [1, 3, 78], "x_offset": 1, "elem_to_loc": 1, "stride": [1, 68, 69, 70, 215, 216, 218, 219, 229, 230, 256, 341], "y_offset": 1, "contigu": 1, "regularli": 1, "default": [1, 6, 11, 12, 13, 14, 21, 22, 23, 24, 59, 66, 67, 68, 69, 70, 74, 75, 76, 77, 78, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 106, 109, 118, 119, 120, 121, 130, 132, 134, 135, 141, 145, 146, 147, 148, 149, 150, 151, 153, 154, 156, 157, 158, 160, 161, 162, 169, 170, 176, 177, 180, 181, 183, 185, 191, 192, 193, 194, 195, 196, 197, 199, 201, 209, 215, 216, 217, 218, 219, 226, 228, 229, 230, 232, 237, 239, 245, 248, 251, 252, 253, 256, 260, 261, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 277, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294, 295, 312, 315, 316, 317, 318, 319, 320, 321, 326, 327, 336, 338, 339, 340, 343, 345, 347], "row": [1, 89, 109, 147, 193], "major": 1, "henc": [1, 147, 339], "doesn": [1, 213], "addit": [1, 3, 10, 121, 217, 225, 227, 251, 254, 312, 340], "abl": [1, 147], "all": [1, 4, 6, 12, 23, 60, 61, 62, 66, 69, 70, 89, 92, 95, 98, 101, 131, 144, 145, 180, 213, 232, 233, 237, 240, 241, 242, 246, 248, 251, 253, 260, 263, 264, 309, 312, 334, 336, 339, 341, 342, 344, 347], "incom": 1, "accordingli": 1, "dispatch": 1, "float16": [1, 121, 209, 232, 342, 343], "bfloat16": [1, 343], "complex64": 1, "throw": [1, 66], "error": [1, 6, 84, 85, 177, 224, 253, 274, 275, 276, 290, 292, 340, 343], "encount": [1, 340], "unexpect": [1, 14], "regist": [1, 4], "op": [1, 143, 237, 342], "assert": 1, "2": [1, 2, 3, 4, 69, 77, 78, 84, 91, 94, 96, 97, 98, 99, 100, 101, 102, 118, 119, 125, 131, 147, 156, 191, 193, 194, 195, 209, 213, 215, 216, 219, 224, 229, 230, 254, 260, 264, 265, 266, 267, 268, 269, 270, 271, 272, 275, 284, 285, 287, 294, 295, 309, 312, 315, 317, 318, 319, 323, 326, 339, 340, 341, 342, 343, 344, 345, 346], "1": [1, 3, 4, 14, 23, 24, 68, 69, 70, 77, 78, 90, 91, 93, 94, 96, 97, 98, 99, 100, 101, 102, 110, 118, 119, 131, 143, 145, 147, 150, 153, 158, 171, 176, 187, 196, 209, 213, 215, 216, 217, 218, 219, 220, 221, 222, 224, 225, 226, 227, 228, 229, 230, 252, 254, 256, 260, 262, 264, 266, 267, 268, 269, 270, 271, 272, 273, 275, 276, 277, 280, 281, 282, 283, 284, 285, 286, 287, 288, 290, 291, 293, 294, 295, 300, 301, 303, 304, 306, 309, 312, 314, 315, 316, 317, 318, 319, 320, 321, 323, 326, 327, 328, 329, 330, 331, 332, 339, 340, 341, 343, 344, 345, 346], "correct": [1, 6, 318, 319, 320, 341, 342], "els": [1, 3, 213, 237, 342], "float16_t": 1, "bfloat16_t": 1, "complex64_t": 1, "runtime_error": 1, "support": [1, 3, 5, 6, 12, 68, 69, 70, 102, 119, 121, 131, 147, 340, 341, 343, 345], "have": [1, 3, 6, 12, 59, 60, 61, 62, 97, 98, 100, 101, 131, 150, 204, 251, 258, 321, 323, 338, 339, 341, 342, 346], "rememb": 1, "3": [1, 3, 6, 102, 118, 119, 264, 267, 269, 278, 316, 321, 336, 339, 341, 343, 344], "complic": 1, "keep": [1, 11, 13, 21, 22, 130, 132, 134, 135, 146, 185, 197, 213, 236, 340, 342], "mind": [1, 3], "half": [1, 14, 154, 158, 256, 342], "precis": [1, 3, 213, 224, 322, 339], "direct": [1, 3, 234, 321, 346], "fix": [1, 3, 6, 342], "possibl": [1, 3, 131, 177, 223, 339, 341, 346], "due": 1, "transpos": [1, 3, 26, 148], "aren": 1, "guarante": 1, "fit": [1, 147, 346], "requir": [1, 3, 213, 342, 343], "column": [1, 89, 109, 147], "inplac": 1, "expect": [1, 3, 218, 219, 220, 221, 222, 260, 263, 285, 339, 341], "answer": 1, "copi": [1, 3, 5, 145, 176, 343], "simpli": [1, 3, 6, 255, 273, 279, 298, 307, 312, 339, 340], "catlas_saxpbi": 1, "axpby_impl_acceler": 1, "first": [1, 2, 3, 4, 6, 78, 102, 106, 127, 129, 131, 145, 156, 186, 191, 196, 204, 213, 216, 225, 230, 264, 283, 291, 316, 318, 319, 320, 323, 339, 340, 343, 346], "mode": [1, 71, 235, 245, 247, 264, 268, 269], "i": [1, 3, 115, 118, 213, 218, 219, 221, 222, 237, 290, 319, 330, 339, 340], "e": [1, 4, 6, 84, 115, 171, 217, 218, 219, 221, 222, 225, 226, 227, 237, 254, 280, 281, 303, 308, 314, 317, 339, 342, 347], "match": [1, 6, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 239, 264, 284, 341, 343], "transposit": 1, "data_s": 1, "items": 1, "flag": [1, 339, 343], "copy_inplac": 1, "copytyp": 1, "n": [1, 3, 25, 68, 69, 70, 89, 90, 92, 93, 95, 96, 99, 101, 109, 193, 197, 215, 216, 217, 218, 219, 221, 222, 229, 230, 264, 290, 295], "incx": 1, "inci": 1, "great": 1, "But": [1, 346], "criteria": 1, "luckili": [1, 342], "alwai": [1, 204, 340], "With": 1, "final": [1, 2, 3, 4, 331], "singl": [1, 4, 86, 115, 121, 144, 198, 216, 230, 339, 341, 345], "row_contigu": 1, "col_contigu": 1, "common": [1, 314, 339, 342], "hit": 1, "mileston": 1, "enough": [1, 342], "run": [1, 3, 4, 5, 6, 7, 143, 207, 217, 232, 315, 316, 318, 319, 320, 339, 342, 346, 347], "If": [1, 3, 6, 11, 12, 13, 14, 21, 22, 23, 24, 56, 59, 65, 67, 71, 77, 78, 86, 99, 100, 101, 104, 105, 106, 118, 121, 130, 131, 132, 134, 135, 141, 144, 145, 146, 150, 160, 175, 176, 177, 185, 187, 188, 191, 196, 197, 199, 201, 205, 217, 218, 219, 225, 227, 228, 237, 239, 248, 253, 256, 258, 260, 264, 282, 284, 295, 316, 339, 340, 342, 345, 346, 347], "plan": [1, 339], "stop": [1, 3, 14, 120, 182, 340, 341], "enjoi": 1, "speed": 1, "appl": [1, 3, 5, 6, 346], "silicon": [1, 3, 5, 6, 346], "address": 1, "shade": 1, "languag": [1, 209], "kernel": [1, 68, 69, 70, 215, 229, 339, 341], "written": 1, "help": [1, 3, 339, 346], "resourc": 1, "walkthrough": 1, "pipelin": 1, "specif": [1, 6, 340], "cpp": 1, "algorithm": [1, 264, 321], "launch": [1, 341], "exactli": [1, 3, 239, 340], "mani": [1, 177, 218, 219, 223, 339, 342], "thread": 1, "pick": 1, "updat": [1, 2, 3, 4, 66, 205, 217, 232, 239, 244, 250, 314, 316, 319, 321, 322, 323, 327, 328, 329, 330, 331, 332, 339, 342], "assign": [1, 312], "axpby_gener": 1, "buffer": [1, 343], "constant": [1, 3, 6, 144, 213, 217, 225, 227, 254, 285, 295, 326, 328, 339, 343], "4": [1, 3, 76, 102, 118, 147, 148, 167, 209, 215, 216, 217, 226, 229, 230, 253, 263, 264, 266, 267, 268, 282, 339, 341, 344, 346], "5": [1, 2, 3, 6, 118, 149, 215, 217, 220, 221, 222, 226, 229, 261, 264, 265, 268, 269, 294, 305, 309, 326, 328, 329, 339, 340, 341], "x_stride": 1, "6": [1, 3, 118, 167, 263, 267, 275, 276, 278, 285, 295, 299, 326, 339, 341, 344], "y_stride": 1, "7": [1, 3, 118, 147, 341], "ndim": [1, 102, 118, 264], "8": [1, 3, 6, 118, 147, 209, 216, 226, 230, 263, 283, 315, 316, 317, 318, 319, 320, 326, 339, 341, 344, 346], "uint": 1, "index": [1, 5, 7, 23, 88, 89, 106, 145, 187, 188, 196, 207], "thread_position_in_grid": 1, "convert": [1, 56, 60, 61, 62, 102, 253, 342, 343, 344], "instanti": [1, 4, 342], "uniqu": [1, 336], "host": 1, "name": [1, 121, 147, 148, 165, 166, 167, 168, 213, 225, 236, 239, 241, 341, 345], "identifi": [1, 204, 338], "instantiate_axpbi": 1, "type_nam": 1, "host_nam": 1, "axpby_general_": 1, "compil": [1, 5, 6, 79, 82, 340, 342], "mlx_ext": 1, "metallib": [1, 6], "see": [1, 3, 4, 6, 8, 27, 28, 29, 30, 31, 33, 35, 37, 38, 39, 40, 41, 42, 44, 45, 46, 47, 48, 50, 52, 53, 54, 55, 57, 58, 118, 165, 166, 213, 217, 221, 224, 235, 252, 253, 255, 256, 257, 259, 260, 261, 264, 266, 267, 268, 269, 274, 275, 276, 300, 339, 340, 341, 344, 346], "later": [1, 6], "co": [1, 260, 340], "locat": [1, 249, 250, 346], "share": [1, 5, 76, 147, 148], "register_librari": 1, "potenti": 1, "path": [1, 6, 167, 168, 239], "tri": 1, "load": [1, 4, 5, 239], "hasn": 1, "alreadi": [1, 3], "static": [1, 6], "object": [1, 8, 25, 36, 56, 149, 154, 157, 158, 199, 204, 205, 221, 263, 338], "why": [1, 3], "packag": [1, 2, 4, 309], "process": [1, 3, 70, 71, 205, 222, 223, 263, 338], "logic": [1, 127, 128, 129], "grid": 1, "shown": 1, "below": [1, 6, 118, 193, 195, 209, 264, 342], "prepar": [1, 3], "carri": 1, "should": [1, 2, 3, 4, 6, 78, 115, 147, 188, 196, 198, 204, 213, 218, 219, 221, 222, 245, 251, 258, 284, 286, 291, 312, 338, 339, 340, 342, 343, 347], "d": [1, 3, 77, 78, 110, 118, 131, 143, 187, 193, 194, 195, 206, 222, 315, 318, 320, 346], "ostringstream": 1, "kname": 1, "axpby_": 1, "general_": 1, "type_to_nam": 1, "make": [1, 3, 4, 6, 131, 140, 170, 213, 328, 329, 331, 332, 339, 342, 344, 346], "sure": [1, 3, 6, 213, 339], "look": [1, 3], "folder": 1, "get_colocated_mtllib_path": 1, "get_kernel": 1, "str": [1, 71, 106, 118, 121, 164, 165, 166, 167, 168, 196, 204, 206, 232, 233, 236, 237, 239, 241, 243, 248, 264, 268, 269, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294, 295], "encod": [1, 256, 260, 263, 284], "compute_encod": 1, "get_command_encod": 1, "setcomputepipelinest": 1, "those": [1, 3, 213], "nelem": 1, "set_array_buff": 1, "setbyt": 1, "sizeof": 1, "threadgroup": 1, "higher": [1, 110, 291, 340], "than": [1, 3, 56, 71, 78, 81, 107, 108, 116, 117, 131, 205, 256, 262, 264, 291, 294, 306, 316, 321, 339, 340, 346], "max": [1, 118, 133, 229, 230, 252, 278, 283, 285, 286, 291, 295, 297, 299, 316, 320, 339, 340, 346], "allow": [1, 213, 250, 312, 334, 341, 344], "tgp_size": 1, "min": [1, 118, 136, 252, 278, 297, 299], "maxtotalthreadsperthreadgroup": 1, "3d": [1, 217, 222, 264], "mtl": 1, "group_dim": 1, "grid_dim": 1, "divid": [1, 104, 147], "among": 1, "dispatchthread": 1, "few": [1, 3, 4, 5, 342, 344], "thing": [1, 3], "note": [1, 3, 6, 12, 66, 68, 69, 97, 98, 118, 147, 150, 213, 264, 343, 345], "befor": [1, 3, 6, 23, 145, 236, 263, 323, 341, 342], "move": [1, 137, 346], "track": [1, 213, 217], "activ": [1, 6, 221, 262, 263, 296, 305, 306, 308, 339], "command": [1, 6], "instead": [1, 6, 213, 250, 260, 340, 342], "end_encod": 1, "end": [1, 78, 147, 216, 230, 262, 287, 294, 300, 305, 306, 331], "until": [1, 342, 344], "limit": [1, 65, 341], "flush": 1, "enqueu": 1, "commit": 1, "associ": [1, 167, 168, 342], "suggest": 1, "deeper": 1, "dive": 1, "studi": 1, "come": [1, 3, 340], "far": [1, 314], "built": [1, 6, 263, 342], "includ": [1, 233, 244, 253, 285, 339, 340, 341, 344, 345, 347], "forward": [1, 196, 339, 342], "diff": 1, "push": 1, "along": [1, 21, 22, 66, 67, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 118, 160, 175, 177, 181, 187, 188, 191, 213, 277], "similarli": [1, 6, 131, 340, 342], "scale_arr": 1, "contribut": 1, "tangent_x": 1, "tangent_i": 1, "revers": [1, 192, 260], "arg": [1, 3, 8, 46, 57, 86, 167, 168], "push_back": 1, "fulli": [1, 5, 339, 343, 346], "overal": 1, "directori": [1, 3, 6], "extens": [1, 121, 209, 243, 345], "h": [1, 68, 69, 118, 216, 217, 219, 221, 222, 230, 340, 342], "mlx_sample_extens": 1, "__init__": [1, 3, 4, 7, 8, 25, 207, 213, 312], "py": [1, 3, 6], "cmakelist": 1, "txt": 1, "setup": [1, 2, 4, 6, 339], "hold": [1, 3, 8, 118, 339], "instal": 1, "pybind11": [1, 6], "sinc": [1, 3, 4, 312, 321, 330, 343, 346], "compon": [1, 3], "etc": [1, 147, 213, 264], "pybind11_modul": 1, "m": [1, 6, 89, 118, 193, 215, 216, 229, 230, 315], "doc": [1, 4], "sampl": [1, 2, 3, 120, 149, 150, 151, 154, 157, 158, 266, 267, 268, 269, 271, 272, 285, 291, 295, 336, 339], "_a": 1, "pos_onli": 1, "kw_onli": 1, "none": [1, 3, 7, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 27, 28, 29, 30, 31, 32, 33, 35, 37, 38, 39, 40, 41, 42, 44, 45, 46, 47, 48, 50, 52, 53, 54, 55, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 166, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 199, 200, 201, 202, 204, 205, 207, 215, 216, 224, 229, 230, 232, 236, 237, 248, 251, 260, 263, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294, 295, 316, 334, 341], "r": [1, 3, 119, 196, 221], "pbdoc": 1, "most": [1, 150, 213, 325, 339, 340, 341, 342], "complex": [1, 97, 98, 99, 100, 101, 149, 154, 157, 158, 204, 213, 250, 339, 340], "bell": 1, "whistl": 1, "liter": [1, 264, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294, 295], "string": [1, 343, 345], "modul": [1, 3, 4, 203, 253, 258, 263, 309, 325, 338, 339, 342], "ensur": [1, 6, 290], "caster": 1, "find_packag": 1, "config": 1, "add_librari": 1, "sourc": [1, 137, 192], "target_sourc": 1, "cmake_current_list_dir": 1, "header": 1, "target_include_directori": 1, "target_link_librari": 1, "attach": 1, "conveni": [1, 4], "mlx_build_metallib": 1, "target": [1, 196, 282, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294, 339], "destin": [1, 137], "automat": [1, 5, 121, 344, 345, 346], "practic": [1, 339], "mlx_build_met": [1, 6], "mlx_ext_metallib": 1, "titl": 1, "include_dir": 1, "project_source_dir": 1, "mlx_include_dir": 1, "output_directori": 1, "cmake_library_output_directori": 1, "add_depend": 1, "endif": 1, "pybind11_add_modul": 1, "build_shared_lib": 1, "target_link_opt": 1, "wl": 1, "rpath": 1, "loader_path": 1, "onc": [1, 339], "describ": [1, 342], "util": [1, 3, 5, 6, 167, 213], "__name__": [1, 3], "__main__": [1, 3], "descript": [1, 3, 209], "ext_modul": 1, "cmakeextens": 1, "cmdclass": 1, "build_ext": 1, "cmakebuild": 1, "package_dir": 1, "package_data": 1, "dylib": 1, "zip_saf": 1, "fals": [1, 3, 11, 12, 13, 21, 22, 28, 29, 30, 31, 39, 40, 41, 42, 44, 55, 58, 59, 66, 70, 118, 121, 130, 132, 134, 135, 146, 185, 197, 200, 204, 205, 209, 225, 226, 228, 237, 239, 248, 251, 253, 256, 260, 263, 264, 282, 285, 316, 327, 343], "python_requir": 1, "even": [1, 3, 66, 339, 342, 343], "though": [1, 3, 339, 342, 343], "j8": 1, "libmlx_ext": 1, "cpython": 1, "3x": 1, "darwin": 1, "pip": [1, 6], "after": [1, 3, 4, 23, 102, 104, 145, 147, 217, 225, 227, 251, 263, 294, 339, 346], "plai": [1, 3], "ones": [1, 3, 142, 167, 193, 249, 250, 253, 341], "b": [1, 3, 10, 12, 59, 80, 81, 83, 104, 107, 108, 110, 116, 117, 118, 126, 127, 129, 131, 133, 136, 138, 143, 147, 184, 191, 196, 228, 264, 277, 340, 341, 342, 343, 344, 345, 346], "f": [1, 2, 4, 118, 213, 319, 339, 343], "item": [1, 2, 3, 4, 205, 342, 343, 344], "true": [1, 2, 3, 12, 59, 66, 118, 121, 148, 175, 200, 204, 205, 209, 213, 217, 218, 219, 225, 226, 227, 228, 236, 237, 239, 245, 248, 253, 256, 260, 263, 264, 282, 290, 316], "quick": [1, 5], "benchmark": [1, 339], "compar": [1, 59, 339], "time": [1, 3, 6, 213, 215, 216, 229, 230, 339, 340, 342, 346], "set_default_devic": 1, "256": [1, 4], "512": [1, 3, 263, 346], "random": [1, 2, 3, 4, 5, 215, 216, 217, 226, 229, 230, 239, 245, 339, 340, 346, 347], "normal": [1, 2, 3, 157, 213, 215, 216, 217, 225, 226, 227, 229, 230, 254, 263, 266, 268, 343, 346], "bench": 1, "warm": [1, 339], "rang": [1, 2, 3, 4, 6, 14, 102, 120, 267, 269, 275, 276, 314, 328, 329, 330, 331, 332, 336, 339, 340, 342, 346], "100": [1, 2, 3, 331, 339, 340, 342, 346], "5000": 1, "simple_tim": 1, "custom_tim": 1, "3f": [1, 4, 339], "custom": [1, 263], "114": 1, "109": 1, "modest": 1, "improv": [1, 3, 315, 316, 317, 318, 319, 320, 326, 339], "awai": [1, 3], "good": [1, 6, 339, 346], "nn": [1, 3, 4, 167, 205, 213, 309, 312, 314, 323, 325, 339, 342], "grad": [1, 2, 4, 196, 314, 322, 339, 340, 341, 342, 344], "full": [1, 4, 46, 57, 71, 175, 249, 250, 285, 339, 342], "implement": [2, 4, 118, 223, 236, 251, 256, 258, 260, 262, 263, 264, 306, 315, 316, 317, 318, 320, 321, 322, 334, 339, 340, 343], "basic": [2, 162, 340], "model": [2, 4, 5, 167, 203, 205, 213, 232, 235, 237, 239, 243, 245, 247, 248, 249, 251, 263, 309, 312, 314, 322, 323, 325, 339, 342], "problem": [2, 4, 213], "metadata": [2, 121, 165, 166], "num_featur": [2, 217], "num_exampl": 2, "1_000": 2, "num_it": 2, "10_000": 2, "iter": [2, 4, 205, 336, 339, 342], "sgd": [2, 4, 314, 321, 323, 328, 329, 332, 339], "lr": [2, 321], "01": [2, 279, 319], "rate": [2, 315, 316, 317, 318, 319, 320, 321, 326, 327], "ll": [2, 4, 287, 339, 340], "synthet": 2, "dataset": [2, 342], "matrix": [2, 76, 77, 89, 109, 118, 119, 131, 147, 148, 253, 270, 309], "ground": [2, 3, 284, 294], "truth": [2, 284, 294], "w_star": 2, "valu": [2, 3, 9, 12, 14, 21, 22, 36, 56, 59, 65, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 105, 118, 120, 144, 149, 150, 151, 153, 154, 157, 158, 165, 187, 188, 196, 199, 203, 204, 205, 209, 216, 220, 221, 222, 226, 228, 230, 236, 251, 252, 261, 262, 263, 265, 282, 283, 284, 285, 286, 287, 289, 290, 291, 292, 293, 294, 306, 312, 316, 319, 328, 329, 331, 332, 340], "gaussian": [2, 224, 274, 275, 276, 285], "nois": 2, "exampl": [2, 3, 4, 14, 102, 118, 119, 183, 187, 213, 215, 216, 217, 226, 229, 230, 237, 239, 245, 248, 264, 265, 266, 267, 268, 269, 270, 271, 272, 282, 284, 291, 309, 314, 323, 328, 329, 330, 331, 332, 336, 340, 341, 342, 343, 344, 345], "noisi": 2, "label": [2, 284, 291], "ep": [2, 217, 225, 226, 227, 254, 283, 285, 295, 315, 316, 317, 318, 319, 320, 326], "1e": [2, 4, 12, 217, 225, 226, 227, 254, 283, 285, 295, 315, 316, 317, 318, 319, 320, 323, 326, 328, 329, 330, 331, 332], "us": [2, 3, 4, 5, 6, 14, 76, 79, 81, 102, 118, 119, 131, 147, 148, 160, 161, 204, 213, 216, 221, 223, 224, 228, 230, 232, 236, 243, 249, 250, 251, 253, 256, 260, 263, 264, 268, 269, 275, 276, 283, 309, 312, 314, 315, 316, 318, 319, 320, 321, 322, 323, 334, 336, 338, 339, 340, 341, 344, 346], "weight": [2, 68, 69, 70, 205, 213, 239, 243, 253, 282, 284, 312, 316, 319, 321, 323, 327, 340, 342], "squar": [2, 3, 109, 163, 178, 196, 205, 213, 254, 292, 294, 315, 316, 318, 319, 320, 340, 343], "loss": [2, 4, 196, 213, 314, 339, 340, 342], "loss_fn": [2, 4, 314, 339, 340], "w": [2, 69, 76, 147, 148, 196, 216, 217, 219, 221, 222, 228, 230, 327, 340], "mean": [2, 3, 4, 153, 196, 213, 217, 225, 237, 254, 271, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294, 295, 339, 340, 343], "grad_fn": [2, 339, 340], "initi": [2, 3, 213, 217, 225, 226, 227, 228, 252, 254, 265, 266, 267, 268, 269, 270, 271, 272, 312, 323, 328, 329, 331, 332, 339, 342], "randomli": [2, 3, 220, 221, 222], "Then": [2, 6], "repeatedli": 2, "_": [2, 3, 213, 328, 329, 330, 331, 332, 336, 339, 342, 346], "verifi": [2, 6], "close": [2, 5, 6, 12], "error_norm": 2, "5f": 2, "someth": [2, 3, 341], "00005": 2, "00364": 2, "complet": [2, 3, 6, 249, 250, 340, 346], "logist": [2, 171, 275, 276, 302], "github": [2, 4, 6, 339], "repo": [2, 4, 6, 339], "enabl": [3, 6, 66, 82, 327], "larg": [3, 213, 251, 290, 339, 342], "ish": 3, "transform": [3, 5, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 203, 213, 217, 225, 227, 228, 236, 237, 248, 253, 256, 341], "compromis": 3, "eas": 3, "llama": 3, "famili": 3, "less": [3, 23, 117, 145, 256, 294], "200": [3, 330], "line": [3, 342, 343], "python": [3, 36, 49, 56, 86, 204, 205, 206, 312, 322, 323, 325, 338, 340, 343], "neural": [3, 5, 223, 266, 267, 296, 309, 312, 326], "network": [3, 5, 217, 221, 223, 266, 267, 309, 312, 326], "build": [3, 5, 268, 312, 339], "concis": 3, "architectur": [3, 6, 213, 250, 346], "notabl": [3, 5], "rope": [3, 213], "posit": [3, 23, 78, 102, 106, 114, 137, 145, 196, 205, 213, 218, 219, 251, 256, 260, 285, 295], "option": [3, 11, 13, 14, 21, 22, 23, 24, 25, 30, 31, 60, 61, 62, 66, 67, 68, 69, 70, 71, 76, 77, 78, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 105, 106, 109, 113, 114, 118, 119, 120, 121, 130, 132, 134, 135, 141, 144, 145, 146, 147, 148, 149, 150, 151, 153, 154, 156, 157, 158, 160, 161, 166, 175, 176, 177, 180, 181, 185, 187, 188, 191, 192, 193, 194, 195, 196, 197, 199, 201, 204, 205, 215, 216, 217, 218, 219, 228, 229, 230, 232, 236, 237, 239, 248, 251, 253, 256, 260, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294, 295, 315, 316, 317, 318, 319, 320, 321, 323, 326, 327, 336, 339, 345, 347], "kei": [3, 149, 150, 151, 153, 154, 156, 157, 158, 204, 205, 236, 237, 248, 251, 323, 336, 338, 340], "cach": [3, 339], "concaten": 3, "project": [3, 251], "llamaattent": 3, "self": [3, 4, 7, 25, 26, 27, 28, 29, 30, 31, 32, 33, 35, 36, 37, 38, 39, 40, 41, 42, 44, 45, 46, 47, 48, 50, 52, 53, 54, 55, 56, 57, 58, 207, 213, 296, 312], "dim": [3, 223, 225, 226, 227, 251, 254, 256, 260, 263], "num_head": [3, 251, 263], "super": [3, 4, 213, 312], "tradit": [3, 221, 222, 256], "query_proj": 3, "bia": [3, 76, 147, 148, 205, 213, 218, 219, 228, 237, 239, 248, 251, 253, 318, 319, 320, 323, 340], "key_proj": 3, "value_proj": 3, "out_proj": [3, 312], "__call__": [3, 4, 213, 312], "queri": [3, 251], "mask": [3, 245, 251, 341], "extract": [3, 77, 78, 213, 236, 312], "l": [3, 4, 213, 215, 217, 218, 229, 294], "reshap": [3, 118, 264, 341], "combin": 3, "key_cach": 3, "value_cach": 3, "sqrt": [3, 84, 217, 225, 226, 227, 228, 254, 260, 266, 267, 268, 269, 315, 317, 318, 319, 326, 339], "score": [3, 291], "softmax": [3, 213, 281, 284], "values_hat": 3, "rm": [3, 6, 316], "swiglu": 3, "rmsnorm": [3, 213], "llamaencoderlay": 3, "mlp_dim": [3, 263], "norm1": 3, "norm2": 3, "linear1": 3, "linear2": 3, "linear3": 3, "sigmoid": [3, 213, 259, 275, 276, 280, 302], "instanc": [3, 147, 206, 213, 226, 232, 233, 234, 237, 240, 241, 248, 250, 258, 312, 343], "embed": [3, 213, 256, 260, 283], "emb": [3, 223, 260], "token": [3, 223], "num_lay": [3, 4, 314], "vocab_s": 3, "norm": [3, 225, 295, 320, 321], "multiheadattent": [3, 213], "create_additive_causal_mask": 3, "list": [3, 8, 11, 13, 25, 28, 29, 39, 40, 41, 42, 44, 52, 55, 56, 58, 60, 61, 62, 63, 66, 67, 70, 86, 88, 91, 92, 94, 95, 97, 98, 100, 101, 105, 106, 115, 118, 130, 132, 134, 135, 141, 144, 146, 149, 150, 151, 153, 154, 157, 158, 161, 165, 175, 177, 180, 181, 185, 191, 192, 196, 197, 198, 201, 204, 206, 213, 237, 239, 240, 241, 242, 246, 248, 249, 250, 312, 318, 319, 320, 321, 330, 338, 339, 340, 342], "still": [3, 6, 118, 339, 342], "consid": [3, 12, 59, 204, 205, 225, 338], "train": [3, 4, 213, 217, 220, 221, 222, 235, 237, 248, 266, 267], "ignor": [3, 65, 66, 86, 316], "whatsoev": 3, "rest": [3, 205, 256], "subsect": 3, "prompt": 3, "autoregress": 3, "yield": [3, 4, 336], "temp": 3, "causal": 3, "save": [3, 5, 121, 147, 165, 166, 167, 168, 243, 342], "append": [3, 131, 339, 342], "store": 3, "per": [3, 4, 76, 147, 148, 217, 225, 226, 227, 254, 334, 339, 342], "care": [3, 342], "last": [3, 24, 56, 92, 95, 97, 98, 100, 101, 102, 110, 119, 131, 150, 176, 191, 218, 219, 221, 222, 225, 264, 343], "logit": [3, 150, 282, 284, 339], "next": [3, 4], "categor": 3, "lazili": [3, 213], "noth": [3, 213, 342], "yet": [3, 118, 213, 312, 323, 340, 341, 342, 344], "forc": [3, 4, 213, 344], "choos": [3, 256], "pars": 3, "feed": 3, "loop": [3, 4, 339, 340, 342], "unsqueez": 3, "sequenc": [3, 217, 218, 263, 336, 346], "length": [3, 180, 217, 218, 330], "len": [3, 92, 95, 98, 101, 330], "overwrit": 3, "discard": [3, 204], "old": 3, "moment": [3, 70, 316, 318, 319, 320], "anymor": 3, "everyth": 3, "small": [3, 217, 225, 227, 254, 285, 290, 295, 339, 346], "10": [3, 4, 123, 162, 167, 205, 213, 239, 309, 330, 332, 339, 341], "12": [3, 330], "8192": 3, "1024": 3, "actual": [3, 14, 239, 312, 342], "materi": [3, 5], "could": [3, 213], "20_000": 3, "machin": [3, 5, 6, 326], "8gb": 3, "ram": 3, "32": [3, 4, 147, 148, 209, 216, 230, 339], "44": 3, "doubl": 3, "bracket": 3, "becaus": [3, 213, 342], "batch": [3, 131, 217, 218, 219, 221, 222, 251, 264, 342], "zip": [3, 4], "haven": 3, "anyth": [3, 196, 342], "result": [3, 14, 56, 66, 76, 110, 118, 121, 131, 143, 148, 160, 162, 181, 191, 200, 205, 260, 339, 340, 343], "similar": [3, 205, 249, 250, 251, 283, 343, 345], "runtim": [3, 339], "section": [3, 6, 177, 295, 339, 340], "access": [3, 36, 213, 312, 323, 342, 346], "origin": [3, 78, 217, 244, 266, 267, 268, 269, 315, 316, 317, 318, 320, 321, 343], "sentencepiec": 3, "pytorch": [3, 5, 225, 340], "compat": [3, 150, 345], "npz": [3, 121, 167, 168, 239, 243, 345], "file": [3, 6, 121, 164, 165, 166, 167, 168, 239, 243, 340, 345], "directli": 3, "argpars": 3, "itertool": [3, 205], "starmap": [3, 205], "np": [3, 4, 343, 344], "torch": [3, 343], "map_torch_to_mlx": 3, "tok_embed": 3, "elif": 3, "replac": [3, 249, 250, 263, 294], "attention_norm": 3, "ffn_norm": 3, "wq": 3, "wk": 3, "wv": 3, "wo": 3, "w1": 3, "w2": 3, "w3": 3, "ffn": 3, "separ": [3, 46, 57, 225, 291], "submodul": [3, 4, 213, 237, 238, 248, 250], "feed_forward": 3, "parser": 3, "argumentpars": 3, "add_argu": 3, "torch_weight": 3, "output_fil": 3, "parse_arg": 3, "state": [3, 4, 213, 314, 323, 336, 339], "savez": [3, 243, 345], "k": [3, 77, 89, 193, 194, 195, 215, 228, 229, 237], "v": [3, 71, 213, 237, 343], "left": [3, 118, 147, 215, 216, 224, 229, 230, 256, 264, 275, 276, 285, 287, 295], "disk": 3, "text": [3, 215, 216, 229, 230, 231, 262, 266, 267, 268, 269, 278, 285, 286, 287, 290, 291, 294, 296, 297, 300, 301, 305, 306, 316, 321], "format": [3, 121, 164, 165, 166, 167, 168, 343], "oper": [3, 5, 7, 32, 60, 61, 62, 70, 175, 182, 188, 207, 213, 263, 321, 339, 340, 341, 342, 343, 344, 346, 347], "dictionari": [3, 66, 121, 165, 166, 204, 213, 236, 244, 249, 250, 324, 338, 345], "represent": [3, 147, 204, 206], "tree_unflatten": 3, "helper": [3, 339], "weight_fil": 3, "incur": 3, "sever": [3, 68, 69, 70, 167, 168, 339, 345], "futur": [3, 253, 341, 342], "pth": 3, "current": [3, 5, 6, 68, 69, 70, 147, 213, 316, 342], "around": 3, "m1": [3, 339, 340, 346], "ultra": 3, "7b": 3, "me": 3, "ishmael": 3, "year": 3, "ago": 3, "never": [3, 342], "long": 3, "info": [3, 6], "247": 3, "press": [3, 118], "enter": 3, "littl": 3, "monei": 3, "my": [3, 6], "purs": 3, "greater": [3, 23, 108, 145, 262, 306], "consequ": 3, "walk": 3, "down": 3, "gower": 3, "street": 3, "afternoon": 3, "heavi": 3, "rain": 3, "saw": [3, 340], "off": [3, 6, 342], "man": 3, "rag": 3, "who": 3, "sat": 3, "upon": [3, 205], "hi": 3, "bundl": 3, "hard": 3, "wet": 3, "he": [3, 268, 269], "were": [3, 346], "cry": 3, "watch": [3, 339], "him": 3, "observ": 3, "numer": [3, 118, 126, 130, 175, 217, 225, 226, 227, 254, 283, 285, 295, 315, 316, 317, 318, 319, 320, 326, 339, 342], "crowd": 3, "wa": [3, 342], "hurri": 3, "437": 3, "330": 3, "second": [3, 78, 127, 129, 131, 186, 196, 216, 230, 283, 291, 316, 318, 319, 320, 340, 346], "spent": 3, "amount": [3, 215, 229], "39": 3, "ms": [3, 339], "By": [3, 340, 343], "bigger": [3, 316], "remain": [3, 196, 220, 221, 222], "almost": 3, "nobodi": 3, "took": 3, "least": [3, 60, 61, 62, 65, 119, 147], "notic": [3, 340, 345], "distanc": [3, 295], "had": 3, "doubt": 3, "minut": 3, "straight": 3, "slowli": 3, "rais": [3, 118, 177, 239], "ey": 3, "speak": [3, 118], "resum": 3, "postur": 3, "stood": 3, "feel": 3, "pain": 3, "heart": 3, "smile": 3, "face": 3, "am": 3, "someon": 3, "three": [3, 62], "quarter": 3, "hour": 3, "made": 3, "immedi": [3, 232], "repli": 3, "again": [3, 6, 213, 339], "hand": [3, 340, 342], "did": 3, "accustom": 3, "thu": [3, 213], "question": [3, 342], "reason": [3, 341], "tell": [3, 339, 343], "understand": [3, 266, 267], "579": 3, "690": 3, "num": [3, 120, 156], "500": [3, 346], "628": 3, "went": 3, "nervou": 3, "trembl": 3, "told": 3, "And": 3, "perhap": 3, "surpris": 3, "matter": [3, 213], "shall": 3, "anyhow": 3, "friend": 3, "ye": 3, "slight": [3, 342], "kind": 3, "longer": [3, 71, 340], "soon": 3, "unless": [3, 12, 118, 312], "unlik": [3, 12, 221, 222, 244], "strang": 3, "amus": 3, "That": 3, "secret": 3, "disappoint": 3, "mine": 3, "cannot": [3, 65, 341, 343], "happi": 3, "ask": 3, "Is": [3, 260, 263], "shop": 3, "bui": 3, "food": 3, "633": 3, "21": [3, 332], "475": 3, "su": 3, "j": [3, 6, 118, 221, 317, 318, 320], "lu": 3, "pan": 3, "murtadha": 3, "wen": 3, "liu": 3, "2021": 3, "roform": [3, 256], "enhanc": [3, 256, 342], "rotari": [3, 256], "arxiv": [3, 225, 226, 227, 231, 254, 276, 296, 315, 321], "preprint": [3, 315, 321], "2104": 3, "09864": 3, "zhang": 3, "sennrich": 3, "2019": [3, 319], "root": [3, 163, 178, 254], "advanc": [3, 339], "inform": [3, 4, 6, 165, 166, 213, 217, 224, 251, 340, 346], "system": [3, 6], "shazeer": 3, "2020": 3, "glu": [3, 213], "variant": [3, 294, 320], "2002": 3, "05202": 3, "classifi": 4, "mnist": 4, "As": [4, 187, 213, 339], "mlp": [4, 213, 263, 314], "inherit": [4, 338], "standard": [4, 36, 56, 131, 151, 153, 263, 266, 268, 271, 344], "idiom": [4, 339], "input_dim": [4, 213, 228, 253], "hidden_dim": [4, 312, 314], "output_dim": [4, 213, 228, 253], "layer_s": 4, "idim": 4, "odim": 4, "maximum": [4, 21, 65, 213, 255, 260, 275, 276, 279, 298, 312, 342], "cross": [4, 70, 282, 284], "entropi": [4, 282, 284], "sub": [4, 78, 156], "commonli": [4, 249, 309, 339], "cross_entropi": [4, 213], "accuraci": 4, "valid": [4, 71, 102, 199, 204, 237, 248, 338], "eval_fn": 4, "argmax": 4, "loader": 4, "num_class": [4, 314], "batch_siz": [4, 314], "num_epoch": [4, 314], "learning_r": [4, 314, 315, 316, 317, 318, 319, 320, 321, 323, 326, 327, 328, 329, 330, 331, 332, 339], "train_imag": [4, 314], "train_label": [4, 314], "test_imag": 4, "test_label": 4, "shuffl": 4, "minibatch": 4, "batch_iter": [4, 314], "perm": 4, "permut": 4, "id": [4, 6], "put": [4, 339], "trainabl": [4, 203, 213, 312], "loss_and_grad_fn": [4, 314, 339, 340], "value_and_grad": [4, 213, 249, 312, 314, 325, 339, 340, 343, 344], "epoch": 4, "test": [4, 6], "confus": 4, "decent": 4, "95": 4, "brought": 5, "research": 5, "except": [5, 89, 96, 97, 99, 100, 101, 225, 239, 341, 343], "featur": [5, 68, 69, 70, 217, 225, 226, 227, 228, 253, 254, 256, 263, 264, 339, 342], "main": [5, 78, 89, 205, 213], "differ": [5, 184, 294, 340], "lazi": [5, 312, 344], "multi": [5, 218, 219, 341, 343], "cpu": [5, 119, 339, 346], "gpu": [5, 339, 341, 346], "inspir": 5, "jax": [5, 336], "arrayfir": 5, "unifi": 5, "live": [5, 346], "guid": 5, "convers": 5, "regress": [5, 290], "layer": [5, 213, 215, 216, 221, 222, 225, 227, 228, 229, 230, 245, 250, 253, 258, 263, 308, 312], "perceptron": 5, "llm": 5, "infer": [5, 105, 121], "fft": 5, "algebra": 5, "tree": [5, 66, 86, 106, 196, 199, 204, 205, 206, 322, 323, 325, 334, 340], "develop": [5, 6], "document": [5, 46, 57, 165, 166, 339, 340, 341], "pypi": 6, "meet": 6, "seri": 6, "chip": 6, "nativ": 6, "maco": 6, "13": 6, "recommend": [6, 321], "14": 6, "sonoma": 6, "conda": 6, "forg": 6, "distribut": [6, 149, 150, 151, 153, 157, 158, 228, 266, 267, 268, 269, 271, 272, 285, 288, 293, 295, 309], "probabl": [6, 154, 220, 221, 222, 253, 282, 284, 288, 346], "platform": 6, "processor": 6, "arm": [6, 209], "i386": 6, "switch": 6, "17": 6, "g": [6, 118, 147, 308, 326, 327, 342, 347], "clang": 6, "cmake": 6, "24": 6, "xcode": 6, "15": [6, 118, 339], "environ": [6, 79, 82], "via": [6, 322, 325, 342, 343], "rosetta": 6, "unam": 6, "p": [6, 149, 213, 220, 221, 222, 295, 318, 320], "clone": 6, "git": 6, "com": 6, "ml": 6, "explor": 6, "cd": 6, "brew": 6, "global": [6, 79, 82, 155, 336, 339], "env": 6, "cmake_build_parallel_level": 6, "edit": [6, 250], "unittest": 6, "discov": 6, "stub": 6, "dev": 6, "generate_stub": 6, "mkdir": 6, "either": [6, 10, 46, 56, 57, 65, 80, 81, 83, 104, 107, 108, 116, 117, 118, 126, 131, 133, 136, 138, 184, 196, 216, 230, 258, 264, 268, 269], "libmlx": 6, "preprocessor": 6, "metal_path": 6, "mlx_build_test": 6, "ON": 6, "mlx_build_exampl": 6, "mlx_build_benchmark": 6, "mlx_build_python_bind": 6, "multipl": [6, 131, 138, 147, 148, 251, 260, 329, 330, 332, 339, 342, 345], "wish": 6, "variabl": [6, 66, 79, 82, 106, 115, 196, 198, 199], "export": 6, "developer_dir": 6, "app": 6, "content": [6, 236, 339], "sdk": 6, "xcrun": 6, "macosx": 6, "show": [6, 209, 339], "unabl": 6, "tool": 6, "select": [6, 200, 232, 236], "sudo": 6, "ouptut": 6, "finder": 6, "iterm": 6, "termin": 6, "click": 6, "uncheck": 6, "window": [6, 215, 216, 229, 230], "restart": 6, "grep": 6, "cmake_host_system_processor": 6, "arm64": 6, "x86_64": 6, "wipe": 6, "cahc": 6, "rf": 6, "devicetyp": 7, "attribut": [7, 8, 25, 207, 244, 312, 334], "kwarg": [8, 167, 168, 347], "union": [9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 27, 28, 29, 30, 31, 32, 33, 35, 37, 38, 39, 40, 41, 42, 44, 45, 46, 47, 48, 50, 52, 53, 54, 55, 57, 58, 59, 60, 61, 62, 63, 64, 65, 67, 68, 69, 70, 71, 72, 73, 76, 77, 78, 80, 81, 83, 84, 85, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 153, 154, 156, 157, 158, 159, 160, 161, 162, 163, 165, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 200, 201, 202, 215, 216, 219, 229, 230, 237, 239, 248, 264, 327], "absolut": [9, 12, 275, 276, 294], "semant": [10, 63, 80, 81, 83, 107, 108, 116, 117, 126, 131, 133, 136, 138, 184, 346], "keepdim": [11, 13, 21, 22, 28, 29, 30, 31, 39, 40, 41, 42, 44, 55, 58, 118, 130, 132, 134, 135, 146, 175, 185, 197], "reduct": [11, 13, 130, 132, 135, 146, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294, 295], "reduc": [11, 13, 21, 22, 130, 132, 134, 135, 146, 185, 197, 217, 263, 290], "unspecifi": [11, 13, 14, 21, 22, 23, 24, 67, 105, 130, 132, 134, 135, 141, 145, 146, 160, 175, 176, 185, 187, 197, 201, 347], "entir": [11, 13, 21, 22, 130, 132, 134, 135, 146, 185, 197, 221, 222], "singleton": [11, 13, 21, 22, 130, 131, 132, 134, 135, 146, 185, 197], "rtol": 12, "05": [12, 217, 225, 226, 227, 254], "atol": 12, "08": [12, 283, 317, 318, 319, 320, 326], "equal_nan": [12, 59], "approxim": [12, 224, 274, 275, 276], "comparison": [12, 83, 107, 108, 116, 117], "infinit": 12, "equal": [12, 23, 59, 89, 108, 117, 145, 154, 177, 226, 228], "sign": [12, 209, 321], "nan": [12, 59, 112], "ab": [12, 118, 196, 225, 226, 227, 231, 254, 276, 296, 339], "array_equ": 12, "rel": [12, 316, 339], "toler": 12, "boolean": [12, 59, 111, 112, 113, 114, 127, 128, 129, 209, 247, 341], "interv": [14, 120, 154, 158], "increment": 14, "otherwis": [14, 70, 204, 205, 237, 239, 248, 262, 263, 264, 282, 287, 294, 305, 306, 342, 343], "int32": [14, 102, 118, 154, 209, 264, 341, 344], "convent": [14, 71, 264, 319], "lead": [14, 339], "fraction": 14, "integr": [14, 187, 342], "invers": [15, 16, 17, 18, 19, 20, 85, 93, 94, 95, 96, 97, 98], "cosin": [15, 16, 72, 73, 283, 328, 330, 340], "hyperbol": [16, 18, 20, 73, 174, 190, 307], "sine": [17, 18, 173, 174, 340], "uint32": [21, 22, 23, 24, 150, 209], "minimum": [22, 65, 260, 283], "kth": [23, 145], "partit": 23, "order": [23, 70, 118, 145, 147, 213, 225, 249, 258, 323, 339, 340], "undefin": [23, 145, 341], "sort": [23, 24, 145], "flatten": [23, 24, 118, 143, 145, 160, 176, 187, 188, 204], "dimension": [25, 90, 91, 92, 93, 94, 95, 99, 100, 101, 215, 216, 217, 218, 219, 223, 228, 229, 230, 253, 260, 341, 343], "val": [25, 105], "tupl": [25, 46, 49, 57, 67, 69, 70, 81, 86, 88, 115, 118, 119, 144, 147, 161, 180, 196, 198, 204, 205, 206, 215, 216, 219, 229, 230, 239, 241, 258, 264, 316, 318, 319, 320, 321, 338, 340], "ndarrai": [25, 341, 342, 344], "properti": [26, 34, 43, 49, 51, 244, 247, 324, 340], "argument": [26, 46, 57, 66, 86, 106, 196, 205, 213, 264, 336, 340, 345, 346, 347], "decim": [47, 162], "indices_or_sect": [52, 177], "nest": [56, 66, 213, 312, 338, 340], "ddof": [58, 197], "a_min": 65, "a_max": 65, "edg": [65, 144, 264, 339], "At": 65, "anoth": [65, 131, 184, 200, 213, 232, 339, 340, 341, 346], "fun": [66, 106, 115, 196, 198, 199, 339, 341, 342, 346], "dict": [66, 86, 121, 165, 166, 167, 242, 246, 249, 250, 312, 322, 323, 325, 338, 340, 345], "dure": [66, 220, 221, 222, 264, 343], "arbitrarili": [66, 213, 338, 340, 344], "leaf": [66, 204, 205, 236], "node": [66, 86, 199], "pad": [68, 69, 70, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 215, 216, 218, 219, 229, 230], "dilat": [68, 69, 70], "group": [68, 69, 70, 76, 147, 148, 225, 253], "1d": [68, 70, 71, 165, 188], "convolut": [68, 69, 70, 71, 218, 219, 221, 222], "channel": [68, 69, 70, 217, 218, 219, 221, 222], "c_in": [68, 69, 70], "c_out": [68, 69, 70], "convolv": [68, 69, 70], "2d": [69, 70, 78, 147, 217, 221], "spatial": [69, 70, 215, 225, 229, 264], "symmetr": 69, "discret": [71, 90, 91, 92, 93, 94, 95, 99, 100, 101, 223], "swap": [71, 186, 250, 253], "conv": 71, "filter": [71, 218, 219, 232, 236], "flip": [70, 71], "signal": [71, 264], "bias": [76, 147, 148, 237, 248, 251], "group_siz": [76, 147, 148, 253], "64": [76, 147, 148, 209, 253], "configur": 76, "formal": [76, 147], "notat": [76, 204, 241], "quantiz": [76, 121, 148, 253], "w_i": [76, 147], "hat": [76, 147], "occupi": [76, 147, 148], "diagon": [77, 89, 193, 194, 195], "th": [77, 89, 330], "axis1": [78, 186], "axis2": [78, 186], "subarrai": [78, 177], "remov": [78, 131, 150, 180, 284], "insert": [78, 88, 346], "neg": [78, 102, 113, 229, 230, 251, 285, 293, 295, 341], "taken": [78, 187], "disabl": [79, 339], "mlx_disable_compil": [79, 82, 339], "divis": [80, 104, 147], "quotient": [80, 81, 104], "remaind": 81, "fuction": 81, "faster": [81, 274, 339, 340], "mathrm": [84, 171, 226], "frac": [84, 147, 171, 215, 216, 217, 220, 221, 222, 225, 226, 227, 228, 229, 230, 254, 266, 267, 268, 269, 283, 285, 287, 290, 301, 303, 315, 317, 318, 319, 320, 326], "pi": [84, 260, 340], "int_0": 84, "dt": 84, "erf": [85, 339], "exponenti": [87, 257, 273, 300, 329], "ident": [89, 182, 213, 245], "zero": [89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 193, 194, 195, 202, 213, 215, 216, 220, 221, 222, 239, 265, 266, 267, 268, 269, 270, 271, 272, 309, 316, 341], "whose": [89, 203], "One": [90, 93, 99, 163, 339, 340], "fourier": [90, 91, 92, 93, 94, 95, 99, 100, 101], "truncat": [90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 157], "dft": [90, 91, 92, 93, 94, 95, 99, 100, 101], "rfft": 96, "real": [96, 97, 98, 99, 100, 101], "rfft2": 97, "rfftn": 98, "silent": [99, 100, 101], "start_axi": 102, "end_axi": 102, "inclus": 102, "outsid": 102, "clamp": 102, "integ": [104, 118, 144, 147, 148, 149, 154, 177, 191, 199, 209, 223, 330, 341], "floor": 104, "argnam": [106, 196], "neither": [106, 196], "keyword": [106, 167, 168, 196, 205, 213, 336, 345, 347], "strict": [107, 116, 237, 239, 248], "ordinari": 110, "inifn": 111, "infin": [111, 113, 114, 229, 230, 320], "ord": 118, "tabl": [118, 209, 223], "frobeniu": 118, "matric": [118, 119], "strictli": 118, "mathemat": 118, "variou": 118, "purpos": 118, "calcul": [118, 285, 291, 316], "fro": 118, "inf": [118, 251], "largest": 118, "sing": 118, "smallest": 118, "singular": 118, "nuclear": 118, "_f": 118, "sum_": [118, 215, 216, 290], "a_": 118, "valueerror": [118, 239, 340], "refer": [118, 226, 231, 244, 266, 267, 268, 269, 276, 296, 341], "golub": 118, "van": 118, "loan": 118, "baltimor": 118, "md": 118, "john": 118, "hopkin": 118, "univers": 118, "1985": 118, "pg": 118, "la": 118, "arang": [118, 264, 341, 343], "9": [118, 284, 315, 318, 319, 320, 321, 323, 329, 332, 343], "74597": 118, "20": 118, "84804": 118, "41421": 118, "23607": [118, 119], "74166": 118, "24264": 118, "11": 118, "225": 118, "factorizatoin": 119, "q": 119, "894427": 119, "447214": 119, "57771": 119, "50": 120, "evenli": 120, "return_metadata": 121, "binari": [121, 164, 165, 166, 167, 168, 262, 282, 306, 339], "npy": [121, 164, 345], "safetensor": [121, 166, 239, 243, 342, 345], "gguf": [121, 165, 345], "matadata": 121, "unsupport": 121, "tensor": [121, 191, 215, 216, 229, 230, 295, 343], "natur": [122, 124, 342], "logarithm": [122, 123, 124, 125], "log": [124, 126, 130, 280, 281, 285, 288, 290, 293, 304], "plu": 124, "exp": [126, 130, 151, 175, 273, 288, 300, 301, 304, 339, 346], "stabl": [126, 130, 175, 290], "prepend": 131, "negat": 139, "beforehand": 143, "pad_with": 144, "constant_valu": 144, "pad_width": 144, "before_1": 144, "after_1": 144, "before_2": 144, "after_2": 144, "before_n": 144, "after_n": 144, "before_i": 144, "after_i": 144, "extend": 144, "side": [144, 215, 216, 229, 230, 339], "smaller": [145, 321, 339], "everi": [147, 205, 332, 340], "particular": [147, 225], "consecut": [147, 256], "w_1": 147, "w_g": 147, "begin": [147, 216, 230, 262, 287, 294, 300, 305, 306], "align": [147, 216, 230], "max_i": 147, "min_i": 147, "textrm": [147, 224, 274, 277], "round": 147, "pack": [147, 148], "unsign": [147, 148, 209], "lower": [147, 154, 157, 158, 193, 272], "upper": [147, 154, 157, 158, 272], "1st": 147, "signific": 147, "2nd": 147, "dequant": 147, "w_q": 147, "whether": [148, 236, 251, 282, 285, 291], "prng": [149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 336], "num_sampl": 150, "unnorm": [150, 282, 284], "draw": 150, "cdf": [151, 224, 274], "accord": [151, 200, 251, 266, 267, 268, 269], "seed": 152, "loc": 153, "deviat": [153, 266, 268, 271], "low": [154, 158, 272, 309], "high": [154, 158, 213, 223, 272, 309], "bound": [154, 157, 158, 224, 272, 339, 341, 346], "roadcast": 154, "domain": 157, "uniformli": 158, "repetit": 160, "preserv": [161, 340], "reciproc": 163, "arr": [164, 341], "obj": 165, "uncompress": 167, "my_path": 167, "tree_flatten": [167, 205, 206, 213], "transformerencod": 167, "128": [167, 213], "flat_param": 167, "compress": 168, "being": [182, 213], "prevent": [182, 295, 343], "flow": [182, 342], "unchang": [182, 256], "prior": [187, 188], "exclud": 188, "dot": [191, 204, 241, 251], "elsewher": [193, 341], "col": 193, "triangl": 193, "mse": 196, "param": [196, 213, 309, 340], "lvalu": 196, "dlvalu": 196, "dparam": 196, "lasso": 196, "l1": [196, 287, 289, 290, 294], "varianc": [197, 217, 225, 285], "divisor": 197, "cotang": 198, "in_ax": [199, 340], "out_ax": [199, 340], "prefix": [199, 204], "fn": [203, 205, 344], "callabl": [203, 204, 205, 232, 233, 236, 258, 263, 265, 266, 267, 268, 269, 270, 271, 272, 327, 328, 329, 330, 331, 332], "wrt": 203, "rho": 315, "06": [285, 295, 315], "paper": [217, 260, 315, 316, 317, 318, 320, 321], "zeiler": 315, "2012": [315, 326], "adapt": [315, 316, 317], "1212": 315, "5701": 315, "v_": [315, 317, 318, 319, 320, 326, 327], "v_t": [315, 317, 318, 319, 320, 326, 327], "g_t": [315, 317, 318, 319, 320, 321, 326, 327], "delta": [287, 315], "w_": [216, 230, 315, 316, 317, 318, 319, 320, 321, 326, 327], "u_t": 315, "epsilon": [217, 225, 226, 227, 254, 283, 285, 315, 317, 318, 319, 320, 326], "u_": 315, "w_t": [315, 317, 318, 319, 320, 321, 326, 327], "lambda": [205, 213, 232, 237, 261, 300, 305, 315, 316, 317, 318, 319, 320, 321, 326, 327, 339, 340], "averag": [215, 216, 315, 316, 318, 319, 320], "denomin": [226, 283, 315, 317, 318, 319, 320, 326], "stabil": [217, 225, 226, 227, 254, 283, 285, 315, 316, 317, 318, 319, 320, 326], "30": 316, "001": 316, "clip_threshold": 316, "decay_r": [316, 329, 332], "beta_1": [316, 318, 319, 320, 321], "weight_decai": [316, 319, 321, 327], "scale_paramet": 316, "relative_step": 316, "warmup_init": 316, "sublinear": 316, "cost": [316, 342], "epsilon_1": 316, "epsilon_2": 316, "parameter_scal": 316, "clip": 316, "unscal": 316, "decai": [316, 319, 321, 327, 328, 329, 332], "duchi": 317, "hazan": 317, "singer": 317, "2011": 317, "subgradi": 317, "onlin": 317, "stochast": [317, 318, 320, 327, 342], "jmlr": 317, "999": [318, 319, 320], "omit": [318, 320], "estim": [318, 320], "kingma": [318, 320], "ba": [318, 320], "2015": [221, 318, 320], "iclr": [318, 319, 320], "m_": [318, 319, 320, 321], "m_t": [318, 319, 320, 321], "beta_2": [318, 319, 320, 321], "contrast": 319, "loshchilov": 319, "hutter": 319, "decoupl": 319, "regular": [221, 296, 319, 339, 341], "adam": [314, 320, 321, 330, 331], "99": [321, 326], "tend": 321, "larger": [256, 321], "10x": 321, "adamw": [314, 321], "maintain": [221, 222, 321], "strength": [321, 327], "wd": 321, "chen": 321, "symbol": 321, "discoveri": 321, "2302": 321, "06675": 321, "c_": 321, "eta": 321, "c_t": 321, "momentum": [217, 321, 323, 327, 339], "appli": [205, 213, 215, 216, 217, 218, 219, 221, 222, 224, 225, 226, 227, 228, 229, 230, 231, 233, 245, 252, 253, 254, 255, 257, 259, 261, 262, 264, 273, 274, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294, 295, 296, 297, 298, 299, 300, 301, 302, 303, 304, 305, 306, 307, 309, 322, 325, 331, 334, 339], "opt": 322, "superset": [205, 322], "trainable_paramet": [213, 236, 323], "tieleman": 326, "hinton": 326, "lectur": 326, "coursera": 326, "smooth": [284, 294, 326], "dampen": 327, "nesterov": 327, "descent": [327, 339, 342], "mu": 327, "tau": 327, "l2": [287, 290, 327], "penalti": 327, "is_leaf": [204, 205], "arbitrari": [204, 312], "depth": [204, 222, 340], "hello": [204, 206], "charact": 204, "flat": [204, 206], "extra": 205, "closer": 205, "constitut": 205, "dict_kei": [205, 323], "recreat": 206, "world": 206, "42": 206, "byte": 209, "bool_": 209, "uint8": 209, "uint16": 209, "16": [209, 215, 226, 229, 232, 312], "uint64": 209, "int8": 209, "int16": 209, "int64": 209, "done": [213, 220, 339, 342, 343], "manual": 213, "explicitli": [213, 336], "solv": 213, "intuit": 213, "freez": [213, 248, 312], "finetun": 213, "in_dim": [213, 312], "out_dim": [213, 312], "enumer": 213, "caus": [213, 339, 342], "local": [213, 221], "scope": 213, "l2_loss": 213, "y_hat": 213, "loss_and_grad": 213, "workhors": 213, "Its": 213, "recurs": [213, 236, 237, 242, 246, 248, 312], "frozen": [213, 237, 246, 248, 253, 312], "individu": [213, 221, 222], "subset": [213, 236], "action": 213, "displai": 213, "tree_map": 213, "count": [213, 330], "num_param": 213, "preclud": 213, "pure": [213, 314], "pattern": [213, 342], "achiev": 213, "other_input": 213, "necessari": 213, "wrap": 213, "apply_to_modul": [213, 237], "children": 213, "filter_and_map": 213, "leaf_modul": 213, "load_weight": [213, 342], "named_modul": 213, "save_weight": 213, "unfreez": [213, 237], "update_modul": 213, "alibi": 213, "batchnorm": 213, "conv1d": 213, "conv2d": 213, "dropout": [213, 221, 222, 245, 263, 339], "dropout2d": 213, "dropout3d": 213, "gelu": [213, 275, 276, 339], "groupnorm": 213, "instancenorm": 213, "layernorm": 213, "mish": 213, "prelu": 213, "quantizedlinear": 213, "relu": [213, 252, 263, 297, 309], "selu": 213, "sequenti": [213, 309], "silu": 213, "sinusoidalpositionalencod": 213, "softshrink": 213, "gelu_approx": [213, 224, 274], "gelu_fast_approx": [213, 224, 274], "binary_cross_entropi": [213, 339], "cosine_similarity_loss": 213, "gaussian_nll_loss": 213, "hinge_loss": 213, "huber_loss": 213, "kl_div_loss": 213, "l1_loss": 213, "log_cosh_loss": 213, "margin_ranking_loss": 213, "mse_loss": 213, "nll_loss": 213, "smooth_l1_loss": 213, "triplet_loss": 213, "init": [213, 252, 309, 314, 328, 329, 331, 332], "uniform": [213, 228, 239, 267, 269, 309, 336, 339, 340, 346], "glorot_norm": 213, "glorot_uniform": 213, "he_norm": 213, "he_uniform": 213, "affin": [217, 225, 226, 227, 228, 253], "track_running_stat": 217, "var": [217, 225, 226, 227, 285], "gamma": [217, 225, 226, 227, 254, 266, 267, 268, 269], "nc": 217, "nlc": [217, 218], "four": 217, "nhwc": [217, 219], "height": [216, 217, 219, 221, 222, 230], "width": [216, 217, 219, 221, 222, 230, 253], "deep": [217, 266, 267, 268, 269], "intern": 217, "covari": 217, "shift": 217, "bn": 217, "in_channel": [218, 219], "out_channel": [218, 219], "kernel_s": [215, 216, 218, 219, 229, 230], "learnabl": [218, 219, 258], "portion": 220, "independ": [221, 222], "nwhc": 221, "whc": 221, "entri": [221, 222], "benefici": [221, 222, 342], "earli": 221, "adjac": 221, "pixel": 221, "correl": [70, 221], "thompson": 221, "goroshin": 221, "jain": 221, "lecun": 221, "bregler": 221, "cvpr": 221, "ndhwc": 222, "dhwc": 222, "medic": 222, "video": 222, "num_embed": 223, "lookup": 223, "typic": [223, 314, 339, 342], "usual": [223, 338, 342], "vocabulari": 223, "approx": 224, "unit": [224, 255, 257, 259, 266, 267, 268, 269, 273, 274, 275, 276, 277, 279, 298, 299, 300, 302], "phi": [224, 274], "geluapprox": 224, "sigma": [224, 266, 267, 268, 269, 275, 276, 277, 280, 301, 302], "60033": [224, 275], "0433603": [224, 275], "gelufast": 224, "773": 224, "regard": 224, "num_group": 225, "pytorch_compat": 225, "split": [225, 277], "preced": 225, "http": [225, 226, 227, 231, 254, 276, 296], "org": [225, 226, 227, 231, 254, 276, 296], "1803": 225, "08494": 225, "inorm": 226, "1607": [226, 227], "08022": 226, "06450": 227, "mathcal": 228, "u": 228, "d_i": 228, "monoton": 296, "1908": [231, 296], "08681": [231, 296], "tanh": [213, 231, 296], "softplu": [213, 231, 296], "map_fn": [232, 236], "filter_fn": [232, 236], "valid_parameter_filt": 232, "apply_fn": 233, "descend": 234, "is_leaf_fn": 236, "found": 236, "drop": 236, "idempot": [237, 248], "attent": [237, 251, 260, 263], "endswith": 237, "file_or_weight": 239, "miss": [239, 345], "ok": [239, 340], "save_safetensor": [243, 345], "reflect": [244, 339, 341, 343], "certain": [245, 339], "ie": 248, "noop": 248, "unfrozen": 248, "chang": [66, 170, 249, 253, 264, 287, 294, 339, 343], "tracer": 249, "partial": [249, 250, 339, 342], "child": 250, "programmat": 250, "query_input_dim": 251, "key_input_dim": 251, "value_input_dim": 251, "value_dim": 251, "value_output_dim": 251, "head": [251, 263], "aggreg": 251, "linearli": 251, "attend": 251, "num_paramet": 252, "25": [252, 264], "parametr": [252, 297], "classmethod": 253, "from_linear": 253, "quantize_modul": 253, "1910": 254, "07467": 254, "rectifi": [255, 268, 269, 279, 298, 299], "10000": 256, "rotat": 256, "slightli": [256, 346], "angular": 256, "frequenc": [256, 260], "_cos_sin_theta_kei": [], "precomput": [], "_cos_sin_theta_valu": [], "leq": [287, 300], "0507": 300, "67326": 300, "elu": [213, 300], "plain": 258, "known": [259, 302], "swish": [259, 302], "cdot": [275, 276, 283, 286, 302], "min_freq": 260, "0001": 260, "max_freq": 260, "cos_first": 260, "full_turn": 260, "sinusoid": 260, "sin": [260, 340, 344], "lambd": [261, 305], "threshold": [262, 287, 294, 306], "geq": [262, 306], "num_encoder_lay": 263, "num_decoder_lay": 263, "custom_encod": 263, "custom_decod": 263, "norm_first": 263, "checkpoint": 263, "decod": 263, "interact": 263, "mechan": 263, "hidden": 263, "chekpoint": 263, "usag": [263, 339], "expens": 263, "init_fn": [265, 266, 267, 268, 269, 270, 271, 272, 309], "glorot": [266, 267], "fan_in": [266, 267, 268, 269], "fan_out": [266, 267, 268, 269], "difficulti": [266, 267], "feedforward": [266, 267], "191107": 266, "61278": 266, "150594": 266, "363207": 266, "gain": [266, 267, 268, 269], "89613": 266, "53947": 266, "48095": 266, "995016": 266, "223404": 267, "890597": 267, "379159": 267, "776856": 267, "90041": 267, "02264": 267, "912766": 267, "12451": 267, "fan": [268, 269], "delv": [268, 269], "surpass": [268, 269], "human": [268, 269], "level": [268, 269], "imagenet": [268, 269], "classif": [268, 269], "25211": 268, "458835": 268, "177208": 268, "0137595": 268, "6967": 268, "02765": 268, "15268": 268, "75787": 268, "kaim": 269, "0300242": 269, "0184009": 269, "793615": 269, "666329": 269, "64331": 269, "16506": 269, "08619": 269, "79854": 269, "982273": 271, "534422": 271, "380709": 271, "0645099": 271, "883935": 272, "863726": 272, "617261": 272, "417497": 272, "exact": [275, 276], "0003": 275, "015": 276, "with_logit": 282, "predict": [282, 285, 286, 287, 288, 289, 290, 292, 293, 294], "105361": 282, "223144": 282, "20397": 282, "916291": 282, "539245": 282, "prob": 282, "510826": 282, "x1": 283, "x2": 283, "x_1": [283, 291], "x_2": [283, 291], "label_smooth": 284, "hot": 284, "0485873": 284, "348587": 284, "likelihood": [285, 293], "nll": [285, 293], "hing": 286, "y_": [286, 290], "pred": [286, 290], "huber": 287, "l_": [215, 229, 287], "kullback": 288, "leibler": 288, "diverg": 288, "cosh": 290, "logcosh": 290, "sensit": 290, "outlier": 290, "dual": 290, "behavior": [290, 341, 342], "offer": 290, "balanc": 290, "robust": 290, "approach": [290, 340], "task": 290, "inputs1": 291, "inputs2": 291, "margin": [291, 295], "rank": 291, "573409": 291, "765166": 291, "0638": 291, "75596": 291, "225763": 291, "256995": 291, "773433": 291, "formula": 294, "anchor": 295, "triplet": 295, "_p": 295, "degre": 295, "pairwis": 295, "instabl": 295, "subclass": 312, "concept": 312, "mymlp": 312, "in_proj": 312, "basi": 334, "subsequ": 314, "apply_gradi": 314, "implicit": [336, 339, 340], "fine": [336, 342], "grain": 336, "control": [336, 342], "manag": [183, 336, 346], "pseudo": 336, "altern": 336, "splittabl": 336, "threefri": 336, "counter": 336, "cycl": 338, "merg": 339, "fuse": 339, "big": 339, "awar": [339, 342], "36788": 339, "compiled_fun": 339, "code": [339, 342], "slow": 339, "Not": [66, 339], "recompil": [66, 339], "stack": 339, "rerun": [339, 342], "too": [339, 342], "frequent": [339, 342], "destroi": 339, "anonym": 339, "don": [339, 346], "nonlinear": 339, "unari": 339, "overhead": [339, 342, 346], "bandwidth": 339, "fusibl": 339, "consider": 339, "versu": 339, "timeit": [339, 340], "tic": 339, "perf_count": 339, "toc": 339, "tpi": 339, "1e3": 339, "1000": [328, 339], "4096": [339, 340, 346], "On": [339, 340, 342], "millisecond": [339, 346], "five": 339, "latest": 339, "won": 339, "trace": 339, "placehold": 339, "insid": 339, "crash": 339, "inspect": [339, 344], "disable_compil": 339, "okai": [339, 342], "intend": 339, "deal": 339, "pretti": [339, 342], "inconveni": 339, "functool": 339, "particularli": 339, "backward": [339, 340], "squeez": [264, 339], "checkout": 339, "compiled_grad_fn": 339, "71828": 339, "outer": [339, 342], "opportun": 339, "idea": [340, 342], "behind": 340, "dfdx": [340, 341], "d2fdx2": 340, "differentiaion": 340, "zero_grad": 340, "detach": 340, "requires_grad": 340, "dloss_dw": 340, "dloss_dx": 340, "lot": 340, "redund": 340, "suppos": [340, 346], "nice": [340, 342], "propag": [340, 341], "stop_gradi": 340, "autom": 340, "contriv": [340, 346], "sake": 340, "clariti": 340, "quit": [340, 343], "power": [340, 343], "difficult": 340, "primit": 340, "issu": [340, 343], "priorit": 340, "xs": 340, "ys": 340, "naive_add": 340, "vmap_add": 340, "total": 340, "390": 340, "wherea": 340, "025": 340, "ten": [340, 342], "Of": 340, "better": [340, 346], "handi": 340, "slice": 341, "ellipsi": 341, "syntax": 341, "idx": 341, "mix": 341, "take_along_axi": 341, "lack": 341, "extrem": [341, 342], "ineffici": [341, 342], "nonzero": 341, "record": 342, "dynam": 342, "easier": 342, "worri": 342, "fun1": 342, "expensive_fun": 342, "consum": 342, "eager": 342, "thank": 342, "weights_fp16": 342, "trade": 342, "bad": 342, "grow": 342, "computation": 342, "costli": 342, "wide": 342, "thousand": 342, "value_and_grad_fn": 342, "implicitli": 342, "anytim": 342, "memoryview": [342, 343], "perfectli": 342, "first_lay": 342, "second_layer_a": 342, "second_layer_b": 342, "protocol": 343, "receiv": [330, 343], "pep": 343, "3118": 343, "view": 343, "a_view": 343, "owndata": 343, "extern": 343, "x_view": 343, "modifi": 343, "df": 343, "x\u00b2": 343, "2x": 343, "indirectli": 343, "modif": 343, "seen": 343, "occur": 343, "incorpor": 343, "incorrect": 343, "experiment": 343, "break": 343, "advis": 343, "intermedi": 343, "jnp": 343, "tf": 343, "page": 344, "composit": 344, "archiv": 345, "savez_compress": 345, "save_gguf": 345, "arr_0": 345, "pool": [215, 216, 229, 230, 346], "advantag": 346, "parallel": 346, "race": 346, "interest": 346, "albeit": 346, "d1": 346, "d2": 346, "matmul": 346, "dens": 346, "twice": 346, "measur": 346, "default_stream": 347, "default_devic": 347, "my_devic": 347, "streamcontext": 183, "context": 183, "avgpool1d": 213, "avgpool2d": 213, "maxpool1d": 213, "maxpool2d": [213, 216], "n_i": [215, 216, 229, 230], "c_j": [215, 216, 229, 230], "ldot": [215, 216, 229, 230], "lfloor": [215, 216, 229, 230], "rfloor": [215, 216, 229, 230], "k_h": [216, 230], "k_w": [216, 230], "h_": [216, 230], "max_": [229, 230], "rmsprop": 314, "adagrad": 314, "adafactor": 314, "adadelta": 314, "adamax": 314, "lion": 314, "step_decai": 314, "exponential_decai": 314, "cosine_decai": [314, 330], "decay_step": 328, "beyond": [328, 331], "lr_schedul": [328, 329, 330, 332], "0999961": 328, "06561": 329, "step_siz": 332, "081": 332, "ari": [60, 61, 62], "shapeless": 66, "attempt": 66, "kernel_dil": 70, "input_dil": 70, "upsampl": 213, "hardswish": 213, "leaky_relu": 213, "log_sigmoid": 213, "log_softmax": 213, "relu6": 213, "pycapsul": 263, "scale_factor": 264, "nearest": 264, "align_corn": 264, "audio": 264, "4d": 264, "forth": 264, "neighbor": 264, "interpol": 264, "bilinear": 264, "trilinear": 264, "corner": 264, "bottom": 264, "75": 264, "33333": 264, "66667": 264, "702": 276, "hendryck": 276, "1606": 276, "08415": 276, "gate": 277, "halv": 277, "negative_slop": 279, "leaki": 279, "sum_i": 281, "x_i": [281, 303], "sum_j": 303, "x_j": 303, "join_schedul": 314, "linear_schedul": [314, 330], "boundari": 330, "join": 330, "transit": 330, "warmup": [330, 331], "0999938": 330, "101": 331}, "objects": {"mlx.core": [[7, 0, 1, "", "Device"], [8, 0, 1, "", "Dtype"], [207, 0, 1, "", "Stream"], [9, 2, 1, "", "abs"], [10, 2, 1, "", "add"], [11, 2, 1, "", "all"], [12, 2, 1, "", "allclose"], [13, 2, 1, "", "any"], [14, 2, 1, "", "arange"], [15, 2, 1, "", "arccos"], [16, 2, 1, "", "arccosh"], [17, 2, 1, "", "arcsin"], [18, 2, 1, "", "arcsinh"], [19, 2, 1, "", "arctan"], [20, 2, 1, "", "arctanh"], [21, 2, 1, "", "argmax"], [22, 2, 1, "", "argmin"], [23, 2, 1, "", "argpartition"], [24, 2, 1, "", "argsort"], [25, 0, 1, "", "array"], [59, 2, 1, "", "array_equal"], [60, 2, 1, "", "atleast_1d"], [61, 2, 1, "", "atleast_2d"], [62, 2, 1, "", "atleast_3d"], [63, 2, 1, "", "broadcast_to"], [64, 2, 1, "", "ceil"], [65, 2, 1, "", "clip"], [66, 2, 1, "", "compile"], [67, 2, 1, "", "concatenate"], [68, 2, 1, "", "conv1d"], [69, 2, 1, "", "conv2d"], [70, 2, 1, "", "conv_general"], [71, 2, 1, "", "convolve"], [72, 2, 1, "", "cos"], [73, 2, 1, "", "cosh"], [74, 2, 1, "", "default_device"], [75, 2, 1, "", "default_stream"], [76, 2, 1, "", "dequantize"], [77, 2, 1, "", "diag"], [78, 2, 1, "", "diagonal"], [79, 2, 1, "", "disable_compile"], [80, 2, 1, "", "divide"], [81, 2, 1, "", "divmod"], [82, 2, 1, "", "enable_compile"], [83, 2, 1, "", "equal"], [84, 2, 1, "", "erf"], [85, 2, 1, "", "erfinv"], [86, 2, 1, "", "eval"], [87, 2, 1, "", "exp"], [88, 2, 1, "", "expand_dims"], [89, 2, 1, "", "eye"], [102, 2, 1, "", "flatten"], [103, 2, 1, "", "floor"], [104, 2, 1, "", "floor_divide"], [105, 2, 1, "", "full"], [106, 2, 1, "", "grad"], [107, 2, 1, "", "greater"], [108, 2, 1, "", "greater_equal"], [109, 2, 1, "", "identity"], [110, 2, 1, "", "inner"], [111, 2, 1, "", "isinf"], [112, 2, 1, "", "isnan"], [113, 2, 1, "", "isneginf"], [114, 2, 1, "", "isposinf"], [115, 2, 1, "", "jvp"], [116, 2, 1, "", "less"], [117, 2, 1, "", "less_equal"], [120, 2, 1, "", "linspace"], [121, 2, 1, "", "load"], [122, 2, 1, "", "log"], [123, 2, 1, "", "log10"], [124, 2, 1, "", "log1p"], [125, 2, 1, "", "log2"], [126, 2, 1, "", "logaddexp"], [127, 2, 1, "", "logical_and"], [128, 2, 1, "", "logical_not"], [129, 2, 1, "", "logical_or"], [130, 2, 1, "", "logsumexp"], [131, 2, 1, "", "matmul"], [132, 2, 1, "", "max"], [133, 2, 1, "", "maximum"], [134, 2, 1, "", "mean"], [135, 2, 1, "", "min"], [136, 2, 1, "", "minimum"], [137, 2, 1, "", "moveaxis"], [138, 2, 1, "", "multiply"], [139, 2, 1, "", "negative"], [140, 2, 1, "", "new_stream"], [141, 2, 1, "", "ones"], [142, 2, 1, "", "ones_like"], [143, 2, 1, "", "outer"], [144, 2, 1, "", "pad"], [145, 2, 1, "", "partition"], [146, 2, 1, "", "prod"], [147, 2, 1, "", "quantize"], [148, 2, 1, "", "quantized_matmul"], [159, 2, 1, "", "reciprocal"], [160, 2, 1, "", "repeat"], [161, 2, 1, "", "reshape"], [162, 2, 1, "", "round"], [163, 2, 1, "", "rsqrt"], [164, 2, 1, "", "save"], [165, 2, 1, "", "save_gguf"], [166, 2, 1, "", "save_safetensors"], [167, 2, 1, "", "savez"], [168, 2, 1, "", "savez_compressed"], [169, 2, 1, "", "set_default_device"], [170, 2, 1, "", "set_default_stream"], [171, 2, 1, "", "sigmoid"], [172, 2, 1, "", "sign"], [173, 2, 1, "", "sin"], [174, 2, 1, "", "sinh"], [175, 2, 1, "", "softmax"], [176, 2, 1, "", "sort"], [177, 2, 1, "", "split"], [178, 2, 1, "", "sqrt"], [179, 2, 1, "", "square"], [180, 2, 1, "", "squeeze"], [181, 2, 1, "", "stack"], [182, 2, 1, "", "stop_gradient"], [183, 2, 1, "", "stream"], [184, 2, 1, "", "subtract"], [185, 2, 1, "", "sum"], [186, 2, 1, "", "swapaxes"], [187, 2, 1, "", "take"], [188, 2, 1, "", "take_along_axis"], [189, 2, 1, "", "tan"], [190, 2, 1, "", "tanh"], [191, 2, 1, "", "tensordot"], [192, 2, 1, "", "transpose"], [193, 2, 1, "", "tri"], [194, 2, 1, "", "tril"], [195, 2, 1, "", "triu"], [196, 2, 1, "", "value_and_grad"], [197, 2, 1, "", "var"], [198, 2, 1, "", "vjp"], [199, 2, 1, "", "vmap"], [200, 2, 1, "", "where"], [201, 2, 1, "", "zeros"], [202, 2, 1, "", "zeros_like"]], "mlx.core.Device": [[7, 1, 1, "", "__init__"]], "mlx.core.Dtype": [[8, 1, 1, "", "__init__"]], "mlx.core.Stream": [[207, 1, 1, "", "__init__"]], "mlx.core.array": [[26, 3, 1, "", "T"], [25, 1, 1, "", "__init__"], [27, 1, 1, "", "abs"], [28, 1, 1, "", "all"], [29, 1, 1, "", "any"], [30, 1, 1, "", "argmax"], [31, 1, 1, "", "argmin"], [32, 1, 1, "", "astype"], [33, 1, 1, "", "cos"], [34, 3, 1, "", "dtype"], [35, 1, 1, "", "exp"], [36, 1, 1, "", "item"], [37, 1, 1, "", "log"], [38, 1, 1, "", "log1p"], [39, 1, 1, "", "logsumexp"], [40, 1, 1, "", "max"], [41, 1, 1, "", "mean"], [42, 1, 1, "", "min"], [43, 3, 1, "", "ndim"], [44, 1, 1, "", "prod"], [45, 1, 1, "", "reciprocal"], [46, 1, 1, "", "reshape"], [47, 1, 1, "", "round"], [48, 1, 1, "", "rsqrt"], [49, 3, 1, "", "shape"], [50, 1, 1, "", "sin"], [51, 3, 1, "", "size"], [52, 1, 1, "", "split"], [53, 1, 1, "", "sqrt"], [54, 1, 1, "", "square"], [55, 1, 1, "", "sum"], [56, 1, 1, "", "tolist"], [57, 1, 1, "", "transpose"], [58, 1, 1, "", "var"]], "mlx.core.fft": [[90, 2, 1, "", "fft"], [91, 2, 1, "", "fft2"], [92, 2, 1, "", "fftn"], [93, 2, 1, "", "ifft"], [94, 2, 1, "", "ifft2"], [95, 2, 1, "", "ifftn"], [96, 2, 1, "", "irfft"], [97, 2, 1, "", "irfft2"], [98, 2, 1, "", "irfftn"], [99, 2, 1, "", "rfft"], [100, 2, 1, "", "rfft2"], [101, 2, 1, "", "rfftn"]], "mlx.core.linalg": [[118, 2, 1, "", "norm"], [119, 2, 1, "", "qr"]], "mlx.core.random": [[149, 2, 1, "", "bernoulli"], [150, 2, 1, "", "categorical"], [151, 2, 1, "", "gumbel"], [152, 2, 1, "", "key"], [153, 2, 1, "", "normal"], [154, 2, 1, "", "randint"], [155, 2, 1, "", "seed"], [156, 2, 1, "", "split"], [157, 2, 1, "", "truncated_normal"], [158, 2, 1, "", "uniform"]], "mlx.nn": [[214, 0, 1, "", "ALiBi"], [215, 0, 1, "", "AvgPool1d"], [216, 0, 1, "", "AvgPool2d"], [217, 0, 1, "", "BatchNorm"], [218, 0, 1, "", "Conv1d"], [219, 0, 1, "", "Conv2d"], [220, 0, 1, "", "Dropout"], [221, 0, 1, "", "Dropout2d"], [222, 0, 1, "", "Dropout3d"], [223, 0, 1, "", "Embedding"], [224, 0, 1, "", "GELU"], [225, 0, 1, "", "GroupNorm"], [226, 0, 1, "", "InstanceNorm"], [227, 0, 1, "", "LayerNorm"], [228, 0, 1, "", "Linear"], [229, 0, 1, "", "MaxPool1d"], [230, 0, 1, "", "MaxPool2d"], [231, 0, 1, "", "Mish"], [312, 0, 1, "", "Module"], [251, 0, 1, "", "MultiHeadAttention"], [252, 0, 1, "", "PReLU"], [253, 0, 1, "", "QuantizedLinear"], [254, 0, 1, "", "RMSNorm"], [255, 0, 1, "", "ReLU"], [256, 0, 1, "", "RoPE"], [257, 0, 1, "", "SELU"], [258, 0, 1, "", "Sequential"], [259, 0, 1, "", "SiLU"], [260, 0, 1, "", "SinusoidalPositionalEncoding"], [261, 0, 1, "", "Softshrink"], [262, 0, 1, "", "Step"], [263, 0, 1, "", "Transformer"], [264, 0, 1, "", "Upsample"], [273, 2, 1, "", "elu"], [274, 2, 1, "", "gelu"], [275, 2, 1, "", "gelu_approx"], [276, 2, 1, "", "gelu_fast_approx"], [277, 2, 1, "", "glu"], [278, 2, 1, "", "hardswish"], [279, 2, 1, "", "leaky_relu"], [280, 2, 1, "", "log_sigmoid"], [281, 2, 1, "", "log_softmax"], [296, 2, 1, "", "mish"], [297, 2, 1, "", "prelu"], [298, 2, 1, "", "relu"], [299, 2, 1, "", "relu6"], [300, 2, 1, "", "selu"], [301, 2, 1, "", "sigmoid"], [302, 2, 1, "", "silu"], [303, 2, 1, "", "softmax"], [304, 2, 1, "", "softplus"], [305, 2, 1, "", "softshrink"], [306, 2, 1, "", "step"], [307, 2, 1, "", "tanh"], [203, 2, 1, "", "value_and_grad"]], "mlx.nn.Module": [[232, 1, 1, "", "apply"], [233, 1, 1, "", "apply_to_modules"], [234, 1, 1, "", "children"], [235, 1, 1, "", "eval"], [236, 1, 1, "", "filter_and_map"], [237, 1, 1, "", "freeze"], [238, 1, 1, "", "leaf_modules"], [239, 1, 1, "", "load_weights"], [240, 1, 1, "", "modules"], [241, 1, 1, "", "named_modules"], [242, 1, 1, "", "parameters"], [243, 1, 1, "", "save_weights"], [244, 3, 1, "", "state"], [245, 1, 1, "", "train"], [246, 1, 1, "", "trainable_parameters"], [247, 3, 1, "", "training"], [248, 1, 1, "", "unfreeze"], [249, 1, 1, "", "update"], [250, 1, 1, "", "update_modules"]], "mlx.nn.init": [[265, 2, 1, "", "constant"], [266, 2, 1, "", "glorot_normal"], [267, 2, 1, "", "glorot_uniform"], [268, 2, 1, "", "he_normal"], [269, 2, 1, "", "he_uniform"], [270, 2, 1, "", "identity"], [271, 2, 1, "", "normal"], [272, 2, 1, "", "uniform"]], "mlx.nn.losses": [[282, 2, 1, "", "binary_cross_entropy"], [283, 2, 1, "", "cosine_similarity_loss"], [284, 2, 1, "", "cross_entropy"], [285, 2, 1, "", "gaussian_nll_loss"], [286, 2, 1, "", "hinge_loss"], [287, 2, 1, "", "huber_loss"], [288, 2, 1, "", "kl_div_loss"], [289, 2, 1, "", "l1_loss"], [290, 2, 1, "", "log_cosh_loss"], [291, 2, 1, "", "margin_ranking_loss"], [292, 2, 1, "", "mse_loss"], [293, 2, 1, "", "nll_loss"], [294, 2, 1, "", "smooth_l1_loss"], [295, 2, 1, "", "triplet_loss"]], "mlx.optimizers": [[315, 0, 1, "", "AdaDelta"], [316, 0, 1, "", "Adafactor"], [317, 0, 1, "", "Adagrad"], [318, 0, 1, "", "Adam"], [319, 0, 1, "", "AdamW"], [320, 0, 1, "", "Adamax"], [321, 0, 1, "", "Lion"], [334, 0, 1, "", "Optimizer"], [326, 0, 1, "", "RMSprop"], [327, 0, 1, "", "SGD"], [328, 2, 1, "", "cosine_decay"], [329, 2, 1, "", "exponential_decay"], [330, 2, 1, "", "join_schedules"], [331, 2, 1, "", "linear_schedule"], [332, 2, 1, "", "step_decay"]], "mlx.optimizers.Optimizer": [[322, 1, 1, "", "apply_gradients"], [323, 1, 1, "", "init"], [324, 3, 1, "", "state"], [325, 1, 1, "", "update"]], "mlx.utils": [[204, 2, 1, "", "tree_flatten"], [205, 2, 1, "", "tree_map"], [206, 2, 1, "", "tree_unflatten"]]}, "objtypes": {"0": "py:class", "1": "py:method", "2": "py:function", "3": "py:property"}, "objnames": {"0": ["py", "class", "Python class"], "1": ["py", "method", "Python method"], "2": ["py", "function", "Python function"], "3": ["py", "property", "Python property"]}, "titleterms": {"oper": [0, 1, 313], "develop": 1, "document": 1, "introduc": 1, "exampl": [1, 5, 339, 346], "primit": 1, "us": [1, 342, 347], "implement": [1, 3], "cpu": 1, "backend": 1, "gpu": 1, "transform": [1, 263, 337, 339, 340, 342, 344], "build": [1, 6], "bind": 1, "python": [1, 5, 6], "cmake": 1, "setuptool": 1, "usag": [1, 5], "result": 1, "script": [1, 3], "download": [1, 3], "code": [1, 3], "linear": [2, 212, 228], "regress": 2, "llm": 3, "infer": 3, "model": 3, "attent": 3, "layer": [3, 4, 310], "encod": 3, "full": [3, 105], "gener": 3, "put": 3, "all": [3, 11, 28], "togeth": 3, "convert": 3, "weight": 3, "load": [3, 121, 345], "benchmark": 3, "multi": 4, "perceptron": 4, "mlx": [5, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294, 295, 296, 297, 298, 299, 300, 301, 302, 303, 304, 305, 306, 307, 315, 316, 317, 318, 319, 320, 321, 322, 323, 324, 325, 326, 327, 328, 329, 330, 331, 332], "instal": [5, 6], "api": [5, 6], "refer": 5, "c": [5, 6], "further": 5, "read": 5, "troubleshoot": 6, "from": [6, 341], "sourc": 6, "requir": 6, "option": 6, "metal": 6, "found": 6, "x86": 6, "shell": 6, "core": [7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 207], "devic": [7, 210], "dtype": [8, 34], "stream": [183, 207, 210, 347], "ab": [9, 27], "add": 10, "allclos": 12, "ani": [13, 29], "arang": 14, "arcco": 15, "arccosh": 16, "arcsin": 17, "arcsinh": 18, "arctan": 19, "arctanh": 20, "argmax": [21, 30], "argmin": [22, 31], "argpartit": 23, "argsort": 24, "arrai": [25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 208, 341, 345], "t": 26, "astyp": 32, "co": [33, 72], "exp": [35, 87], "item": 36, "log": [37, 122], "log1p": [38, 124], "logsumexp": [39, 130], "max": [40, 132], "mean": [41, 134], "min": [42, 135], "ndim": 43, "prod": [44, 146], "reciproc": [45, 159], "reshap": [46, 161], "round": [47, 162], "rsqrt": [48, 163], "shape": 49, "sin": [50, 173], "size": 51, "split": [52, 156, 177], "sqrt": [53, 178], "squar": [54, 179], "sum": [55, 185], "tolist": 56, "transpos": [57, 192], "var": [58, 197], "array_equ": 59, "broadcast_to": 63, "ceil": 64, "clip": 65, "compil": [66, 339], "concaten": 67, "conv1d": [68, 218], "conv2d": [69, 219], "convolv": 71, "cosh": 73, "default_devic": 74, "default_stream": 75, "dequant": 76, "diag": 77, "diagon": 78, "disable_compil": 79, "divid": 80, "divmod": 81, "enable_compil": 82, "equal": 83, "erf": 84, "erfinv": 85, "eval": [86, 235], "expand_dim": 88, "ey": 89, "fft": [90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 211], "fft2": 91, "fftn": 92, "ifft": 93, "ifft2": 94, "ifftn": 95, "irfft": 96, "irfft2": 97, "irfftn": 98, "rfft": 99, "rfft2": 100, "rfftn": 101, "flatten": 102, "floor": 103, "floor_divid": 104, "grad": [106, 213], "greater": 107, "greater_equ": 108, "ident": [109, 270], "inner": 110, "isinf": 111, "isnan": 112, "isneginf": 113, "isposinf": 114, "jvp": 115, "less": 116, "less_equ": 117, "linalg": [118, 119], "norm": 118, "qr": 119, "linspac": 120, "log10": 123, "log2": 125, "logaddexp": 126, "logical_and": 127, "logical_not": 128, "logical_or": 129, "matmul": 131, "maximum": 133, "minimum": 136, "moveaxi": 137, "multipli": 138, "neg": 139, "new_stream": 140, "ones": 141, "ones_lik": 142, "outer": 143, "pad": 144, "partit": 145, "quantiz": 147, "quantized_matmul": 148, "random": [149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 336], "bernoulli": 149, "categor": 150, "gumbel": 151, "kei": 152, "normal": [153, 271], "randint": 154, "seed": 155, "truncated_norm": 157, "uniform": [158, 272], "repeat": 160, "save": [164, 345], "save_gguf": 165, "save_safetensor": 166, "savez": 167, "savez_compress": 168, "set_default_devic": 169, "set_default_stream": 170, "sigmoid": [171, 301], "sign": 172, "sinh": 174, "softmax": [175, 303], "sort": 176, "squeez": 180, "stack": 181, "stop_gradi": 182, "subtract": 184, "swapax": 186, "take": 187, "take_along_axi": 188, "tan": 189, "tanh": [190, 307], "tensordot": 191, "tri": 193, "tril": 194, "triu": 195, "value_and_grad": [196, 203], "vjp": 198, "vmap": 199, "where": 200, "zero": 201, "zeros_lik": 202, "nn": [203, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294, 295, 296, 297, 298, 299, 300, 301, 302, 303, 304, 305, 306, 307], "optim": [314, 315, 316, 317, 318, 319, 320, 321, 322, 323, 324, 325, 326, 327, 328, 329, 330, 331, 332, 333, 334], "adadelta": 315, "adafactor": 316, "adagrad": 317, "adam": 318, "adamw": 319, "adamax": 320, "lion": 321, "apply_gradi": 322, "init": [265, 266, 267, 268, 269, 270, 271, 272, 323], "state": [244, 324], "updat": [213, 249, 325, 341], "rmsprop": 326, "sgd": 327, "util": [204, 205, 206, 338], "tree_flatten": 204, "tree_map": 205, "tree_unflatten": 206, "data": 209, "type": 209, "support": 209, "algebra": 212, "neural": 213, "network": 213, "quick": [213, 344], "start": [213, 344], "The": 213, "modul": [213, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 312], "class": 213, "paramet": [213, 242], "inspect": 213, "valu": 213, "alibi": 214, "batchnorm": 217, "dropout": 220, "dropout2d": 221, "dropout3d": 222, "embed": 223, "gelu": [224, 274], "groupnorm": 225, "instancenorm": 226, "layernorm": 227, "mish": [231, 296], "appli": 232, "apply_to_modul": 233, "children": 234, "filter_and_map": 236, "freez": 237, "leaf_modul": 238, "load_weight": 239, "named_modul": 241, "save_weight": 243, "train": [245, 247, 339], "trainable_paramet": 246, "unfreez": 248, "update_modul": 250, "multiheadattent": 251, "prelu": [252, 297], "quantizedlinear": 253, "rmsnorm": 254, "relu": [255, 298], "rope": 256, "selu": [257, 300], "sequenti": 258, "silu": [259, 302], "sinusoidalpositionalencod": 260, "softshrink": [261, 305], "step": [262, 306], "constant": 265, "glorot_norm": 266, "glorot_uniform": 267, "he_norm": 268, "he_uniform": 269, "gelu_approx": 275, "gelu_fast_approx": 276, "loss": [282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294, 295, 311], "binary_cross_entropi": 282, "cosine_similarity_loss": 283, "cross_entropi": 284, "gaussian_nll_loss": 285, "hinge_loss": 286, "huber_loss": 287, "kl_div_loss": 288, "l1_loss": 289, "log_cosh_loss": 290, "margin_ranking_loss": 291, "mse_loss": 292, "nll_loss": 293, "smooth_l1_loss": 294, "triplet_loss": 295, "function": [308, 311, 339, 340, 344], "initi": 309, "tree": 338, "basic": [339, 344], "speedup": 339, "debug": 339, "pure": 339, "graph": [339, 342, 344], "automat": 340, "differenti": 340, "vector": 340, "index": 341, "differ": 341, "numpi": [341, 343], "In": 341, "place": 341, "lazi": 342, "evalu": 342, "why": 342, "comput": 342, "onli": 342, "what": 342, "you": 342, "when": 342, "convers": 343, "other": 343, "framework": 343, "pytorch": 343, "jax": 343, "tensorflow": 343, "guid": 344, "serial": 345, "format": 345, "unifi": 346, "memori": 346, "A": 346, "simpl": 346, "specifi": 347, "avgpool1d": 215, "avgpool2d": 216, "maxpool1d": 229, "maxpool2d": 230, "cosine_decai": 328, "exponential_decai": 329, "step_decai": 332, "common": 333, "schedul": 335, "atleast_1d": 60, "atleast_2d": 61, "atleast_3d": 62, "conv_gener": 70, "upsampl": 264, "elu": 273, "glu": 277, "hardswish": 278, "leaky_relu": 279, "log_sigmoid": 280, "log_softmax": 281, "relu6": 299, "softplu": 304, "join_schedul": 330, "linear_schedul": 331}, "envversion": {"sphinx.domains.c": 2, "sphinx.domains.changeset": 1, "sphinx.domains.citation": 1, "sphinx.domains.cpp": 6, "sphinx.domains.index": 1, "sphinx.domains.javascript": 2, "sphinx.domains.math": 2, "sphinx.domains.python": 3, "sphinx.domains.rst": 2, "sphinx.domains.std": 2, "sphinx.ext.intersphinx": 1, "sphinx": 56}}) |