From ec3f487eedae851adc084b45f119fde8e55b6246 Mon Sep 17 00:00:00 2001 From: CircleCI Docs Date: Fri, 29 Aug 2025 17:13:50 +0000 Subject: [PATCH] rebase --- docs/build/html/.buildinfo | 2 +- .../_sources/dev/custom_metal_kernels.rst | 4 +- docs/build/html/_sources/index.rst | 1 + docs/build/html/_sources/install.rst | 2 +- .../mlx.core.cuda.is_available.rst | 6 + .../mlx.core.fast.cuda_kernel.rst | 6 + docs/build/html/_sources/python/cuda.rst | 9 + docs/build/html/_sources/python/fast.rst | 1 + docs/build/html/_sources/usage/compile.rst | 2 +- docs/build/html/_static/check-solid.svg | 4 + docs/build/html/_static/clipboard.min.js | 7 + docs/build/html/_static/copy-button.svg | 5 + docs/build/html/_static/copybutton.css | 94 ++ docs/build/html/_static/copybutton.js | 248 ++++ docs/build/html/_static/copybutton_funcs.js | 73 ++ .../html/_static/documentation_options.js | 2 +- docs/build/html/cpp/ops.html | 50 +- docs/build/html/dev/custom_metal_kernels.html | 20 +- docs/build/html/dev/extensions.html | 16 +- docs/build/html/dev/metal_debugger.html | 16 +- docs/build/html/dev/mlx_in_cpp.html | 16 +- .../html/examples/linear_regression.html | 16 +- docs/build/html/examples/llama-inference.html | 16 +- docs/build/html/examples/mlp.html | 16 +- docs/build/html/genindex.html | 34 +- docs/build/html/index.html | 17 +- docs/build/html/install.html | 18 +- docs/build/html/objects.inv | Bin 28353 -> 28451 bytes .../python/_autosummary/mlx.core.Device.html | 16 +- .../python/_autosummary/mlx.core.Dtype.html | 16 +- .../_autosummary/mlx.core.DtypeCategory.html | 16 +- .../python/_autosummary/mlx.core.abs.html | 16 +- .../python/_autosummary/mlx.core.add.html | 16 +- .../python/_autosummary/mlx.core.addmm.html | 16 +- .../python/_autosummary/mlx.core.all.html | 16 +- .../_autosummary/mlx.core.allclose.html | 16 +- .../python/_autosummary/mlx.core.any.html | 16 +- .../python/_autosummary/mlx.core.arange.html | 16 +- .../python/_autosummary/mlx.core.arccos.html | 16 +- .../python/_autosummary/mlx.core.arccosh.html | 16 +- .../python/_autosummary/mlx.core.arcsin.html | 16 +- .../python/_autosummary/mlx.core.arcsinh.html | 16 +- .../python/_autosummary/mlx.core.arctan.html | 16 +- .../python/_autosummary/mlx.core.arctan2.html | 16 +- .../python/_autosummary/mlx.core.arctanh.html | 16 +- .../python/_autosummary/mlx.core.argmax.html | 16 +- .../python/_autosummary/mlx.core.argmin.html | 16 +- .../_autosummary/mlx.core.argpartition.html | 16 +- .../python/_autosummary/mlx.core.argsort.html | 16 +- .../python/_autosummary/mlx.core.array.T.html | 16 +- .../_autosummary/mlx.core.array.abs.html | 16 +- .../_autosummary/mlx.core.array.all.html | 16 +- .../_autosummary/mlx.core.array.any.html | 16 +- .../_autosummary/mlx.core.array.argmax.html | 16 +- .../_autosummary/mlx.core.array.argmin.html | 16 +- .../_autosummary/mlx.core.array.astype.html | 16 +- .../_autosummary/mlx.core.array.at.html | 16 +- .../_autosummary/mlx.core.array.conj.html | 16 +- .../_autosummary/mlx.core.array.cos.html | 16 +- .../_autosummary/mlx.core.array.cummax.html | 16 +- .../_autosummary/mlx.core.array.cummin.html | 16 +- .../_autosummary/mlx.core.array.cumprod.html | 16 +- .../_autosummary/mlx.core.array.cumsum.html | 16 +- .../_autosummary/mlx.core.array.diag.html | 16 +- .../_autosummary/mlx.core.array.diagonal.html | 16 +- .../_autosummary/mlx.core.array.dtype.html | 16 +- .../_autosummary/mlx.core.array.exp.html | 16 +- .../_autosummary/mlx.core.array.flatten.html | 16 +- .../python/_autosummary/mlx.core.array.html | 16 +- .../_autosummary/mlx.core.array.imag.html | 16 +- .../_autosummary/mlx.core.array.item.html | 16 +- .../_autosummary/mlx.core.array.itemsize.html | 16 +- .../_autosummary/mlx.core.array.log.html | 16 +- .../_autosummary/mlx.core.array.log10.html | 16 +- .../_autosummary/mlx.core.array.log1p.html | 16 +- .../_autosummary/mlx.core.array.log2.html | 16 +- .../mlx.core.array.logcumsumexp.html | 16 +- .../mlx.core.array.logsumexp.html | 16 +- .../_autosummary/mlx.core.array.max.html | 16 +- .../_autosummary/mlx.core.array.mean.html | 16 +- .../_autosummary/mlx.core.array.min.html | 16 +- .../_autosummary/mlx.core.array.moveaxis.html | 16 +- .../_autosummary/mlx.core.array.nbytes.html | 16 +- .../_autosummary/mlx.core.array.ndim.html | 16 +- .../_autosummary/mlx.core.array.prod.html | 16 +- .../_autosummary/mlx.core.array.real.html | 16 +- .../mlx.core.array.reciprocal.html | 16 +- .../_autosummary/mlx.core.array.reshape.html | 16 +- .../_autosummary/mlx.core.array.round.html | 16 +- .../_autosummary/mlx.core.array.rsqrt.html | 16 +- .../_autosummary/mlx.core.array.shape.html | 16 +- .../_autosummary/mlx.core.array.sin.html | 16 +- .../_autosummary/mlx.core.array.size.html | 16 +- .../_autosummary/mlx.core.array.split.html | 16 +- .../_autosummary/mlx.core.array.sqrt.html | 16 +- .../_autosummary/mlx.core.array.square.html | 16 +- .../_autosummary/mlx.core.array.squeeze.html | 16 +- .../_autosummary/mlx.core.array.std.html | 16 +- .../_autosummary/mlx.core.array.sum.html | 16 +- .../_autosummary/mlx.core.array.swapaxes.html | 16 +- .../_autosummary/mlx.core.array.tolist.html | 16 +- .../mlx.core.array.transpose.html | 16 +- .../_autosummary/mlx.core.array.var.html | 16 +- .../_autosummary/mlx.core.array.view.html | 16 +- .../_autosummary/mlx.core.array_equal.html | 16 +- .../_autosummary/mlx.core.as_strided.html | 16 +- .../_autosummary/mlx.core.async_eval.html | 16 +- .../_autosummary/mlx.core.atleast_1d.html | 16 +- .../_autosummary/mlx.core.atleast_2d.html | 16 +- .../_autosummary/mlx.core.atleast_3d.html | 16 +- .../_autosummary/mlx.core.bitwise_and.html | 16 +- .../_autosummary/mlx.core.bitwise_invert.html | 16 +- .../_autosummary/mlx.core.bitwise_or.html | 16 +- .../_autosummary/mlx.core.bitwise_xor.html | 16 +- .../mlx.core.block_masked_mm.html | 16 +- .../mlx.core.broadcast_arrays.html | 16 +- .../_autosummary/mlx.core.broadcast_to.html | 16 +- .../python/_autosummary/mlx.core.ceil.html | 16 +- .../_autosummary/mlx.core.clear_cache.html | 16 +- .../python/_autosummary/mlx.core.clip.html | 16 +- .../python/_autosummary/mlx.core.compile.html | 16 +- .../_autosummary/mlx.core.concatenate.html | 16 +- .../python/_autosummary/mlx.core.conj.html | 16 +- .../_autosummary/mlx.core.conjugate.html | 16 +- .../_autosummary/mlx.core.contiguous.html | 16 +- .../python/_autosummary/mlx.core.conv1d.html | 16 +- .../python/_autosummary/mlx.core.conv2d.html | 16 +- .../python/_autosummary/mlx.core.conv3d.html | 16 +- .../_autosummary/mlx.core.conv_general.html | 16 +- .../mlx.core.conv_transpose1d.html | 16 +- .../mlx.core.conv_transpose2d.html | 16 +- .../mlx.core.conv_transpose3d.html | 16 +- .../_autosummary/mlx.core.convolve.html | 16 +- .../python/_autosummary/mlx.core.cos.html | 16 +- .../python/_autosummary/mlx.core.cosh.html | 16 +- .../mlx.core.cuda.is_available.html | 1039 ++++++++++++++++ .../python/_autosummary/mlx.core.cummax.html | 16 +- .../python/_autosummary/mlx.core.cummin.html | 16 +- .../python/_autosummary/mlx.core.cumprod.html | 16 +- .../python/_autosummary/mlx.core.cumsum.html | 16 +- .../mlx.core.custom_function.html | 16 +- .../_autosummary/mlx.core.default_device.html | 16 +- .../_autosummary/mlx.core.default_stream.html | 16 +- .../python/_autosummary/mlx.core.degrees.html | 16 +- .../_autosummary/mlx.core.dequantize.html | 41 +- .../python/_autosummary/mlx.core.diag.html | 16 +- .../_autosummary/mlx.core.diagonal.html | 16 +- .../mlx.core.disable_compile.html | 16 +- .../mlx.core.distributed.Group.html | 16 +- .../mlx.core.distributed.all_gather.html | 16 +- .../mlx.core.distributed.all_sum.html | 16 +- .../mlx.core.distributed.init.html | 18 +- .../mlx.core.distributed.is_available.html | 16 +- .../mlx.core.distributed.recv.html | 16 +- .../mlx.core.distributed.recv_like.html | 16 +- .../mlx.core.distributed.send.html | 16 +- .../python/_autosummary/mlx.core.divide.html | 16 +- .../python/_autosummary/mlx.core.divmod.html | 16 +- .../python/_autosummary/mlx.core.einsum.html | 16 +- .../_autosummary/mlx.core.einsum_path.html | 16 +- .../_autosummary/mlx.core.enable_compile.html | 16 +- .../python/_autosummary/mlx.core.equal.html | 16 +- .../python/_autosummary/mlx.core.erf.html | 16 +- .../python/_autosummary/mlx.core.erfinv.html | 16 +- .../python/_autosummary/mlx.core.eval.html | 16 +- .../python/_autosummary/mlx.core.exp.html | 16 +- .../_autosummary/mlx.core.expand_dims.html | 16 +- .../python/_autosummary/mlx.core.expm1.html | 16 +- .../mlx.core.export_function.html | 16 +- .../_autosummary/mlx.core.export_to_dot.html | 16 +- .../_autosummary/mlx.core.exporter.html | 16 +- .../python/_autosummary/mlx.core.eye.html | 16 +- .../mlx.core.fast.cuda_kernel.html | 1093 +++++++++++++++++ .../mlx.core.fast.layer_norm.html | 16 +- .../mlx.core.fast.metal_kernel.html | 22 +- .../_autosummary/mlx.core.fast.rms_norm.html | 16 +- .../_autosummary/mlx.core.fast.rope.html | 16 +- ...ore.fast.scaled_dot_product_attention.html | 16 +- .../python/_autosummary/mlx.core.fft.fft.html | 16 +- .../_autosummary/mlx.core.fft.fft2.html | 16 +- .../_autosummary/mlx.core.fft.fftn.html | 16 +- .../_autosummary/mlx.core.fft.fftshift.html | 16 +- .../_autosummary/mlx.core.fft.ifft.html | 16 +- .../_autosummary/mlx.core.fft.ifft2.html | 16 +- .../_autosummary/mlx.core.fft.ifftn.html | 16 +- .../_autosummary/mlx.core.fft.ifftshift.html | 16 +- .../_autosummary/mlx.core.fft.irfft.html | 16 +- .../_autosummary/mlx.core.fft.irfft2.html | 16 +- .../_autosummary/mlx.core.fft.irfftn.html | 16 +- .../_autosummary/mlx.core.fft.rfft.html | 16 +- .../_autosummary/mlx.core.fft.rfft2.html | 16 +- .../_autosummary/mlx.core.fft.rfftn.html | 16 +- .../python/_autosummary/mlx.core.finfo.html | 16 +- .../python/_autosummary/mlx.core.flatten.html | 16 +- .../python/_autosummary/mlx.core.floor.html | 16 +- .../_autosummary/mlx.core.floor_divide.html | 16 +- .../python/_autosummary/mlx.core.full.html | 16 +- .../_autosummary/mlx.core.gather_mm.html | 16 +- .../_autosummary/mlx.core.gather_qmm.html | 22 +- .../mlx.core.get_active_memory.html | 16 +- .../mlx.core.get_cache_memory.html | 16 +- .../mlx.core.get_peak_memory.html | 16 +- .../python/_autosummary/mlx.core.grad.html | 16 +- .../python/_autosummary/mlx.core.greater.html | 16 +- .../_autosummary/mlx.core.greater_equal.html | 16 +- .../mlx.core.hadamard_transform.html | 16 +- .../_autosummary/mlx.core.identity.html | 16 +- .../python/_autosummary/mlx.core.imag.html | 16 +- .../mlx.core.import_function.html | 16 +- .../python/_autosummary/mlx.core.inner.html | 16 +- .../python/_autosummary/mlx.core.isclose.html | 16 +- .../_autosummary/mlx.core.isfinite.html | 16 +- .../python/_autosummary/mlx.core.isinf.html | 16 +- .../python/_autosummary/mlx.core.isnan.html | 16 +- .../_autosummary/mlx.core.isneginf.html | 16 +- .../_autosummary/mlx.core.isposinf.html | 16 +- .../_autosummary/mlx.core.issubdtype.html | 16 +- .../python/_autosummary/mlx.core.jvp.html | 16 +- .../python/_autosummary/mlx.core.kron.html | 16 +- .../_autosummary/mlx.core.left_shift.html | 16 +- .../python/_autosummary/mlx.core.less.html | 16 +- .../_autosummary/mlx.core.less_equal.html | 16 +- .../mlx.core.linalg.cholesky.html | 16 +- .../mlx.core.linalg.cholesky_inv.html | 16 +- .../_autosummary/mlx.core.linalg.cross.html | 16 +- .../_autosummary/mlx.core.linalg.eig.html | 16 +- .../_autosummary/mlx.core.linalg.eigh.html | 16 +- .../_autosummary/mlx.core.linalg.eigvals.html | 16 +- .../mlx.core.linalg.eigvalsh.html | 16 +- .../_autosummary/mlx.core.linalg.inv.html | 16 +- .../_autosummary/mlx.core.linalg.lu.html | 16 +- .../mlx.core.linalg.lu_factor.html | 16 +- .../_autosummary/mlx.core.linalg.norm.html | 16 +- .../_autosummary/mlx.core.linalg.pinv.html | 16 +- .../_autosummary/mlx.core.linalg.qr.html | 16 +- .../_autosummary/mlx.core.linalg.solve.html | 16 +- .../mlx.core.linalg.solve_triangular.html | 16 +- .../_autosummary/mlx.core.linalg.svd.html | 16 +- .../_autosummary/mlx.core.linalg.tri_inv.html | 16 +- .../_autosummary/mlx.core.linspace.html | 16 +- .../python/_autosummary/mlx.core.load.html | 20 +- .../python/_autosummary/mlx.core.log.html | 16 +- .../python/_autosummary/mlx.core.log10.html | 16 +- .../python/_autosummary/mlx.core.log1p.html | 16 +- .../python/_autosummary/mlx.core.log2.html | 16 +- .../_autosummary/mlx.core.logaddexp.html | 16 +- .../_autosummary/mlx.core.logcumsumexp.html | 16 +- .../_autosummary/mlx.core.logical_and.html | 16 +- .../_autosummary/mlx.core.logical_not.html | 16 +- .../_autosummary/mlx.core.logical_or.html | 16 +- .../_autosummary/mlx.core.logsumexp.html | 16 +- .../python/_autosummary/mlx.core.matmul.html | 16 +- .../python/_autosummary/mlx.core.max.html | 16 +- .../python/_autosummary/mlx.core.maximum.html | 16 +- .../python/_autosummary/mlx.core.mean.html | 16 +- .../_autosummary/mlx.core.meshgrid.html | 16 +- .../mlx.core.metal.device_info.html | 16 +- .../mlx.core.metal.is_available.html | 16 +- .../mlx.core.metal.start_capture.html | 16 +- .../mlx.core.metal.stop_capture.html | 22 +- .../python/_autosummary/mlx.core.min.html | 16 +- .../python/_autosummary/mlx.core.minimum.html | 16 +- .../_autosummary/mlx.core.moveaxis.html | 16 +- .../_autosummary/mlx.core.multiply.html | 16 +- .../_autosummary/mlx.core.nan_to_num.html | 16 +- .../_autosummary/mlx.core.negative.html | 16 +- .../_autosummary/mlx.core.new_stream.html | 16 +- .../_autosummary/mlx.core.not_equal.html | 16 +- .../python/_autosummary/mlx.core.ones.html | 16 +- .../_autosummary/mlx.core.ones_like.html | 16 +- .../python/_autosummary/mlx.core.outer.html | 16 +- .../python/_autosummary/mlx.core.pad.html | 16 +- .../_autosummary/mlx.core.partition.html | 16 +- .../python/_autosummary/mlx.core.power.html | 16 +- .../python/_autosummary/mlx.core.prod.html | 16 +- .../_autosummary/mlx.core.put_along_axis.html | 16 +- .../_autosummary/mlx.core.quantize.html | 87 +- .../mlx.core.quantized_matmul.html | 22 +- .../python/_autosummary/mlx.core.radians.html | 16 +- .../mlx.core.random.bernoulli.html | 16 +- .../mlx.core.random.categorical.html | 16 +- .../_autosummary/mlx.core.random.gumbel.html | 16 +- .../_autosummary/mlx.core.random.key.html | 16 +- .../_autosummary/mlx.core.random.laplace.html | 16 +- .../mlx.core.random.multivariate_normal.html | 16 +- .../_autosummary/mlx.core.random.normal.html | 16 +- .../mlx.core.random.permutation.html | 16 +- .../_autosummary/mlx.core.random.randint.html | 16 +- .../_autosummary/mlx.core.random.seed.html | 16 +- .../_autosummary/mlx.core.random.split.html | 16 +- .../mlx.core.random.truncated_normal.html | 16 +- .../_autosummary/mlx.core.random.uniform.html | 16 +- .../python/_autosummary/mlx.core.real.html | 16 +- .../_autosummary/mlx.core.reciprocal.html | 16 +- .../_autosummary/mlx.core.remainder.html | 16 +- .../python/_autosummary/mlx.core.repeat.html | 16 +- .../mlx.core.reset_peak_memory.html | 16 +- .../python/_autosummary/mlx.core.reshape.html | 16 +- .../_autosummary/mlx.core.right_shift.html | 16 +- .../python/_autosummary/mlx.core.roll.html | 16 +- .../python/_autosummary/mlx.core.round.html | 16 +- .../python/_autosummary/mlx.core.rsqrt.html | 16 +- .../python/_autosummary/mlx.core.save.html | 20 +- .../_autosummary/mlx.core.save_gguf.html | 20 +- .../mlx.core.save_safetensors.html | 20 +- .../python/_autosummary/mlx.core.savez.html | 20 +- .../mlx.core.savez_compressed.html | 20 +- .../mlx.core.set_cache_limit.html | 16 +- .../mlx.core.set_default_device.html | 16 +- .../mlx.core.set_default_stream.html | 16 +- .../mlx.core.set_memory_limit.html | 16 +- .../mlx.core.set_wired_limit.html | 16 +- .../python/_autosummary/mlx.core.sigmoid.html | 16 +- .../python/_autosummary/mlx.core.sign.html | 16 +- .../python/_autosummary/mlx.core.sin.html | 16 +- .../python/_autosummary/mlx.core.sinh.html | 16 +- .../python/_autosummary/mlx.core.slice.html | 16 +- .../_autosummary/mlx.core.slice_update.html | 16 +- .../python/_autosummary/mlx.core.softmax.html | 16 +- .../python/_autosummary/mlx.core.sort.html | 16 +- .../python/_autosummary/mlx.core.split.html | 16 +- .../python/_autosummary/mlx.core.sqrt.html | 16 +- .../python/_autosummary/mlx.core.square.html | 16 +- .../python/_autosummary/mlx.core.squeeze.html | 16 +- .../python/_autosummary/mlx.core.stack.html | 16 +- .../python/_autosummary/mlx.core.std.html | 16 +- .../_autosummary/mlx.core.stop_gradient.html | 16 +- .../python/_autosummary/mlx.core.stream.html | 16 +- .../_autosummary/mlx.core.subtract.html | 16 +- .../python/_autosummary/mlx.core.sum.html | 16 +- .../_autosummary/mlx.core.swapaxes.html | 16 +- .../_autosummary/mlx.core.synchronize.html | 16 +- .../python/_autosummary/mlx.core.take.html | 16 +- .../mlx.core.take_along_axis.html | 16 +- .../python/_autosummary/mlx.core.tan.html | 16 +- .../python/_autosummary/mlx.core.tanh.html | 16 +- .../_autosummary/mlx.core.tensordot.html | 16 +- .../python/_autosummary/mlx.core.tile.html | 16 +- .../python/_autosummary/mlx.core.topk.html | 16 +- .../python/_autosummary/mlx.core.trace.html | 16 +- .../_autosummary/mlx.core.transpose.html | 16 +- .../python/_autosummary/mlx.core.tri.html | 16 +- .../python/_autosummary/mlx.core.tril.html | 16 +- .../python/_autosummary/mlx.core.triu.html | 16 +- .../_autosummary/mlx.core.unflatten.html | 16 +- .../_autosummary/mlx.core.value_and_grad.html | 16 +- .../python/_autosummary/mlx.core.var.html | 16 +- .../python/_autosummary/mlx.core.view.html | 16 +- .../python/_autosummary/mlx.core.vjp.html | 16 +- .../python/_autosummary/mlx.core.vmap.html | 16 +- .../python/_autosummary/mlx.core.where.html | 16 +- .../python/_autosummary/mlx.core.zeros.html | 16 +- .../_autosummary/mlx.core.zeros_like.html | 16 +- .../mlx.nn.average_gradients.html | 21 +- .../python/_autosummary/mlx.nn.quantize.html | 20 +- .../_autosummary/mlx.nn.value_and_grad.html | 16 +- .../mlx.optimizers.clip_grad_norm.html | 16 +- .../_autosummary/mlx.utils.tree_flatten.html | 16 +- .../_autosummary/mlx.utils.tree_map.html | 16 +- .../mlx.utils.tree_map_with_path.html | 16 +- .../_autosummary/mlx.utils.tree_reduce.html | 16 +- .../mlx.utils.tree_unflatten.html | 16 +- .../python/_autosummary/stream_class.html | 16 +- docs/build/html/python/array.html | 16 +- docs/build/html/python/cuda.html | 1016 +++++++++++++++ docs/build/html/python/data_types.html | 16 +- .../html/python/devices_and_streams.html | 16 +- docs/build/html/python/distributed.html | 16 +- docs/build/html/python/export.html | 16 +- docs/build/html/python/fast.html | 19 +- docs/build/html/python/fft.html | 22 +- docs/build/html/python/linalg.html | 16 +- docs/build/html/python/memory_management.html | 22 +- docs/build/html/python/metal.html | 16 +- docs/build/html/python/nn.html | 18 +- .../python/nn/_autosummary/mlx.nn.ALiBi.html | 16 +- .../nn/_autosummary/mlx.nn.AvgPool1d.html | 16 +- .../nn/_autosummary/mlx.nn.AvgPool2d.html | 16 +- .../nn/_autosummary/mlx.nn.AvgPool3d.html | 16 +- .../nn/_autosummary/mlx.nn.BatchNorm.html | 16 +- .../python/nn/_autosummary/mlx.nn.CELU.html | 16 +- .../python/nn/_autosummary/mlx.nn.Conv1d.html | 16 +- .../python/nn/_autosummary/mlx.nn.Conv2d.html | 16 +- .../python/nn/_autosummary/mlx.nn.Conv3d.html | 16 +- .../_autosummary/mlx.nn.ConvTranspose1d.html | 16 +- .../_autosummary/mlx.nn.ConvTranspose2d.html | 16 +- .../_autosummary/mlx.nn.ConvTranspose3d.html | 16 +- .../nn/_autosummary/mlx.nn.Dropout.html | 16 +- .../nn/_autosummary/mlx.nn.Dropout2d.html | 16 +- .../nn/_autosummary/mlx.nn.Dropout3d.html | 16 +- .../python/nn/_autosummary/mlx.nn.ELU.html | 16 +- .../nn/_autosummary/mlx.nn.Embedding.html | 18 +- .../python/nn/_autosummary/mlx.nn.GELU.html | 16 +- .../python/nn/_autosummary/mlx.nn.GLU.html | 16 +- .../python/nn/_autosummary/mlx.nn.GRU.html | 16 +- .../nn/_autosummary/mlx.nn.GroupNorm.html | 16 +- .../nn/_autosummary/mlx.nn.HardShrink.html | 16 +- .../nn/_autosummary/mlx.nn.HardTanh.html | 16 +- .../nn/_autosummary/mlx.nn.Hardswish.html | 16 +- .../nn/_autosummary/mlx.nn.InstanceNorm.html | 16 +- .../python/nn/_autosummary/mlx.nn.LSTM.html | 16 +- .../nn/_autosummary/mlx.nn.LayerNorm.html | 16 +- .../nn/_autosummary/mlx.nn.LeakyReLU.html | 16 +- .../python/nn/_autosummary/mlx.nn.Linear.html | 18 +- .../nn/_autosummary/mlx.nn.LogSigmoid.html | 16 +- .../nn/_autosummary/mlx.nn.LogSoftmax.html | 16 +- .../nn/_autosummary/mlx.nn.MaxPool1d.html | 16 +- .../nn/_autosummary/mlx.nn.MaxPool2d.html | 16 +- .../nn/_autosummary/mlx.nn.MaxPool3d.html | 16 +- .../python/nn/_autosummary/mlx.nn.Mish.html | 16 +- .../nn/_autosummary/mlx.nn.Module.apply.html | 16 +- .../mlx.nn.Module.apply_to_modules.html | 16 +- .../_autosummary/mlx.nn.Module.children.html | 16 +- .../nn/_autosummary/mlx.nn.Module.eval.html | 16 +- .../mlx.nn.Module.filter_and_map.html | 16 +- .../nn/_autosummary/mlx.nn.Module.freeze.html | 16 +- .../mlx.nn.Module.leaf_modules.html | 16 +- .../mlx.nn.Module.load_weights.html | 16 +- .../_autosummary/mlx.nn.Module.modules.html | 16 +- .../mlx.nn.Module.named_modules.html | 16 +- .../mlx.nn.Module.parameters.html | 16 +- .../mlx.nn.Module.save_weights.html | 16 +- .../_autosummary/mlx.nn.Module.set_dtype.html | 16 +- .../nn/_autosummary/mlx.nn.Module.state.html | 16 +- .../nn/_autosummary/mlx.nn.Module.train.html | 16 +- .../mlx.nn.Module.trainable_parameters.html | 16 +- .../_autosummary/mlx.nn.Module.training.html | 16 +- .../_autosummary/mlx.nn.Module.unfreeze.html | 16 +- .../nn/_autosummary/mlx.nn.Module.update.html | 16 +- .../mlx.nn.Module.update_modules.html | 16 +- .../mlx.nn.MultiHeadAttention.html | 16 +- .../python/nn/_autosummary/mlx.nn.PReLU.html | 16 +- .../mlx.nn.QuantizedEmbedding.html | 20 +- .../_autosummary/mlx.nn.QuantizedLinear.html | 22 +- .../nn/_autosummary/mlx.nn.RMSNorm.html | 16 +- .../python/nn/_autosummary/mlx.nn.RNN.html | 16 +- .../python/nn/_autosummary/mlx.nn.ReLU.html | 16 +- .../python/nn/_autosummary/mlx.nn.ReLU6.html | 16 +- .../python/nn/_autosummary/mlx.nn.RoPE.html | 16 +- .../python/nn/_autosummary/mlx.nn.SELU.html | 16 +- .../nn/_autosummary/mlx.nn.Sequential.html | 16 +- .../python/nn/_autosummary/mlx.nn.SiLU.html | 16 +- .../nn/_autosummary/mlx.nn.Sigmoid.html | 16 +- .../mlx.nn.SinusoidalPositionalEncoding.html | 16 +- .../nn/_autosummary/mlx.nn.Softmax.html | 16 +- .../nn/_autosummary/mlx.nn.Softmin.html | 16 +- .../nn/_autosummary/mlx.nn.Softplus.html | 16 +- .../nn/_autosummary/mlx.nn.Softshrink.html | 16 +- .../nn/_autosummary/mlx.nn.Softsign.html | 16 +- .../python/nn/_autosummary/mlx.nn.Step.html | 16 +- .../python/nn/_autosummary/mlx.nn.Tanh.html | 16 +- .../nn/_autosummary/mlx.nn.Transformer.html | 16 +- .../nn/_autosummary/mlx.nn.Upsample.html | 16 +- .../nn/_autosummary/mlx.nn.init.constant.html | 16 +- .../mlx.nn.init.glorot_normal.html | 16 +- .../mlx.nn.init.glorot_uniform.html | 16 +- .../_autosummary/mlx.nn.init.he_normal.html | 16 +- .../_autosummary/mlx.nn.init.he_uniform.html | 16 +- .../nn/_autosummary/mlx.nn.init.identity.html | 16 +- .../nn/_autosummary/mlx.nn.init.normal.html | 16 +- .../nn/_autosummary/mlx.nn.init.uniform.html | 16 +- .../_autosummary_functions/mlx.nn.celu.html | 16 +- .../nn/_autosummary_functions/mlx.nn.elu.html | 16 +- .../_autosummary_functions/mlx.nn.gelu.html | 16 +- .../mlx.nn.gelu_approx.html | 16 +- .../mlx.nn.gelu_fast_approx.html | 16 +- .../nn/_autosummary_functions/mlx.nn.glu.html | 16 +- .../mlx.nn.hard_shrink.html | 16 +- .../mlx.nn.hard_tanh.html | 16 +- .../mlx.nn.hardswish.html | 16 +- .../mlx.nn.leaky_relu.html | 16 +- .../mlx.nn.log_sigmoid.html | 16 +- .../mlx.nn.log_softmax.html | 16 +- .../mlx.nn.losses.binary_cross_entropy.html | 16 +- .../mlx.nn.losses.cosine_similarity_loss.html | 16 +- .../mlx.nn.losses.cross_entropy.html | 16 +- .../mlx.nn.losses.gaussian_nll_loss.html | 16 +- .../mlx.nn.losses.hinge_loss.html | 16 +- .../mlx.nn.losses.huber_loss.html | 16 +- .../mlx.nn.losses.kl_div_loss.html | 16 +- .../mlx.nn.losses.l1_loss.html | 16 +- .../mlx.nn.losses.log_cosh_loss.html | 16 +- .../mlx.nn.losses.margin_ranking_loss.html | 16 +- .../mlx.nn.losses.mse_loss.html | 16 +- .../mlx.nn.losses.nll_loss.html | 16 +- .../mlx.nn.losses.smooth_l1_loss.html | 16 +- .../mlx.nn.losses.triplet_loss.html | 16 +- .../_autosummary_functions/mlx.nn.mish.html | 16 +- .../_autosummary_functions/mlx.nn.prelu.html | 16 +- .../_autosummary_functions/mlx.nn.relu.html | 16 +- .../_autosummary_functions/mlx.nn.relu6.html | 16 +- .../_autosummary_functions/mlx.nn.selu.html | 16 +- .../mlx.nn.sigmoid.html | 16 +- .../_autosummary_functions/mlx.nn.silu.html | 16 +- .../mlx.nn.softmax.html | 16 +- .../mlx.nn.softmin.html | 16 +- .../mlx.nn.softplus.html | 16 +- .../mlx.nn.softshrink.html | 16 +- .../_autosummary_functions/mlx.nn.step.html | 16 +- .../_autosummary_functions/mlx.nn.tanh.html | 16 +- docs/build/html/python/nn/functions.html | 16 +- docs/build/html/python/nn/init.html | 16 +- docs/build/html/python/nn/layers.html | 16 +- docs/build/html/python/nn/losses.html | 16 +- docs/build/html/python/nn/module.html | 16 +- docs/build/html/python/ops.html | 26 +- docs/build/html/python/optimizers.html | 16 +- .../_autosummary/mlx.optimizers.AdaDelta.html | 16 +- .../mlx.optimizers.Adafactor.html | 16 +- .../_autosummary/mlx.optimizers.Adagrad.html | 16 +- .../_autosummary/mlx.optimizers.Adam.html | 16 +- .../_autosummary/mlx.optimizers.AdamW.html | 16 +- .../_autosummary/mlx.optimizers.Adamax.html | 16 +- .../_autosummary/mlx.optimizers.Lion.html | 16 +- .../mlx.optimizers.MultiOptimizer.html | 16 +- .../_autosummary/mlx.optimizers.Muon.html | 16 +- ....optimizers.Optimizer.apply_gradients.html | 16 +- .../mlx.optimizers.Optimizer.init.html | 16 +- .../mlx.optimizers.Optimizer.state.html | 16 +- .../mlx.optimizers.Optimizer.update.html | 16 +- .../_autosummary/mlx.optimizers.RMSprop.html | 16 +- .../_autosummary/mlx.optimizers.SGD.html | 16 +- .../mlx.optimizers.cosine_decay.html | 16 +- .../mlx.optimizers.exponential_decay.html | 16 +- .../mlx.optimizers.join_schedules.html | 16 +- .../mlx.optimizers.linear_schedule.html | 16 +- .../mlx.optimizers.step_decay.html | 16 +- .../python/optimizers/common_optimizers.html | 16 +- .../html/python/optimizers/optimizer.html | 16 +- .../html/python/optimizers/schedulers.html | 16 +- docs/build/html/python/random.html | 16 +- docs/build/html/python/transforms.html | 16 +- docs/build/html/python/tree_utils.html | 16 +- docs/build/html/search.html | 16 +- docs/build/html/searchindex.js | 2 +- docs/build/html/usage/compile.html | 18 +- docs/build/html/usage/distributed.html | 16 +- docs/build/html/usage/export.html | 16 +- .../build/html/usage/function_transforms.html | 16 +- docs/build/html/usage/indexing.html | 16 +- .../html/usage/launching_distributed.html | 16 +- docs/build/html/usage/lazy_evaluation.html | 16 +- docs/build/html/usage/numpy.html | 16 +- docs/build/html/usage/quick_start.html | 16 +- docs/build/html/usage/saving_and_loading.html | 16 +- docs/build/html/usage/unified_memory.html | 16 +- docs/build/html/usage/using_streams.html | 16 +- 547 files changed, 10067 insertions(+), 2222 deletions(-) create mode 100644 docs/build/html/_sources/python/_autosummary/mlx.core.cuda.is_available.rst create mode 100644 docs/build/html/_sources/python/_autosummary/mlx.core.fast.cuda_kernel.rst create mode 100644 docs/build/html/_sources/python/cuda.rst create mode 100644 docs/build/html/_static/check-solid.svg create mode 100644 docs/build/html/_static/clipboard.min.js create mode 100644 docs/build/html/_static/copy-button.svg create mode 100644 docs/build/html/_static/copybutton.css create mode 100644 docs/build/html/_static/copybutton.js create mode 100644 docs/build/html/_static/copybutton_funcs.js create mode 100644 docs/build/html/python/_autosummary/mlx.core.cuda.is_available.html create mode 100644 docs/build/html/python/_autosummary/mlx.core.fast.cuda_kernel.html create mode 100644 docs/build/html/python/cuda.html diff --git a/docs/build/html/.buildinfo b/docs/build/html/.buildinfo index 05b88bc4b..81b4e9106 100644 --- a/docs/build/html/.buildinfo +++ b/docs/build/html/.buildinfo @@ -1,4 +1,4 @@ # Sphinx build info version 1 # This file hashes the configuration used when building these files. When it is not found, a full rebuild will be done. -config: 90bedb5997ff77e9adb28aaeab67a6e2 +config: 6e9fcd3fd9a477c32d79521f0d5d7188 tags: 645f666f9bcd5a90fca523b33c5a78b7 diff --git a/docs/build/html/_sources/dev/custom_metal_kernels.rst b/docs/build/html/_sources/dev/custom_metal_kernels.rst index 873b1e544..4c4ce65ae 100644 --- a/docs/build/html/_sources/dev/custom_metal_kernels.rst +++ b/docs/build/html/_sources/dev/custom_metal_kernels.rst @@ -127,7 +127,8 @@ relying on a copy from ``ensure_row_contiguous``: name="myexp_strided", input_names=["inp"], output_names=["out"], - source=source + source=source, + ensure_row_contiguous=False, ) def exp_elementwise(a: mx.array): @@ -138,7 +139,6 @@ relying on a copy from ``ensure_row_contiguous``: threadgroup=(256, 1, 1), output_shapes=[a.shape], output_dtypes=[a.dtype], - ensure_row_contiguous=False, ) return outputs[0] diff --git a/docs/build/html/_sources/index.rst b/docs/build/html/_sources/index.rst index 51e719572..76907dbe0 100644 --- a/docs/build/html/_sources/index.rst +++ b/docs/build/html/_sources/index.rst @@ -70,6 +70,7 @@ are the CPU and GPU. python/fft python/linalg python/metal + python/cuda python/memory_management python/nn python/optimizers diff --git a/docs/build/html/_sources/install.rst b/docs/build/html/_sources/install.rst index 1e7a015ca..da7470908 100644 --- a/docs/build/html/_sources/install.rst +++ b/docs/build/html/_sources/install.rst @@ -271,7 +271,7 @@ and the CUDA toolkit. For example on Ubuntu, run the following: dpkg -i cuda-keyring_1.1-1_all.deb apt-get update -y apt-get -y install cuda-toolkit-12-9 - apt-get install libblas-dev liblapack-dev liblapacke-dev -y + apt-get install libblas-dev liblapack-dev liblapacke-dev libcudnn9-dev-cuda-12 -y When building either the Python or C++ APIs make sure to pass the cmake flag diff --git a/docs/build/html/_sources/python/_autosummary/mlx.core.cuda.is_available.rst b/docs/build/html/_sources/python/_autosummary/mlx.core.cuda.is_available.rst new file mode 100644 index 000000000..547f297c8 --- /dev/null +++ b/docs/build/html/_sources/python/_autosummary/mlx.core.cuda.is_available.rst @@ -0,0 +1,6 @@ +mlx.core.cuda.is\_available +=========================== + +.. currentmodule:: mlx.core.cuda + +.. autofunction:: is_available \ No newline at end of file diff --git a/docs/build/html/_sources/python/_autosummary/mlx.core.fast.cuda_kernel.rst b/docs/build/html/_sources/python/_autosummary/mlx.core.fast.cuda_kernel.rst new file mode 100644 index 000000000..f636d2169 --- /dev/null +++ b/docs/build/html/_sources/python/_autosummary/mlx.core.fast.cuda_kernel.rst @@ -0,0 +1,6 @@ +mlx.core.fast.cuda\_kernel +========================== + +.. currentmodule:: mlx.core.fast + +.. autofunction:: cuda_kernel \ No newline at end of file diff --git a/docs/build/html/_sources/python/cuda.rst b/docs/build/html/_sources/python/cuda.rst new file mode 100644 index 000000000..932d36b5e --- /dev/null +++ b/docs/build/html/_sources/python/cuda.rst @@ -0,0 +1,9 @@ +CUDA +===== + +.. currentmodule:: mlx.core.cuda + +.. autosummary:: + :toctree: _autosummary + + is_available diff --git a/docs/build/html/_sources/python/fast.rst b/docs/build/html/_sources/python/fast.rst index f78f40563..b250dcb18 100644 --- a/docs/build/html/_sources/python/fast.rst +++ b/docs/build/html/_sources/python/fast.rst @@ -13,3 +13,4 @@ Fast rope scaled_dot_product_attention metal_kernel + cuda_kernel diff --git a/docs/build/html/_sources/usage/compile.rst b/docs/build/html/_sources/usage/compile.rst index 7fe0ffd4f..ae01bb1f3 100644 --- a/docs/build/html/_sources/usage/compile.rst +++ b/docs/build/html/_sources/usage/compile.rst @@ -225,7 +225,7 @@ In some cases returning updated state can be pretty inconvenient. Hence, def fun(x, y): z = x + y state.append(z) - return mx.exp(z), state + return mx.exp(z) fun(mx.array(1.0), mx.array(2.0)) # Prints [array(3, dtype=float32)] diff --git a/docs/build/html/_static/check-solid.svg b/docs/build/html/_static/check-solid.svg new file mode 100644 index 000000000..92fad4b5c --- /dev/null +++ b/docs/build/html/_static/check-solid.svg @@ -0,0 +1,4 @@ + + + + diff --git a/docs/build/html/_static/clipboard.min.js b/docs/build/html/_static/clipboard.min.js new file mode 100644 index 000000000..54b3c4638 --- /dev/null +++ b/docs/build/html/_static/clipboard.min.js @@ -0,0 +1,7 @@ +/*! + * clipboard.js v2.0.8 + * https://clipboardjs.com/ + * + * Licensed MIT © Zeno Rocha + */ +!function(t,e){"object"==typeof exports&&"object"==typeof module?module.exports=e():"function"==typeof define&&define.amd?define([],e):"object"==typeof exports?exports.ClipboardJS=e():t.ClipboardJS=e()}(this,function(){return n={686:function(t,e,n){"use strict";n.d(e,{default:function(){return o}});var e=n(279),i=n.n(e),e=n(370),u=n.n(e),e=n(817),c=n.n(e);function a(t){try{return document.execCommand(t)}catch(t){return}}var f=function(t){t=c()(t);return a("cut"),t};var l=function(t){var e,n,o,r=1 + + + + diff --git a/docs/build/html/_static/copybutton.css b/docs/build/html/_static/copybutton.css new file mode 100644 index 000000000..f1916ec7d --- /dev/null +++ b/docs/build/html/_static/copybutton.css @@ -0,0 +1,94 @@ +/* Copy buttons */ +button.copybtn { + position: absolute; + display: flex; + top: .3em; + right: .3em; + width: 1.7em; + height: 1.7em; + opacity: 0; + transition: opacity 0.3s, border .3s, background-color .3s; + user-select: none; + padding: 0; + border: none; + outline: none; + border-radius: 0.4em; + /* The colors that GitHub uses */ + border: #1b1f2426 1px solid; + background-color: #f6f8fa; + color: #57606a; +} + +button.copybtn.success { + border-color: #22863a; + color: #22863a; +} + +button.copybtn svg { + stroke: currentColor; + width: 1.5em; + height: 1.5em; + padding: 0.1em; +} + +div.highlight { + position: relative; +} + +/* Show the copybutton */ +.highlight:hover button.copybtn, button.copybtn.success { + opacity: 1; +} + +.highlight button.copybtn:hover { + background-color: rgb(235, 235, 235); +} + +.highlight button.copybtn:active { + background-color: rgb(187, 187, 187); +} + +/** + * A minimal CSS-only tooltip copied from: + * https://codepen.io/mildrenben/pen/rVBrpK + * + * To use, write HTML like the following: + * + *

Short

+ */ + .o-tooltip--left { + position: relative; + } + + .o-tooltip--left:after { + opacity: 0; + visibility: hidden; + position: absolute; + content: attr(data-tooltip); + padding: .2em; + font-size: .8em; + left: -.2em; + background: grey; + color: white; + white-space: nowrap; + z-index: 2; + border-radius: 2px; + transform: translateX(-102%) translateY(0); + transition: opacity 0.2s cubic-bezier(0.64, 0.09, 0.08, 1), transform 0.2s cubic-bezier(0.64, 0.09, 0.08, 1); +} + +.o-tooltip--left:hover:after { + display: block; + opacity: 1; + visibility: visible; + transform: translateX(-100%) translateY(0); + transition: opacity 0.2s cubic-bezier(0.64, 0.09, 0.08, 1), transform 0.2s cubic-bezier(0.64, 0.09, 0.08, 1); + transition-delay: .5s; +} + +/* By default the copy button shouldn't show up when printing a page */ +@media print { + button.copybtn { + display: none; + } +} diff --git a/docs/build/html/_static/copybutton.js b/docs/build/html/_static/copybutton.js new file mode 100644 index 000000000..2ea7ff3e2 --- /dev/null +++ b/docs/build/html/_static/copybutton.js @@ -0,0 +1,248 @@ +// Localization support +const messages = { + 'en': { + 'copy': 'Copy', + 'copy_to_clipboard': 'Copy to clipboard', + 'copy_success': 'Copied!', + 'copy_failure': 'Failed to copy', + }, + 'es' : { + 'copy': 'Copiar', + 'copy_to_clipboard': 'Copiar al portapapeles', + 'copy_success': '¡Copiado!', + 'copy_failure': 'Error al copiar', + }, + 'de' : { + 'copy': 'Kopieren', + 'copy_to_clipboard': 'In die Zwischenablage kopieren', + 'copy_success': 'Kopiert!', + 'copy_failure': 'Fehler beim Kopieren', + }, + 'fr' : { + 'copy': 'Copier', + 'copy_to_clipboard': 'Copier dans le presse-papier', + 'copy_success': 'Copié !', + 'copy_failure': 'Échec de la copie', + }, + 'ru': { + 'copy': 'Скопировать', + 'copy_to_clipboard': 'Скопировать в буфер', + 'copy_success': 'Скопировано!', + 'copy_failure': 'Не удалось скопировать', + }, + 'zh-CN': { + 'copy': '复制', + 'copy_to_clipboard': '复制到剪贴板', + 'copy_success': '复制成功!', + 'copy_failure': '复制失败', + }, + 'it' : { + 'copy': 'Copiare', + 'copy_to_clipboard': 'Copiato negli appunti', + 'copy_success': 'Copiato!', + 'copy_failure': 'Errore durante la copia', + } +} + +let locale = 'en' +if( document.documentElement.lang !== undefined + && messages[document.documentElement.lang] !== undefined ) { + locale = document.documentElement.lang +} + +let doc_url_root = DOCUMENTATION_OPTIONS.URL_ROOT; +if (doc_url_root == '#') { + doc_url_root = ''; +} + +/** + * SVG files for our copy buttons + */ +let iconCheck = ` + ${messages[locale]['copy_success']} + + +` + +// If the user specified their own SVG use that, otherwise use the default +let iconCopy = ``; +if (!iconCopy) { + iconCopy = ` + ${messages[locale]['copy_to_clipboard']} + + + +` +} + +/** + * Set up copy/paste for code blocks + */ + +const runWhenDOMLoaded = cb => { + if (document.readyState != 'loading') { + cb() + } else if (document.addEventListener) { + document.addEventListener('DOMContentLoaded', cb) + } else { + document.attachEvent('onreadystatechange', function() { + if (document.readyState == 'complete') cb() + }) + } +} + +const codeCellId = index => `codecell${index}` + +// Clears selected text since ClipboardJS will select the text when copying +const clearSelection = () => { + if (window.getSelection) { + window.getSelection().removeAllRanges() + } else if (document.selection) { + document.selection.empty() + } +} + +// Changes tooltip text for a moment, then changes it back +// We want the timeout of our `success` class to be a bit shorter than the +// tooltip and icon change, so that we can hide the icon before changing back. +var timeoutIcon = 2000; +var timeoutSuccessClass = 1500; + +const temporarilyChangeTooltip = (el, oldText, newText) => { + el.setAttribute('data-tooltip', newText) + el.classList.add('success') + // Remove success a little bit sooner than we change the tooltip + // So that we can use CSS to hide the copybutton first + setTimeout(() => el.classList.remove('success'), timeoutSuccessClass) + setTimeout(() => el.setAttribute('data-tooltip', oldText), timeoutIcon) +} + +// Changes the copy button icon for two seconds, then changes it back +const temporarilyChangeIcon = (el) => { + el.innerHTML = iconCheck; + setTimeout(() => {el.innerHTML = iconCopy}, timeoutIcon) +} + +const addCopyButtonToCodeCells = () => { + // If ClipboardJS hasn't loaded, wait a bit and try again. This + // happens because we load ClipboardJS asynchronously. + if (window.ClipboardJS === undefined) { + setTimeout(addCopyButtonToCodeCells, 250) + return + } + + // Add copybuttons to all of our code cells + const COPYBUTTON_SELECTOR = 'div.highlight pre'; + const codeCells = document.querySelectorAll(COPYBUTTON_SELECTOR) + codeCells.forEach((codeCell, index) => { + const id = codeCellId(index) + codeCell.setAttribute('id', id) + + const clipboardButton = id => + `` + codeCell.insertAdjacentHTML('afterend', clipboardButton(id)) + }) + +function escapeRegExp(string) { + return string.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); // $& means the whole matched string +} + +/** + * Removes excluded text from a Node. + * + * @param {Node} target Node to filter. + * @param {string} exclude CSS selector of nodes to exclude. + * @returns {DOMString} Text from `target` with text removed. + */ +function filterText(target, exclude) { + const clone = target.cloneNode(true); // clone as to not modify the live DOM + if (exclude) { + // remove excluded nodes + clone.querySelectorAll(exclude).forEach(node => node.remove()); + } + return clone.innerText; +} + +// Callback when a copy button is clicked. Will be passed the node that was clicked +// should then grab the text and replace pieces of text that shouldn't be used in output +function formatCopyText(textContent, copybuttonPromptText, isRegexp = false, onlyCopyPromptLines = true, removePrompts = true, copyEmptyLines = true, lineContinuationChar = "", hereDocDelim = "") { + var regexp; + var match; + + // Do we check for line continuation characters and "HERE-documents"? + var useLineCont = !!lineContinuationChar + var useHereDoc = !!hereDocDelim + + // create regexp to capture prompt and remaining line + if (isRegexp) { + regexp = new RegExp('^(' + copybuttonPromptText + ')(.*)') + } else { + regexp = new RegExp('^(' + escapeRegExp(copybuttonPromptText) + ')(.*)') + } + + const outputLines = []; + var promptFound = false; + var gotLineCont = false; + var gotHereDoc = false; + const lineGotPrompt = []; + for (const line of textContent.split('\n')) { + match = line.match(regexp) + if (match || gotLineCont || gotHereDoc) { + promptFound = regexp.test(line) + lineGotPrompt.push(promptFound) + if (removePrompts && promptFound) { + outputLines.push(match[2]) + } else { + outputLines.push(line) + } + gotLineCont = line.endsWith(lineContinuationChar) & useLineCont + if (line.includes(hereDocDelim) & useHereDoc) + gotHereDoc = !gotHereDoc + } else if (!onlyCopyPromptLines) { + outputLines.push(line) + } else if (copyEmptyLines && line.trim() === '') { + outputLines.push(line) + } + } + + // If no lines with the prompt were found then just use original lines + if (lineGotPrompt.some(v => v === true)) { + textContent = outputLines.join('\n'); + } + + // Remove a trailing newline to avoid auto-running when pasting + if (textContent.endsWith("\n")) { + textContent = textContent.slice(0, -1) + } + return textContent +} + + +var copyTargetText = (trigger) => { + var target = document.querySelector(trigger.attributes['data-clipboard-target'].value); + + // get filtered text + let exclude = '.linenos'; + + let text = filterText(target, exclude); + return formatCopyText(text, '', false, true, true, true, '', '') +} + + // Initialize with a callback so we can modify the text before copy + const clipboard = new ClipboardJS('.copybtn', {text: copyTargetText}) + + // Update UI with error/success messages + clipboard.on('success', event => { + clearSelection() + temporarilyChangeTooltip(event.trigger, messages[locale]['copy'], messages[locale]['copy_success']) + temporarilyChangeIcon(event.trigger) + }) + + clipboard.on('error', event => { + temporarilyChangeTooltip(event.trigger, messages[locale]['copy'], messages[locale]['copy_failure']) + }) +} + +runWhenDOMLoaded(addCopyButtonToCodeCells) \ No newline at end of file diff --git a/docs/build/html/_static/copybutton_funcs.js b/docs/build/html/_static/copybutton_funcs.js new file mode 100644 index 000000000..dbe1aaad7 --- /dev/null +++ b/docs/build/html/_static/copybutton_funcs.js @@ -0,0 +1,73 @@ +function escapeRegExp(string) { + return string.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); // $& means the whole matched string +} + +/** + * Removes excluded text from a Node. + * + * @param {Node} target Node to filter. + * @param {string} exclude CSS selector of nodes to exclude. + * @returns {DOMString} Text from `target` with text removed. + */ +export function filterText(target, exclude) { + const clone = target.cloneNode(true); // clone as to not modify the live DOM + if (exclude) { + // remove excluded nodes + clone.querySelectorAll(exclude).forEach(node => node.remove()); + } + return clone.innerText; +} + +// Callback when a copy button is clicked. Will be passed the node that was clicked +// should then grab the text and replace pieces of text that shouldn't be used in output +export function formatCopyText(textContent, copybuttonPromptText, isRegexp = false, onlyCopyPromptLines = true, removePrompts = true, copyEmptyLines = true, lineContinuationChar = "", hereDocDelim = "") { + var regexp; + var match; + + // Do we check for line continuation characters and "HERE-documents"? + var useLineCont = !!lineContinuationChar + var useHereDoc = !!hereDocDelim + + // create regexp to capture prompt and remaining line + if (isRegexp) { + regexp = new RegExp('^(' + copybuttonPromptText + ')(.*)') + } else { + regexp = new RegExp('^(' + escapeRegExp(copybuttonPromptText) + ')(.*)') + } + + const outputLines = []; + var promptFound = false; + var gotLineCont = false; + var gotHereDoc = false; + const lineGotPrompt = []; + for (const line of textContent.split('\n')) { + match = line.match(regexp) + if (match || gotLineCont || gotHereDoc) { + promptFound = regexp.test(line) + lineGotPrompt.push(promptFound) + if (removePrompts && promptFound) { + outputLines.push(match[2]) + } else { + outputLines.push(line) + } + gotLineCont = line.endsWith(lineContinuationChar) & useLineCont + if (line.includes(hereDocDelim) & useHereDoc) + gotHereDoc = !gotHereDoc + } else if (!onlyCopyPromptLines) { + outputLines.push(line) + } else if (copyEmptyLines && line.trim() === '') { + outputLines.push(line) + } + } + + // If no lines with the prompt were found then just use original lines + if (lineGotPrompt.some(v => v === true)) { + textContent = outputLines.join('\n'); + } + + // Remove a trailing newline to avoid auto-running when pasting + if (textContent.endsWith("\n")) { + textContent = textContent.slice(0, -1) + } + return textContent +} diff --git a/docs/build/html/_static/documentation_options.js b/docs/build/html/_static/documentation_options.js index 37480da8c..8afd8539f 100644 --- a/docs/build/html/_static/documentation_options.js +++ b/docs/build/html/_static/documentation_options.js @@ -1,5 +1,5 @@ const DOCUMENTATION_OPTIONS = { - VERSION: '0.28.0', + VERSION: '0.29.0', LANGUAGE: 'en', COLLAPSE_INDEX: false, BUILDER: 'html', diff --git a/docs/build/html/cpp/ops.html b/docs/build/html/cpp/ops.html index c6c38c562..072b95350 100644 --- a/docs/build/html/cpp/ops.html +++ b/docs/build/html/cpp/ops.html @@ -8,7 +8,7 @@ - Operations — MLX 0.28.0 documentation + Operations — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
  • mlx.core.fast.rope
  • mlx.core.fast.scaled_dot_product_attention
  • mlx.core.fast.metal_kernel
  • +
  • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • @@ -1177,10 +1185,10 @@ document.write(`
    • conv_transpose1d()
    • conv_transpose2d()
    • conv_transpose3d()
    • -
    • quantized_matmul()
    • -
    • quantize()
    • -
    • dequantize()
    • -
    • gather_qmm()
    • +
    • quantized_matmul()
    • +
    • quantize()
    • +
    • dequantize()
    • +
    • gather_qmm()
    • tensordot()
    • tensordot()
    • outer()
    • @@ -2832,26 +2840,26 @@ document.write(`
      -
      -array quantized_matmul(array x, array w, array scales, array biases, bool transpose = true, int group_size = 64, int bits = 4, StreamOrDevice s = {})#
      +
      +array quantized_matmul(array x, array w, array scales, std::optional<array> biases = std::nullopt, bool transpose = true, int group_size = 64, int bits = 4, const std::string &mode = "affine", StreamOrDevice s = {})#

      Quantized matmul multiplies x with a quantized matrix w.

      -
      -std::tuple<array, array, array> quantize(const array &w, int group_size = 64, int bits = 4, StreamOrDevice s = {})#
      +
      +std::vector<array> quantize(const array &w, int group_size = 64, int bits = 4, const std::string &mode = "affine", StreamOrDevice s = {})#

      Quantize a matrix along its last axis.

      -
      -array dequantize(const array &w, const array &scales, const array &biases, int group_size = 64, int bits = 4, StreamOrDevice s = {})#
      -

      Dequantize a matrix produced by quantize()

      +
      +array dequantize(const array &w, const array &scales, const std::optional<array> &biases = std::nullopt, int group_size = 64, int bits = 4, const std::string &mode = "affine", StreamOrDevice s = {})#
      +

      Dequantize a matrix produced by quantize()

      -
      -array gather_qmm(const array &x, const array &w, const array &scales, const array &biases, std::optional<array> lhs_indices = std::nullopt, std::optional<array> rhs_indices = std::nullopt, bool transpose = true, int group_size = 64, int bits = 4, bool sorted_indices = false, StreamOrDevice s = {})#
      +
      +array gather_qmm(const array &x, const array &w, const array &scales, const std::optional<array> &biases = std::nullopt, std::optional<array> lhs_indices = std::nullopt, std::optional<array> rhs_indices = std::nullopt, bool transpose = true, int group_size = 64, int bits = 4, const std::string &mode = "affine", bool sorted_indices = false, StreamOrDevice s = {})#

      Compute matrix products with matrix-level gather.

      @@ -3415,10 +3423,10 @@ document.write(`
    • conv_transpose1d()
    • conv_transpose2d()
    • conv_transpose3d()
    • -
    • quantized_matmul()
    • -
    • quantize()
    • -
    • dequantize()
    • -
    • gather_qmm()
    • +
    • quantized_matmul()
    • +
    • quantize()
    • +
    • dequantize()
    • +
    • gather_qmm()
    • tensordot()
    • tensordot()
    • outer()
    • diff --git a/docs/build/html/dev/custom_metal_kernels.html b/docs/build/html/dev/custom_metal_kernels.html index 1a028f698..5f008fece 100644 --- a/docs/build/html/dev/custom_metal_kernels.html +++ b/docs/build/html/dev/custom_metal_kernels.html @@ -8,7 +8,7 @@ - Custom Metal Kernels — MLX 0.28.0 documentation + Custom Metal Kernels — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • @@ -1043,7 +1051,8 @@ relying on a copy from name="myexp_strided", input_names=["inp"], output_names=["out"], - source=source + source=source, + ensure_row_contiguous=False, ) def exp_elementwise(a: mx.array): @@ -1054,7 +1063,6 @@ relying on a copy from threadgroup=(256, 1, 1), output_shapes=[a.shape], output_dtypes=[a.dtype], - ensure_row_contiguous=False, ) return outputs[0] diff --git a/docs/build/html/dev/extensions.html b/docs/build/html/dev/extensions.html index f9bc9a815..a0f07474f 100644 --- a/docs/build/html/dev/extensions.html +++ b/docs/build/html/dev/extensions.html @@ -8,7 +8,7 @@ - Custom Extensions in MLX — MLX 0.28.0 documentation + Custom Extensions in MLX — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/dev/metal_debugger.html b/docs/build/html/dev/metal_debugger.html index 46a09523e..dbb45245e 100644 --- a/docs/build/html/dev/metal_debugger.html +++ b/docs/build/html/dev/metal_debugger.html @@ -8,7 +8,7 @@ - Metal Debugger — MLX 0.28.0 documentation + Metal Debugger — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/dev/mlx_in_cpp.html b/docs/build/html/dev/mlx_in_cpp.html index 018cfc089..9447d79a9 100644 --- a/docs/build/html/dev/mlx_in_cpp.html +++ b/docs/build/html/dev/mlx_in_cpp.html @@ -8,7 +8,7 @@ - Using MLX in C++ — MLX 0.28.0 documentation + Using MLX in C++ — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -136,8 +139,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -476,6 +479,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/examples/linear_regression.html b/docs/build/html/examples/linear_regression.html index 115bde2fc..641211937 100644 --- a/docs/build/html/examples/linear_regression.html +++ b/docs/build/html/examples/linear_regression.html @@ -8,7 +8,7 @@ - Linear Regression — MLX 0.28.0 documentation + Linear Regression — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/examples/llama-inference.html b/docs/build/html/examples/llama-inference.html index f7c8059c9..6f31fcc5b 100644 --- a/docs/build/html/examples/llama-inference.html +++ b/docs/build/html/examples/llama-inference.html @@ -8,7 +8,7 @@ - LLM inference — MLX 0.28.0 documentation + LLM inference — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/examples/mlp.html b/docs/build/html/examples/mlp.html index 0a5e8c667..9905d0532 100644 --- a/docs/build/html/examples/mlp.html +++ b/docs/build/html/examples/mlp.html @@ -8,7 +8,7 @@ - Multi-Layer Perceptron — MLX 0.28.0 documentation + Multi-Layer Perceptron — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/genindex.html b/docs/build/html/genindex.html index 3cdd41950..a6ecb54a7 100644 --- a/docs/build/html/genindex.html +++ b/docs/build/html/genindex.html @@ -7,7 +7,7 @@ - Index — MLX 0.28.0 documentation + Index — MLX 0.29.0 documentation @@ -29,15 +29,18 @@ + - + + + @@ -136,8 +139,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -476,6 +479,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    - +
  • FFT
  • +
  • CUDA
    +
  • Memory Management
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • @@ -1173,7 +1181,7 @@ and the CUDA toolkit. For example on Ubuntu, run the following:

      dpkg -i cuda-keyring_1.1-1_all.deb apt-get update -y apt-get -y install cuda-toolkit-12-9 -apt-get install libblas-dev liblapack-dev liblapacke-dev -y +apt-get install libblas-dev liblapack-dev liblapacke-dev libcudnn9-dev-cuda-12 -y

      When building either the Python or C++ APIs make sure to pass the cmake flag diff --git a/docs/build/html/objects.inv b/docs/build/html/objects.inv index dc6246693e44528b1d49d8552282cec485075b49..ec41f5c8ed5326909ee54ea5fb6de1b6c7e90656 100644 GIT binary patch delta 27933 zcmaHQQ*-c*yhBxlZl;7Y}+tBL)_rp~`mzG4$!B{Ew_c z@|E_(F=2)9$A?qn&PVDLA@F)icCDfMb!SJ+L$HR)@!ZBG)$#>A9pwIz?mO%#y}Ew* z@Nx~kL*;4c;#2QJOG6S{L)`~uh8G(z;WBT5H1Fa)@wL;Rs*_#q+x+|jKIuljqCrq; ztz_$Xcx&Yns^y3>mst8%$jeqpS1+S9-)%Wmmd1#_-Zun%Z3{5RGS8k@_CpC9M1Q3U zQi5fV>>q$<6^m_LW@gN#={@ywZ>yoWYg0&0e%gvx`W?2+p7G-*S++O2}K>inGsSo&In`X9>;`ibW7JnWIG7&N^v`*i_2 zx^ph9I-Nk+5v&dYMoTe#omYdTsxRp__ZZ zEVbtJ&{9P?2z47!A!s`U4F>oZ>$rpGEN6nKI5SxDE?RS)q>#g?q{d%^j_iR|sUfU% zX376q$~=pvo;os!RxCm40F#&7s=DQ>UG%N4HeJ7d;#T@Ckx) zlsk00sjCbEhVE?7$jg&hJX-uU6|2K}6*uApbuWomemQYpEtc4ZSuGJvU0f607p*_s zssIM?sI=KnU_9)i8)-*cDd$_T+VN^&Il|rtio0ah*Nj#DV|w|Ifc4B zs5HqUIuEA#;vd)Sr_L*eFzop_bf1jMHz2bXDlnJ9;M%nYfx#+6p|Tw-(?v=ycJsJH zJCXTj3)e}uOZ=$P^^>OLUWt>&c8S=xa=`S+$r(qzPM5Cfnd}zccVbyN-?nM1+q;{D z+}t(6VS(13B?X;#a*nAV@WrYOLTZN|beXM(RXb^N;hjt-sn2v0P?_$-`@!so*gnoS zb+RH65>$1A_~K|Mo(8L?6!G>4IPC9Mx8?j31b3!xrchr4h>74$wm~JGZ0vlf4?skE z*$xRRL3J4)>LDFtxs(=qeg5t8>b=Y&jcI{zq|9?u!J}+aYi|*?jmluixR%iZi$ZN$ zxaX9NvFKfS*Cbi#!}vfs6UTJGq@C2dG9Z=-VGCg^ zHtH(o44KAF1D8xD=@6t$Zrxj$H@^i$C2~R|xCVkP! zu;F(~g4c{q=t@UknVzpL1*$a;rE9}WJx`=@gkmKjYEmI>BQDFivD+g4p`d&VvbJ2} zP8nmjL+8O_pb}p!OwS9I@4M0xjb9H|b)J~c_$Qupz5v-eneUe_8V~#GGIw*9^-u$! zYN>D+y*RFNm9&Gt5Za-uPD1k3qD_Z->OmrH%M49Qu-PU-Cy4bqunZ2Riua|7X2xK4 zKjQ#Ft2G_*s@_zO-kje$jGkO?(MnTsH-(lUs#jOQj?pzNwf;Q(5m~E7WGxq(4L%a= zbQ|ixM(dTn)ZB1lrMeQneNa`CAluW}uDGLKuW>I}d2T$1T3xYVN+a#T$}_sV1-nU4 zy1C^*omKi_eRZe<>AN-wo2iUvT~EY7W3dis3RA0Q=Yrc$F}0#0A}F=y#z;n*UNRVv!@9` zl`ib0@kq14hY;)~GA3%BCs&zld zF`6CXH|5J;WlRP~^!q~Z_z_S1i^EF;*Bh_YdGKgJ6FW+wJh=~TwT3yZU_%*F>0;>8 zfm4-SLCIUrGDy6wDlJU9ttJgpwgy$i+pwCbfWa>LwTa_`EbiazRq3=s6fYPf{Yu?_ ztgV7n#$nxM0wLR2#f)eF8?MtJ7DqY3QxfRBhk`sP4cGzeR>@{HR&&5R82IXb3#IiS zrUK3cdL=r(`V-C%!jomcK(p%`!)nKx9X779+!dTBse9>K1Pv`L55- zSa)KWFL?fy&P{Y7!=HvGc+A0gYDuMiknsX8a4lI+~6&R)OoA~b0Zw*76Ruk-lo$9p8Qi>%0)B#vfee#-iO=XkxgTjZ(}8b zl$D^6`VWr!qA!N?ydD2!Fv-*cPJ;pZ?4Tfw(m1R-=$l4UFk1>!w ztt}~}1!*D~jiG44m32C~6q(kc3(RglFdx-mGP`vpwcX)c;aD_;lJHYxV})GwHF5Ek z=F`?ZaNXZ}JzrNUO~_0;H%zWGt| zPeQR|d^X7`bbxJ-75kt>XrK?*BT#Z^z!%%O>QW%%m`4+tN!J&Ro3x zc+=C|75j^5{^VE?B(csP#Z$;OmS2jFhrB8Ev99@mPLw*ItS)%wLMP+l7MKY89S0yH z6vAI}pU5z?*!L9G8SjgK312b3s6S?RY!1~kb*HkzY4Pw1GRo&gIVw|A$}ijLofoCL z9=P(iz;5gomFw{&aC`39+IS*w)A6$j_s;CvNY_Pus&QiWUZ&67FXgRjs3BP6C>Z(M zwvB}QIgW$h!xi0)%~xSWI>cd|6$_X$+;G>9g}%v9JY}LZUD(KGFK2W=yk|}d-Mvlw zO<_FQG2&wAoN%Ngu0MX-F@lYJiNfx?)$rYL4z{L2RQf+=PL4mEMen%K^MBMdf4#pg z5C9xvPU<&Y<$a0^Tx2F~>!7?^sGYiq-bU?A+YeVi%HWcQ=ETz*IE*S*i~vv5fXg)n z=T@x4r8&wY$q+-1R^FbLaqW(5XG$5WmczF5+Og{QaE^V3All3VrZz6>wj#Qeer%np z-$g|9z2nB98%ZjhH ziGWWpgVv-%)x6YPmBqSa0>?!@zla+gv?F#*!grKAlBpV!Z?sDFl^PsPat>ZgDpvYu zA{!Vz@A=z`va9ZOP{yfTP^`OXtrd>MwsNI7p+wFeC%>ITfV@@fqd~(Q`vW3VW$Aw3f8^xC`|A{d6SGf_+V6$F@q6@a{T`=t- zE7woHY$F|W0nd5X%o)+Z&uIH6NOvBViJ(Jo{+F4$`RbuX4~kNbt*r1+URX zigBEqu;$6bo63eT5=z@LP1Hrri~>t-N{Y@-0bg4Of6xTqGU+!xa_iFaKm8tNr<5?Q zt*P{jKBC@~c!$A8UbNGCJ;$7`5k2dYy^7UyccM$uPSpWmD3??t;OkB3f|2fGm)2k_ zy8Y?NqFm`nl(%A)kDJ{f4(BKTds7G;Dl8`%QIP<05nO5Tqb||H;9;cGMeW|8GX5nS(lZ7qB4 z34eViiohLQ5Au-ZdMouRO)~xhPf{1>qLfs%ouz#yK1*o*Jy9a{kEtqZhaK&Gzv^GvN^EqVF!MfTG{+?2#0A+%!*h zv4}#qgv-J9%JPA%bRl)|+>1=I?Y^xIZkl9vS#3X(pB2q)@ZOR04?bDLl2W!mMKzf^ zH+{y?^0nalX)fg~OhBF+bREIv(xvrT_raM)8ee}gmSt3Bfieo|Cqf6!XNZoc2o{tn z%Ym6he*4HZ{h0OTPyh?ZTF)>GsNaz_30nS=`QLB*0$55@;g<(Ir?C087z5V=uqQFb zc6-uPt_GBc!?4!j()IDkS%fN=7qA-!=FOs1tB>y+`aIQZxdzO>n=<}q3JJD1Gag7p zcoZ^I6E<_-DrU-`+5qlv`*BPbFg&zfqG7_rQM&Bvi08!&Of=v4ezoKUH$c!O(IxZ= zU@h7JHw{>RMAESM6#4F@$*rI$%h06hV=3b@kM^bwBg}U@ldq>bEOT=I*79f&P;W&Z z^|E8Wb>SRxQFo7B=+fxqxu(xowUV{h2*Hx}pAAWe!&#d0MDaJg;3~4l{xR_Kv9*{) zk1nG3i=2puD;?7?Fnc-?e^p{TOx9J71sA9rVasgxZ80WjB)dzaOBngGRi8T?3GY-8 zZ1P|RjVGkI6RO;0@n`_U9mA~-n6Wekd;D$MxIB|GY4o2-qPNcilV>lkqY2mL%Iw6RREf6TRAV(?X6UB&4ED@-*z)!ze zM$u}W1A;m?!LlZ|8miyw-B9Rv=-8CiS$Q3KBCm3Qwqm zynCKDmJ>#jD?(i9k?9YY`8X+r@jc?vRE~AhNS#^l`O>29=K2yvV_PJ9zGQFByuuNE z*#B|3<3ENGl-q+1Wau#(`*dJS0W7cr?M^Fw4lPclx3ztSXbuzk@veas_Pk9V>oWG2 zBi(=flL8GG*M91UhG-+3b9$R)vP%z3c5Aidjp)5>Pg{G2dBShxP}e+?<1Vby35vRl z+HtLv5d7ZgXSG7>a&Vn6cO$4YnV*9Pqc$3;O|{#*+TUE_{bp2Hk5(M{0HLOTDHqIw z{(+c#ylP8W#LlQo-maH~eHhWHO+l}WwBzH;4Rck8;k~4?@Qkt&ruuKRi&eM6)Cv4w zn}@=t+18~CT)NqR!J%~>E`yZjTv!DtBzryGP(HtF&0CHS2MsoJoO$9p6}fPb6Kvbb z{HDnOLmgd%0I%GRjU{Ca*nz7tK#hD4lHR1OthvkBgidsRVZ!dv#q*k3WT7KhDmZ1U z@6+C8qch>&P9W0C%+n3uB}yN&dQ?btq-Jgy#(}cRg=h8+Tv8qi{Y7B_Gsh3n`exS^ zKujQ#6QC=+6^xFri^t7w=_1KsU-%TCvk)()LG9c{!g@%e;`D3?ylq;a9g}dJ;%PsT zcS#~XW@Ip5@b&-K4&X>PvpCho{e`q`awqgVpRr_s)1wV)#hR9Sp&Lf~!PR>-^g&sb zk4;dlu}&R%P;DJrZYijd=UoMDlMcO{PaBLv6G3+$bb0QQ@dr2gSTm&t(>B3Mt^Kfa zs8>+4#*!D##ui8kEnlvirB1WcPmDivq?Tp>3bN^!6rY7OS0)>WWF=jy0-;bwOThGM zsl3{t8nzbZ0h0;|FVtKzbnlnWU?E=n5u5}#$OrH!@ui)~gi zNxOP{MVwN6D|wBprd&|AGq^qY!L65SMSRdLYun??FAqTVWpGe$gtlRf-G#syur9eJ zZj*Gx&e5hQ%{Lt!aQSB0j%6w3ta^djM7-KW2DH!I;*#jh{t7_b@lk! zp`Fah_7E7VDwS2}u~AUxLDS217VPM8`0UBg(&9mj{R3dn2Uq<{YNG#5S$+EfJ!Qz% zeC}E8^v=1Jz}0$ws*Pgev!}+!IYxA==Pj$B3kxUI}a_Yxx z(#2T(0sCKAj}IG@{`TbJOyqraVFD9s+h6-urHgQ}Rs}b4jE$v|}Jz`It1 zHp3*^Vo?K_LnOO23cSj!%T+0&B4LgxyOk?dC=ahH1)tO^`!yC5kpzszKW zxPL!~sYryPQfY9OdA&{6!ue2QB05%w^Vt-&QvLBnAE4E-i;_!)qTX8c?tzeD3J}&@ zN>c+SFl>eR#)p_q;~pBgbboaX)G)UD=E<7dSB@F^O_cv8nx{mw!1p)o3H47^^MDwn zk`O{I((1+WmNMB7p|%WG2lo~FxNX{L`l`aLB}5dK*RM6bdZ6v<^v7&I7axfCJ*r8K z9gJW5mn0DW4#totj{oBijEFcDr%_J@2`V6(U^sk6QVoP<4b~i9H>xlmRBGW};G3mw zD3nCWrC>x5;cN~XG%EA2T7^72!vvGdImXYv(P9;t<~QdRL41{#uDxs@;wTN{Da@j@ zamcagCIj^qv2!ZB2D_)@v72;grnD9fp<2?T6}l%_zk zix4V@qIwv$o{|C2d+`l9+)p3qB1G7 zcb;&`$nB!SEQ|(|GBtfplc{(I)PWzVO^}Gn+6gFChETcH-}P$2dN{b(9M>6O4V%nG z#dIKxvHXegx6>O_$G|NAr$EfoT9?Md#Vb3*P zG@R(oN8~RU&0&oAUb!ysd7rtEE%Hb~3k}+=0=%sJS0+F?E&|7b?DFOlwQ&gmkJjLv zUut}$uO0@O{C-?8QW|X~OUq=vBPx25r*_g?LpQ_UIQ|Kk)&WiUKu%OSzO4vl{Ts7$ z()SqBUb_QXM3NZ>kJXkJd+!GP-?v{0m}&V818{`P6JLp!QI~((OA0N8OHs5r`aZN6y$Vy+Uh+FZ`kIY`O+Quc1ZN2&>5-j>qzszYGvBIp(Jt>C%_)SvvP%@g9y3HMF-6}xk=Dy6-Olcn0J15&`MiIv2?0Lo!tWgY?0c(Su|~aDJ9mj&__%;9rvp?h{;c_- zm6E~=n8a)|{>8puqo?A#yOGYNVClow1#BVy-9KmHUE|pjPWdKuy86(+`nGj^<>8XF zlM8Tpd(601)qr?D&-1Ds60%<`DnD5Cn2j(1CuCR9U*|~2Z&PBj1-9(4_6#6DE|51$ zOsdkP_(BW|m(jKfH2~7qrJL`v9g&N871b@M&pIzN^2DsNKTigyXvYmVVS0! z>7qYY{GkOwk%bT&c2Er-z3^vk#RJwZ?}@xiF^6`)rBA0 zdYh~$=6w6Q*#ZE^ea5Jm7SyN4r_|D~F_~}6&Yd^yqsh>1O+4k2zdSA66qWMMqHVt` zld(fo)_~*}?oUvbQLd)ImK&3UR3=7QVgnub>nW+oPu`5gBb< zrL?ZUf(=h`B4a~XlWil_S1U)gr zw$b!gFx3}dwAQFVt?seSBV&Myl^tm@+d?T*yg2?48F-?%Gtn05H08<0O-lQ89}8ny zyMpXZ&ML}BjJ!CU;3n*!6yh_57sEd?Vx%lV6EOV_?)(0#ID&R&Zjq6;8XnXwmH)tP zLB|QTwyOb3g{botMy-}DirKOsGPWho!^JE)R%H>Kw^*o@@snpci$vFCFlF5W1+Wz* zW&K;+W)>8ZS2?YPYtE3cLkqojlxm(Hj%cZmG;Gm+moIi(Q(IH8A$an^3J!Ha#|TyW zXo~y_iyA1;23O`CjRLX#LCs(@{;DDarD@kr!DNwmMzCU_QbUmFd9 ztC@KE`uPQHCuRrPQS+RX%=bEK#Nkv))dmXM@Y`}8%oGik;xf^hJLKh~w?9X#y#==C z`UfnAoA*gg<7iZv*+}}SG4Y+$iZtno>&t1in`Xxm)KRsRo++au&nSA1Vrl~0l2JcB zj4AEl#wh^d7o%U9)wGB4>?qsg(S5ofSnUvYCT4qGP`h0o?q49!3Ylshn3i33CQbH( zuQjzNO%PY{OSZl!7jFgIb(pFb>S7neul+&U^bS1ml;6x@4sX$Y@=O&z@2@zdPRc&{%D@v zC`@95Z$zI9ApX(+e2>fYhDnA0$KzuhOQy%wrNdIxA4j7eMYLM;eYpSaH2ka&8QQcI zGJjVuX6aOa6A3bb`~)M~e|*5T1HLoQ%;SfqRG|J((TiBteU=~Nx-D>0t_a(XA~*wS z`8-mE58<|5I19}T*9Y2vU58W_ICZj!UoAFRI`D8p4c6S0EV z@n)M&+^f?Nnwl>0uE<s*hwqn*_1{hr&_C6{38%Y~%_7@x=My=5v^4f_y2j4-dghmB zku^+*RkrJlrgwq_(vlT`&l=M+{u@bbW&`)%v6I>Yqup!O)oIlitv#lIgYK4v4-!sT zZTuhjP25o!MpJ8YuqrR?)W3LcOc(xg6|l!^FlK{#6n=^qoQe$h@qTBu!>eiX%N(@? zKD5;jTa$79>1but|0LSyz~^v)sIPxZLG@D&p*^DYXkt2K%7whILh7O#oriUt%wccK z8EsO@DQ&P$KA3~UG||O|8n!uOLW}(7j&|h&lsS-#KZP81M6~d)z(#@CoYYTNXCXX_ z@SGISTj{sg<@`2uN5_GBHxGVRK%`;R zI*}gaC4WL}sv4EkX2t|${6CGOoLD|qBiVV7&*?CbFmrSne?)fdDXa5;S_PIa#Yvr# z6b5cAnEn!PNyhX#udAe}0ArBW9BT#T&dcz&yr(UIm{{ulDROSyWRJyWu8uSdfF?IJ z##Q?fNZ5`^xiD+~78ac`q9_cmlwW&L4&&Mtl>&P?|8C(4KE_5w7}IoC`|l%M|Ml68 zzn)a2zH6+oGG3-TKd^KidneBeVvudv^c>HiC-@M#*znDHT*Eua$&vC4I#z|TcgN#~ zCJ!xxqA4cE&A6t`B<3+#t8MH7)MFYaW?mOCU6&Pu#U&4Rt5z02Xj&av>g*3;Rc+;YT$@aO`==3kA+tX-7~-Kc zrT83bC?e_*VwGxkB+vD!G_491uk_Eht6iS9Pn%(~ekgCZl}QBi<9UV4n~kObiHCc3 z5XKn`{Ba{e(dt4<^9%_$qhw^9m>lre@6TX4#d_+!XiK9J4P)ob@?M1ES}K<-rTf z*@q0oBbw=%aegNxTe@@<9X_f^K~c5J$VMi7oB3BXc?_N(dd1FhD#D0CwL&<(6-AfL z%BSM`qh1v?LfpNm4rk5iv)}N|Ki37%Yc#ev*j&~Y^It_KbZLM*Fp|HS1Z%85oNTRj zElS_6ez485^Vm~7?&$J|!*A)GOjz#5$`IWXYwZ3*4~=N^7k}bN1NWsX1pI@pD_)Kp zrVK{bG7Vhu$UlztA-9?3>2ugcyp4pQE*d6wyQm`__IgSU2C*-dT7BrnyZ0QMx^;3hw)F>{x^+$%lO`qrCV`NlUTs zD9W$MX;ZFVgq{G^}DHe7KW-7AEq)gvEyo+oq7jz+6-xalC*iT!P>z#SHd%S-aZ2wV1RUNLS~s<3?1 zCAM%F&d$R~+dPkXY$}!M(w0-YqAd(AEHBPJ>Fk1;#IktDV05WoxT~iQZ{+0w3~^>d z=1fslUXigT%ByqAFglAsQat0#NFr)ht~S~Csp>1^5aQfb8|HH3M5IYwu&BEbjwG@c zEgx^9QwaK+IwbE1wGwYhidbHC1=9<&&Q~cfP0*@zxe#1ax{4$XF2Ai-FuVg8%6`TRYT7f)4*~dhW#khyCb6fUz{0bF(jjZu0Z;(nkj}AY@+DX zlYrIr#4Wx=%H{UNAvu5hD*E=W1eQmCmH(iq77w!eWh{PkcUMm-kXG0=ZocQs^>%io z-vihAa2NWjq<~tl`R^G(PkL~bES0r!OA4Oo>+_C5AVau(Q5f1q15~!Saui9X0a5Rh zifZYOni-r%YV9Q|M8f8kSf=^o#f4-Z>5RHC*ee^8C3HbHYHuO-f@P_)?cZ8GoEh!V zCQAb?+II;hZBE2>?q2FqO0yROFE;E*Np>0sJUFzzq8_n6qE~?*CF}Xi3M(ns9MDEG z?`^B$g`bys79o*;ux<~}h;YuXLGxGaJnEIq^i|N@5n{|&pKc3jN9s91OlM;U#^zUQ zdBz;y<~x%KOv=($y%mp(aBxF#PnjUC9?V!7)^u%#8$^3h2@g2S)Y8c!b^>-)T;aNO z*qU2Ds_!WSHKzewuPHmoPZI8ktt>Uy5W9#;B96e~?3LK5cyx>{DH`_hwe6ut36Ha1 zb;kJ&7_wDO$B-cXsSp(3AzK;cvetzD>5`Qa4YYNWu zjWWlYLn?Ybg#v|CL?rShA6geUH@wU}97(LG&93~1xMb{p^dHf%*LMDqDa3%sL3AbmwrU(8l8uaJZv~WG}BoCBf&Tw9cLeYP_WT@FXS8^DXyzhbyB9 z{&t=Re*@T8p9=U{Z-4p;o`Hbdh>7xK-cB6;#gRPWunU6p`*J8-`((sGJDa6P0i)z} z|0mlkU63W;skrArI8(RMzbx=;pEehP-|@J98iuENEh*|7Om*-9>qZZ27q15mW-dyJ7V?ZioyaS~9AY*O;K1)Wwkfl_a5=sz-|@z-#w@i@IzZFJHGPcXPpgpL$#%Y_Cc0cpC>H*0++G?k3T8icoGpi{*lZVv>wP;!5 za|XN)?`F24B>bvqMpOSs-@VtjN6YRM#3OKzCx<3iqV-wZlxk|Dpo zFd`CW5wHmJgab28w zG!i0&#z10MSNAT&nOy7ryN#bf#CS@%JnKkfGuirMdJDO7;P3ZS|6V*x`JvYHorcpQ9Jc+25+;ISaZy4 zalYrtsAvUs8nxtDCwpgMoqP1rGR>djS(Afa{90-Y@fqF@E}%WN!q~0cA*j^BQVg6N zhC7}ee(p}wqUN)hJ-pM7V42143`(~p zeUpwMO`^{8kS+E4UIMrhwz zOWN3#uPQ72g9&R;pMDxSPu2J4E%8dZerL&B*Rs{!Ie)Asw1!#?bfqQZuXZ41@@r6$ zsZ_J$nzdxRhOB8>tX@8S^t)m@a@%BBU8`Ez3RvS|G@6#S@hq`1?W=~eaBa7-<*SCW z1lo%U1a4Gu@s%U8;(yZ9DmU-NwTLtoPXW{kl_-0i#Aa2 zDAa58Ms1CcqhiBMGGB<^Nfj(v)X8^9yv`N)thzPPgPHkO2rlq-DF3*NL;!KrQP4r_ zBkbPbrRsJ@riVzoXXWQ zM{XSc(jXvzm$L4q`t3(=2FC3IQW&dan~;(4Z+ZL$uN>{Os3S>)4R^wFAv+|&7H^&}ly`iNFW;yv5T7UJtcWt{JC*i`jmzdx`t)nY4q;hsA zZ(~VG7P}SVlx zdl`G>t1Qs{ruB5-76!H^w$)M2khU^|U-$Y}F8xXIez(PNmb(*8;&rhUdnu_JLHkb@Pf8~8plK-vHzppH& zaDm#_;3qDJ1wA6IO(9pn>bwQ;J{W>jx8?bZ#^biNF?gSr5<7|F-Hgh+W01u_2lzp! zgw~T93b>LUL3AHXNJ!+?M?nZ;nX%TT)epibkuzhMQUyYwjzl&F)oVZ36#0`0{(aW= zOh5K8=)a{slEes_kJiX>ADowA9y7{mLy!+3iU{~IEkrTxk#=Z-CoTdoPH(OrFgqwe zNNj~slp`S|P0YuIvN8?_4q{5Cd39rYW|28yO|U+YAT#H}4j*hDrxS+zH6nQqO+Ib) zP9%cElRAX!PewDFCl+v#S!BF_rn3~&8B>P9m_Ge!Uqi| z*`O^mf(TV3jVu8Iw5`}5LXL6S4`4hI?N1D%l;i87_%j639a z@;OQ7Dzl$-ve1dc<))o7M#|SYD#!gn5Gj&WSP?82=TI=Ed`##xGqW%$E+39Vu9NPe zk#Sgq)I>iTJ%EJodj?S-*?CxIXo~|gechVLR4}CoRS0ti(9RBUYl|?$7onQ8<(6RA zbG2FQ2IVlM<^Bc~2h-uQDMf~Y+X@(Gd`Q%Tmj6KOadOQt8&8LZoE*$Lgx1JI9N4Ca zm!}>vBP%2sm?LI^SBFbtFd2d{o{H0l=&#Z~CN3J~UqNHqkiZz-Z?iHs6=mH?6eu`x zgzv8+&b^QeQX4{5p!BI{5j)w^&$X4n!3y1We18Z>9n}Y| zbq3|zILt)v|KhaA41?GmQ9$tuhueMabu7m{);N;bgAOhHWv=g!I@Ir?j^a*o=T0p| zF*KT#w_{3B`hcUGHil-)bejYfL-?D-i6n{^0WulThAl8f=4Bm3vcNJhj!+)Sjihzi zJt6Za@)XnsFXR0~q9mL^+PP{}arKx5?kN=t4QV$7H)MbZ0a9wpa;M}RBbOlVAGtpCtIYho z%o8hpAq{$1$^Ejs@>SybBRsV22f{Bu#MZN~IHidbm!Gv{W z8FM;p(_S^Q3Ok_j(}{{?N!1gVL3fM6>qRi*UpH^k35SqF+~KINTNZkYO%~h|f7~Vl zyhbNectJ9PA$e4yPa%B-2F&nqpLM8XSltw70Tjss)Lfg!@dHz2-1PjzqayaX#THSI z2l`n~e0!rZ(Yfh)(Q^C+<1I>oTHjk4!e*bKpudp!$*Ata^`XT<3Fn+MjxA1%W<@lD zED}IC5EV0U&kB=SQVf-6v>AHyf-qnKh7~8DXPoo{hZKIe^ zy2C*~LeqeaKoG=w?F2y<#95ejyOCo&espA7Nd;eKe?79a(b-T*hQO;SFbdhbe{Agl zM_u#tI$4^tC94=NUmc0EXN_#Rky4xnx_EN0`53yu8w*i(?4Umq+`;*F?~)XVIMN96 zO%d<@$#wg!hFSPM(MePOy!4vxc6Fkk(NTL5h4mIMS)I+XiMe1(eYgK~hVS`9uAn^{ z&4dK6i$bLp3QY6YcP(*C7C4Gq3(MKeeIKY6Igbf0Ar&m;_kCA(} z*KtK5Th?#g-~JnuQk+?LE?IwY#-jC6f*Gw!qZd`bVaoR=f1(F>1Te>fW31jC@y2@% zXsA&b&@=|I)(`@OY;s<0k~A3uGejNQ0*jg>>p7hw-ee|C|NC%#tTaR5cN6rFcUmE+ z1c`ide<$UHU$dLDMEfx`SWf%j5RAVe(!|X1`=60cON6lpI{B;bAm<0YfME%sU{(B< za7qzHK$eNdwlGXA;B<+Q3E*dJH=c@N)=(y!sOp2Oa9;EZsECgcOD_KsljEh|U(F z$RPwwIbkYb^E!}VNgL09QrG^T~H;Eb3l+Lf`wXZgex{u+mmIOZG*@_6Kk(8?=O5C;%*z-&^w zd+4SFGmV_2i_fP57G?XqIB7J|sAC!@(+m`5x@P1?%Nj8#3(rHV(Ou z^E0)Q>rEz@+aTmG^ecDyJr*}94ptO7>Iu=_@5ogZ18gVv9I2B`PmwxGf0m(2{6|@hp zVR5o{7_FFb%Slw_Hj-n zhEqWA_gz<~P%qs$uF+cY?}G&*Hf!O9cyxmO=dSaVVn6Zv79L0l=7zteLFvSa(E6(e z=YzLRZy^zLYa3IL#>gq8ka)u?^cljPS^!Z@NgcHCbC5`vA_&M)p?-pFF8DPi35;ls z;OHyxdu>Q&air5o9k%$(jNUsY45fVg+9vArzP?lQXH+lO4M@3hP z9tDwY$Q{-8%BGaCW0&BtFtNh&P0^fEk03!BG;u=En;nz#4SRjW!{AEi$$)^q zwV>nN4v>aa&hDVE05NK2XA=9BFnx~@De3^qYo;^`-abb3vUbQPEL|{o+R)2A^}Gxc z->HSk;5+0Y1ns7u(2^H;d+)uaKQ|nZ3rB3|ot%lGRepFnW=t85Ad@(=1XVgjNrRAv3qTfnY7WLe|952rSV{)m%XtW z*6O6l&GSAmE%sd@C`9SPLap6&X0dY*@fZB&oHlfa5R4M=5g1oWrO+J*;{^Qwouhz< zyPwtn6J0E#(?xnrwW&a@Z!+d)c^Fc`G6zA5?|G{8hmR)X+;>oo!$gEEA=)fJq=SJ; zMkyk41`CGh6A|(t+uTb$B3LfGI99@H_7S60u689X zQhWB8@4)4jfpRqB48au9*;5r&s$M9B`QV<(Fh)U0#MlU)Fm@8b(9Kyq0r-({lT20JK)?(25n4FJN{beb z$i#O-{3k%HqT-S?2rveOB1?#W*~Ade5e#sI0lrW{MH0r$k)EHXYPFOI=JON-Nh0A6 z!CJmr>lYjq7K-J>sfaj>{KN?z>G&)~qzJc+r-`dp17WcsCW6IMZM{!I)kh*@41I-@ zWnK>Rrx|fX=YEFRH%EnMYBw@slLVb@6bEW%G$uMOoH_7i69sog8!qrW6B6`|-dCxABqcNU$0M=H_OVKot<>j+)q{8^%A zlo*S6;0A6aVjnJ+>%!pxWW>e%o8l~fBYSK3dV|8FA z4Fq~2hg0`|Ah&0TWCq6t1`0C~I5I$|8>g@%LJeAom>}*AQv~L4_Ba}<16NY5ov_Sz z5&sZ1%i%2|f|em%1a#n0ikK{eU+abf!%9E}kP91L4T0RAWc{hB_UiJcf3Vs12ieLK~e@M89xc zLUlHb3BTPhlfW$yj+1gmbX?LPPlTVjX?xZQnPVy9J3r0Qqs4;gpQR`?AbMelK`xZS zjZyhC$8nPYF&R~}P^$A`5e8udWvVmtg&U~|9n}``0Sydsp{F*DB`M6O_NhBjzj)S^E8YDX>X z2WbHJ$B9FA4i-L|NAu4Ro%1|Ob2Sm2V{CE5Jd|#PpFJeEPk zR8-+*7+7eXBjPw@tomtNlbU5vuL(0%Nb*@ZuqK6-=nD;W=V-BA6G4X_`2mI+qNv;;?rNkH?CPdAG6eCBJ zz#&%iv;iCh__ZzumW06@4TxYIzIH5fnV>Z}%3)Wl>FNw}>Sj4nGYm)Y3<9{ru2jow zs1d0|hvZ3Ld3-c5_!MZIkq8ifEvic+B7{$<9%^H);KZv6vRIu9cHoAp-9~xrs!0g} z_a%%aj7D=qGx$`Qn*T(k0_TD!f#W!8?$AT0$%$L~Gqnj;Em|QQR-3zmTq&rkwCpD>%XGAcySmCP>$5%Uiy+kdwsVhZ)vP?}A(ZF)G zA9MHqh3%ck&Lm<-{=Bhd7D zCg9RXL@*8=)RZ1-T8NxUmj~$Ia@X~wAa+4&M1wB?E1~az^CL60jW$ZqIz7m**Jw@%S%tCFUT=v++~Aiz&^Gy+%E*%;mxSA`OP$vEUhFb0nwHiEHY z^oS~yMU01%MduV#TR`NZd8;kbK12T_)cz&JPU;XWJie5u89IB9OTjTp<>~;e5J}kf zkiHuS#CVe@@beG?pnpu&-3O{&Aq)T#8fs1qcX+DYeYGtpfb}vj@S=#zSbBj7)G`ze zOl?+*GGsN(LO98PYM-5G9ppr+l>r)RCVRP%Y5R3 zj6@xEMUbb7FV(GJ32H0iiAYVs=bX67NFtsKQRgLUEacHLx$-{qSlx;g6PG>E`J;pM zGe1eVgohn^P87leb(1)S591QSM3{4oCXwp!=(*|MbI}yBg25&Zx z;~+zS!_QQMg8efmrDhXR2wWCa_<#qwkMJi}J34bO@$iMXl}AQSrp})DX_oTP$<%5; zT+y86k*m&65y295TNdhC#?sIY9oK^!ul!I{*&qsuV7WTQ3s=QQXs32e3ZAMc+fn!A zabgR|)GZ$_%G6W0T6umHIswKPiMW6>84$sLGW4vukgB4^fx0^}Wey^0xL3?4u86_I zaMZjb207nTcUhz&_v6GANKNsf;yQWm_=o@y zpP;={!LnE#@JnEF-(!ez)akk$4o8?LP7)Kr(5s?OcGAH_*RJ))F5 zu^*#e!M}EM>4&NB35I~qMWpVsFeHM1anTkYqAqpA1{^h}DhUQt-*t%YLGB0-%D_#| z@*G}EIM%SuN>8be*Ai`t;M_7igmo~4Ft2@g8$dk;81UjTJ4>4SS#fY)N zL-c|W0C2}qv8pr|ZU98E zgHcnRBoZ!S33aCt%%8h)?8)2-h+v_UBAm|PTPFy2Q0H<_9c=e8EPH9@5y1cutBtFX zFVqB+@2P8oMDBP9hKZ9Tc^LbD@Ij-%g*%&t7>>~xQVhq5pSqmrIjVKnSnXntQb*ly zh%w!fPJl=;$x)YyZBy-sND_qTC<2E$Qe9rdy%?hvJOrxEB8+rCkJTV2)hTl6peK`p zsY{3$aV`iwgU2~sccxAO#cr%F4dsAO)oEYioMRrVGnQWLa)x#gia2wB(cPl%!q>%s zNAopubw!QOgV0A?N)esIb4LIRpD*-Ncx^Ih zI2LNh63su$;R307GS%&<`;id#L{WgCE@NmD#KWMuLcGYa|7N4A%pVuNRsuCGO1 ztJKY#Nybl1y~v zFT^bTeorCT?}rro{S3+dcsQ4~Pk=FUPK0AHkQ9zqU`W*dJi}I+S23jRbCh${>B!10 zAuDb2K1uuI91rIeq2{*92R!OW6P{-x(X*E6d6o)8Fog-pheY_%gy)$^^sHr?&r-uk zSwixEArXEw;qiz>o|efop?W4#J!`3Et+!QA5}Pw^FOM6)>-{*-L#lWxxrS%H;%yJ? z*c46gkvwCeA?NkeEOUK%#LkekzC2C$uK(*C)#Z^?Gos7GsqgN8Ga&i?7b49sI->7? zF`*)P2^R@1-Mq)pC8LbF7YRM#?D*RyE2*V_rWqYCzZ(%!TlcGwp4OW6> z^L*QHQ`RJZ`c_EKT+e&g=adbVwb3)0G=KcF{FTV3M6`_RgBfQ{rmRO8jhZM|KT|7k zC#0usKbbOQA))KgDP3ob)OAd0 z4;HkCQQCtctw(3|#z;Lw&aMNe_ZYW-qGLK-6Bz1ZtH`t#<|@!#kug8S&&zVtVTHM-%=s`_f1%dncsj?mihYi-I~zC=op? zKA3P?xR#@SrKwYSlw=o6iD)_12QyAyKtFE5d=VVszkD{+qJCy%7w;@J!ry*>Ga|1} zCAF+u89lN3VnkbiJW?a!Rz^>&zL?P37<$~y#zwBm-DN$}UDjsZrG<+(>fV{UcJ*Ob zP>Zyc(X*;AMzr*bG+K|0tI@|3JCY6D3XSlpPln7={$@d~q{g__HzV?f+EXC4p0qLQ zCFrvZV^a)<^9%JSc3_{$1N}vROiz9OXu{uAyP~F6=T1n=-F-4*KC-etkXC>1<@EfQ zuO=$Iup^Ni5?WgPZbVqfPgF*T=qd5RfO8|!)QR3kC!=HSzRb|p8?`Y>NJ87aiu6gZ zALcl}lzemDSZBt05gBk^KFn}7JIU&1;}Y_D73p(cKbUcPbWU%K(=+6M^y!@b7^gqO znH9~aqLv#*a20Q4daO5J%xDX>7p);PcVZ*t-A6P2>PY6Ujw8I)(Twx61?lHe*3YxF zyNZ9<>{fB$txg=578SJkoY+swz{zF=R8-e*%Sn#w|hrW$2$s%d5} z3-j($CGIh)pJo_#Y6mfYXZ+cGl)sVb^SyqVqb&{uzTz+-DrRQhc(p$iJ3^+q^nIT0 z*Ey<^vqqNqAA#;FGoZYFnWg=e!uRWt=+}9M)0UX6PC~=g{yt0C6x&mEsws?Tks;;z z!z||u!9nPf54m3E90Qfcqw#n;9?yWsr}6kY9^Zgxvz4Vh5?s1}TOkeS<&y#PHH;vZ z-L0TbgPYG3Ej(K-ylE{wk`}(L7XGvrCUv3Oy^#8eRzSn5ex75abPvORVYWjc?5Wjk zsGZ+U%TI!RU(_4+!ipWsD5F$gvd`WBKBow2n~ZjYjLFs*;-(nwb{Lb5Fw`wD`pqv! z+g^wpUbI_XOg6cH@KfDX`|C*`0^yz9fcft0vO#U4OE(~Q}dgq$lIkx zHcFXokrG9%dcF-c0_e#NxbMEss&GB{_ZM|0m!ibIzTku^>hqiyFNL7kLM0zCsvl>0_7x?MD*2FS zKj&e|qNH?x3tjRaPxqq<&*p$`i%8bWXc^ltGqk1ED0MH^Coj#%DB7x%ZoSE1rAZ!h zbBbf$CphM_@J0DTVL`rS8tC0##wGmZ+Lsg)Y($ja}Ovnjd-9kIaX`=1T8Xs?Ial+|%hQ{q)lr^D-40~PkN zVLK~J+)C=yxczRR$Z=m=SmGk3Q{eK=guEhb(7Q@#NvnfN{i`|KNugTaMok2rc(Z9w zjgY#3msup8f>Y^d6we#=KXdiE^nE3|UyZ0dDwn4x^NiR^DqE>%Gc#lTT+nUV3h7zf zPxH(LwFC=&5f-^An@6VeM)*84!jeo_ju4h+g!G~;Cd#rP6y}j!n#YmiJm$uP9-Yw} zqx1|pOFCyc#%WNhWV?=-)y%4NqmJ3s(4=&Ki;fx8#GXZ&n{R4qQ}>C~Z!;vsW1RW} zoJL1C^Ib*QCDo=L&{T6fquL}J;(W`kkdAU=GWU|72zO7Y#C@jI+>I06>@8)#*Hxm& zb^YCl@FSh^;~3?~SxyPR3US?w-0bC^aysVGZA zC2mvq>C|sCBsJ~UzjOLNQEhPNUs306YGsVV)d>v9x}WFR;H?m-kb1y_-{we4!tv`i z^?;z+kf%<8hVJGuuLko6aMS5(nACpgA7k@W5f36_$!F=%Q$@SUqzming zP*%A#veT+;X4k(i0>_F{-Zw%c3aC#;%(B>0c_E@_#RpT)XA9EjQPyWu+84H~Etr+q zD7pGb;eWPN;CWI5GZR?U_R&giTDNy6D&_8^XWX3(2{$<+MZ#=FUvP~6{Mkqg(xT3d zjGlXbL4W6*@L*BIhtV=Vm=^L!vavsoSox!wd27=hOBQxZ^d@-Xf3A=i3WJYMqxg9;1gcRT4Dl$Sk z>kp8x$s4;M&+uxzJ!|$S^Qf*%-lOV%oa6aSAo)CC_&f_=>em@6%B_By z`+xlAZ}QvA`gwaS1c9X!7%+7|&#^U#(Yq5s8`3)DKytv}`);C$)t39As8$v8ZT4OI zx?xAPf^%q2N2)S+vRZBKQkbbz9jvu^6&+FLHNlK7gd>E*^MkX82a$l*lDqUn8iSTh zy}EMb(n_=ID);Iww!_YRBI_d1XFLBq%??(wQf*r;36^{n=`p^3lW|`Ye`gV2MFxzY zjLsm!XCO|4UM2dBm*1xetCDcyQ6=utsGsH;XvcT15*jXpV>;D|vi{qq?h~D6*M1L# z0PHY`zYcTZ7yXf%u@`C#pQkxgp9n^4Q}>C~Z!;v6L))4Y+w;e@52t3sc2&9Ekow(w zu|DhF$7z1@vEGqGy=F&xe=n)ThUjJY1xMH~pQp7rGEuP7+}P*zJk$;e1sLT-2 z)8c~>=Z+nAlfr4reIM6GB212iwCZUmze|Bn3X+j`B$m9Oh zFwz({6xFM7lfXz9^`D|_sCt~m^g5hB8u8bXa0Ta$jGkG2F{342%X}*|LOg#l^EqX6 zJhiu*3CpI@m)GmlPi3UqsVqpFKF@3M8;SZie+}Pn1|;ph4QP)w!71nq z>Zor%&uDQyHLxr81P`Gn*Vp0x>%0n<_l*rjHHuK~9lK3WobMZoPUvDX|EJP-gMvlb z+zM*+xcxk@#i!y3$BM4Rjhse@o3Aq}6x)48SfM(n?^AZa&Qg^Wo^nW1&M{C3hWPXF z&lLRUA;HXsf4zD~YN0z#qukB6LzQH6Ita>5#J55^%JZig<|=O}>&NHxeZKD3IVysW zpm_j%{`GHK)Tayg4B|YJtE2IB3>+QWLp$0hc1#}Ep+2jle^AHhDV@4u{DV1>O5LYX zzs->lI?cfwJ+rU#w9B~a6kZ_-6DoI71In9kbL6zqe_1;sn~cJztT{4H$lqC@&vpKJ zj*XC|!P@EUy=UZyjYgg{GJDKOJ=yYntV+rn$c;>&^YzO#ZNmubiz;!CNc}X=V6(K; zlhg1TFD^BNeL>A&TTnB%D+mdmDfrJrf*CiW3&bdQXI&-wT$kTx2q{8C*y~@0?|wDMf@Xjs09S&(*Cizu# zgp(blw?iUr2ol<|qZtCxEiXK3n)mrz(dEZtdX+_05yH*vQ0bBc%IXut-6D*xJmEg{ ze~!WfS)9mDqCNLz&82lr9tlVB~bgVZgm4c_2`LD#9?69?}pFsQ21xH*sDK~& z|CcY~{|PQ<@tN!)RE5AezSkH~6>^6wye`t|A%kAKJPensRR~Ls7|f zfl#VIMY&Z@pTBj7ylZ%{effI{Mhm-J4=QPik-QzB+O`7ED~myFGHt~XZs>#?kZ^+} z+|miRAmKJf_

      (Pml0}J_fM}eXFQkiwrGSQ#t=G=aM9mpu}&jgz-@>; z8=q%`;n@;-wm#1m!}GFuCLj*zF~9f+0%9Uo!!#`8PKRe-{qv6GoUbF|=1hJ{KHA47D-W_(Qu54cMj5EffoXrT% z7x2#fsnf9G>Sxv3|JO{lFgLN(sOvq^uON}6Ik?Qs`e$`r-ee3Kc$ z8C=m4#dw_dvX%h;A^$-g^-K_pakAnFH*~@cNVq`~Zs~+ukTCjE@IdO!9^8KHA>M^O zw1=T8-sfj{*Ej-YGa3a1X0i3VmjAOf5k`QZ9>WJGZGS4OLsv9pe@Pk$Nzr9E34!Ct z;vk6HJezDe!q>7AIjF@n8a%H|fkRSk52UCruu2O914VS^g01B!Rgr&9zC=whB*ZL7 z%4zdM_FZI}>=Wb|U{H=xj+2wxFbi!$YK|ta#ZKi!6&4!OVmaUxo%4=^_%%+KGeaRQ zb16=U+yaq$rI=tye~ej zyHgt;7K80j#n$&fiu`c1a4;VlYs>M9BU+wl># zp$?WEAgls4e13k|(WW6rfJHglVpNZh1e(W3o8s{?7M^tRc-Ic%(h)A#G>U7F<2pvt zKHU;HxVKHg4H1SNY3_LE&K>XKbH}^l-$C5*l+o3UvDqebjI$)ws&s2r#%o2LW`gFJ za1%_svKbh6f6J?d&VxadV7M7D=|y?H8v#C5^rcRC zovVS-xCFu@6Gj9ZBYr!WF&-nsd^?bi@V?9l!sj!#XNItSL73+d-Zl$2L>Oq_LA-Au zi0>aS`lzmg@Srw8Hs5TI(0)F%ygpZiw^fa>2(>Z7eVuF@%MjJdhsK!ZcvlQ`Eq&3T_2sX*7AT-CRNNkE32r3{n!NNg! z7?@A*o`|;;C8_a4Est;Zv^Ebm_;PDcr{^&pZC`*#x}b0tyD*?x6G>f83gt18^XQTu zsjOLke?E8Zk5uvz?r9k9G$~>+>{>lW3}2K{h*y^Q%eX%b;deQS<2kW!Ws7i2f?=mg z=}|8@3t(SHAAae9;~WsEz%x1if_Lwp3aBvZBE1B&y zDcC{SBHWjGpuWIIFJ}_41P1aP^+7|X=L25lGs^U}G9E-J5=OB>A zxs)t!_)>JM#Z5s#aZ@Q(+-sE`%D1)Zj3|&iqnN6%`rle@V+2UoSVY#uJ73-(oK`C{ zf8&7qOb4n;UU)2bC9q z1+w}!u>(O8Z4au@zAMjF(XW+hhJi$y?f!o z)Ta-AjDlm}K1+qMo`8i9RB)8^Nf8Erv|_Q3RzM`w2PGJ1bB8}I`RLR+7;pN;nR%H2 zaPX;k+B*3i7#trQ9ZxbjVhyma@e{7II z@j!+|s})Lc097{bR+oQkV0(r*xm7v;buPNxVw$s#fVe3o$q3i~`|!n_CpU-uV^Q__ zA_po0m@h>a(p3cyN2i7e>9S8r$43pMwxIlwa z3e@U(5e0v1%O+#{QxV!SkQ4`$YP%41R&vGp1Wy<01Ok;hK18Wv+_(0}pJRXm=VCzH zzOC!F$&h0c-+fF0C(X-)mKOJd(%SDIOYa)G$z!5tKNWK~{jY|E^2 zujQWExPj%zf{(5FkXU^5%iJt@?*#a220x2c-#h*91KsY7hYB&; zu13^n_3xOu@4~8zc{TcVwCbUX1Bn&A!dPBK1@O}&s0>RLW*sArmAKQpoUfAG#? z1s!zgUq3HzC*v5a8ESjkhGEthLi5INDM8e?mybjJ^i$rrfAuHc!+#RHK~Z;mlU=T#y%Z=O z!!X(VjR{Mon|G-=ce&S9OY@b9iri|My53?$3r`0{sT?fk=;i7*klTfMP4rHht~ADJ hu<0r4ccs>IRCcA)lXOb5?y!gY2adkF_&-qHMq_nOBOCw# delta 27893 zcmZsBV{|4_(``JlZBK05nAo;$B4MrpLsCOR#}#b2E5ZCJ@F*m7j!xc+s%PIwRQJCSYYU`1%=Y1%5M#VCw4Y>vY%o zeHWXlzhMQUbMPKWz7B+F(CS9@wF~m*589q78@+K?k;{g^o9+ELET!~L=qir|(D!@^ z=s>vPBpXA79Zj8FngL+UEjeW#%;qUCU$j;(Rz6*n7%T52sgVlSYAc_}$SVV4Hw*O} zXsc|N&8FMS2>#Vn6bJhJVlaDi3P)BM3yHOvom1^xmTp+aIja5HvYA|^n|=O*301jj zOx24WQ*vx-DhtI7yf9rdQ;bBxUMS+ajwFO(+jYiGqM~or4 zzIdt(Q#D)(dqAOSQ;)E{8;uv z!v~(;LNk>cbOS0BrKnXcAS9uj;Z><&z8qyvz6?A_5@oEAP5PKk48I1SqZ1nyhn{Vr zv=|YrUUSsuy_gPsI3TN2A??!6eI&-UsAlJaD$2%J!T>eo^NFgrwsN=L?2g8{EM9&? z0%l`Ic>L@Z^M{71Of@~hb!%Lq=@uZ8IZ zgdsVqodEqF>MEn4;tPMn(GpAEMc-NHZk<5dJL^7|@1JW_`g#Zf1s`H>DR)=zrN5~h zr!wWTWTRnR%&DZ&t#vlf)kV*9rA^mQsakkU6hEo_g=IZcmC`430ub-vEFhuL`}+#`NTcEaJkf-QCd+|d!uyF%-XDX(OZ0sxc)kdq3#@T5awAMVohOe zqL@7^vNO8?oAhZfM$ie$N2yeCS&>%|TADz!s=5{*4`}3{>>f9EzoR^KQ zH-G|&L95g&PQkZP`6Qd%+E+wl_ZW%rr*))6qd>nN^-)K4ptRe1SIi4ewqilOc1`g= zR{u4s)%sm!DS*+uRkv`a*U5Q|AaawrP=tR8nsOI<_Syt;;JP%?(S(VS-+87eEPwTaNDV*_2JU!Qv)!7 z)hIoAF24QiVY*+dv1l?3p`qliGh*7>hW8j6J$Rq9<*|!#yuXx&XCZvT381i}DVzpt z1>+<$_Jf;jne@rv4#aNQ{0ThDcP@5Xsyd`w?w&d3yd4TbscoxM!yAf0s`y?Gxqu-zXRj| z|2X=SyC%SFR!fN}N}#2H<6)<*fQ943UEHlFBh_>tPJL-)z^1Aw#^KVyw;RmJh3Ar- ziy5EUr6#@k5dK=*}I_Y>RaY@)T|dV`&M z4d`*OTSj0nd)1zL%R6weJdL~4oY=mZpNq2m3`?OLNxP{&J!yX-U=NF0=mdHcxqf^B z^mgmE7w(d3`-_mM-oxCBk2MVf^@x-_{W>`$)mOqRQp`&MR|P%s_D|hR#D%kp^y6~T z$5^H%p`CE38l@8AK}$veVT7H8S3Q09`Fw^kSyyg2W*-j1qyBWy!QcOMdPezYea_y~ zM!k_@i0)i(@^4uP@A%oi2rAw?ttaBV?w;pf>QVNp*`MqrV#U?vr&5*eJ&A~fcT}E7 zB@&;P-t$oa$NU%u=B8e=xbLxQUwRymqY_!)H|i5k>DdK+nR%{)oY6Kjkqo3tHBS10 z#eK5wx^IgHQu%&E!w^;ah?nb^S91Pvp)cc@|BcSYom=g%nxhMf(RC`^;~5rInqyo$ zSVyJ1?Q#bFBbqbGf247bJZqDigO>--HAOJEfM#awLS?*Q>N-6WCh=A)_>#4#%|pj3 z*@luA-6iNaXDw!wRA)VAgp4i9h?gOCVKIXpnnx>#GYZ88h?p~qDW%`~1K4X5GT4PW zo-ihtW>+LfPgKRy_}IBo@(^UFsOVV2Ak|oq{A0@4>Yelz-37vLhqUh=yKZ0c6pZBJ zfR7*8R?h~S&L7^!W{bT-7MFAfHh+zFIQ}vRXw={IOv0+G)PU%RGT?2Lx~1&aS&`aptekzp`%NXh((45MB| zMn>+WA^xl`TDq;M5LSyN>3=@!7#qT)2apct1?db*cKidMUHwC^7Ob0d0=2NFh?#tT zgze0h|IFLVa*=U3q$Kw;&%ZeoHme#^ml4T#N{+H#$rd++<+mM(I^z%ZWJb#EMp|w- z@$;0C_{X65TqKE``+FnYkOzU&e&BJRCN)Q}0&DCxm8;69ee;F+;WZE4FdNEC2Kh!*R{VPz1Zb+wbhoKzg2jj$=iXBUuHx}nGgS6l+I-DtB>~NAP*tMmKj~z>B zH5iSry@g#?C(}Ch$F?4oPi8!-tN=yhiGc6y%?!UDY-QukAgYDHvho*n&PtDh6m3C- z4W?Uv5Y^^;nA@Hz;>%8SD+_Qo6 zpnUa^`zqSrTI0CmVh1Z6*W=7Q0N~bh!{pH82>k3~+bK%f^?P=K+Zy`-1$^kscabEv zARW$F%E6vlF~&mM7yH{)zrx1Lod2>Ve(b<3@udthJPtQZT}rpU8m+fz0-RUJ(v`jq zj@@U}&XEv_5G;F6W>{FEdW-9g_wR)Nj>}y$nZ4q*p0%F3RbFAUy?BBi6>%etd9>jl{T_RjP(XX(bg-WD)@!Dz9K%)KLs zcY7S`LFIR!eGFUstFdzSvTXIRe5OQzN8Lku_-_5QuX)=1vZlX~3D+sROcQA}ea2h7 zD>r>|p|M~t`NUgpy}z&{w|aOZwYnP^Hbn4vQ(W?g2*Bx9n~LE32*~h%1+u@tyIDu{ z;uTUyeQ!ELvwhvf1|BQWoN5^T+8;g4+dr;eJy2DlCbZWIW!E#AmoJ$4S^J+ZsXBPo z?|_!*2b7}Bb~jVaoj<|{l2=>!JU()^tl z?L~zQF&24d-E}FCfJE)SN0xok|C*4;E(pW%CT5XcA*%GXIBWT|L<~eQVg2`NK zq33Sk+$&oJ98gWX_O&S?dsEX6d0zWdi)I_;8>N5Y`4B zegrt9x?o1b7`b~%d(+#pAnHyiKmFB0HMK!ovO#eLXk=%k?>^c?mefji(@XOhTgVBw zV-015`o2~3WBjGj^(kncQg>|G{3coVqTej3G0cZru4Dh?|FW!!3X*Q5WKPj$ZzPBZ zZ+h24(d9AWnqim~%Irq}05Tu&SZ{vBLlmlROnrnpfHH|rmXX(xF=`_ASK8(?i6I~{ zo5)xj$!q00B-kQOCjuE{zVc3KVsKBri@oxmYaa;V!cn;jba!~hTw(oMaNlh-2e71j z$8;n@k*R3JJNy4HZi3p~_0xTwxX45Wq%~>r09mNW>=N#%>%4rlf^asbQT?=+NLeQTl%GqRr+*(5N z$i`Am@<>LRtO6AMl7EOP#+;MSl8k$(TXjbjH&%0@6YeGk`}k@{Vi8*k;(s?asDAfY zpb9pg;w-@Irvq^>dO_AMF&LZ{r}((GoJK*J26&CUBmH*f&yTiu?#HYDqZyzn9)$ADVLkMdPEj1IL%kv43 zWo%D(JK`hn+$8mm>y8yY#W&Zl zz`NbiiR1LcQ=^WVvG@iQ4eHH4S)kYlG&E>i3z#0Zy}aZ?D%I3OQ&`rLTO+MN2K@ec zGvGGFq9-LEUSDj_TWc?vkL97^>k5$OH&EK;t9ft=cLq+Z(d}OnlpO4%evNEN^pakl zM6f2c4UI4;#)?-r%AVQm-(pJ8NO6%yuRQv-U6oDrjHX6rY-wW}&5;lujW!Pc}*(gr)0svsM$*IeOVw2QKLkw(Yh)<*l7nG^WgPTQx zd~mV~Htota36X}i9H$#zdZ=LJ|I0mxT{k+XmADsl@m@FNuUgI8Z-k^xqu1=WO@RH3 z(z+LrRf~?uua1WqF=`-fGUwML;idB%ww(5sXT+-tf6QR@{m-laW1`&ua}a=%msW<2 zBC11z7VT#w{Yg^>PK=0?lu|dipcBGnI(7D6mtQ%}A@hDrU^XeXKofH@;GdvOx#!l)Z?0-yUI-}_{gPX(M@Ja%qEF(W^QguN?gl{U$M-< z8j_OSNZi-XdDa7CmSWb-PT3kkeRLbOE-YkLm~|Rr*Gb*piNX-{K6#WlX(C$WC5Z1< z$2;b5Uu3f{`y_x8HhYa3>jC^V>s62c{Z(3{wh26+3XSf7o@`ji2<#2qbVJ+Hu+J}P zo2^BRhQS4m=#{3^hlYM{&06k33pjNqow}fP77R9gxE7UQ3Jfs=3bgp@tSY#AD}~i@ zMB5_!p%MPlbcD6TjA?Db|2UNBHkDFd`1_~e_ZKBEe{vVuq7;2?k|$P{fKUmK!z;Ex z&wZIF76bM_(po`6Xr@8UCP8qPtrnJyN({+u+}lGnfT zuLb`&(T~)z1=F#c>ozcb3x2*LolQ#tFY*9t55g+7+0$=D*5AwF_BYel!|q$?)k>!O zQ5@-bbcS=Ko_R%4-Rh^5J_;_+>91WsoG|`#I-uBjaMtE~?I2nR`J55NDDd)+^Z3O6 zt9-Mi4VO`T(c0qpQIq2LmC&XCFPkva*zxb!7)g`OKJD~fL{fZmp(mRek!VhEi7%ii zAo_E@Ycnb44*Ln-vpZuWN~iPHl*3~llTCU}jeBAX@_*C+i)Q-KS|Cj;Q}p<-F-M!s zKDvoea+C+cr8%61wV=*>@!yh5>2{B`1*l}7aD-*b-7TBaS5Y?FetFGe{T7!hyY1rX zjv{`A>9DH`=T7D>m05&q@mBzN-w}xaPwDnpb7|!XEOx9T`d@)RZbUv9imrzGGMT>q zBMWjp@!RPyl-*fyyU<0_)YwiqolxpqM}e-y(`m{$>M9?LJblI{@9nglKsdLT!LM8# zr3#H{YY`v4z+Nv&cW)D2{cw*e|LOH>egl6OQ!~FZphweZ>2u-dJ3xI&k`fks<0$9E zW&ZctU)>;QK~~TveN$z7S*zmJ0Vo&l)rRDA)&GG{ilAUqN&~XEjzpD&YW5W_&9`-|LkG)@iWg)cIUR{k z;hy_fwFc8r@EAa4&Zexhu3#{VXjG!Q<_yyQL15g4dx1(Ov_>POL^s3=(ZDu0Q|?3e zh;pXi&0!+bvu<@E+2{u4zg3oH&5CTTfKX>n$HLlY(6-O2qk-FD5znaoD%D+MSz%=m z+s_W7Po`JH}7nQe-bw?^fIe( z9^=h;+jrB($i(!kY=mJPoRfp9Hky_gQ=E0-6F4PO6xB*RgV=lrbHMgjanj zJZxP?09>qQ=KVeXNcD7T3L;x=1*qa5luwk`t)2!oq0w&~&3<3rIfy~*4iEN3C)!0b zc$6XYs=IPtQ9uESMol^C#n`yj7pD#i8~J}?8+eu~OAc=|Wp774qzjN&pCgZ7k=F7L zt&>XtoQ2^k!!Dff0MXJ8=pym3GLFpDO3T~;JY$srIE@dWw#C<;FIlO(g#K}sYL#Gt zFVIxg)&4znm08)MiPTcaEb!mdFd|Uw54Z@~NyC+Ni35 zAJEgr?9DFSEA0MR_Tw3wUa!kd)WB9hj3@c$ZE38gH@wv^+KTq>MZdhy$#@16$iZO1 zO^pYwq00S_-zk;!!(R-_7m(0c)%?T9y}t#-KUNq+fPuXX8x+-FCmb&o)(Ve9aO;L7 z8$aktT79%QfJ{3(Q_Q>2@UxfGOmkU#CjV6j4x>vS%``^!+Zv%v8&mTb*&7}Ue~7QL zn0|cMU)je+z}4)b!Wx*@{}a_S6h*QAdQ$<4HD~M~m8sx996sU4_wevn1h{?6QTW}|iwsYpsAgh97?QrW5l>SHI{?^Zu?pkR@XGNzH; zTD?}Y6P;)s>2zVtP|o_(AGL6|-8Iygi+aBIp4iZMhy6xE67RCaOom`inM?kk zww*Ag-8Xa1akS(~+B#(3{-sLC9{R@W>bQ6Qs&1$Y9T&i+b{0z(gVnv8=WfpS#`VeJ z-vV=k;FYv2HW9{|TAZ<+5ugo9FL<|?OYPl=?TxuF!>{(}b;57vX$%(2wrOV<^h0w+ zIGY*feD8ovorFNXNOeE&`iix8=RRMXZ>*W}sVQOSgkrT4+x*F_Hkhhx3Lld>*IVBfk6vU$uw^&t@>fZThp}P%>u1v^U?-ML)%T);l4&=$Hi)tEy{=0IMz-a7Do*D-#HxSaznL(MpV`pz7>r1u437AYWPV!+z)X0fw-V1H#p1LRLca! zRrVw<7^HSY9mas#uhr1Bj%c8U^W&)SZbc|MKbxIVVL{UmNOg}|pe!_>M>9kS)sN>$ zbPx}K{I?oM2cOmzB+OqD69nb>XzbWfRasajdy>0gv+sZLqEnCSDtO4O+23uf1nAMS z*J>4~BCH}Cc$!|#7k~QHPJ45E)5!T?pdxHM!b_|7&*j17ZM;N0j-F*MAGorIa_N-~ z7n~67ru@|2Ti!2B=vT65*{bWu0VjVM}&*_(1p%-+U z&jZEh1sFrzY3{RM_X5Tf6}{_4lS0b)P8`(}3)P8s)S9I(>!J!)>uw&4i%sT6sBe(SJ79;B$_42jz2>WTrJUCPQr77JW}2Olpm+ zMQ>7*S=MHI^zG1aOY+0^h=F|jREKrg(hBwhO@3reiW_14rkPq zMC`zV9!ZM)aGpAo#bXe*zRE1ZmhQK-urjeGL+rhn(fpPN?1@%E+N{BPo-vdAVR+!F z64NU&NA;FhTb@u1JkECGj9Sv(!*z$xikB9uF1OOU4S<*=_WAeZVU2~Nu%x%-TU$?v z75SVuIiQP2(Ef}*A?6A3z41M?B0n)5r0U%B&@O=% zramtG7|Jj)BKno`K01qPwddJQIcG)g!P`edtA3QB6@{p{;Z2^Li?t=4{kibvMI}O; zou}tzzijbaNI2ahq6g;-Z_4N1yj?Ic?&&yTw^XeSI51k0aDQ|YhUOjULrCT?3(%8N ziH+kXHWhj zW|yRTr$H)ybl^w|o<0^XmEy+3y}4Il06Po!T}PQv-+~4EyDI0hzsoSQp8eJMG?`9S z#*7PedgEfvdc=h~qe}3WjJ(k5>W8@M89-hcuABdt)*_P*3OzOpCOlCuN+lp+K{zKh zHM;*nMy56D7kXI&B!$uv-R5|VxZ?uaaxj^~q&xykb1#17Vt*SMJh|K)JrDSw@GEyb z2X)j#^(F27Y5BarK8|6Y?~Znjh2?Sfq&Lu8KqDtqIbt_Xp+7k?!N@y$#B?8%g$Tbx~=At~aRr zPc?*PJRBZMq@Z`|uJ~Q$9KaN99)RmlE?8MS5XNr|VQlh$6kknuR_ENkUc>U*N?uMl zQuHn`*@OPMW`D0lDAP_hxC;zE`Fth^K zln2qqX-I24njc9X^~IfQ6nQ8imZ2SE_SsnSE?7`^%c*Eki;<+&P4+U&YHukXRitBC zQtDd$_%rece_JR0hGyecot6ozDgP&4t256U+ErdMO>N-$o&7=NrkW-Gtp@Fo!DBos z8L^@MkvZe@DQob^nBP815Wu~767@^n9paOCT~g)&Pl@0qApuoEO&^X7hdGEpDmr++ zRbmUqIv_XQFdlikKvX^&A$$QB_JKo88{wQ@&(zh?X9N9otmV?!Asg#a%x%w1+ULGH z0I`fRw}UUrvfr7V33;9ehaO)70T&aZNr>W#&WJYeN1{%Ha&|Q4H9&FxVr|@VAE@Ha zuV#eknXn-Ac_)(5E98VQHcD3guksEzEqf{xf&0_%&w|JbJA$lFiOz@Yb5^Y~&sc$J z=T+-_yGVT;`RWQ<_K#iUpVMpyXyl`jmWO!lPMTV_^g<-;F#l}B|M9`R4!AcH)Fftw z`nQbc%x%_4%eI7Uj|0G*JTkzxj%4)`%tC!q2*OKeOtvFpf(b9-ebSmPYdJ7q+zNo~ zZeOF-Tkv!u?LmJw7h?(RaO{aG$Mi!GKBKV1A*zdO`DB3k7vP7gHm%=r6q%w^cBhS< z;j^4G)0s0#)@M0;q^EGG6oBYW92aqTZj^>R)SO)zPk4Jx1t0=-0b&yoVr5;G@#Z=V zQ3}ay1Wm=PK~T-78Wcaot3weqpX9K=p-~T9kT)sjT5;t*5)bFJT>E0q2TE&)u{nD? zGhUot)g!Z&$xFbk67yU?8QC$QrJc0#?<;|AA`pt6D@Aq~NwQzKTFOhp#q$1zNd(?P zk5)Ft2DNM|0eY8FdJ{Koe-?mua;w=ybNClv>NQg%@+#N3CCvBzsZyPsrKoQCUTvv% zGi&W+2+x%H0l$W|fft(M;3`~iQeTV(5OoSW)*>1~Fwva{SBwSFoCuZL>Hw%Sp{P~F z4aaF~pn!SeQx6K=@cZ~OtbYZP}k4!%52SerWx?WuzL z9D`6MN`#yt)vi-keRE*-qN`lA*O*hhF&o|Y@(?!Zx6(JF;+L=Izhbot-hqh)2P7NN zDUz@wwwr_#Fz?eUU*+J4p8rQxqwI>=ls5ZPA+`Sqvs)hN#~-`N!b%Y9Z;T(<-sXp#g85V zTf3h|h#XG$qe|CWO0XM>-WUsDgyeFRgeiX~?m8MO|v#EMgMCK56#c{*n>x zL&gjCg@($$Pl&Prs2QEK>96_nA?W5@8FstvN@Y9 zP3tK^R3-Qn0K@*qkyBCRK^C|fUa_lOC0A_pXh>=2IQ=Yxr1aTVQt+8rO6|1*W}7uR zvNboMWw&x#;gH4b78KWrdkd&jIHjCQ0A#^8m^N$+zGK0JyLlOHIVhE_PZ{Y>R&3tu zgTa=J@UR2+9OQR64&Dcf!~{5K+acc;dHaCraBe8uH}shA7CjG*G> zpZX15xC!#`O05I7q9z6YGT>hNR8foI=JM2Cg!w8pxOn`LN_!iv^su_y{Vt*nK#+7I z7|WSgRMZDw^c#&0ScJ6<=upT)3D6E?CcdMYuUz6c)$)I<1u%YA&t>W?N4>1);4f)dK;90Y0bfWi_-fwJPTks* z$YL(_EB`FwpucytFKKjb_`iRE&kb6auH4b8ybiG*19P8RWAZRI&A9 zu4_rn6y&LQP3ir?mu@2N#Ilw}yDfLNzJq&hHrVpxJ?~R>Nvul!dk$a~mD_!sV%oM< z8GNkw>){^hM6u!lSwt^INadV{1tRJ4Fct9Y*L_zce1D{}J98n4vW`EriXFf^JAy5w zW5RM@|8#h2-xbZo^8x%#HpaS!)WndGUm9rt=_Bt#Kv4CdDakwD@6A^1 zNXMP)#{438p0KbNB@-~D%Dcn^+(7+=GK&5!h{fx)1n{xQ*eB%<^KIV@598_DKYi5F zzg)u6`~#VjM27p+{Z2LMbSeLV^Kj_c@bGvp->~&*xAQBGMOD^Tpxi|{24OJf0Xw+) zl{F*PwwA+Wy*L z$3MvgsvU>!s!PB&wyMsNwt9`++(&DJ>_#P=W!y7%y8NVlEY>LZmyUr;0={?;!wQNM zdz9Np=@Y@DzW-Q(`_KpD_)zc84zd|n36klLTfqg=QKDw0!!NIOUh9M=gl{}5pSMzQ z1bLq&@-GnIw*Ul^%n5!9gZvv63djL83sa*4s#IX7) z{pqcI)eQFsKCp&Gv@NJeDSt5{V>-H+MG)h-d<4AZt-fND;MVrpZ=6=UG+hKoSy`pD zzN`GBo9eevhLg;P|C-w_DU+l!s&-ihSKMEk!f@UjZxTQN!c%wIj~G5=Lq%^8;Y_9Z znUZ(k_D%`Z_mh}8kXu1S_D!OZL=l1wabu8BkejiVyN=a9R}%4`JRqkHy!V3DhmSd# zuSlHE;7l@f1WPNZHoQ8UrV@MQ(Bmek1mitGXW^&6%Ir5p0cwoN^^s*LgJSCu`FgCzJQ zpGO3$Cf+ATvz%f#gq-7|Cr{h&gkMGI^IB0)ZTb;vl3LnQlGYI1j50zux5KCl*wmk1 zC9E#@fvZ+Hp{;I(YgU0^dlz`ee>JeY1)Lv#nE_G)!gj-Rg;(i``IR){!=jX2lAlTD z(d_<|_iD0BuH4Tk47ru{2d$vzn&X`QZi{8rM9w%`q?_kcTD1(Dz}S=x@diX*?b7xqWcGRYahJLy-kVj3FaX6-dYCT3%n5_H@2k&1`R5 z`~xVQSbbj6^|OICcb>gQN=xQHqv~NXpA!WqfBsHhe5QmtNxOFW{HeXLo6DO0$DLqx-`1jAQKv3Ll za_MKr;ra%u-J7fqT7|Iw=Y!BeZMs6*@hU(bk&mB;mx<$psXVrN|IIy@xzjy%|2$9{ zYIC!eUXDQCxbX|rZ< zd~@#LXz?#5I1t5O&eAc8ceuCJ@9^z&a%Z;tH29HL0DkA%=xJCx385{zjMS#aQV767 zV7}UdQTc{Zc}nvnH9F@8n}n!@)v`?cx=edg@hsH4_{g2~sg4(ZNS4*z*Tn%nWpm%B ztH1+2Zb&C-U|x$ntI%NDU8(fx2?2nYSztZH3Z9fmk5=5O$ntOcSW>T~hIwt$))8H+;P0G9J)j6wN zyT_@!1h7lIMxzJ`OgzD|{BoH%`fHRyJBB?yn*Az&SIEanWg^D=tdz()Kn2=Cu>rgv zWNYOzF%??FJcjCdCfhkGOvVK?DIL7-w|E3%7i#9QPCw>*8>k`By*p;M{lG1>iG_r! zK2d7WNdsNpalU`mZ*}-E3P zH2M*gJ?*_spXLX`irKYPVzFV4x#b;iUb=a*vgn=nRH_5~i{{Wg&)f_s|5Jy@v?(ctH-v}1K6ao#ik$n;_FMce^i(3hVh_ScC~rta64XQ9uIwWGyJhRe56AS^ zHU=&ylTRoFNMCaT9Tiu4t$t&K%C4vvmxu zLWQ<>_xUuP^muTJh;tHNfbw{hm3J-uTIG7m?r>bQ%N74xNTLG3rBRHt{0Ol^93+Ve z^Gr1A2i5U;Ka&|7V#fTTK)jbXRU!^dk~!O&phPdx;m$1T*J<$77x%A^+{Ud}yI4%8 zL;;A*g0Hu+1_o711Zpm=Rau9rXHg>WcA<3^iw_9YKD! zy*M#672GXI2!#~@N+9JFc3p+6SD?`Y#)-cvH~X;s^rCcozF(Rl6&I|n|HWtb9Qxjg z$~>O~VwG1*kUs_y1-CIi$0x&xkUe_JD=2jK87&tt;XD&i!nf4~$lFkBSMTRc{Zb2z zb3lLip>*`GyRY0_=U;dfP3FbG1^2a*vO@LJBIkPf+HWMlW8>D@m2OE}fhB+-$@%l=4sUGDhRb`GywLV1T5{{9{P|4iN{!z;rpF;<2Bpe62E zX5tuXZ{n^1xmWJ>4lnz+MmzCTZtVL>iGeeEdSb(B=l2SBR#X&m+hHzht}fKxp*{3l zB9*AEBBdSWPk2XO&w;s{JT2221e?KaPl>A@zZ@x6( z7L{Prvs-lF!}!bIN(ppJCvs31jxKfXLMLN|5Qndm_Qzm+MunoIZEu!yZ^8xyRp8jnTV)5RIW2F>bpwi7`7W3EV zVEBbXt>@dea8%&#tb>}kzrvOL0+QeaS!(#n8jY*s?#Uufm5*!v^0QvQpn>_+Y+~lh z4HU+=_ojS0YAX}An@OgG5 z^p?HtcU+Zzn$yZWrXS2c@1+{>E`F(f+-FvA)ORTnIvv`lA418ebg&$gp87MmNY;tH z_M}RZ$e6M0vo9NNgTo~anFZpF=(g3&st7)Y;8D=Yo1qU2pwPj29)$!tb`7~5kpBXR zu1>2kV$Nm2;cB9{CuF`2r26BsXbhpmy@yyR9YhdAM;Llx^O8Q9a2o6H@%BWAJm5@f zB8NChfJfoTWHFhMni!?9XfioEfc6{gxwJN_-{ zn<-7{HO0ERFO4jw6hEYOHpGq8$QJ`RjDQ|V$MeypjI|~#$NNi;F%amig?kqdHR2in zLJH$D`z#XINRDWdUZPJfa-!1jnf1nZPln=%DN>vEuLsZ5D>2W6oB<`eT?xug*|n5h z&P6cJ1%->SV%)gGoK%Wp$wgO#%jnWY@E3Q7S|YFTSXMGLW+&-v8{XJfDrqzzXlg${ zS%1jcH7E;;axlaPG(!ZPg2godbV9MYaVU-e(~F#G_3teqj!PagPg$zO)Uz&Ov2cj_ zq$OjYn-(HxAQTE@tgFkz&opENu?{OB8gIwNzd-o5+&g>*Iu8{TQ&PA_u?C^M7Ph)R z81hq=Su{Z`>W!4#f?`j3!mtH^G#RPpp4ab)`RP!+hAoR4q;8fU2(exa(~#}?gA1}y zD6eKY9l{6oQexa0+Gh|rKR0B#ztVjCbIb+!D74dD%}&g$QaVNEbmQ^DN$~xFko8d= z$Ck$SxG*!ft(nXP)5<>zVKxACdqxBdr0Gx#zZvSIS* z1^WZKN@=Ix;UVJS9Kx8DWr0I;Dk}QI#dSURSZ=sAeMj}Upi4;_$pU{Bn3VV3c_fwa z1(khGh$6w}i25LZ)I8%I5YLqwz~0G(3d>1$A(VEtzNS>deJHt$#`CfEQ*3KpRzu*qWkEQ@rqP%`{$X?bwCEGo>J z!Nfka^idTQ9%`s0h?*3ytd;q0rX*@ottDzCd_hO?6ESRf{AloYbg^kt2sP-pMWz8m zRBD6lC^|RfObS3CF%*M7L>b=^sY+o4S=XA$#J0CQ7=N)SglND%gg_V6B+BUE8y6y4 z^x!NB1F1Ti6VsL}EK=YZtX4oDRTeMyBU2x_K>a*r`{`fBz2V~WWR?e391H9KZZ;Q! zgUt|!#}8zBa;*Hmmy6Jdo%Fcgdqs1x%;Ne?M>d9^=r{nZg};l?HzHyrr--<)7<3AX zoCZa)M=(jhFT@2{0;0V{Oq*V&OvcEO5l>+@tQlBi9hv*ia7L7n9j)0}1t0hO68T_t zn;61UBYR>?!u#1`ejD{QRpPv&QB+pd6a-Z1I4bBA?(q;Nt`io)26i04=MWWxg{UsghMF6NTQN_ zq=c;uEh}Nd3|A3U1fyKqqUI7{8s^x7*sb_M0>zPK+9H-tV`tx~Vwq@66omt_pctw% zdNRqSZttV}Gr7l{gW$d7VV}eP;-u3fz>`#=jbj7$NjHLiszFn8lj^a_>y9?MD$65Wu188 zzGAceWf))^56JvMP4E0LKZzs5TwzgzWvD2EByI?77ch>Q`Ws=Wj~{-!DxF+r1>MiW zDcT1}9qcX6N-G?ch4x|EBl-BnVtE3}6Zq!tJM$5M)}M%eqd*x^34w;=oA$sGGRfx{ z|M^3#F4{!V3G%L3mlX4u&TK3vNGP;}p*~xj+Z&YS9Y4@Lwre!}@7y0ghAt^&$)81! zFsKGO;Z>KOR2l~l`*2^i!p z&|yC3qDe80-Lt>s9kS$=+OH(IE|Sk_;~pR^GRC>9UxUet-eOC;`y>?EX9{W!D3sds zHt9-)7epzA0cTrW3I#Y)=+c8piU|rMhrY-SC)gP$AW1-c131dConk|qh8rcDmB)qQikIW{8{BV+tA|NtL5Fch zB%q=E;6Z+uJR&K%V;Q=N5WGv%lzX_~6Wd5G#&#iCj@py(BoJlvy~7r?7JPv41!+DNHu(pbuosltIWz%4gv0nq#0%O{;N z2Mn>3xS5Lvq#&O0#oLo82q(Q#eq8x_pw{!rWrskAZIDA3A|HEcVUFMzS`bPbl;ls` zTN>7gITJY_*Jm%ecwrCOlNaHcO2w(9LWh|$+^N=0clVvb&;JDD@YG3vIm=~Znw3mJ zWO`{#rm!wo&BM6|d76ND1VDQJ&MjMypbEY3KZ~9FA&(z+pS3paOR9-k(vA%M2EGgP zAvREZY?R`J0fEZPlVt>7MlLZ!BPtlsIy{zo$|4k&8XgTj^0!x4uF-;K8HTlu?Chdw z!NgI5jR{+ha!#1k5v#s(f#u|u@6R0PV}zknkl~c57PLPTFQ^lxbTl|1f7I=Y}br9*roJL zB9&Njv~U_%V7WQa4-mV@6UQ843Sw^U4Th2#Z%v&IoniqEKrS!S@=0Ja7#1DVj>mUY zO@qMXwnT}|E%ftZdDaYhN6dlf;D&Z(&2Wj{qKYu(l5wD@4Atl04=77P$ALKm_7{mSyIG4 zZf1ldR`?L*$#lbK>8N&4Uu5lg!kF`&xjYfNqpd#Q8k9@8u7Wtp@LQ_DH6sPNUYv!NOT_8rB9&M+P>A|nw zO=5JvmKZX*02A|Q^HBA_r$q!dh|^S8{bA_M|#e>z7P@k`&t&MkDF9cMOp&NkfDlb{Krx@AuL$=A<>QPvW6i>(BXKo@?6qM4UY3YcD;62Cncx)|!!zP5Rr3^_%)M(x?}gNBI(y#8LJUY21n(Y1To@`L#ZH!y7LPe1#I<~32hsr_*^x(CZ3eXe57ro?^W(gXz;hgpao8wcGn!?PRgR&67%d-4RNMO}u3Q-W72)1@%{LYc>w+EUq zfXa2rXyt^|GA$ZPrZp6k7_J9&+%DuvmY?Xcaklc}zBFh5ClM^}(`LlUxEMlSswSjZ z7Ur3c?u771L~$nZTP&zx0^xhe@IU2^;D17iyfwpnmBJ%x79|KY)D8=Da<$(jhnr+w zwvV?eqSSGy7xscm-5cMx*RKm{6chGdEu z?5Tz-l@|(OK7?m7jPW8Qa%?0|7(0nz@bt*ckx-zeB8o>pijfE?_l*`zY_zHC$GK`7 zYA!C7zS_2d5Y0u~LvE20!D7z|6ZlBvtzno7j8G~I^He2cVuk{+G>wqzOXdf@i<*Wi z67_%GAXbx+M9o;@sD&`R06!9LlBuQ}2t=ViQVWOJXwk!wnfOkK{{+ZYR9=z>0mgt( zWC^jF7{WP{0gg1l7b>Yp!k9VI^V3wVmJ-2yo?;+LB*GzB%U5gtf}_Dgv79&+5oeK~ zIH4mQpQXqY5ti{Zan))dEEdE>uvn_y@=1TFd?YHy;47Rg^KzI!&4?p9_cP?aIT}1u z8;FscByhG-9H^Pmm>9Tl<{*|$9CU&-N5%uEk@!K#b3adf;>1#nu^TNd$Ws9~7R9i6 z+NSKtICDnauMrEiwM96tjMXelgv0>-TkRu`;>gWHA{ZKnTKVLOJV*6}gM2*-ks^CE%@Pj*tf#CkQ#k%K z%8`vhU9=7s@-!fVF}Un@!Wbb!6#9vBO1|{q-aL#kG%z4#Spdf$$Ec*^B7}6+rr!*C zRLE$JHVINc3K;?x*WtQ(z*7;S!Vxjy$7qyzSsvr9y6}b) zaUTpX@gC6~5^4`%YKNVZCF*~K2ciz)FwPKf2WmDtC$0x13{5{ml8<)fJE49Ul~;gePJqw?u`xVRD&aTq3Pc1y6uQL8 zxI|4)F&6Q_4cth?KH@dkMf4te7^Vaj3?9t$FnDs;{mHP27c#Tq2B8 zCtw)pFsMgr10a*g^%yG5i8*>AVzrcmC{22TGt0{6HJ>2m#HG>?4 zu{to51_Cb4;nWSy?HMAOAqs+l!b}8?4B)}z6m~@3Ld!Q3B++4tEFY00N3VAfm#T#r zmiaD{DWYaMLQkaQGDLs8fzcpJk>F(rlHE{Xj0&h=TzTmTc_QL6wG#nODa<^XA!jG9 zR;LcSdYAK=D9bFr}5raSsr=j`-f&+$rkf`0Kc|sfNQl!#| zfksiuHbowf5KtrS&~G=GZiVa8c#I}hkIeHDxy7~yroU8_yH5eyc(Kp@5%sYD`{=IB$YI>+WZ2nig#Uj#A{S|IHOFh}hWK#=aJ zT>?QGAeM6CP@RKCz~|9|J*4_PkJ4PtYUdbR+%ONN8zFyA5Fwm-P6caE7$klSoiL)u zGKiQ;QM?QT3(YwqDFjB!lPGr>Mi!AEEJFeyQyIG{ae*1Wi*dY3wS1^{obfmiY8n&m z&Os1E)C?XqL}-R&Rl-}S5IKu;wB0;O@NQ_07O%67V^g@rn1`7Qx69SZZyq2DcWJp3 zlWvUhTkU_tlxo5;6QQ3AwJ4*aQR4C%z%m}h%64%ctBG2Pd{`Z4b0R;Nv@w9?EJuVQ zGqn*JT@wy62?Hc{+zF5s(7Z;b&d`OiT9yzPbI~o;rfm_aC2-+KE)h%}#EsBLBXpwB zlW~NQO+?B|s;vV=FcGS~^8#b9+CAeT4;D7l&z@IG#F}9U!?t_#UWf zZUm8165CmZz=FB4@R3$~NXmsDzy+y^EAAsFqjmHQQC^lKNl|7;V&XVz=ab8T9Ol$B z6`4Aoogf!<)JDuKQOhyD+E60YmH}dlsHR%DheXj4Eapfh{7fxyhVUhbJcx@68N8Pt zrk;N~4~Mih6`tg3>wuI97{Y{D%^<}v6eWnR)eLfgXahmAi%}|J2yJODNbNpDWDp=6 zcBNXYLyJfy9Fiw}74gx)5L2LYMj}A!sV&rkwCpANnXGAcy^5Lr#$5-2hy+p0ssVhaYOwBpb zz;d;_%S(A2!66E@bjY(XNYjj~0gu=;)P5`{$&oRonT!5}p%~++ioe}BWW=VC=ZJq3 zk?O+{`W_SL33vf`FAj2RC#DT`+)3S(M;Xk+Bzmb@Z}WvzQ?3XELs~oIuJ5~u>=M}q zJ+2oc7`VuIovWOClJO^w@!w;MEY8$xQ7QjuQ9{kwnD9ph(bF zc+pNhVs!FF8dsxBktqsr9?6gIT4H@7lD6`V5}Iv zS|!=YKJjMZHe+fJh+H@xwG-TD@Qgxj(?V9K&iW!2O^KRe81lFj97A5N&iM+Fgst!P z-8dj-!aP9)isS^IJ5{$bsD6b61xVfTE4I6RFlLXkhAM z4rDewSG#}$KUd2rv8R+x5V?O-!PJa~I`*bE&A?qJT;0l(KvWVQj8;#BajW; zG!YrY5JU5{i0+b1CTfizp#`9Mqz=xjv!Pxnhr`*MBjo}(I;>40joIF;&J>jm0a05=9C2-%d?>Kn3J`pTc z8&PBmx1)~7AZ5v2uCg8s>Ul^mu70F4Y=JRt--2Nny z9END~5-k?;=$TxFvU#koSc-{jC~*F8kbdST36}`hLl5~Of<`xqQ-mEZ5ln%pV7H0v&KujAqB{Ef{01N?W9>+n3j-ROx1^Z`CO06bR8bldr@Bt5UAGuVl z_L}Bi;^7H#laYUnoJ^gH@zX5jp_8e72#6kYmPf8S*+m3P)YV&PYZ*&JH*{PNk+%vR z(PV=tB!cDYd@-U6A9r%8036U-Lij?irkMAPc@D(N@;UJ5-FVE z>TVPSRq%;P66P7+aG(w)GUDt3!n{o6ffJ}>Vi9vZhL-C}Wb4pPQhR_ztONPXQ8!~a z_=BnKi6Q#9OFs22B_aaEk3kIIpR2psB6T^B2$90V&r!kDl{bhjA~nT_w&vuy<0H;S z7Jx2F1P}7nS?s3mm@|9^TbJFA{ZPYUZbb3=LrHY&0V!ui2G5TB-Hax zxfA;_ngT*%HE(=2<7#Ho!LvEw4>p*P9R3pLo=erJZM9&@JK^a80 zS)L;>P#0q)KHe+CKoAi(fvX#I(0kMhBy7*ZAclVvN2dyTDmn65Dp-O6JXTZLIWlxK z;?&7~i58+}Nqr(161o`PP#yVGvj{+h$pVh#2i}r4dyuOO&;ZDj%!veiNSKEhVPa&m z;2{Psh}pU0sEkk@@MA3U)n3WKSJy~65pmrq_Z?*Th>0e5gS_*5MoCeG~Tu{yBn#V%*)2cd{F7w#5q7vU)e zJX&~=tNU(z9)v#nQi_Bc!8USEglVClBG4j&rLM1*__I`v+n_W?|-BwHAJ-UB#cc0q$*WFfTNA`{WvmIB6nGbjTm+t4M8}Z?%Ji71F zk6NbtYW+n!{B`rSi_6D4hWcDS*8!Q44|eoE#d7yqoZiP+)JMUditb$u%d=^6met0TDBB~#FEjAx z1kDIuh(5d!v+(;pghDlj=?}uI9h*!AyNDD3|nd5#89@+ zQO-4|BP+Lrshh6PGos6fQ$O7QWX zH0q*U{Y-7ZosgckePqf^Zor+;D6?4uM#;mm5_D=MHO8%;8It1(of}WasPV{zSrlwb z()+a;t|%{1&cCjCv|z>Zs)B1CbOGT1xzGLU{Q?JdO1B!6yOiz7&Xu@A+Um8he?}W76-6JDr zQBX$-C8B4=0~1b*&~h}cGz}_`k{n_w5iO^BV8+P{=*KOXFM=cdm&ayW)X$6@;+>^N z_}gbjWrTl-o)QlXI5!eqofvI&GCJ1o$qa41Q5%zl zB=p^@NT2ljV2<-k$v4-Hb!MCwkpbuB!3<}!ldNtwE}@=Rkv`}3ff=Vq=k&%nJwr~P z&gqYF`ZJta(R?auxnYD*@kXY{dh^7LworS~8ZvVyHbUM#G~=(1WbW!X!do59I6r?| zkbWLz{X9#%tN6#wp4A_v*bsIy_2l~W_fKb3I95z>)f`*jWR2CXE z)hK&YO*3;@n7d0=xW}Y^nqkk^Lx~t59^7?;dmi8A4-!DU=U*;K3TVl032@O~K`z&EoY){px zrZApGhLq*j*=pfUYvGZ!@NKp5r?oI?3)Sw0l ze?4GBAiR?sFyB3$Rzb*|op#Me(9Vv7=^qmKz%)EoA~ik)h6cG}zkOlfsf%v`&phYWwFNL7kLKPn{svl>0_7$a$s`!v+KNn%iqNKQm zu6U29`_Y7FbAa0-lC?5g#`eh!ZD}=1-HY|fOY<>`w%DUv<}p~{k;mMe;+Xddj`=Kn zQNB45IH)f( zUMXjF^KE+{%BHN2=H9<2*Q4z{HBezM8@Ahgl*dx;XiD5l>ePR@eQu!0abH_l;v%I} z;PT9bydvyKyGm$DtAoj#uQ}UEq1yaLO~IUavuTe>kUI7(l1{;?@-vF(jryOtdR_Ux z65X#xR34Sf)025dY$cVg)U%nHv3@S-wrqv;tnH(D=7RbL3;ioBa#J>sOy`a8d1izq znXnuoEX@e%ud;ubc$Wp?Wgf|I^EmQ4kGV0SM`!fLC_O{YlFnIf;vRy~aYGzfs zQO9g*Xi~aG#|&y>&!WuDcX70(`$X!u84}{TMg56IqqBar<~)i-TkzZIrl?)97&XbVh|EGoQ>;h5Kaq zX^ufMN`87SKj3M9HKHQ!WWJNq)0*sNerAH<7tw83$SFzNP_(DUu)wq~@MlO}8uJ!) z5$N$=ex83}qnuFJoK2rUm0s-aCV6^t175-;Pf?bHM%z(jf%<8hVJGuu zLrZpFMS5(n59T=Qn!r}q1D?8`XCRKFuOb>YlM(cM88v^dAo0 z^^kwU|7@wi^P~o5Ca`GjqmA6OZSPK0%H2uNxH}mVZgOObgxQL|;28b+v5^*}MV%WN zJ@@>A&bi>h;uRl8-|@lpCI3n`_OByW{?*L9HEA38t&pC!p5G3BTM~I%!OHDv$BtG? z+{jPpb5qJpwHHCw=0%Js@FI*jp`y(odKQ1vbDtm1^4AB(NsXWt7;?5h&#+a5X%tns z&r;1zqfo9V8(vH{dnFm|GBVvYWCjoP-5BYjG0hcY^OuXE1QUX?XXh$0 z&hdOEkbE96e4d3b_Jj?h=kh}u_3I23gI*>2jF;c139FKD)=(Ah(Wsy18E6M*t`dJ5E`tLy z)rqqH+m`MVoo3g54}=8lFv!0SbLkiTt1M%mWHJ06%c1&2Fj`x>Po#dEA)y?X)|{1| zKPY`TH5<08%I${ackjjetalHm`N;=rM~>5)9j3jM5*uQa-4`5TzdWAS;>bk7N^@hM z%lA0Xzs^yWqS}9($zxfacDsLqzum zPD-yq_sod=qx?c;hKQaP4~#f>?69Lux7~~Bxz7)c_}d`qg&hmLsP=#Zy%A_2^m=Iu z8tG`K)|4g$@`F6?PYolDVM9^98aD-ubW#5)%7&`PSxm3P`JoYiEeU^jPTt7qnbi|B zTGBnlw?ZSt^EVNnOE$+-d!3iC>>7P}y*~X?Myj34g0xxgUQDmZ`QZ#dmh3@lL zxm#S~K6-t->Ba3wxo7Hw!$$(;Sz^F={(X+{APJui>xvJk)Q@vKwdHo%f#iU_rfiv| zUW%{34&Ue^{A00wrfz>Ox{>KqzCM|utq8l_RpCBSHMiTHcDKLoR)2&0{AoA&>+bM3 zxV^t-gy|1exJRLWnrGN-x2K#$(@JQ#+TRTbYnIDKmUVTkX}mXrI{KT(^IH5yqW(?8 z_nQGpdt(CnV@(JO`hq&@o5wR+Tu%+`iajAh=*jhUxPLmYg5`frUqexiB9t4yZp#zr z`-Y+ux|q!Wsq)>RVDWBl1vPryKAzX&Q*lIKMOWfRPNT!k(-{?t?Y<(cP@T*7DZ5{1 zsY(h@IV36P7$^io{CW6i3jXtuU}nQ!y(9I|ou*Un=KG;4vN;_DUU)H zUyw2Sc1+zc{?VLBmG0B1-{wdNPIK^4n%U3Mw9B~a6kZ_-6DoI71In9cbL6zqSvw+| zjKZg^IWkVD-&vr~b^dvdjZme*+Ue|#W8^RSjC{k#?CXC$>Ki4`$Eu{Pf!xUSIbWYl z(>9E-zNiZKh}2K>3^q$kJvj}p@#0cL*ca3cwgokFyMj>QnS%d3B$x>!xbv z&vp5IhLCdET{FDmu9>~q4$jNmrK`ey2K+R`aHOnrw59tT$Jtd4`bV$EK6hpKp{wRt z5U7d)Nily5zoX#qh6KA=h7*P1G-Q}LKyoUn7iwH3dR*7vrwL!!?$myaXhS-te=pXf zzJEB!e={eP`pU2ogC5`BNquL#ilngZ7b@}6yz>;kbj8>i@_ zpxUgh8+~|1Ic@jXUw()ZDGtRMYTs;f&YVL^qDYDyB1mYg=3TkWGp4SpVm9N93q2#|EHiEz!bA4PXU2Qm&}Ln1~CjGUGoF1SvT z2Lc7?EDSU0B@N*~{`nWS9>Qk(GLLW5wrwZP+RhmXt0Un6Z~9PAg}jqLjr}3-@=V5C zfFz8W8u?lLzkZef2e_J;=jx%?8bLq4RquaLXI#sTLb@EI=P`~sqse)SKv!+2VyN?6TESdk>GQzfitBt$R7 z=e9XFmkv+e102de4M#<(>Hz}Lt&n9~(+SrgVYD(y&CRs6i8nU))i-uH6!nonVjwP+ zNsSObk_z-VEzx7VNRK3fssFh97mE9riTjtRdwD(!nlSNJnQ1mW?$T3-uhu>42CiF69$@u}1Z;U6{c@_Nr`m(fN!w01^>`DypM z!81y0he;;H803Kzw1N!~m&>Ha2wDF2>xEGs>H*>sn$#Gf9`E4Ul)X!(l43l~aTi>j z6veoFLo$LhxJV_6@i^U;Dgl4|CjUtt^$ZY)aY)4xuIPj-kZ^@0T+<2HAYrsqupu>O zn`t|?i8o=J=5Q(5$NUU$1V^A;Mk|BBEO&EP>OW5tVL1ruv21YC_NSsew5%peGC)X* z$-_wqob43{LDcp^U-KTm7o`jlQd5;0S4v7=PY{bOP% zKze6cJH|_LbLzswVz3RW+|B+w%MW)mXYHY}t{g8rqUD)3K7IDarq76R>0c(AM#7w( zk9Fw%Y^)aQV0i+BRiK8?FPj~08lnd{lzm!^YP%)SY_~4Ob{l_lPr7-$dk1mrNRwL{ z#jVG28zX6+?+F~j+NBT)5r!RU?szwyJKn|Tj(69#$)7|ZwJZ{-f9^^_gn15W?Xrl3 z2m|dqi1!Ty@%`h~oW8{fFKP?q@{Q>c+An9GSLc$@TQvxW&?H9K4hO&myT!!pPh=Zt^+R@5?Eoimq-w`<#+f2Bzax|>K1+EJzNygB zzM?%iEmmgx0rlAiRFxw0SkzgY*Kgw1U8)uE1}O&2$h!IBC)v0F74o#F_+MxX=bFIO zmXCi|P1A`RWbtF-I)Ws+7F3~KTbxVQEtP4zfkfK*Xo*!~zqy)rnJY?f`JCEOK6`!S zGpT!=%IndRS#MKmZ#~{yG%-&sE}s^cQ;NG_`~i(UsN)V^Bo?}u7P_DmI(?)AjdG|X z9FF}26@>cq!H-d}58P*|FxC?=^MMNXl0JVa!r_lrJoeEF@PzuH1mj%p@TVmoojM2O zO}|WLUIqZ{e8}#1o=>;63lllB3xo9}yTtt=eieDCLTXshQtluLFzdX21H1Zd1Jw&L z0PKei3MgO5kZ8R^33j0K`rYd4w*fY1h*P9$<3G==%^jvOkO+tyqKx!#^WTThhB1G+ zJLI2P+2ykwC=Xx=6q89;6+G{p>LR4eJ@_2&H4v?a<{%`>4qb!ySS`|1g~U_|O1>_W z@-?XCYl0Z?jZ4A@<&1A%q`xP4jUoXJYI(BOCn9~R1&%|5k*>-01FCaP$U2wQm_!FO zk!x%$q~0?$7oBUVOHgyrI=LW$H*bHEpNM2>lB_edC_l|nM5(i6pUeBS#? zfC9&CK-<2ls;176V-w$8Gk#LZ^MjTK*FkCA{>NNj!&G@p^k}Dj?uP%ysXnm{@X{aU zreH7Xw}EtKXTG(K|$qS>1oNs*IHDBb+x&8Q1>U-g-+{l44wZhbYH21 z(L~e3)d5N-V)&$ReO6y0gSat>ok@`oh)B^91SfaSv@nxUZ%-NusW~-d?KbU+TgAvpb0v{z2{r z)qxr{^6yKrf6GL79J;txrgr|9Ot{`D>&HuRU{|5G#CH~GZ)O@7=(+Lz0?!rvAYHg> za{cI~Kz8qjtJbd%SgOptTV?W;drh@UzOhk}M=jI$Ta0Mt?VvL$wSmJNyJeZn4U9|Zd5>i=>gc|+&dAD#dJ diff --git a/docs/build/html/python/_autosummary/mlx.core.Device.html b/docs/build/html/python/_autosummary/mlx.core.Device.html index a8ab78755..1b1f93df5 100644 --- a/docs/build/html/python/_autosummary/mlx.core.Device.html +++ b/docs/build/html/python/_autosummary/mlx.core.Device.html @@ -8,7 +8,7 @@ - mlx.core.Device — MLX 0.28.0 documentation + mlx.core.Device — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@

    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.Dtype.html b/docs/build/html/python/_autosummary/mlx.core.Dtype.html index 0d0614276..dbded74b5 100644 --- a/docs/build/html/python/_autosummary/mlx.core.Dtype.html +++ b/docs/build/html/python/_autosummary/mlx.core.Dtype.html @@ -8,7 +8,7 @@ - mlx.core.Dtype — MLX 0.28.0 documentation + mlx.core.Dtype — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.DtypeCategory.html b/docs/build/html/python/_autosummary/mlx.core.DtypeCategory.html index 31f0f3956..4621801a2 100644 --- a/docs/build/html/python/_autosummary/mlx.core.DtypeCategory.html +++ b/docs/build/html/python/_autosummary/mlx.core.DtypeCategory.html @@ -8,7 +8,7 @@ - mlx.core.DtypeCategory — MLX 0.28.0 documentation + mlx.core.DtypeCategory — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.abs.html b/docs/build/html/python/_autosummary/mlx.core.abs.html index b40beb38f..e89422aad 100644 --- a/docs/build/html/python/_autosummary/mlx.core.abs.html +++ b/docs/build/html/python/_autosummary/mlx.core.abs.html @@ -8,7 +8,7 @@ - mlx.core.abs — MLX 0.28.0 documentation + mlx.core.abs — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.add.html b/docs/build/html/python/_autosummary/mlx.core.add.html index 980a915a0..d2d5fbdd1 100644 --- a/docs/build/html/python/_autosummary/mlx.core.add.html +++ b/docs/build/html/python/_autosummary/mlx.core.add.html @@ -8,7 +8,7 @@ - mlx.core.add — MLX 0.28.0 documentation + mlx.core.add — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.addmm.html b/docs/build/html/python/_autosummary/mlx.core.addmm.html index dbe1b9feb..fc44f6d24 100644 --- a/docs/build/html/python/_autosummary/mlx.core.addmm.html +++ b/docs/build/html/python/_autosummary/mlx.core.addmm.html @@ -8,7 +8,7 @@ - mlx.core.addmm — MLX 0.28.0 documentation + mlx.core.addmm — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.all.html b/docs/build/html/python/_autosummary/mlx.core.all.html index c52b058bb..96f2905c9 100644 --- a/docs/build/html/python/_autosummary/mlx.core.all.html +++ b/docs/build/html/python/_autosummary/mlx.core.all.html @@ -8,7 +8,7 @@ - mlx.core.all — MLX 0.28.0 documentation + mlx.core.all — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.allclose.html b/docs/build/html/python/_autosummary/mlx.core.allclose.html index a5c9f47b7..0f647c6b6 100644 --- a/docs/build/html/python/_autosummary/mlx.core.allclose.html +++ b/docs/build/html/python/_autosummary/mlx.core.allclose.html @@ -8,7 +8,7 @@ - mlx.core.allclose — MLX 0.28.0 documentation + mlx.core.allclose — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.any.html b/docs/build/html/python/_autosummary/mlx.core.any.html index ffd99c6e4..b1cb21da0 100644 --- a/docs/build/html/python/_autosummary/mlx.core.any.html +++ b/docs/build/html/python/_autosummary/mlx.core.any.html @@ -8,7 +8,7 @@ - mlx.core.any — MLX 0.28.0 documentation + mlx.core.any — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.arange.html b/docs/build/html/python/_autosummary/mlx.core.arange.html index 3be784cf9..8720fa95e 100644 --- a/docs/build/html/python/_autosummary/mlx.core.arange.html +++ b/docs/build/html/python/_autosummary/mlx.core.arange.html @@ -8,7 +8,7 @@ - mlx.core.arange — MLX 0.28.0 documentation + mlx.core.arange — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.arccos.html b/docs/build/html/python/_autosummary/mlx.core.arccos.html index 34924ddce..8cd8164ed 100644 --- a/docs/build/html/python/_autosummary/mlx.core.arccos.html +++ b/docs/build/html/python/_autosummary/mlx.core.arccos.html @@ -8,7 +8,7 @@ - mlx.core.arccos — MLX 0.28.0 documentation + mlx.core.arccos — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.arccosh.html b/docs/build/html/python/_autosummary/mlx.core.arccosh.html index fd9580143..e139966f4 100644 --- a/docs/build/html/python/_autosummary/mlx.core.arccosh.html +++ b/docs/build/html/python/_autosummary/mlx.core.arccosh.html @@ -8,7 +8,7 @@ - mlx.core.arccosh — MLX 0.28.0 documentation + mlx.core.arccosh — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.arcsin.html b/docs/build/html/python/_autosummary/mlx.core.arcsin.html index 26a9e6a84..652237c2c 100644 --- a/docs/build/html/python/_autosummary/mlx.core.arcsin.html +++ b/docs/build/html/python/_autosummary/mlx.core.arcsin.html @@ -8,7 +8,7 @@ - mlx.core.arcsin — MLX 0.28.0 documentation + mlx.core.arcsin — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.arcsinh.html b/docs/build/html/python/_autosummary/mlx.core.arcsinh.html index c6e2f2fd9..8a886c998 100644 --- a/docs/build/html/python/_autosummary/mlx.core.arcsinh.html +++ b/docs/build/html/python/_autosummary/mlx.core.arcsinh.html @@ -8,7 +8,7 @@ - mlx.core.arcsinh — MLX 0.28.0 documentation + mlx.core.arcsinh — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.arctan.html b/docs/build/html/python/_autosummary/mlx.core.arctan.html index 11760deff..81b56f678 100644 --- a/docs/build/html/python/_autosummary/mlx.core.arctan.html +++ b/docs/build/html/python/_autosummary/mlx.core.arctan.html @@ -8,7 +8,7 @@ - mlx.core.arctan — MLX 0.28.0 documentation + mlx.core.arctan — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.arctan2.html b/docs/build/html/python/_autosummary/mlx.core.arctan2.html index b43d99702..02ccd4377 100644 --- a/docs/build/html/python/_autosummary/mlx.core.arctan2.html +++ b/docs/build/html/python/_autosummary/mlx.core.arctan2.html @@ -8,7 +8,7 @@ - mlx.core.arctan2 — MLX 0.28.0 documentation + mlx.core.arctan2 — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.arctanh.html b/docs/build/html/python/_autosummary/mlx.core.arctanh.html index b265702c3..103e77099 100644 --- a/docs/build/html/python/_autosummary/mlx.core.arctanh.html +++ b/docs/build/html/python/_autosummary/mlx.core.arctanh.html @@ -8,7 +8,7 @@ - mlx.core.arctanh — MLX 0.28.0 documentation + mlx.core.arctanh — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.argmax.html b/docs/build/html/python/_autosummary/mlx.core.argmax.html index d9761d80b..69100ce39 100644 --- a/docs/build/html/python/_autosummary/mlx.core.argmax.html +++ b/docs/build/html/python/_autosummary/mlx.core.argmax.html @@ -8,7 +8,7 @@ - mlx.core.argmax — MLX 0.28.0 documentation + mlx.core.argmax — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.argmin.html b/docs/build/html/python/_autosummary/mlx.core.argmin.html index 5be6a3084..0360ab7e6 100644 --- a/docs/build/html/python/_autosummary/mlx.core.argmin.html +++ b/docs/build/html/python/_autosummary/mlx.core.argmin.html @@ -8,7 +8,7 @@ - mlx.core.argmin — MLX 0.28.0 documentation + mlx.core.argmin — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.argpartition.html b/docs/build/html/python/_autosummary/mlx.core.argpartition.html index bfb67199a..b51cb3d36 100644 --- a/docs/build/html/python/_autosummary/mlx.core.argpartition.html +++ b/docs/build/html/python/_autosummary/mlx.core.argpartition.html @@ -8,7 +8,7 @@ - mlx.core.argpartition — MLX 0.28.0 documentation + mlx.core.argpartition — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.argsort.html b/docs/build/html/python/_autosummary/mlx.core.argsort.html index 1e69b6156..b014c661e 100644 --- a/docs/build/html/python/_autosummary/mlx.core.argsort.html +++ b/docs/build/html/python/_autosummary/mlx.core.argsort.html @@ -8,7 +8,7 @@ - mlx.core.argsort — MLX 0.28.0 documentation + mlx.core.argsort — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.array.T.html b/docs/build/html/python/_autosummary/mlx.core.array.T.html index 6e2353871..96218c62b 100644 --- a/docs/build/html/python/_autosummary/mlx.core.array.T.html +++ b/docs/build/html/python/_autosummary/mlx.core.array.T.html @@ -8,7 +8,7 @@ - mlx.core.array.T — MLX 0.28.0 documentation + mlx.core.array.T — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.array.abs.html b/docs/build/html/python/_autosummary/mlx.core.array.abs.html index 786eb4697..aaf3b1bdc 100644 --- a/docs/build/html/python/_autosummary/mlx.core.array.abs.html +++ b/docs/build/html/python/_autosummary/mlx.core.array.abs.html @@ -8,7 +8,7 @@ - mlx.core.array.abs — MLX 0.28.0 documentation + mlx.core.array.abs — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.array.all.html b/docs/build/html/python/_autosummary/mlx.core.array.all.html index ede578ccd..07012a13b 100644 --- a/docs/build/html/python/_autosummary/mlx.core.array.all.html +++ b/docs/build/html/python/_autosummary/mlx.core.array.all.html @@ -8,7 +8,7 @@ - mlx.core.array.all — MLX 0.28.0 documentation + mlx.core.array.all — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.array.any.html b/docs/build/html/python/_autosummary/mlx.core.array.any.html index 3583f1e88..82f267912 100644 --- a/docs/build/html/python/_autosummary/mlx.core.array.any.html +++ b/docs/build/html/python/_autosummary/mlx.core.array.any.html @@ -8,7 +8,7 @@ - mlx.core.array.any — MLX 0.28.0 documentation + mlx.core.array.any — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.array.argmax.html b/docs/build/html/python/_autosummary/mlx.core.array.argmax.html index 2d475fb4a..f9370d7fc 100644 --- a/docs/build/html/python/_autosummary/mlx.core.array.argmax.html +++ b/docs/build/html/python/_autosummary/mlx.core.array.argmax.html @@ -8,7 +8,7 @@ - mlx.core.array.argmax — MLX 0.28.0 documentation + mlx.core.array.argmax — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.array.argmin.html b/docs/build/html/python/_autosummary/mlx.core.array.argmin.html index a11d16e4e..594117338 100644 --- a/docs/build/html/python/_autosummary/mlx.core.array.argmin.html +++ b/docs/build/html/python/_autosummary/mlx.core.array.argmin.html @@ -8,7 +8,7 @@ - mlx.core.array.argmin — MLX 0.28.0 documentation + mlx.core.array.argmin — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.array.astype.html b/docs/build/html/python/_autosummary/mlx.core.array.astype.html index 3d90ed685..e58bcdb9a 100644 --- a/docs/build/html/python/_autosummary/mlx.core.array.astype.html +++ b/docs/build/html/python/_autosummary/mlx.core.array.astype.html @@ -8,7 +8,7 @@ - mlx.core.array.astype — MLX 0.28.0 documentation + mlx.core.array.astype — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.array.at.html b/docs/build/html/python/_autosummary/mlx.core.array.at.html index f0004fb78..91492ab16 100644 --- a/docs/build/html/python/_autosummary/mlx.core.array.at.html +++ b/docs/build/html/python/_autosummary/mlx.core.array.at.html @@ -8,7 +8,7 @@ - mlx.core.array.at — MLX 0.28.0 documentation + mlx.core.array.at — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.array.conj.html b/docs/build/html/python/_autosummary/mlx.core.array.conj.html index 174bcd6b0..1d1bffefa 100644 --- a/docs/build/html/python/_autosummary/mlx.core.array.conj.html +++ b/docs/build/html/python/_autosummary/mlx.core.array.conj.html @@ -8,7 +8,7 @@ - mlx.core.array.conj — MLX 0.28.0 documentation + mlx.core.array.conj — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.array.cos.html b/docs/build/html/python/_autosummary/mlx.core.array.cos.html index 378de3ac9..1600eb6f6 100644 --- a/docs/build/html/python/_autosummary/mlx.core.array.cos.html +++ b/docs/build/html/python/_autosummary/mlx.core.array.cos.html @@ -8,7 +8,7 @@ - mlx.core.array.cos — MLX 0.28.0 documentation + mlx.core.array.cos — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.array.cummax.html b/docs/build/html/python/_autosummary/mlx.core.array.cummax.html index 025a75b01..3bee54f94 100644 --- a/docs/build/html/python/_autosummary/mlx.core.array.cummax.html +++ b/docs/build/html/python/_autosummary/mlx.core.array.cummax.html @@ -8,7 +8,7 @@ - mlx.core.array.cummax — MLX 0.28.0 documentation + mlx.core.array.cummax — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.array.cummin.html b/docs/build/html/python/_autosummary/mlx.core.array.cummin.html index ecfbbc765..644e39e34 100644 --- a/docs/build/html/python/_autosummary/mlx.core.array.cummin.html +++ b/docs/build/html/python/_autosummary/mlx.core.array.cummin.html @@ -8,7 +8,7 @@ - mlx.core.array.cummin — MLX 0.28.0 documentation + mlx.core.array.cummin — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.array.cumprod.html b/docs/build/html/python/_autosummary/mlx.core.array.cumprod.html index 4cd7cb356..646521617 100644 --- a/docs/build/html/python/_autosummary/mlx.core.array.cumprod.html +++ b/docs/build/html/python/_autosummary/mlx.core.array.cumprod.html @@ -8,7 +8,7 @@ - mlx.core.array.cumprod — MLX 0.28.0 documentation + mlx.core.array.cumprod — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.array.cumsum.html b/docs/build/html/python/_autosummary/mlx.core.array.cumsum.html index 8ecaa746b..f8073a1ca 100644 --- a/docs/build/html/python/_autosummary/mlx.core.array.cumsum.html +++ b/docs/build/html/python/_autosummary/mlx.core.array.cumsum.html @@ -8,7 +8,7 @@ - mlx.core.array.cumsum — MLX 0.28.0 documentation + mlx.core.array.cumsum — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.array.diag.html b/docs/build/html/python/_autosummary/mlx.core.array.diag.html index 56ca3f391..dddfef5c9 100644 --- a/docs/build/html/python/_autosummary/mlx.core.array.diag.html +++ b/docs/build/html/python/_autosummary/mlx.core.array.diag.html @@ -8,7 +8,7 @@ - mlx.core.array.diag — MLX 0.28.0 documentation + mlx.core.array.diag — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.array.diagonal.html b/docs/build/html/python/_autosummary/mlx.core.array.diagonal.html index 7c47577c1..114e0a436 100644 --- a/docs/build/html/python/_autosummary/mlx.core.array.diagonal.html +++ b/docs/build/html/python/_autosummary/mlx.core.array.diagonal.html @@ -8,7 +8,7 @@ - mlx.core.array.diagonal — MLX 0.28.0 documentation + mlx.core.array.diagonal — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.array.dtype.html b/docs/build/html/python/_autosummary/mlx.core.array.dtype.html index f23a94051..52266fcf7 100644 --- a/docs/build/html/python/_autosummary/mlx.core.array.dtype.html +++ b/docs/build/html/python/_autosummary/mlx.core.array.dtype.html @@ -8,7 +8,7 @@ - mlx.core.array.dtype — MLX 0.28.0 documentation + mlx.core.array.dtype — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.array.exp.html b/docs/build/html/python/_autosummary/mlx.core.array.exp.html index d328c787a..776268e77 100644 --- a/docs/build/html/python/_autosummary/mlx.core.array.exp.html +++ b/docs/build/html/python/_autosummary/mlx.core.array.exp.html @@ -8,7 +8,7 @@ - mlx.core.array.exp — MLX 0.28.0 documentation + mlx.core.array.exp — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.array.flatten.html b/docs/build/html/python/_autosummary/mlx.core.array.flatten.html index 59bb49ce4..ddf29643f 100644 --- a/docs/build/html/python/_autosummary/mlx.core.array.flatten.html +++ b/docs/build/html/python/_autosummary/mlx.core.array.flatten.html @@ -8,7 +8,7 @@ - mlx.core.array.flatten — MLX 0.28.0 documentation + mlx.core.array.flatten — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.array.html b/docs/build/html/python/_autosummary/mlx.core.array.html index fd5a35b06..4a007f214 100644 --- a/docs/build/html/python/_autosummary/mlx.core.array.html +++ b/docs/build/html/python/_autosummary/mlx.core.array.html @@ -8,7 +8,7 @@ - mlx.core.array — MLX 0.28.0 documentation + mlx.core.array — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.array.imag.html b/docs/build/html/python/_autosummary/mlx.core.array.imag.html index fb6174677..84d2aba44 100644 --- a/docs/build/html/python/_autosummary/mlx.core.array.imag.html +++ b/docs/build/html/python/_autosummary/mlx.core.array.imag.html @@ -8,7 +8,7 @@ - mlx.core.array.imag — MLX 0.28.0 documentation + mlx.core.array.imag — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.array.item.html b/docs/build/html/python/_autosummary/mlx.core.array.item.html index 4879fadf4..4cb51199e 100644 --- a/docs/build/html/python/_autosummary/mlx.core.array.item.html +++ b/docs/build/html/python/_autosummary/mlx.core.array.item.html @@ -8,7 +8,7 @@ - mlx.core.array.item — MLX 0.28.0 documentation + mlx.core.array.item — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.array.itemsize.html b/docs/build/html/python/_autosummary/mlx.core.array.itemsize.html index 184b141a8..b98fab2ce 100644 --- a/docs/build/html/python/_autosummary/mlx.core.array.itemsize.html +++ b/docs/build/html/python/_autosummary/mlx.core.array.itemsize.html @@ -8,7 +8,7 @@ - mlx.core.array.itemsize — MLX 0.28.0 documentation + mlx.core.array.itemsize — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.array.log.html b/docs/build/html/python/_autosummary/mlx.core.array.log.html index f1e30359d..478f4a1d4 100644 --- a/docs/build/html/python/_autosummary/mlx.core.array.log.html +++ b/docs/build/html/python/_autosummary/mlx.core.array.log.html @@ -8,7 +8,7 @@ - mlx.core.array.log — MLX 0.28.0 documentation + mlx.core.array.log — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.array.log10.html b/docs/build/html/python/_autosummary/mlx.core.array.log10.html index 79e0013ba..d551e81de 100644 --- a/docs/build/html/python/_autosummary/mlx.core.array.log10.html +++ b/docs/build/html/python/_autosummary/mlx.core.array.log10.html @@ -8,7 +8,7 @@ - mlx.core.array.log10 — MLX 0.28.0 documentation + mlx.core.array.log10 — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.array.log1p.html b/docs/build/html/python/_autosummary/mlx.core.array.log1p.html index 81bab97ba..da35f893d 100644 --- a/docs/build/html/python/_autosummary/mlx.core.array.log1p.html +++ b/docs/build/html/python/_autosummary/mlx.core.array.log1p.html @@ -8,7 +8,7 @@ - mlx.core.array.log1p — MLX 0.28.0 documentation + mlx.core.array.log1p — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.array.log2.html b/docs/build/html/python/_autosummary/mlx.core.array.log2.html index a881b34db..e8dee3745 100644 --- a/docs/build/html/python/_autosummary/mlx.core.array.log2.html +++ b/docs/build/html/python/_autosummary/mlx.core.array.log2.html @@ -8,7 +8,7 @@ - mlx.core.array.log2 — MLX 0.28.0 documentation + mlx.core.array.log2 — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.array.logcumsumexp.html b/docs/build/html/python/_autosummary/mlx.core.array.logcumsumexp.html index b65052a73..24eb5c3c8 100644 --- a/docs/build/html/python/_autosummary/mlx.core.array.logcumsumexp.html +++ b/docs/build/html/python/_autosummary/mlx.core.array.logcumsumexp.html @@ -8,7 +8,7 @@ - mlx.core.array.logcumsumexp — MLX 0.28.0 documentation + mlx.core.array.logcumsumexp — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.array.logsumexp.html b/docs/build/html/python/_autosummary/mlx.core.array.logsumexp.html index c3acc1103..1603e4114 100644 --- a/docs/build/html/python/_autosummary/mlx.core.array.logsumexp.html +++ b/docs/build/html/python/_autosummary/mlx.core.array.logsumexp.html @@ -8,7 +8,7 @@ - mlx.core.array.logsumexp — MLX 0.28.0 documentation + mlx.core.array.logsumexp — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.array.max.html b/docs/build/html/python/_autosummary/mlx.core.array.max.html index 2d492011b..519efe3d4 100644 --- a/docs/build/html/python/_autosummary/mlx.core.array.max.html +++ b/docs/build/html/python/_autosummary/mlx.core.array.max.html @@ -8,7 +8,7 @@ - mlx.core.array.max — MLX 0.28.0 documentation + mlx.core.array.max — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.array.mean.html b/docs/build/html/python/_autosummary/mlx.core.array.mean.html index db69ad64d..546cfa803 100644 --- a/docs/build/html/python/_autosummary/mlx.core.array.mean.html +++ b/docs/build/html/python/_autosummary/mlx.core.array.mean.html @@ -8,7 +8,7 @@ - mlx.core.array.mean — MLX 0.28.0 documentation + mlx.core.array.mean — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.array.min.html b/docs/build/html/python/_autosummary/mlx.core.array.min.html index c16aeb76b..96cf124e5 100644 --- a/docs/build/html/python/_autosummary/mlx.core.array.min.html +++ b/docs/build/html/python/_autosummary/mlx.core.array.min.html @@ -8,7 +8,7 @@ - mlx.core.array.min — MLX 0.28.0 documentation + mlx.core.array.min — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.array.moveaxis.html b/docs/build/html/python/_autosummary/mlx.core.array.moveaxis.html index 9710b4808..bcd099136 100644 --- a/docs/build/html/python/_autosummary/mlx.core.array.moveaxis.html +++ b/docs/build/html/python/_autosummary/mlx.core.array.moveaxis.html @@ -8,7 +8,7 @@ - mlx.core.array.moveaxis — MLX 0.28.0 documentation + mlx.core.array.moveaxis — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.array.nbytes.html b/docs/build/html/python/_autosummary/mlx.core.array.nbytes.html index 02a84ccf0..b0366702e 100644 --- a/docs/build/html/python/_autosummary/mlx.core.array.nbytes.html +++ b/docs/build/html/python/_autosummary/mlx.core.array.nbytes.html @@ -8,7 +8,7 @@ - mlx.core.array.nbytes — MLX 0.28.0 documentation + mlx.core.array.nbytes — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.array.ndim.html b/docs/build/html/python/_autosummary/mlx.core.array.ndim.html index f48072224..0a90b7bae 100644 --- a/docs/build/html/python/_autosummary/mlx.core.array.ndim.html +++ b/docs/build/html/python/_autosummary/mlx.core.array.ndim.html @@ -8,7 +8,7 @@ - mlx.core.array.ndim — MLX 0.28.0 documentation + mlx.core.array.ndim — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.array.prod.html b/docs/build/html/python/_autosummary/mlx.core.array.prod.html index 7f3328a94..e36d995f6 100644 --- a/docs/build/html/python/_autosummary/mlx.core.array.prod.html +++ b/docs/build/html/python/_autosummary/mlx.core.array.prod.html @@ -8,7 +8,7 @@ - mlx.core.array.prod — MLX 0.28.0 documentation + mlx.core.array.prod — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.array.real.html b/docs/build/html/python/_autosummary/mlx.core.array.real.html index 91833cebf..96a869d79 100644 --- a/docs/build/html/python/_autosummary/mlx.core.array.real.html +++ b/docs/build/html/python/_autosummary/mlx.core.array.real.html @@ -8,7 +8,7 @@ - mlx.core.array.real — MLX 0.28.0 documentation + mlx.core.array.real — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.array.reciprocal.html b/docs/build/html/python/_autosummary/mlx.core.array.reciprocal.html index c9b7e6b4a..0ecd966de 100644 --- a/docs/build/html/python/_autosummary/mlx.core.array.reciprocal.html +++ b/docs/build/html/python/_autosummary/mlx.core.array.reciprocal.html @@ -8,7 +8,7 @@ - mlx.core.array.reciprocal — MLX 0.28.0 documentation + mlx.core.array.reciprocal — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.array.reshape.html b/docs/build/html/python/_autosummary/mlx.core.array.reshape.html index 14221e327..8b80067af 100644 --- a/docs/build/html/python/_autosummary/mlx.core.array.reshape.html +++ b/docs/build/html/python/_autosummary/mlx.core.array.reshape.html @@ -8,7 +8,7 @@ - mlx.core.array.reshape — MLX 0.28.0 documentation + mlx.core.array.reshape — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.array.round.html b/docs/build/html/python/_autosummary/mlx.core.array.round.html index e46b328df..e8ea4ae9c 100644 --- a/docs/build/html/python/_autosummary/mlx.core.array.round.html +++ b/docs/build/html/python/_autosummary/mlx.core.array.round.html @@ -8,7 +8,7 @@ - mlx.core.array.round — MLX 0.28.0 documentation + mlx.core.array.round — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.array.rsqrt.html b/docs/build/html/python/_autosummary/mlx.core.array.rsqrt.html index febb607e7..605fee3ec 100644 --- a/docs/build/html/python/_autosummary/mlx.core.array.rsqrt.html +++ b/docs/build/html/python/_autosummary/mlx.core.array.rsqrt.html @@ -8,7 +8,7 @@ - mlx.core.array.rsqrt — MLX 0.28.0 documentation + mlx.core.array.rsqrt — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.array.shape.html b/docs/build/html/python/_autosummary/mlx.core.array.shape.html index 730cab964..de36d8c53 100644 --- a/docs/build/html/python/_autosummary/mlx.core.array.shape.html +++ b/docs/build/html/python/_autosummary/mlx.core.array.shape.html @@ -8,7 +8,7 @@ - mlx.core.array.shape — MLX 0.28.0 documentation + mlx.core.array.shape — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.array.sin.html b/docs/build/html/python/_autosummary/mlx.core.array.sin.html index 1f535f2a1..0acaedb56 100644 --- a/docs/build/html/python/_autosummary/mlx.core.array.sin.html +++ b/docs/build/html/python/_autosummary/mlx.core.array.sin.html @@ -8,7 +8,7 @@ - mlx.core.array.sin — MLX 0.28.0 documentation + mlx.core.array.sin — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.array.size.html b/docs/build/html/python/_autosummary/mlx.core.array.size.html index 759e45337..04c78f8c5 100644 --- a/docs/build/html/python/_autosummary/mlx.core.array.size.html +++ b/docs/build/html/python/_autosummary/mlx.core.array.size.html @@ -8,7 +8,7 @@ - mlx.core.array.size — MLX 0.28.0 documentation + mlx.core.array.size — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.array.split.html b/docs/build/html/python/_autosummary/mlx.core.array.split.html index 151ce94d2..b4e2824c6 100644 --- a/docs/build/html/python/_autosummary/mlx.core.array.split.html +++ b/docs/build/html/python/_autosummary/mlx.core.array.split.html @@ -8,7 +8,7 @@ - mlx.core.array.split — MLX 0.28.0 documentation + mlx.core.array.split — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.array.sqrt.html b/docs/build/html/python/_autosummary/mlx.core.array.sqrt.html index cb9b93257..7ee17cd06 100644 --- a/docs/build/html/python/_autosummary/mlx.core.array.sqrt.html +++ b/docs/build/html/python/_autosummary/mlx.core.array.sqrt.html @@ -8,7 +8,7 @@ - mlx.core.array.sqrt — MLX 0.28.0 documentation + mlx.core.array.sqrt — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.array.square.html b/docs/build/html/python/_autosummary/mlx.core.array.square.html index c13fe2245..95d48e3bf 100644 --- a/docs/build/html/python/_autosummary/mlx.core.array.square.html +++ b/docs/build/html/python/_autosummary/mlx.core.array.square.html @@ -8,7 +8,7 @@ - mlx.core.array.square — MLX 0.28.0 documentation + mlx.core.array.square — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.array.squeeze.html b/docs/build/html/python/_autosummary/mlx.core.array.squeeze.html index a60f2d9ab..c3f1b9d02 100644 --- a/docs/build/html/python/_autosummary/mlx.core.array.squeeze.html +++ b/docs/build/html/python/_autosummary/mlx.core.array.squeeze.html @@ -8,7 +8,7 @@ - mlx.core.array.squeeze — MLX 0.28.0 documentation + mlx.core.array.squeeze — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.array.std.html b/docs/build/html/python/_autosummary/mlx.core.array.std.html index 0fa259b9c..fd6a7dd95 100644 --- a/docs/build/html/python/_autosummary/mlx.core.array.std.html +++ b/docs/build/html/python/_autosummary/mlx.core.array.std.html @@ -8,7 +8,7 @@ - mlx.core.array.std — MLX 0.28.0 documentation + mlx.core.array.std — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.array.sum.html b/docs/build/html/python/_autosummary/mlx.core.array.sum.html index 1e2345f2b..0f951b478 100644 --- a/docs/build/html/python/_autosummary/mlx.core.array.sum.html +++ b/docs/build/html/python/_autosummary/mlx.core.array.sum.html @@ -8,7 +8,7 @@ - mlx.core.array.sum — MLX 0.28.0 documentation + mlx.core.array.sum — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.array.swapaxes.html b/docs/build/html/python/_autosummary/mlx.core.array.swapaxes.html index b34829180..c84906c51 100644 --- a/docs/build/html/python/_autosummary/mlx.core.array.swapaxes.html +++ b/docs/build/html/python/_autosummary/mlx.core.array.swapaxes.html @@ -8,7 +8,7 @@ - mlx.core.array.swapaxes — MLX 0.28.0 documentation + mlx.core.array.swapaxes — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.array.tolist.html b/docs/build/html/python/_autosummary/mlx.core.array.tolist.html index 6349d4396..355c10db9 100644 --- a/docs/build/html/python/_autosummary/mlx.core.array.tolist.html +++ b/docs/build/html/python/_autosummary/mlx.core.array.tolist.html @@ -8,7 +8,7 @@ - mlx.core.array.tolist — MLX 0.28.0 documentation + mlx.core.array.tolist — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.array.transpose.html b/docs/build/html/python/_autosummary/mlx.core.array.transpose.html index 8244c6728..db61fe842 100644 --- a/docs/build/html/python/_autosummary/mlx.core.array.transpose.html +++ b/docs/build/html/python/_autosummary/mlx.core.array.transpose.html @@ -8,7 +8,7 @@ - mlx.core.array.transpose — MLX 0.28.0 documentation + mlx.core.array.transpose — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.array.var.html b/docs/build/html/python/_autosummary/mlx.core.array.var.html index 3f2211d13..574f3982b 100644 --- a/docs/build/html/python/_autosummary/mlx.core.array.var.html +++ b/docs/build/html/python/_autosummary/mlx.core.array.var.html @@ -8,7 +8,7 @@ - mlx.core.array.var — MLX 0.28.0 documentation + mlx.core.array.var — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.array.view.html b/docs/build/html/python/_autosummary/mlx.core.array.view.html index 4cd0e3b19..2d599e454 100644 --- a/docs/build/html/python/_autosummary/mlx.core.array.view.html +++ b/docs/build/html/python/_autosummary/mlx.core.array.view.html @@ -8,7 +8,7 @@ - mlx.core.array.view — MLX 0.28.0 documentation + mlx.core.array.view — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.array_equal.html b/docs/build/html/python/_autosummary/mlx.core.array_equal.html index fa41b6179..41fe562fb 100644 --- a/docs/build/html/python/_autosummary/mlx.core.array_equal.html +++ b/docs/build/html/python/_autosummary/mlx.core.array_equal.html @@ -8,7 +8,7 @@ - mlx.core.array_equal — MLX 0.28.0 documentation + mlx.core.array_equal — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.as_strided.html b/docs/build/html/python/_autosummary/mlx.core.as_strided.html index 3e3059cb5..b4d8ba394 100644 --- a/docs/build/html/python/_autosummary/mlx.core.as_strided.html +++ b/docs/build/html/python/_autosummary/mlx.core.as_strided.html @@ -8,7 +8,7 @@ - mlx.core.as_strided — MLX 0.28.0 documentation + mlx.core.as_strided — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.async_eval.html b/docs/build/html/python/_autosummary/mlx.core.async_eval.html index 7bde95b24..f1d390caf 100644 --- a/docs/build/html/python/_autosummary/mlx.core.async_eval.html +++ b/docs/build/html/python/_autosummary/mlx.core.async_eval.html @@ -8,7 +8,7 @@ - mlx.core.async_eval — MLX 0.28.0 documentation + mlx.core.async_eval — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.atleast_1d.html b/docs/build/html/python/_autosummary/mlx.core.atleast_1d.html index 266581c52..9564e9a5e 100644 --- a/docs/build/html/python/_autosummary/mlx.core.atleast_1d.html +++ b/docs/build/html/python/_autosummary/mlx.core.atleast_1d.html @@ -8,7 +8,7 @@ - mlx.core.atleast_1d — MLX 0.28.0 documentation + mlx.core.atleast_1d — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.atleast_2d.html b/docs/build/html/python/_autosummary/mlx.core.atleast_2d.html index 436838528..20ffe68c8 100644 --- a/docs/build/html/python/_autosummary/mlx.core.atleast_2d.html +++ b/docs/build/html/python/_autosummary/mlx.core.atleast_2d.html @@ -8,7 +8,7 @@ - mlx.core.atleast_2d — MLX 0.28.0 documentation + mlx.core.atleast_2d — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.atleast_3d.html b/docs/build/html/python/_autosummary/mlx.core.atleast_3d.html index 8f0281b10..7e694dcd5 100644 --- a/docs/build/html/python/_autosummary/mlx.core.atleast_3d.html +++ b/docs/build/html/python/_autosummary/mlx.core.atleast_3d.html @@ -8,7 +8,7 @@ - mlx.core.atleast_3d — MLX 0.28.0 documentation + mlx.core.atleast_3d — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.bitwise_and.html b/docs/build/html/python/_autosummary/mlx.core.bitwise_and.html index e11c0371d..43adbc081 100644 --- a/docs/build/html/python/_autosummary/mlx.core.bitwise_and.html +++ b/docs/build/html/python/_autosummary/mlx.core.bitwise_and.html @@ -8,7 +8,7 @@ - mlx.core.bitwise_and — MLX 0.28.0 documentation + mlx.core.bitwise_and — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.bitwise_invert.html b/docs/build/html/python/_autosummary/mlx.core.bitwise_invert.html index 8b646639c..905ce0d00 100644 --- a/docs/build/html/python/_autosummary/mlx.core.bitwise_invert.html +++ b/docs/build/html/python/_autosummary/mlx.core.bitwise_invert.html @@ -8,7 +8,7 @@ - mlx.core.bitwise_invert — MLX 0.28.0 documentation + mlx.core.bitwise_invert — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.bitwise_or.html b/docs/build/html/python/_autosummary/mlx.core.bitwise_or.html index 11bc5ba35..e6a7a3575 100644 --- a/docs/build/html/python/_autosummary/mlx.core.bitwise_or.html +++ b/docs/build/html/python/_autosummary/mlx.core.bitwise_or.html @@ -8,7 +8,7 @@ - mlx.core.bitwise_or — MLX 0.28.0 documentation + mlx.core.bitwise_or — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.bitwise_xor.html b/docs/build/html/python/_autosummary/mlx.core.bitwise_xor.html index a85c26ae2..edd179df4 100644 --- a/docs/build/html/python/_autosummary/mlx.core.bitwise_xor.html +++ b/docs/build/html/python/_autosummary/mlx.core.bitwise_xor.html @@ -8,7 +8,7 @@ - mlx.core.bitwise_xor — MLX 0.28.0 documentation + mlx.core.bitwise_xor — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.block_masked_mm.html b/docs/build/html/python/_autosummary/mlx.core.block_masked_mm.html index 20125c3c0..01f6d90af 100644 --- a/docs/build/html/python/_autosummary/mlx.core.block_masked_mm.html +++ b/docs/build/html/python/_autosummary/mlx.core.block_masked_mm.html @@ -8,7 +8,7 @@ - mlx.core.block_masked_mm — MLX 0.28.0 documentation + mlx.core.block_masked_mm — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.broadcast_arrays.html b/docs/build/html/python/_autosummary/mlx.core.broadcast_arrays.html index 397a8904a..a0efe61ea 100644 --- a/docs/build/html/python/_autosummary/mlx.core.broadcast_arrays.html +++ b/docs/build/html/python/_autosummary/mlx.core.broadcast_arrays.html @@ -8,7 +8,7 @@ - mlx.core.broadcast_arrays — MLX 0.28.0 documentation + mlx.core.broadcast_arrays — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.broadcast_to.html b/docs/build/html/python/_autosummary/mlx.core.broadcast_to.html index fa74670aa..a504bcc92 100644 --- a/docs/build/html/python/_autosummary/mlx.core.broadcast_to.html +++ b/docs/build/html/python/_autosummary/mlx.core.broadcast_to.html @@ -8,7 +8,7 @@ - mlx.core.broadcast_to — MLX 0.28.0 documentation + mlx.core.broadcast_to — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.ceil.html b/docs/build/html/python/_autosummary/mlx.core.ceil.html index 18f9a3a45..a88e00ae8 100644 --- a/docs/build/html/python/_autosummary/mlx.core.ceil.html +++ b/docs/build/html/python/_autosummary/mlx.core.ceil.html @@ -8,7 +8,7 @@ - mlx.core.ceil — MLX 0.28.0 documentation + mlx.core.ceil — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.clear_cache.html b/docs/build/html/python/_autosummary/mlx.core.clear_cache.html index 581c05586..ce46e0632 100644 --- a/docs/build/html/python/_autosummary/mlx.core.clear_cache.html +++ b/docs/build/html/python/_autosummary/mlx.core.clear_cache.html @@ -8,7 +8,7 @@ - mlx.core.clear_cache — MLX 0.28.0 documentation + mlx.core.clear_cache — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.clip.html b/docs/build/html/python/_autosummary/mlx.core.clip.html index e0a003da4..970854b62 100644 --- a/docs/build/html/python/_autosummary/mlx.core.clip.html +++ b/docs/build/html/python/_autosummary/mlx.core.clip.html @@ -8,7 +8,7 @@ - mlx.core.clip — MLX 0.28.0 documentation + mlx.core.clip — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.compile.html b/docs/build/html/python/_autosummary/mlx.core.compile.html index fb309cfd2..6f3db6f85 100644 --- a/docs/build/html/python/_autosummary/mlx.core.compile.html +++ b/docs/build/html/python/_autosummary/mlx.core.compile.html @@ -8,7 +8,7 @@ - mlx.core.compile — MLX 0.28.0 documentation + mlx.core.compile — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.concatenate.html b/docs/build/html/python/_autosummary/mlx.core.concatenate.html index 6202a905d..37930b14d 100644 --- a/docs/build/html/python/_autosummary/mlx.core.concatenate.html +++ b/docs/build/html/python/_autosummary/mlx.core.concatenate.html @@ -8,7 +8,7 @@ - mlx.core.concatenate — MLX 0.28.0 documentation + mlx.core.concatenate — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.conj.html b/docs/build/html/python/_autosummary/mlx.core.conj.html index 10dff7c66..17a32d356 100644 --- a/docs/build/html/python/_autosummary/mlx.core.conj.html +++ b/docs/build/html/python/_autosummary/mlx.core.conj.html @@ -8,7 +8,7 @@ - mlx.core.conj — MLX 0.28.0 documentation + mlx.core.conj — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.conjugate.html b/docs/build/html/python/_autosummary/mlx.core.conjugate.html index 619b7f68f..ca1f1268e 100644 --- a/docs/build/html/python/_autosummary/mlx.core.conjugate.html +++ b/docs/build/html/python/_autosummary/mlx.core.conjugate.html @@ -8,7 +8,7 @@ - mlx.core.conjugate — MLX 0.28.0 documentation + mlx.core.conjugate — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.contiguous.html b/docs/build/html/python/_autosummary/mlx.core.contiguous.html index ba24a9c18..29145086e 100644 --- a/docs/build/html/python/_autosummary/mlx.core.contiguous.html +++ b/docs/build/html/python/_autosummary/mlx.core.contiguous.html @@ -8,7 +8,7 @@ - mlx.core.contiguous — MLX 0.28.0 documentation + mlx.core.contiguous — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.conv1d.html b/docs/build/html/python/_autosummary/mlx.core.conv1d.html index a5afb5d59..94cff9a2a 100644 --- a/docs/build/html/python/_autosummary/mlx.core.conv1d.html +++ b/docs/build/html/python/_autosummary/mlx.core.conv1d.html @@ -8,7 +8,7 @@ - mlx.core.conv1d — MLX 0.28.0 documentation + mlx.core.conv1d — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.conv2d.html b/docs/build/html/python/_autosummary/mlx.core.conv2d.html index 2608d1884..6a84428f8 100644 --- a/docs/build/html/python/_autosummary/mlx.core.conv2d.html +++ b/docs/build/html/python/_autosummary/mlx.core.conv2d.html @@ -8,7 +8,7 @@ - mlx.core.conv2d — MLX 0.28.0 documentation + mlx.core.conv2d — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.conv3d.html b/docs/build/html/python/_autosummary/mlx.core.conv3d.html index afe644295..8edd37a0b 100644 --- a/docs/build/html/python/_autosummary/mlx.core.conv3d.html +++ b/docs/build/html/python/_autosummary/mlx.core.conv3d.html @@ -8,7 +8,7 @@ - mlx.core.conv3d — MLX 0.28.0 documentation + mlx.core.conv3d — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.conv_general.html b/docs/build/html/python/_autosummary/mlx.core.conv_general.html index cda10d74e..85d41cc3c 100644 --- a/docs/build/html/python/_autosummary/mlx.core.conv_general.html +++ b/docs/build/html/python/_autosummary/mlx.core.conv_general.html @@ -8,7 +8,7 @@ - mlx.core.conv_general — MLX 0.28.0 documentation + mlx.core.conv_general — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.conv_transpose1d.html b/docs/build/html/python/_autosummary/mlx.core.conv_transpose1d.html index 07a0bcfde..494307e01 100644 --- a/docs/build/html/python/_autosummary/mlx.core.conv_transpose1d.html +++ b/docs/build/html/python/_autosummary/mlx.core.conv_transpose1d.html @@ -8,7 +8,7 @@ - mlx.core.conv_transpose1d — MLX 0.28.0 documentation + mlx.core.conv_transpose1d — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.conv_transpose2d.html b/docs/build/html/python/_autosummary/mlx.core.conv_transpose2d.html index bf1b16ff3..30428177e 100644 --- a/docs/build/html/python/_autosummary/mlx.core.conv_transpose2d.html +++ b/docs/build/html/python/_autosummary/mlx.core.conv_transpose2d.html @@ -8,7 +8,7 @@ - mlx.core.conv_transpose2d — MLX 0.28.0 documentation + mlx.core.conv_transpose2d — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.conv_transpose3d.html b/docs/build/html/python/_autosummary/mlx.core.conv_transpose3d.html index 67ac99a0c..a72ef711b 100644 --- a/docs/build/html/python/_autosummary/mlx.core.conv_transpose3d.html +++ b/docs/build/html/python/_autosummary/mlx.core.conv_transpose3d.html @@ -8,7 +8,7 @@ - mlx.core.conv_transpose3d — MLX 0.28.0 documentation + mlx.core.conv_transpose3d — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.convolve.html b/docs/build/html/python/_autosummary/mlx.core.convolve.html index 27fad0586..5e29d233d 100644 --- a/docs/build/html/python/_autosummary/mlx.core.convolve.html +++ b/docs/build/html/python/_autosummary/mlx.core.convolve.html @@ -8,7 +8,7 @@ - mlx.core.convolve — MLX 0.28.0 documentation + mlx.core.convolve — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.cos.html b/docs/build/html/python/_autosummary/mlx.core.cos.html index 32f0b9516..3130d7248 100644 --- a/docs/build/html/python/_autosummary/mlx.core.cos.html +++ b/docs/build/html/python/_autosummary/mlx.core.cos.html @@ -8,7 +8,7 @@ - mlx.core.cos — MLX 0.28.0 documentation + mlx.core.cos — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.cosh.html b/docs/build/html/python/_autosummary/mlx.core.cosh.html index 08935bdac..18727dd81 100644 --- a/docs/build/html/python/_autosummary/mlx.core.cosh.html +++ b/docs/build/html/python/_autosummary/mlx.core.cosh.html @@ -8,7 +8,7 @@ - mlx.core.cosh — MLX 0.28.0 documentation + mlx.core.cosh — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.cummin.html b/docs/build/html/python/_autosummary/mlx.core.cummin.html index 6a581d3b8..d86990d79 100644 --- a/docs/build/html/python/_autosummary/mlx.core.cummin.html +++ b/docs/build/html/python/_autosummary/mlx.core.cummin.html @@ -8,7 +8,7 @@ - mlx.core.cummin — MLX 0.28.0 documentation + mlx.core.cummin — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.cumprod.html b/docs/build/html/python/_autosummary/mlx.core.cumprod.html index cb960778e..e296ebc9d 100644 --- a/docs/build/html/python/_autosummary/mlx.core.cumprod.html +++ b/docs/build/html/python/_autosummary/mlx.core.cumprod.html @@ -8,7 +8,7 @@ - mlx.core.cumprod — MLX 0.28.0 documentation + mlx.core.cumprod — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.cumsum.html b/docs/build/html/python/_autosummary/mlx.core.cumsum.html index f28b0ee21..fedd4298f 100644 --- a/docs/build/html/python/_autosummary/mlx.core.cumsum.html +++ b/docs/build/html/python/_autosummary/mlx.core.cumsum.html @@ -8,7 +8,7 @@ - mlx.core.cumsum — MLX 0.28.0 documentation + mlx.core.cumsum — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.custom_function.html b/docs/build/html/python/_autosummary/mlx.core.custom_function.html index 0fd9800cb..90ccf6ff3 100644 --- a/docs/build/html/python/_autosummary/mlx.core.custom_function.html +++ b/docs/build/html/python/_autosummary/mlx.core.custom_function.html @@ -8,7 +8,7 @@ - mlx.core.custom_function — MLX 0.28.0 documentation + mlx.core.custom_function — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.default_device.html b/docs/build/html/python/_autosummary/mlx.core.default_device.html index a95675016..a4e25b5d4 100644 --- a/docs/build/html/python/_autosummary/mlx.core.default_device.html +++ b/docs/build/html/python/_autosummary/mlx.core.default_device.html @@ -8,7 +8,7 @@ - mlx.core.default_device — MLX 0.28.0 documentation + mlx.core.default_device — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.default_stream.html b/docs/build/html/python/_autosummary/mlx.core.default_stream.html index 4b965f42e..6543d1536 100644 --- a/docs/build/html/python/_autosummary/mlx.core.default_stream.html +++ b/docs/build/html/python/_autosummary/mlx.core.default_stream.html @@ -8,7 +8,7 @@ - mlx.core.default_stream — MLX 0.28.0 documentation + mlx.core.default_stream — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.degrees.html b/docs/build/html/python/_autosummary/mlx.core.degrees.html index b952ed46f..6edbf1461 100644 --- a/docs/build/html/python/_autosummary/mlx.core.degrees.html +++ b/docs/build/html/python/_autosummary/mlx.core.degrees.html @@ -8,7 +8,7 @@ - mlx.core.degrees — MLX 0.28.0 documentation + mlx.core.degrees — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.dequantize.html b/docs/build/html/python/_autosummary/mlx.core.dequantize.html index 277073643..a83699bc6 100644 --- a/docs/build/html/python/_autosummary/mlx.core.dequantize.html +++ b/docs/build/html/python/_autosummary/mlx.core.dequantize.html @@ -8,7 +8,7 @@ - mlx.core.dequantize — MLX 0.28.0 documentation + mlx.core.dequantize — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • @@ -923,24 +931,20 @@ document.write(`

      mlx.core.dequantize#

      -dequantize(w: array, /, scales: array, biases: array, group_size: int = 64, bits: int = 4, *, stream: None | Stream | Device = None) array#
      -

      Dequantize the matrix w using the provided scales and -biases and the group_size and bits configuration.

      -

      Formally, given the notation in quantize(), we compute -\(w_i\) from \(\hat{w_i}\) and corresponding \(s\) and -\(\beta\) as follows

      -
      -\[w_i = s \hat{w_i} + \beta\]
      +dequantize(w: array, /, scales: array, biases: Optional[array] = = None, group_size: int = 64, bits: int = 4, mode: str = 'affine', *, stream: Union[None, Stream, Device] = None) array# +

      Dequantize the matrix w using quantization parameters.

      Parameters:
        -
      • w (array) – Matrix to be quantized

      • -
      • scales (array) – The scales to use per group_size elements of w

      • -
      • biases (array) – The biases to use per group_size elements of w

      • +
      • w (array) – Matrix to be dequantized

      • +
      • scales (array) – The scales to use per group_size elements of w.

      • +
      • biases (array, optional) – The biases to use per group_size +elements of w. Default: None.

      • group_size (int, optional) – The size of the group in w that shares a scale and bias. Default: 64.

      • bits (int, optional) – The number of bits occupied by each element in w. Default: 4.

      • +
      • mode (str, optional) – The quantization mode. Default: "affine".

      Returns:
      @@ -950,6 +954,13 @@ scale and bias. Default:

      array

      +

      Notes

      +

      The currently supported quantization modes are "affine" and mxfp4.

      +

      For affine quantization, given the notation in quantize(), +we compute \(w_i\) from \(\hat{w_i}\) and corresponding \(s\) +and \(\beta\) as follows

      +
      +\[w_i = s \hat{w_i} + \beta\]
      diff --git a/docs/build/html/python/_autosummary/mlx.core.diag.html b/docs/build/html/python/_autosummary/mlx.core.diag.html index f6d63cf58..93cb551f5 100644 --- a/docs/build/html/python/_autosummary/mlx.core.diag.html +++ b/docs/build/html/python/_autosummary/mlx.core.diag.html @@ -8,7 +8,7 @@ - mlx.core.diag — MLX 0.28.0 documentation + mlx.core.diag — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.diagonal.html b/docs/build/html/python/_autosummary/mlx.core.diagonal.html index 25c8a849f..a14618bdf 100644 --- a/docs/build/html/python/_autosummary/mlx.core.diagonal.html +++ b/docs/build/html/python/_autosummary/mlx.core.diagonal.html @@ -8,7 +8,7 @@ - mlx.core.diagonal — MLX 0.28.0 documentation + mlx.core.diagonal — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.disable_compile.html b/docs/build/html/python/_autosummary/mlx.core.disable_compile.html index af079490f..52ace06a1 100644 --- a/docs/build/html/python/_autosummary/mlx.core.disable_compile.html +++ b/docs/build/html/python/_autosummary/mlx.core.disable_compile.html @@ -8,7 +8,7 @@ - mlx.core.disable_compile — MLX 0.28.0 documentation + mlx.core.disable_compile — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.distributed.Group.html b/docs/build/html/python/_autosummary/mlx.core.distributed.Group.html index 48bb62327..a9c0305d3 100644 --- a/docs/build/html/python/_autosummary/mlx.core.distributed.Group.html +++ b/docs/build/html/python/_autosummary/mlx.core.distributed.Group.html @@ -8,7 +8,7 @@ - mlx.core.distributed.Group — MLX 0.28.0 documentation + mlx.core.distributed.Group — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.distributed.all_gather.html b/docs/build/html/python/_autosummary/mlx.core.distributed.all_gather.html index 30e8e549f..20d827184 100644 --- a/docs/build/html/python/_autosummary/mlx.core.distributed.all_gather.html +++ b/docs/build/html/python/_autosummary/mlx.core.distributed.all_gather.html @@ -8,7 +8,7 @@ - mlx.core.distributed.all_gather — MLX 0.28.0 documentation + mlx.core.distributed.all_gather — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.distributed.all_sum.html b/docs/build/html/python/_autosummary/mlx.core.distributed.all_sum.html index 419b8a084..9cebb5c5c 100644 --- a/docs/build/html/python/_autosummary/mlx.core.distributed.all_sum.html +++ b/docs/build/html/python/_autosummary/mlx.core.distributed.all_sum.html @@ -8,7 +8,7 @@ - mlx.core.distributed.all_sum — MLX 0.28.0 documentation + mlx.core.distributed.all_sum — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.distributed.init.html b/docs/build/html/python/_autosummary/mlx.core.distributed.init.html index 7e23453f6..5596daa4b 100644 --- a/docs/build/html/python/_autosummary/mlx.core.distributed.init.html +++ b/docs/build/html/python/_autosummary/mlx.core.distributed.init.html @@ -8,7 +8,7 @@ - mlx.core.distributed.init — MLX 0.28.0 documentation + mlx.core.distributed.init — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • @@ -937,7 +945,7 @@ document.write(` in case mx.distributed.is_available() returns False otherwise it throws a runtime error. Default: False

    • backend (str, optional) – Which distributed backend to initialize. -Possible values mpi, ring, any. If set to any all +Possible values mpi, ring, nccl, any. If set to any all available backends are tried and the first one that succeeds becomes the global group which will be returned in subsequent calls. Default: any

    • diff --git a/docs/build/html/python/_autosummary/mlx.core.distributed.is_available.html b/docs/build/html/python/_autosummary/mlx.core.distributed.is_available.html index 3b9280373..f975bbee8 100644 --- a/docs/build/html/python/_autosummary/mlx.core.distributed.is_available.html +++ b/docs/build/html/python/_autosummary/mlx.core.distributed.is_available.html @@ -8,7 +8,7 @@ - mlx.core.distributed.is_available — MLX 0.28.0 documentation + mlx.core.distributed.is_available — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.distributed.recv.html b/docs/build/html/python/_autosummary/mlx.core.distributed.recv.html index 3b5d3dc4b..611abd6b2 100644 --- a/docs/build/html/python/_autosummary/mlx.core.distributed.recv.html +++ b/docs/build/html/python/_autosummary/mlx.core.distributed.recv.html @@ -8,7 +8,7 @@ - mlx.core.distributed.recv — MLX 0.28.0 documentation + mlx.core.distributed.recv — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.distributed.recv_like.html b/docs/build/html/python/_autosummary/mlx.core.distributed.recv_like.html index b7a4971a8..aceced867 100644 --- a/docs/build/html/python/_autosummary/mlx.core.distributed.recv_like.html +++ b/docs/build/html/python/_autosummary/mlx.core.distributed.recv_like.html @@ -8,7 +8,7 @@ - mlx.core.distributed.recv_like — MLX 0.28.0 documentation + mlx.core.distributed.recv_like — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.distributed.send.html b/docs/build/html/python/_autosummary/mlx.core.distributed.send.html index c2d33ffb9..5dad50dd7 100644 --- a/docs/build/html/python/_autosummary/mlx.core.distributed.send.html +++ b/docs/build/html/python/_autosummary/mlx.core.distributed.send.html @@ -8,7 +8,7 @@ - mlx.core.distributed.send — MLX 0.28.0 documentation + mlx.core.distributed.send — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.divide.html b/docs/build/html/python/_autosummary/mlx.core.divide.html index 74f3a6077..9d2611db3 100644 --- a/docs/build/html/python/_autosummary/mlx.core.divide.html +++ b/docs/build/html/python/_autosummary/mlx.core.divide.html @@ -8,7 +8,7 @@ - mlx.core.divide — MLX 0.28.0 documentation + mlx.core.divide — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.divmod.html b/docs/build/html/python/_autosummary/mlx.core.divmod.html index b0b452500..81be023ff 100644 --- a/docs/build/html/python/_autosummary/mlx.core.divmod.html +++ b/docs/build/html/python/_autosummary/mlx.core.divmod.html @@ -8,7 +8,7 @@ - mlx.core.divmod — MLX 0.28.0 documentation + mlx.core.divmod — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.einsum.html b/docs/build/html/python/_autosummary/mlx.core.einsum.html index 6a3ea4a2f..2fb7a3842 100644 --- a/docs/build/html/python/_autosummary/mlx.core.einsum.html +++ b/docs/build/html/python/_autosummary/mlx.core.einsum.html @@ -8,7 +8,7 @@ - mlx.core.einsum — MLX 0.28.0 documentation + mlx.core.einsum — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.einsum_path.html b/docs/build/html/python/_autosummary/mlx.core.einsum_path.html index 9c2ed7099..5f57eb6ff 100644 --- a/docs/build/html/python/_autosummary/mlx.core.einsum_path.html +++ b/docs/build/html/python/_autosummary/mlx.core.einsum_path.html @@ -8,7 +8,7 @@ - mlx.core.einsum_path — MLX 0.28.0 documentation + mlx.core.einsum_path — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.enable_compile.html b/docs/build/html/python/_autosummary/mlx.core.enable_compile.html index a91b45dc3..c4e6952ea 100644 --- a/docs/build/html/python/_autosummary/mlx.core.enable_compile.html +++ b/docs/build/html/python/_autosummary/mlx.core.enable_compile.html @@ -8,7 +8,7 @@ - mlx.core.enable_compile — MLX 0.28.0 documentation + mlx.core.enable_compile — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.equal.html b/docs/build/html/python/_autosummary/mlx.core.equal.html index 51df54966..ea8201de5 100644 --- a/docs/build/html/python/_autosummary/mlx.core.equal.html +++ b/docs/build/html/python/_autosummary/mlx.core.equal.html @@ -8,7 +8,7 @@ - mlx.core.equal — MLX 0.28.0 documentation + mlx.core.equal — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.erf.html b/docs/build/html/python/_autosummary/mlx.core.erf.html index 194a549d4..e8847b783 100644 --- a/docs/build/html/python/_autosummary/mlx.core.erf.html +++ b/docs/build/html/python/_autosummary/mlx.core.erf.html @@ -8,7 +8,7 @@ - mlx.core.erf — MLX 0.28.0 documentation + mlx.core.erf — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.erfinv.html b/docs/build/html/python/_autosummary/mlx.core.erfinv.html index 7ac0d1f95..cd8affa67 100644 --- a/docs/build/html/python/_autosummary/mlx.core.erfinv.html +++ b/docs/build/html/python/_autosummary/mlx.core.erfinv.html @@ -8,7 +8,7 @@ - mlx.core.erfinv — MLX 0.28.0 documentation + mlx.core.erfinv — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.eval.html b/docs/build/html/python/_autosummary/mlx.core.eval.html index dc1f608ca..252c7d19a 100644 --- a/docs/build/html/python/_autosummary/mlx.core.eval.html +++ b/docs/build/html/python/_autosummary/mlx.core.eval.html @@ -8,7 +8,7 @@ - mlx.core.eval — MLX 0.28.0 documentation + mlx.core.eval — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.exp.html b/docs/build/html/python/_autosummary/mlx.core.exp.html index fdbec73fa..2da3b3a10 100644 --- a/docs/build/html/python/_autosummary/mlx.core.exp.html +++ b/docs/build/html/python/_autosummary/mlx.core.exp.html @@ -8,7 +8,7 @@ - mlx.core.exp — MLX 0.28.0 documentation + mlx.core.exp — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.expand_dims.html b/docs/build/html/python/_autosummary/mlx.core.expand_dims.html index 186e3d636..a4c50d33a 100644 --- a/docs/build/html/python/_autosummary/mlx.core.expand_dims.html +++ b/docs/build/html/python/_autosummary/mlx.core.expand_dims.html @@ -8,7 +8,7 @@ - mlx.core.expand_dims — MLX 0.28.0 documentation + mlx.core.expand_dims — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.expm1.html b/docs/build/html/python/_autosummary/mlx.core.expm1.html index 26da6836b..fd2fab044 100644 --- a/docs/build/html/python/_autosummary/mlx.core.expm1.html +++ b/docs/build/html/python/_autosummary/mlx.core.expm1.html @@ -8,7 +8,7 @@ - mlx.core.expm1 — MLX 0.28.0 documentation + mlx.core.expm1 — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.export_function.html b/docs/build/html/python/_autosummary/mlx.core.export_function.html index 672963718..8cde5bbce 100644 --- a/docs/build/html/python/_autosummary/mlx.core.export_function.html +++ b/docs/build/html/python/_autosummary/mlx.core.export_function.html @@ -8,7 +8,7 @@ - mlx.core.export_function — MLX 0.28.0 documentation + mlx.core.export_function — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.export_to_dot.html b/docs/build/html/python/_autosummary/mlx.core.export_to_dot.html index 44e231974..600f47a5c 100644 --- a/docs/build/html/python/_autosummary/mlx.core.export_to_dot.html +++ b/docs/build/html/python/_autosummary/mlx.core.export_to_dot.html @@ -8,7 +8,7 @@ - mlx.core.export_to_dot — MLX 0.28.0 documentation + mlx.core.export_to_dot — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.exporter.html b/docs/build/html/python/_autosummary/mlx.core.exporter.html index 80be2afdf..be1df9482 100644 --- a/docs/build/html/python/_autosummary/mlx.core.exporter.html +++ b/docs/build/html/python/_autosummary/mlx.core.exporter.html @@ -8,7 +8,7 @@ - mlx.core.exporter — MLX 0.28.0 documentation + mlx.core.exporter — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.eye.html b/docs/build/html/python/_autosummary/mlx.core.eye.html index 7c943e702..e89a5320a 100644 --- a/docs/build/html/python/_autosummary/mlx.core.eye.html +++ b/docs/build/html/python/_autosummary/mlx.core.eye.html @@ -8,7 +8,7 @@ - mlx.core.eye — MLX 0.28.0 documentation + mlx.core.eye — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.fast.metal_kernel.html b/docs/build/html/python/_autosummary/mlx.core.fast.metal_kernel.html index 43f2a18dd..57be063e9 100644 --- a/docs/build/html/python/_autosummary/mlx.core.fast.metal_kernel.html +++ b/docs/build/html/python/_autosummary/mlx.core.fast.metal_kernel.html @@ -8,7 +8,7 @@ - mlx.core.fast.metal_kernel — MLX 0.28.0 documentation + mlx.core.fast.metal_kernel — MLX 0.29.0 documentation @@ -30,21 +30,24 @@ + - + + + - + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • @@ -1003,11 +1011,11 @@ e.g. device

      next

      -

      FFT

      +

      mlx.core.fast.cuda_kernel

      diff --git a/docs/build/html/python/_autosummary/mlx.core.fast.rms_norm.html b/docs/build/html/python/_autosummary/mlx.core.fast.rms_norm.html index f76ad29ed..17c2f9207 100644 --- a/docs/build/html/python/_autosummary/mlx.core.fast.rms_norm.html +++ b/docs/build/html/python/_autosummary/mlx.core.fast.rms_norm.html @@ -8,7 +8,7 @@ - mlx.core.fast.rms_norm — MLX 0.28.0 documentation + mlx.core.fast.rms_norm — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.fast.rope.html b/docs/build/html/python/_autosummary/mlx.core.fast.rope.html index 6fb02fe24..5339eb687 100644 --- a/docs/build/html/python/_autosummary/mlx.core.fast.rope.html +++ b/docs/build/html/python/_autosummary/mlx.core.fast.rope.html @@ -8,7 +8,7 @@ - mlx.core.fast.rope — MLX 0.28.0 documentation + mlx.core.fast.rope — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.fast.scaled_dot_product_attention.html b/docs/build/html/python/_autosummary/mlx.core.fast.scaled_dot_product_attention.html index 8e09060ff..f38d272a5 100644 --- a/docs/build/html/python/_autosummary/mlx.core.fast.scaled_dot_product_attention.html +++ b/docs/build/html/python/_autosummary/mlx.core.fast.scaled_dot_product_attention.html @@ -8,7 +8,7 @@ - mlx.core.fast.scaled_dot_product_attention — MLX 0.28.0 documentation + mlx.core.fast.scaled_dot_product_attention — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.fft.fft.html b/docs/build/html/python/_autosummary/mlx.core.fft.fft.html index 03085a554..b4e4a20fe 100644 --- a/docs/build/html/python/_autosummary/mlx.core.fft.fft.html +++ b/docs/build/html/python/_autosummary/mlx.core.fft.fft.html @@ -8,7 +8,7 @@ - mlx.core.fft.fft — MLX 0.28.0 documentation + mlx.core.fft.fft — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.fft.fft2.html b/docs/build/html/python/_autosummary/mlx.core.fft.fft2.html index 2940ac232..878bcbd5c 100644 --- a/docs/build/html/python/_autosummary/mlx.core.fft.fft2.html +++ b/docs/build/html/python/_autosummary/mlx.core.fft.fft2.html @@ -8,7 +8,7 @@ - mlx.core.fft.fft2 — MLX 0.28.0 documentation + mlx.core.fft.fft2 — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.fft.fftn.html b/docs/build/html/python/_autosummary/mlx.core.fft.fftn.html index aff63985f..b8910c5d2 100644 --- a/docs/build/html/python/_autosummary/mlx.core.fft.fftn.html +++ b/docs/build/html/python/_autosummary/mlx.core.fft.fftn.html @@ -8,7 +8,7 @@ - mlx.core.fft.fftn — MLX 0.28.0 documentation + mlx.core.fft.fftn — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.fft.fftshift.html b/docs/build/html/python/_autosummary/mlx.core.fft.fftshift.html index e012d3e86..4752cfd66 100644 --- a/docs/build/html/python/_autosummary/mlx.core.fft.fftshift.html +++ b/docs/build/html/python/_autosummary/mlx.core.fft.fftshift.html @@ -8,7 +8,7 @@ - mlx.core.fft.fftshift — MLX 0.28.0 documentation + mlx.core.fft.fftshift — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.fft.ifft.html b/docs/build/html/python/_autosummary/mlx.core.fft.ifft.html index 0d8d8e06a..430a1b7bc 100644 --- a/docs/build/html/python/_autosummary/mlx.core.fft.ifft.html +++ b/docs/build/html/python/_autosummary/mlx.core.fft.ifft.html @@ -8,7 +8,7 @@ - mlx.core.fft.ifft — MLX 0.28.0 documentation + mlx.core.fft.ifft — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.fft.ifft2.html b/docs/build/html/python/_autosummary/mlx.core.fft.ifft2.html index e5afea53a..68cc51cdb 100644 --- a/docs/build/html/python/_autosummary/mlx.core.fft.ifft2.html +++ b/docs/build/html/python/_autosummary/mlx.core.fft.ifft2.html @@ -8,7 +8,7 @@ - mlx.core.fft.ifft2 — MLX 0.28.0 documentation + mlx.core.fft.ifft2 — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.fft.ifftn.html b/docs/build/html/python/_autosummary/mlx.core.fft.ifftn.html index 12b775615..ef15bfacc 100644 --- a/docs/build/html/python/_autosummary/mlx.core.fft.ifftn.html +++ b/docs/build/html/python/_autosummary/mlx.core.fft.ifftn.html @@ -8,7 +8,7 @@ - mlx.core.fft.ifftn — MLX 0.28.0 documentation + mlx.core.fft.ifftn — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.fft.ifftshift.html b/docs/build/html/python/_autosummary/mlx.core.fft.ifftshift.html index 3e144a866..5b6708fd9 100644 --- a/docs/build/html/python/_autosummary/mlx.core.fft.ifftshift.html +++ b/docs/build/html/python/_autosummary/mlx.core.fft.ifftshift.html @@ -8,7 +8,7 @@ - mlx.core.fft.ifftshift — MLX 0.28.0 documentation + mlx.core.fft.ifftshift — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.fft.irfft.html b/docs/build/html/python/_autosummary/mlx.core.fft.irfft.html index 864f34a5f..c858964d8 100644 --- a/docs/build/html/python/_autosummary/mlx.core.fft.irfft.html +++ b/docs/build/html/python/_autosummary/mlx.core.fft.irfft.html @@ -8,7 +8,7 @@ - mlx.core.fft.irfft — MLX 0.28.0 documentation + mlx.core.fft.irfft — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.fft.irfft2.html b/docs/build/html/python/_autosummary/mlx.core.fft.irfft2.html index a01783b83..c94003496 100644 --- a/docs/build/html/python/_autosummary/mlx.core.fft.irfft2.html +++ b/docs/build/html/python/_autosummary/mlx.core.fft.irfft2.html @@ -8,7 +8,7 @@ - mlx.core.fft.irfft2 — MLX 0.28.0 documentation + mlx.core.fft.irfft2 — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.fft.irfftn.html b/docs/build/html/python/_autosummary/mlx.core.fft.irfftn.html index f71c4d1dd..34142942a 100644 --- a/docs/build/html/python/_autosummary/mlx.core.fft.irfftn.html +++ b/docs/build/html/python/_autosummary/mlx.core.fft.irfftn.html @@ -8,7 +8,7 @@ - mlx.core.fft.irfftn — MLX 0.28.0 documentation + mlx.core.fft.irfftn — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.fft.rfft.html b/docs/build/html/python/_autosummary/mlx.core.fft.rfft.html index 35c4da345..daff913f5 100644 --- a/docs/build/html/python/_autosummary/mlx.core.fft.rfft.html +++ b/docs/build/html/python/_autosummary/mlx.core.fft.rfft.html @@ -8,7 +8,7 @@ - mlx.core.fft.rfft — MLX 0.28.0 documentation + mlx.core.fft.rfft — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.fft.rfft2.html b/docs/build/html/python/_autosummary/mlx.core.fft.rfft2.html index 516ab6e3e..45012636f 100644 --- a/docs/build/html/python/_autosummary/mlx.core.fft.rfft2.html +++ b/docs/build/html/python/_autosummary/mlx.core.fft.rfft2.html @@ -8,7 +8,7 @@ - mlx.core.fft.rfft2 — MLX 0.28.0 documentation + mlx.core.fft.rfft2 — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.fft.rfftn.html b/docs/build/html/python/_autosummary/mlx.core.fft.rfftn.html index 49a7f4a72..a818f7d36 100644 --- a/docs/build/html/python/_autosummary/mlx.core.fft.rfftn.html +++ b/docs/build/html/python/_autosummary/mlx.core.fft.rfftn.html @@ -8,7 +8,7 @@ - mlx.core.fft.rfftn — MLX 0.28.0 documentation + mlx.core.fft.rfftn — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.finfo.html b/docs/build/html/python/_autosummary/mlx.core.finfo.html index f354f271d..dc59d7825 100644 --- a/docs/build/html/python/_autosummary/mlx.core.finfo.html +++ b/docs/build/html/python/_autosummary/mlx.core.finfo.html @@ -8,7 +8,7 @@ - mlx.core.finfo — MLX 0.28.0 documentation + mlx.core.finfo — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.flatten.html b/docs/build/html/python/_autosummary/mlx.core.flatten.html index fb08bde8c..479ea78d6 100644 --- a/docs/build/html/python/_autosummary/mlx.core.flatten.html +++ b/docs/build/html/python/_autosummary/mlx.core.flatten.html @@ -8,7 +8,7 @@ - mlx.core.flatten — MLX 0.28.0 documentation + mlx.core.flatten — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.floor.html b/docs/build/html/python/_autosummary/mlx.core.floor.html index cd52c7982..58653db51 100644 --- a/docs/build/html/python/_autosummary/mlx.core.floor.html +++ b/docs/build/html/python/_autosummary/mlx.core.floor.html @@ -8,7 +8,7 @@ - mlx.core.floor — MLX 0.28.0 documentation + mlx.core.floor — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.floor_divide.html b/docs/build/html/python/_autosummary/mlx.core.floor_divide.html index d00d7880e..76d310d21 100644 --- a/docs/build/html/python/_autosummary/mlx.core.floor_divide.html +++ b/docs/build/html/python/_autosummary/mlx.core.floor_divide.html @@ -8,7 +8,7 @@ - mlx.core.floor_divide — MLX 0.28.0 documentation + mlx.core.floor_divide — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.full.html b/docs/build/html/python/_autosummary/mlx.core.full.html index 0e0b479d6..2b3d2e5f3 100644 --- a/docs/build/html/python/_autosummary/mlx.core.full.html +++ b/docs/build/html/python/_autosummary/mlx.core.full.html @@ -8,7 +8,7 @@ - mlx.core.full — MLX 0.28.0 documentation + mlx.core.full — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.gather_mm.html b/docs/build/html/python/_autosummary/mlx.core.gather_mm.html index cc43ff55c..86b20b717 100644 --- a/docs/build/html/python/_autosummary/mlx.core.gather_mm.html +++ b/docs/build/html/python/_autosummary/mlx.core.gather_mm.html @@ -8,7 +8,7 @@ - mlx.core.gather_mm — MLX 0.28.0 documentation + mlx.core.gather_mm — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.gather_qmm.html b/docs/build/html/python/_autosummary/mlx.core.gather_qmm.html index 513607c4e..403d002db 100644 --- a/docs/build/html/python/_autosummary/mlx.core.gather_qmm.html +++ b/docs/build/html/python/_autosummary/mlx.core.gather_qmm.html @@ -8,7 +8,7 @@ - mlx.core.gather_qmm — MLX 0.28.0 documentation + mlx.core.gather_qmm — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • @@ -922,7 +930,7 @@ document.write(`

      mlx.core.gather_qmm#

      -gather_qmm(x: array, w: array, /, scales: array, biases: array, lhs_indices: array | None = None, rhs_indices: array | None = None, transpose: bool = True, group_size: int = 64, bits: int = 4, *, sorted_indices: bool = False, stream: None | Stream | Device = None) array#
      +gather_qmm(x: array, w: array, /, scales: array, biases: array | None = None, lhs_indices: array | None = None, rhs_indices: array | None = None, transpose: bool = True, group_size: int = 64, bits: int = 4, mode: str = 'affine', *, sorted_indices: bool = False, stream: None | Stream | Device = None) array#

      Perform quantized matrix multiplication with matrix-level gather.

      This operation is the quantized equivalent to gather_mm(). Similar to gather_mm(), the indices lhs_indices and @@ -936,7 +944,8 @@ as w

    • x (array) – Input array

    • w (array) – Quantized matrix packed in unsigned integers

    • scales (array) – The scales to use per group_size elements of w

    • -
    • biases (array) – The biases to use per group_size elements of w

    • +
    • biases (array, optional) – The biases to use per group_size +elements of w. Default: None.

    • lhs_indices (array, optional) – Integer indices for x. Default: None.

    • rhs_indices (array, optional) – Integer indices for w. Default: None.

    • transpose (bool, optional) – Defines whether to multiply with the @@ -946,6 +955,7 @@ transposed w shares a scale and bias. Default: 64.

    • bits (int, optional) – The number of bits occupied by each element in w. Default: 4.

    • +
    • mode (str, optional) – The quantization mode. Default: "affine".

    • sorted_indices (bool, optional) – May allow a faster implementation if the passed indices are sorted. Default: False.

    diff --git a/docs/build/html/python/_autosummary/mlx.core.get_active_memory.html b/docs/build/html/python/_autosummary/mlx.core.get_active_memory.html index 6279e45b6..44817c711 100644 --- a/docs/build/html/python/_autosummary/mlx.core.get_active_memory.html +++ b/docs/build/html/python/_autosummary/mlx.core.get_active_memory.html @@ -8,7 +8,7 @@ - mlx.core.get_active_memory — MLX 0.28.0 documentation + mlx.core.get_active_memory — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
  • mlx.core.fast.rope
  • mlx.core.fast.scaled_dot_product_attention
  • mlx.core.fast.metal_kernel
  • +
  • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.get_cache_memory.html b/docs/build/html/python/_autosummary/mlx.core.get_cache_memory.html index 8c2bf2fce..5862d0575 100644 --- a/docs/build/html/python/_autosummary/mlx.core.get_cache_memory.html +++ b/docs/build/html/python/_autosummary/mlx.core.get_cache_memory.html @@ -8,7 +8,7 @@ - mlx.core.get_cache_memory — MLX 0.28.0 documentation + mlx.core.get_cache_memory — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.get_peak_memory.html b/docs/build/html/python/_autosummary/mlx.core.get_peak_memory.html index 3ebfc3259..10e6c4bb3 100644 --- a/docs/build/html/python/_autosummary/mlx.core.get_peak_memory.html +++ b/docs/build/html/python/_autosummary/mlx.core.get_peak_memory.html @@ -8,7 +8,7 @@ - mlx.core.get_peak_memory — MLX 0.28.0 documentation + mlx.core.get_peak_memory — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.grad.html b/docs/build/html/python/_autosummary/mlx.core.grad.html index fb706a240..f71b02898 100644 --- a/docs/build/html/python/_autosummary/mlx.core.grad.html +++ b/docs/build/html/python/_autosummary/mlx.core.grad.html @@ -8,7 +8,7 @@ - mlx.core.grad — MLX 0.28.0 documentation + mlx.core.grad — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.greater.html b/docs/build/html/python/_autosummary/mlx.core.greater.html index ec5611c54..5dae68f81 100644 --- a/docs/build/html/python/_autosummary/mlx.core.greater.html +++ b/docs/build/html/python/_autosummary/mlx.core.greater.html @@ -8,7 +8,7 @@ - mlx.core.greater — MLX 0.28.0 documentation + mlx.core.greater — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.greater_equal.html b/docs/build/html/python/_autosummary/mlx.core.greater_equal.html index e9f4c638e..91d04d82e 100644 --- a/docs/build/html/python/_autosummary/mlx.core.greater_equal.html +++ b/docs/build/html/python/_autosummary/mlx.core.greater_equal.html @@ -8,7 +8,7 @@ - mlx.core.greater_equal — MLX 0.28.0 documentation + mlx.core.greater_equal — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.hadamard_transform.html b/docs/build/html/python/_autosummary/mlx.core.hadamard_transform.html index ee84f9c01..047bf1ea7 100644 --- a/docs/build/html/python/_autosummary/mlx.core.hadamard_transform.html +++ b/docs/build/html/python/_autosummary/mlx.core.hadamard_transform.html @@ -8,7 +8,7 @@ - mlx.core.hadamard_transform — MLX 0.28.0 documentation + mlx.core.hadamard_transform — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.identity.html b/docs/build/html/python/_autosummary/mlx.core.identity.html index 9cd547169..24cfb2fd9 100644 --- a/docs/build/html/python/_autosummary/mlx.core.identity.html +++ b/docs/build/html/python/_autosummary/mlx.core.identity.html @@ -8,7 +8,7 @@ - mlx.core.identity — MLX 0.28.0 documentation + mlx.core.identity — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.imag.html b/docs/build/html/python/_autosummary/mlx.core.imag.html index f37407a58..a15dab997 100644 --- a/docs/build/html/python/_autosummary/mlx.core.imag.html +++ b/docs/build/html/python/_autosummary/mlx.core.imag.html @@ -8,7 +8,7 @@ - mlx.core.imag — MLX 0.28.0 documentation + mlx.core.imag — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.import_function.html b/docs/build/html/python/_autosummary/mlx.core.import_function.html index e686c5553..a52816475 100644 --- a/docs/build/html/python/_autosummary/mlx.core.import_function.html +++ b/docs/build/html/python/_autosummary/mlx.core.import_function.html @@ -8,7 +8,7 @@ - mlx.core.import_function — MLX 0.28.0 documentation + mlx.core.import_function — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.inner.html b/docs/build/html/python/_autosummary/mlx.core.inner.html index 1c7523238..c1a15d9c8 100644 --- a/docs/build/html/python/_autosummary/mlx.core.inner.html +++ b/docs/build/html/python/_autosummary/mlx.core.inner.html @@ -8,7 +8,7 @@ - mlx.core.inner — MLX 0.28.0 documentation + mlx.core.inner — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.isclose.html b/docs/build/html/python/_autosummary/mlx.core.isclose.html index 152eb8c9d..733670f14 100644 --- a/docs/build/html/python/_autosummary/mlx.core.isclose.html +++ b/docs/build/html/python/_autosummary/mlx.core.isclose.html @@ -8,7 +8,7 @@ - mlx.core.isclose — MLX 0.28.0 documentation + mlx.core.isclose — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.isfinite.html b/docs/build/html/python/_autosummary/mlx.core.isfinite.html index a2ff148d1..973cd8620 100644 --- a/docs/build/html/python/_autosummary/mlx.core.isfinite.html +++ b/docs/build/html/python/_autosummary/mlx.core.isfinite.html @@ -8,7 +8,7 @@ - mlx.core.isfinite — MLX 0.28.0 documentation + mlx.core.isfinite — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.isinf.html b/docs/build/html/python/_autosummary/mlx.core.isinf.html index 22320eb7d..6f102f0b5 100644 --- a/docs/build/html/python/_autosummary/mlx.core.isinf.html +++ b/docs/build/html/python/_autosummary/mlx.core.isinf.html @@ -8,7 +8,7 @@ - mlx.core.isinf — MLX 0.28.0 documentation + mlx.core.isinf — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.isnan.html b/docs/build/html/python/_autosummary/mlx.core.isnan.html index b9148a5ae..129693e74 100644 --- a/docs/build/html/python/_autosummary/mlx.core.isnan.html +++ b/docs/build/html/python/_autosummary/mlx.core.isnan.html @@ -8,7 +8,7 @@ - mlx.core.isnan — MLX 0.28.0 documentation + mlx.core.isnan — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.isneginf.html b/docs/build/html/python/_autosummary/mlx.core.isneginf.html index 3c27558b7..2549aab8b 100644 --- a/docs/build/html/python/_autosummary/mlx.core.isneginf.html +++ b/docs/build/html/python/_autosummary/mlx.core.isneginf.html @@ -8,7 +8,7 @@ - mlx.core.isneginf — MLX 0.28.0 documentation + mlx.core.isneginf — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.isposinf.html b/docs/build/html/python/_autosummary/mlx.core.isposinf.html index 5a4018abd..cd25f7f37 100644 --- a/docs/build/html/python/_autosummary/mlx.core.isposinf.html +++ b/docs/build/html/python/_autosummary/mlx.core.isposinf.html @@ -8,7 +8,7 @@ - mlx.core.isposinf — MLX 0.28.0 documentation + mlx.core.isposinf — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.issubdtype.html b/docs/build/html/python/_autosummary/mlx.core.issubdtype.html index d67258461..4e04c9d8e 100644 --- a/docs/build/html/python/_autosummary/mlx.core.issubdtype.html +++ b/docs/build/html/python/_autosummary/mlx.core.issubdtype.html @@ -8,7 +8,7 @@ - mlx.core.issubdtype — MLX 0.28.0 documentation + mlx.core.issubdtype — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.jvp.html b/docs/build/html/python/_autosummary/mlx.core.jvp.html index cbc7cbb32..a4980ebc4 100644 --- a/docs/build/html/python/_autosummary/mlx.core.jvp.html +++ b/docs/build/html/python/_autosummary/mlx.core.jvp.html @@ -8,7 +8,7 @@ - mlx.core.jvp — MLX 0.28.0 documentation + mlx.core.jvp — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.kron.html b/docs/build/html/python/_autosummary/mlx.core.kron.html index cc3809dc3..9a69d4cc1 100644 --- a/docs/build/html/python/_autosummary/mlx.core.kron.html +++ b/docs/build/html/python/_autosummary/mlx.core.kron.html @@ -8,7 +8,7 @@ - mlx.core.kron — MLX 0.28.0 documentation + mlx.core.kron — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.left_shift.html b/docs/build/html/python/_autosummary/mlx.core.left_shift.html index 5bc61c137..92bcc18fe 100644 --- a/docs/build/html/python/_autosummary/mlx.core.left_shift.html +++ b/docs/build/html/python/_autosummary/mlx.core.left_shift.html @@ -8,7 +8,7 @@ - mlx.core.left_shift — MLX 0.28.0 documentation + mlx.core.left_shift — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.less.html b/docs/build/html/python/_autosummary/mlx.core.less.html index 1d9c11bc0..927c3346e 100644 --- a/docs/build/html/python/_autosummary/mlx.core.less.html +++ b/docs/build/html/python/_autosummary/mlx.core.less.html @@ -8,7 +8,7 @@ - mlx.core.less — MLX 0.28.0 documentation + mlx.core.less — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.less_equal.html b/docs/build/html/python/_autosummary/mlx.core.less_equal.html index 08d90e811..b598731d3 100644 --- a/docs/build/html/python/_autosummary/mlx.core.less_equal.html +++ b/docs/build/html/python/_autosummary/mlx.core.less_equal.html @@ -8,7 +8,7 @@ - mlx.core.less_equal — MLX 0.28.0 documentation + mlx.core.less_equal — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.linalg.cholesky.html b/docs/build/html/python/_autosummary/mlx.core.linalg.cholesky.html index b299ad651..07695efb9 100644 --- a/docs/build/html/python/_autosummary/mlx.core.linalg.cholesky.html +++ b/docs/build/html/python/_autosummary/mlx.core.linalg.cholesky.html @@ -8,7 +8,7 @@ - mlx.core.linalg.cholesky — MLX 0.28.0 documentation + mlx.core.linalg.cholesky — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.linalg.cholesky_inv.html b/docs/build/html/python/_autosummary/mlx.core.linalg.cholesky_inv.html index bde671f5c..97d871727 100644 --- a/docs/build/html/python/_autosummary/mlx.core.linalg.cholesky_inv.html +++ b/docs/build/html/python/_autosummary/mlx.core.linalg.cholesky_inv.html @@ -8,7 +8,7 @@ - mlx.core.linalg.cholesky_inv — MLX 0.28.0 documentation + mlx.core.linalg.cholesky_inv — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.linalg.cross.html b/docs/build/html/python/_autosummary/mlx.core.linalg.cross.html index 95a895f00..e2d9be3c5 100644 --- a/docs/build/html/python/_autosummary/mlx.core.linalg.cross.html +++ b/docs/build/html/python/_autosummary/mlx.core.linalg.cross.html @@ -8,7 +8,7 @@ - mlx.core.linalg.cross — MLX 0.28.0 documentation + mlx.core.linalg.cross — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.linalg.eig.html b/docs/build/html/python/_autosummary/mlx.core.linalg.eig.html index 355c69094..ee21fd336 100644 --- a/docs/build/html/python/_autosummary/mlx.core.linalg.eig.html +++ b/docs/build/html/python/_autosummary/mlx.core.linalg.eig.html @@ -8,7 +8,7 @@ - mlx.core.linalg.eig — MLX 0.28.0 documentation + mlx.core.linalg.eig — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.linalg.eigh.html b/docs/build/html/python/_autosummary/mlx.core.linalg.eigh.html index c62a2ddb1..f14b497b3 100644 --- a/docs/build/html/python/_autosummary/mlx.core.linalg.eigh.html +++ b/docs/build/html/python/_autosummary/mlx.core.linalg.eigh.html @@ -8,7 +8,7 @@ - mlx.core.linalg.eigh — MLX 0.28.0 documentation + mlx.core.linalg.eigh — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.linalg.eigvals.html b/docs/build/html/python/_autosummary/mlx.core.linalg.eigvals.html index 7736b5ff8..c5d4f9f0e 100644 --- a/docs/build/html/python/_autosummary/mlx.core.linalg.eigvals.html +++ b/docs/build/html/python/_autosummary/mlx.core.linalg.eigvals.html @@ -8,7 +8,7 @@ - mlx.core.linalg.eigvals — MLX 0.28.0 documentation + mlx.core.linalg.eigvals — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.linalg.eigvalsh.html b/docs/build/html/python/_autosummary/mlx.core.linalg.eigvalsh.html index 9e94f2c94..6b64614c5 100644 --- a/docs/build/html/python/_autosummary/mlx.core.linalg.eigvalsh.html +++ b/docs/build/html/python/_autosummary/mlx.core.linalg.eigvalsh.html @@ -8,7 +8,7 @@ - mlx.core.linalg.eigvalsh — MLX 0.28.0 documentation + mlx.core.linalg.eigvalsh — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.linalg.inv.html b/docs/build/html/python/_autosummary/mlx.core.linalg.inv.html index 5ac6df35e..c4b6f47ed 100644 --- a/docs/build/html/python/_autosummary/mlx.core.linalg.inv.html +++ b/docs/build/html/python/_autosummary/mlx.core.linalg.inv.html @@ -8,7 +8,7 @@ - mlx.core.linalg.inv — MLX 0.28.0 documentation + mlx.core.linalg.inv — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.linalg.lu.html b/docs/build/html/python/_autosummary/mlx.core.linalg.lu.html index d9bdd5af7..3bc9b2dc4 100644 --- a/docs/build/html/python/_autosummary/mlx.core.linalg.lu.html +++ b/docs/build/html/python/_autosummary/mlx.core.linalg.lu.html @@ -8,7 +8,7 @@ - mlx.core.linalg.lu — MLX 0.28.0 documentation + mlx.core.linalg.lu — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.linalg.lu_factor.html b/docs/build/html/python/_autosummary/mlx.core.linalg.lu_factor.html index cb4274901..23d18f2f9 100644 --- a/docs/build/html/python/_autosummary/mlx.core.linalg.lu_factor.html +++ b/docs/build/html/python/_autosummary/mlx.core.linalg.lu_factor.html @@ -8,7 +8,7 @@ - mlx.core.linalg.lu_factor — MLX 0.28.0 documentation + mlx.core.linalg.lu_factor — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.linalg.norm.html b/docs/build/html/python/_autosummary/mlx.core.linalg.norm.html index 426eb0148..5f3ceae7c 100644 --- a/docs/build/html/python/_autosummary/mlx.core.linalg.norm.html +++ b/docs/build/html/python/_autosummary/mlx.core.linalg.norm.html @@ -8,7 +8,7 @@ - mlx.core.linalg.norm — MLX 0.28.0 documentation + mlx.core.linalg.norm — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.linalg.pinv.html b/docs/build/html/python/_autosummary/mlx.core.linalg.pinv.html index 4340e0721..891f25a97 100644 --- a/docs/build/html/python/_autosummary/mlx.core.linalg.pinv.html +++ b/docs/build/html/python/_autosummary/mlx.core.linalg.pinv.html @@ -8,7 +8,7 @@ - mlx.core.linalg.pinv — MLX 0.28.0 documentation + mlx.core.linalg.pinv — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.linalg.qr.html b/docs/build/html/python/_autosummary/mlx.core.linalg.qr.html index ce8c05fe6..152dd0019 100644 --- a/docs/build/html/python/_autosummary/mlx.core.linalg.qr.html +++ b/docs/build/html/python/_autosummary/mlx.core.linalg.qr.html @@ -8,7 +8,7 @@ - mlx.core.linalg.qr — MLX 0.28.0 documentation + mlx.core.linalg.qr — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.linalg.solve.html b/docs/build/html/python/_autosummary/mlx.core.linalg.solve.html index f6a9a8ad4..42caf4674 100644 --- a/docs/build/html/python/_autosummary/mlx.core.linalg.solve.html +++ b/docs/build/html/python/_autosummary/mlx.core.linalg.solve.html @@ -8,7 +8,7 @@ - mlx.core.linalg.solve — MLX 0.28.0 documentation + mlx.core.linalg.solve — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.linalg.solve_triangular.html b/docs/build/html/python/_autosummary/mlx.core.linalg.solve_triangular.html index 3d0078483..424463016 100644 --- a/docs/build/html/python/_autosummary/mlx.core.linalg.solve_triangular.html +++ b/docs/build/html/python/_autosummary/mlx.core.linalg.solve_triangular.html @@ -8,7 +8,7 @@ - mlx.core.linalg.solve_triangular — MLX 0.28.0 documentation + mlx.core.linalg.solve_triangular — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.linalg.svd.html b/docs/build/html/python/_autosummary/mlx.core.linalg.svd.html index 3a8b6105f..9de39875c 100644 --- a/docs/build/html/python/_autosummary/mlx.core.linalg.svd.html +++ b/docs/build/html/python/_autosummary/mlx.core.linalg.svd.html @@ -8,7 +8,7 @@ - mlx.core.linalg.svd — MLX 0.28.0 documentation + mlx.core.linalg.svd — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.linalg.tri_inv.html b/docs/build/html/python/_autosummary/mlx.core.linalg.tri_inv.html index 60315ebc4..0677d3359 100644 --- a/docs/build/html/python/_autosummary/mlx.core.linalg.tri_inv.html +++ b/docs/build/html/python/_autosummary/mlx.core.linalg.tri_inv.html @@ -8,7 +8,7 @@ - mlx.core.linalg.tri_inv — MLX 0.28.0 documentation + mlx.core.linalg.tri_inv — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.linspace.html b/docs/build/html/python/_autosummary/mlx.core.linspace.html index 75fa8b600..b1263bf9e 100644 --- a/docs/build/html/python/_autosummary/mlx.core.linspace.html +++ b/docs/build/html/python/_autosummary/mlx.core.linspace.html @@ -8,7 +8,7 @@ - mlx.core.linspace — MLX 0.28.0 documentation + mlx.core.linspace — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.load.html b/docs/build/html/python/_autosummary/mlx.core.load.html index fa6b7c7a9..95fcfbbbf 100644 --- a/docs/build/html/python/_autosummary/mlx.core.load.html +++ b/docs/build/html/python/_autosummary/mlx.core.load.html @@ -8,7 +8,7 @@ - mlx.core.load — MLX 0.28.0 documentation + mlx.core.load — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • @@ -922,14 +930,14 @@ document.write(`

      mlx.core.load#

      -load(file: str, /, format: str | None = None, return_metadata: bool = False, *, stream: None | Stream | Device = None) array | dict[str, array]#
      +load(file: file | str | Path, /, format: str | None = None, return_metadata: bool = False, *, stream: None | Stream | Device = None) array | dict[str, array]#

      Load array(s) from a binary file.

      The supported formats are .npy, .npz, .safetensors, and .gguf.

      Parameters:
        -
      • file (file, str) – File in which the array is saved.

      • +
      • file (file, str, Path) – File in which the array is saved.

      • format (str, optional) – Format of the file. If None, the format is inferred from the file extension. Supported formats: npy, npz, and safetensors. Default: None.

      • diff --git a/docs/build/html/python/_autosummary/mlx.core.log.html b/docs/build/html/python/_autosummary/mlx.core.log.html index 0a5b69067..8d53d5c95 100644 --- a/docs/build/html/python/_autosummary/mlx.core.log.html +++ b/docs/build/html/python/_autosummary/mlx.core.log.html @@ -8,7 +8,7 @@ - mlx.core.log — MLX 0.28.0 documentation + mlx.core.log — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.log10.html b/docs/build/html/python/_autosummary/mlx.core.log10.html index 2b867b716..08827994b 100644 --- a/docs/build/html/python/_autosummary/mlx.core.log10.html +++ b/docs/build/html/python/_autosummary/mlx.core.log10.html @@ -8,7 +8,7 @@ - mlx.core.log10 — MLX 0.28.0 documentation + mlx.core.log10 — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.log1p.html b/docs/build/html/python/_autosummary/mlx.core.log1p.html index 3136929fa..250973834 100644 --- a/docs/build/html/python/_autosummary/mlx.core.log1p.html +++ b/docs/build/html/python/_autosummary/mlx.core.log1p.html @@ -8,7 +8,7 @@ - mlx.core.log1p — MLX 0.28.0 documentation + mlx.core.log1p — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.log2.html b/docs/build/html/python/_autosummary/mlx.core.log2.html index b0e59ed0f..d48494b43 100644 --- a/docs/build/html/python/_autosummary/mlx.core.log2.html +++ b/docs/build/html/python/_autosummary/mlx.core.log2.html @@ -8,7 +8,7 @@ - mlx.core.log2 — MLX 0.28.0 documentation + mlx.core.log2 — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.logaddexp.html b/docs/build/html/python/_autosummary/mlx.core.logaddexp.html index 290312eec..96e8b96dd 100644 --- a/docs/build/html/python/_autosummary/mlx.core.logaddexp.html +++ b/docs/build/html/python/_autosummary/mlx.core.logaddexp.html @@ -8,7 +8,7 @@ - mlx.core.logaddexp — MLX 0.28.0 documentation + mlx.core.logaddexp — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.logcumsumexp.html b/docs/build/html/python/_autosummary/mlx.core.logcumsumexp.html index 75ec2fe51..c79d0191b 100644 --- a/docs/build/html/python/_autosummary/mlx.core.logcumsumexp.html +++ b/docs/build/html/python/_autosummary/mlx.core.logcumsumexp.html @@ -8,7 +8,7 @@ - mlx.core.logcumsumexp — MLX 0.28.0 documentation + mlx.core.logcumsumexp — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.logical_and.html b/docs/build/html/python/_autosummary/mlx.core.logical_and.html index 2e7f8d462..053913cf0 100644 --- a/docs/build/html/python/_autosummary/mlx.core.logical_and.html +++ b/docs/build/html/python/_autosummary/mlx.core.logical_and.html @@ -8,7 +8,7 @@ - mlx.core.logical_and — MLX 0.28.0 documentation + mlx.core.logical_and — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.logical_not.html b/docs/build/html/python/_autosummary/mlx.core.logical_not.html index 5f4f2c0f1..eaf84acc3 100644 --- a/docs/build/html/python/_autosummary/mlx.core.logical_not.html +++ b/docs/build/html/python/_autosummary/mlx.core.logical_not.html @@ -8,7 +8,7 @@ - mlx.core.logical_not — MLX 0.28.0 documentation + mlx.core.logical_not — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.logical_or.html b/docs/build/html/python/_autosummary/mlx.core.logical_or.html index 5eeb6eed9..6b468ef52 100644 --- a/docs/build/html/python/_autosummary/mlx.core.logical_or.html +++ b/docs/build/html/python/_autosummary/mlx.core.logical_or.html @@ -8,7 +8,7 @@ - mlx.core.logical_or — MLX 0.28.0 documentation + mlx.core.logical_or — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.logsumexp.html b/docs/build/html/python/_autosummary/mlx.core.logsumexp.html index fd19dabda..d768f38b6 100644 --- a/docs/build/html/python/_autosummary/mlx.core.logsumexp.html +++ b/docs/build/html/python/_autosummary/mlx.core.logsumexp.html @@ -8,7 +8,7 @@ - mlx.core.logsumexp — MLX 0.28.0 documentation + mlx.core.logsumexp — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.matmul.html b/docs/build/html/python/_autosummary/mlx.core.matmul.html index 79190eee0..d3099e325 100644 --- a/docs/build/html/python/_autosummary/mlx.core.matmul.html +++ b/docs/build/html/python/_autosummary/mlx.core.matmul.html @@ -8,7 +8,7 @@ - mlx.core.matmul — MLX 0.28.0 documentation + mlx.core.matmul — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.max.html b/docs/build/html/python/_autosummary/mlx.core.max.html index 4a9c2c0ca..6bf2a6c8f 100644 --- a/docs/build/html/python/_autosummary/mlx.core.max.html +++ b/docs/build/html/python/_autosummary/mlx.core.max.html @@ -8,7 +8,7 @@ - mlx.core.max — MLX 0.28.0 documentation + mlx.core.max — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.maximum.html b/docs/build/html/python/_autosummary/mlx.core.maximum.html index abefa0a8d..c170fe61a 100644 --- a/docs/build/html/python/_autosummary/mlx.core.maximum.html +++ b/docs/build/html/python/_autosummary/mlx.core.maximum.html @@ -8,7 +8,7 @@ - mlx.core.maximum — MLX 0.28.0 documentation + mlx.core.maximum — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.mean.html b/docs/build/html/python/_autosummary/mlx.core.mean.html index 18be4c035..2a61336f7 100644 --- a/docs/build/html/python/_autosummary/mlx.core.mean.html +++ b/docs/build/html/python/_autosummary/mlx.core.mean.html @@ -8,7 +8,7 @@ - mlx.core.mean — MLX 0.28.0 documentation + mlx.core.mean — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.meshgrid.html b/docs/build/html/python/_autosummary/mlx.core.meshgrid.html index 13c079882..f258fd924 100644 --- a/docs/build/html/python/_autosummary/mlx.core.meshgrid.html +++ b/docs/build/html/python/_autosummary/mlx.core.meshgrid.html @@ -8,7 +8,7 @@ - mlx.core.meshgrid — MLX 0.28.0 documentation + mlx.core.meshgrid — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.metal.device_info.html b/docs/build/html/python/_autosummary/mlx.core.metal.device_info.html index 70d62fa1e..faf998587 100644 --- a/docs/build/html/python/_autosummary/mlx.core.metal.device_info.html +++ b/docs/build/html/python/_autosummary/mlx.core.metal.device_info.html @@ -8,7 +8,7 @@ - mlx.core.metal.device_info — MLX 0.28.0 documentation + mlx.core.metal.device_info — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.metal.is_available.html b/docs/build/html/python/_autosummary/mlx.core.metal.is_available.html index 1ef7a293b..ffd125c3f 100644 --- a/docs/build/html/python/_autosummary/mlx.core.metal.is_available.html +++ b/docs/build/html/python/_autosummary/mlx.core.metal.is_available.html @@ -8,7 +8,7 @@ - mlx.core.metal.is_available — MLX 0.28.0 documentation + mlx.core.metal.is_available — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.metal.start_capture.html b/docs/build/html/python/_autosummary/mlx.core.metal.start_capture.html index 25348e56a..e374fa38e 100644 --- a/docs/build/html/python/_autosummary/mlx.core.metal.start_capture.html +++ b/docs/build/html/python/_autosummary/mlx.core.metal.start_capture.html @@ -8,7 +8,7 @@ - mlx.core.metal.start_capture — MLX 0.28.0 documentation + mlx.core.metal.start_capture — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.metal.stop_capture.html b/docs/build/html/python/_autosummary/mlx.core.metal.stop_capture.html index c3e632620..090a7a4b1 100644 --- a/docs/build/html/python/_autosummary/mlx.core.metal.stop_capture.html +++ b/docs/build/html/python/_autosummary/mlx.core.metal.stop_capture.html @@ -8,7 +8,7 @@ - mlx.core.metal.stop_capture — MLX 0.28.0 documentation + mlx.core.metal.stop_capture — MLX 0.29.0 documentation @@ -30,21 +30,24 @@ + - + + + - + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • @@ -949,11 +957,11 @@ document.write(`

      next

      -

      Memory Management

      +

      CUDA

      diff --git a/docs/build/html/python/_autosummary/mlx.core.min.html b/docs/build/html/python/_autosummary/mlx.core.min.html index b13a445d8..235bdd83b 100644 --- a/docs/build/html/python/_autosummary/mlx.core.min.html +++ b/docs/build/html/python/_autosummary/mlx.core.min.html @@ -8,7 +8,7 @@ - mlx.core.min — MLX 0.28.0 documentation + mlx.core.min — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.minimum.html b/docs/build/html/python/_autosummary/mlx.core.minimum.html index fe51c0720..7f74a508c 100644 --- a/docs/build/html/python/_autosummary/mlx.core.minimum.html +++ b/docs/build/html/python/_autosummary/mlx.core.minimum.html @@ -8,7 +8,7 @@ - mlx.core.minimum — MLX 0.28.0 documentation + mlx.core.minimum — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.moveaxis.html b/docs/build/html/python/_autosummary/mlx.core.moveaxis.html index d2bfdc8ac..5a202083f 100644 --- a/docs/build/html/python/_autosummary/mlx.core.moveaxis.html +++ b/docs/build/html/python/_autosummary/mlx.core.moveaxis.html @@ -8,7 +8,7 @@ - mlx.core.moveaxis — MLX 0.28.0 documentation + mlx.core.moveaxis — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.multiply.html b/docs/build/html/python/_autosummary/mlx.core.multiply.html index 1b010980d..263f13c47 100644 --- a/docs/build/html/python/_autosummary/mlx.core.multiply.html +++ b/docs/build/html/python/_autosummary/mlx.core.multiply.html @@ -8,7 +8,7 @@ - mlx.core.multiply — MLX 0.28.0 documentation + mlx.core.multiply — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.nan_to_num.html b/docs/build/html/python/_autosummary/mlx.core.nan_to_num.html index 58df8e681..1dc285a25 100644 --- a/docs/build/html/python/_autosummary/mlx.core.nan_to_num.html +++ b/docs/build/html/python/_autosummary/mlx.core.nan_to_num.html @@ -8,7 +8,7 @@ - mlx.core.nan_to_num — MLX 0.28.0 documentation + mlx.core.nan_to_num — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.negative.html b/docs/build/html/python/_autosummary/mlx.core.negative.html index 3867f5c3b..ecb577690 100644 --- a/docs/build/html/python/_autosummary/mlx.core.negative.html +++ b/docs/build/html/python/_autosummary/mlx.core.negative.html @@ -8,7 +8,7 @@ - mlx.core.negative — MLX 0.28.0 documentation + mlx.core.negative — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.new_stream.html b/docs/build/html/python/_autosummary/mlx.core.new_stream.html index 2693d7a07..19b06dcaa 100644 --- a/docs/build/html/python/_autosummary/mlx.core.new_stream.html +++ b/docs/build/html/python/_autosummary/mlx.core.new_stream.html @@ -8,7 +8,7 @@ - mlx.core.new_stream — MLX 0.28.0 documentation + mlx.core.new_stream — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.not_equal.html b/docs/build/html/python/_autosummary/mlx.core.not_equal.html index 904801503..392cb4834 100644 --- a/docs/build/html/python/_autosummary/mlx.core.not_equal.html +++ b/docs/build/html/python/_autosummary/mlx.core.not_equal.html @@ -8,7 +8,7 @@ - mlx.core.not_equal — MLX 0.28.0 documentation + mlx.core.not_equal — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.ones.html b/docs/build/html/python/_autosummary/mlx.core.ones.html index 266bf4f0d..8788e4337 100644 --- a/docs/build/html/python/_autosummary/mlx.core.ones.html +++ b/docs/build/html/python/_autosummary/mlx.core.ones.html @@ -8,7 +8,7 @@ - mlx.core.ones — MLX 0.28.0 documentation + mlx.core.ones — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.ones_like.html b/docs/build/html/python/_autosummary/mlx.core.ones_like.html index bdbe29c87..8fcd34dd1 100644 --- a/docs/build/html/python/_autosummary/mlx.core.ones_like.html +++ b/docs/build/html/python/_autosummary/mlx.core.ones_like.html @@ -8,7 +8,7 @@ - mlx.core.ones_like — MLX 0.28.0 documentation + mlx.core.ones_like — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.outer.html b/docs/build/html/python/_autosummary/mlx.core.outer.html index 270af9d33..a1239cf2a 100644 --- a/docs/build/html/python/_autosummary/mlx.core.outer.html +++ b/docs/build/html/python/_autosummary/mlx.core.outer.html @@ -8,7 +8,7 @@ - mlx.core.outer — MLX 0.28.0 documentation + mlx.core.outer — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.pad.html b/docs/build/html/python/_autosummary/mlx.core.pad.html index 2c214193a..c1d53b2b6 100644 --- a/docs/build/html/python/_autosummary/mlx.core.pad.html +++ b/docs/build/html/python/_autosummary/mlx.core.pad.html @@ -8,7 +8,7 @@ - mlx.core.pad — MLX 0.28.0 documentation + mlx.core.pad — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.partition.html b/docs/build/html/python/_autosummary/mlx.core.partition.html index 5f2fc33c6..b77e89905 100644 --- a/docs/build/html/python/_autosummary/mlx.core.partition.html +++ b/docs/build/html/python/_autosummary/mlx.core.partition.html @@ -8,7 +8,7 @@ - mlx.core.partition — MLX 0.28.0 documentation + mlx.core.partition — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.power.html b/docs/build/html/python/_autosummary/mlx.core.power.html index 7d9d284b9..1e30386f1 100644 --- a/docs/build/html/python/_autosummary/mlx.core.power.html +++ b/docs/build/html/python/_autosummary/mlx.core.power.html @@ -8,7 +8,7 @@ - mlx.core.power — MLX 0.28.0 documentation + mlx.core.power — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.prod.html b/docs/build/html/python/_autosummary/mlx.core.prod.html index 8054ed043..5e2559e85 100644 --- a/docs/build/html/python/_autosummary/mlx.core.prod.html +++ b/docs/build/html/python/_autosummary/mlx.core.prod.html @@ -8,7 +8,7 @@ - mlx.core.prod — MLX 0.28.0 documentation + mlx.core.prod — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.put_along_axis.html b/docs/build/html/python/_autosummary/mlx.core.put_along_axis.html index ddf97490f..725f8f7d3 100644 --- a/docs/build/html/python/_autosummary/mlx.core.put_along_axis.html +++ b/docs/build/html/python/_autosummary/mlx.core.put_along_axis.html @@ -8,7 +8,7 @@ - mlx.core.put_along_axis — MLX 0.28.0 documentation + mlx.core.put_along_axis — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.quantize.html b/docs/build/html/python/_autosummary/mlx.core.quantize.html index c881f2256..76cb699bf 100644 --- a/docs/build/html/python/_autosummary/mlx.core.quantize.html +++ b/docs/build/html/python/_autosummary/mlx.core.quantize.html @@ -8,7 +8,7 @@ - mlx.core.quantize — MLX 0.28.0 documentation + mlx.core.quantize — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • @@ -923,7 +931,7 @@ document.write(`

      mlx.core.quantize#

      -quantize(w: array, /, group_size: int = 64, bits: int = 4, *, stream: None | Stream | Device = None) tuple[array, array, array]#
      +quantize(w: array, /, group_size: int = 64, bits: int = 4, mode: str = 'affine', *, stream: None | Stream | Device = None) tuple[array, array, array]#

      Quantize the matrix w using bits bits per element.

      Note, every group_size elements in a row of w are quantized together. Hence, number of columns of w should be divisible by @@ -931,11 +939,39 @@ together. Hence, number of columns of group_size which are quantized together.

      Warning

      -

      quantize currently only supports 2D inputs with dimensions which are multiples of 32

      +

      quantize currently only supports 2D inputs with the second +dimension divisible by group_size

      -

      Formally, for a group of \(g\) consecutive elements \(w_1\) to -\(w_g\) in a row of w we compute the quantized representation -of each element \(\hat{w_i}\) as follows

      +

      The supported quantization modes are "affine" and "mxfp4". They +are described in more detail below.

      +
      +
      Parameters:
      +
        +
      • w (array) – Matrix to be quantized

      • +
      • group_size (int, optional) – The size of the group in w that shares a +scale and bias. Default: 64.

      • +
      • bits (int, optional) – The number of bits occupied by each element of +w in the returned quantized matrix. Default: 4.

      • +
      • mode (str, optional) – The quantization mode. Default: "affine".

      • +
      +
      +
      Returns:
      +

      A tuple with either two or three elements containing:

      +
        +
      • w_q (array): The quantized version of w

      • +
      • scales (array): The quantization scales

      • +
      • biases (array): The quantization biases (returned for mode=="affine").

      • +
      +

      +
      +
      Return type:
      +

      tuple

      +
      +
      +

      Notes

      +

      The affine mode quantizes groups of \(g\) consecutive +elements in a row of w. For each group the quantized +representation of each element \(\hat{w_i}\) is computed as follows:

      \[\begin{split}\begin{aligned} \alpha &= \max_i w_i \\ @@ -948,32 +984,15 @@ and is packed in an unsigned 32-bit integer from the lower to upper bits. For instance, for 4-bit quantization we fit 8 elements in an unsigned 32 bit integer where the 1st element occupies the 4 least significant bits, the 2nd bits 4-7 etc.

      -

      In order to be able to dequantize the elements of w we also need to -save \(s\) and \(\beta\) which are the returned scales and +

      To dequantize the elements of w, we also save \(s\) and +\(\beta\) which are the returned scales and biases respectively.

      -
      -
      Parameters:
      -
        -
      • w (array) – Matrix to be quantized

      • -
      • group_size (int, optional) – The size of the group in w that shares a -scale and bias. Default: 64.

      • -
      • bits (int, optional) – The number of bits occupied by each element of -w in the returned quantized matrix. Default: 4.

      • -
      -
      -
      Returns:
      -

      A tuple containing

      -
        -
      • w_q (array): The quantized version of w

      • -
      • scales (array): The scale to multiply each element with, namely \(s\)

      • -
      • biases (array): The biases to add to each element, namely \(\beta\)

      • -
      -

      -
      -
      Return type:
      -

      tuple

      -
      -
      +

      The mxfp4 mode similarly quantizes groups of \(g\) elements +of w. For mxfp4 the group size must be 32. The elements +are quantized to 4-bit precision floating-point values (E2M1) with a +shared 8-bit scale per group. Unlike affine quantization, +mxfp4 does not have a bias value. More details on the format can +be found in the specification.

      diff --git a/docs/build/html/python/_autosummary/mlx.core.quantized_matmul.html b/docs/build/html/python/_autosummary/mlx.core.quantized_matmul.html index b8f7eeb6b..aa9abc018 100644 --- a/docs/build/html/python/_autosummary/mlx.core.quantized_matmul.html +++ b/docs/build/html/python/_autosummary/mlx.core.quantized_matmul.html @@ -8,7 +8,7 @@ - mlx.core.quantized_matmul — MLX 0.28.0 documentation + mlx.core.quantized_matmul — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • @@ -922,7 +930,7 @@ document.write(`

      mlx.core.quantized_matmul#

      -quantized_matmul(x: array, w: array, /, scales: array, biases: array, transpose: bool = True, group_size: int = 64, bits: int = 4, *, stream: None | Stream | Device = None) array#
      +quantized_matmul(x: array, w: array, /, scales: array, biases: array | None = None, transpose: bool = True, group_size: int = 64, bits: int = 4, mode: str = 'affine', *, stream: None | Stream | Device = None) array#

      Perform the matrix multiplication with the quantized matrix w. The quantization uses one floating point scale and bias per group_size of elements. Each element in w takes bits bits and is packed in an @@ -933,7 +941,8 @@ unsigned 32 bit integer.

    • x (array) – Input array

    • w (array) – Quantized matrix packed in unsigned integers

    • scales (array) – The scales to use per group_size elements of w

    • -
    • biases (array) – The biases to use per group_size elements of w

    • +
    • biases (array, optional) – The biases to use per group_size +elements of w. Default: None.

    • transpose (bool, optional) – Defines whether to multiply with the transposed w or not, namely whether we are performing x @ w.T or x @ w. Default: True.

    • @@ -941,6 +950,7 @@ transposed w shares a scale and bias. Default: 64.

    • bits (int, optional) – The number of bits occupied by each element in w. Default: 4.

    • +
    • mode (str, optional) – The quantization mode. Default: "affine".

    Returns:
    diff --git a/docs/build/html/python/_autosummary/mlx.core.radians.html b/docs/build/html/python/_autosummary/mlx.core.radians.html index 982baa98e..6ab6052d2 100644 --- a/docs/build/html/python/_autosummary/mlx.core.radians.html +++ b/docs/build/html/python/_autosummary/mlx.core.radians.html @@ -8,7 +8,7 @@ - mlx.core.radians — MLX 0.28.0 documentation + mlx.core.radians — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
  • mlx.core.fast.rope
  • mlx.core.fast.scaled_dot_product_attention
  • mlx.core.fast.metal_kernel
  • +
  • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.random.bernoulli.html b/docs/build/html/python/_autosummary/mlx.core.random.bernoulli.html index 17e43eebd..313311a1e 100644 --- a/docs/build/html/python/_autosummary/mlx.core.random.bernoulli.html +++ b/docs/build/html/python/_autosummary/mlx.core.random.bernoulli.html @@ -8,7 +8,7 @@ - mlx.core.random.bernoulli — MLX 0.28.0 documentation + mlx.core.random.bernoulli — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.random.categorical.html b/docs/build/html/python/_autosummary/mlx.core.random.categorical.html index 128bbcc3c..faf277a0a 100644 --- a/docs/build/html/python/_autosummary/mlx.core.random.categorical.html +++ b/docs/build/html/python/_autosummary/mlx.core.random.categorical.html @@ -8,7 +8,7 @@ - mlx.core.random.categorical — MLX 0.28.0 documentation + mlx.core.random.categorical — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.random.gumbel.html b/docs/build/html/python/_autosummary/mlx.core.random.gumbel.html index 34b9c6494..1af5c2f5e 100644 --- a/docs/build/html/python/_autosummary/mlx.core.random.gumbel.html +++ b/docs/build/html/python/_autosummary/mlx.core.random.gumbel.html @@ -8,7 +8,7 @@ - mlx.core.random.gumbel — MLX 0.28.0 documentation + mlx.core.random.gumbel — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.random.key.html b/docs/build/html/python/_autosummary/mlx.core.random.key.html index 2b8725193..73b76fb51 100644 --- a/docs/build/html/python/_autosummary/mlx.core.random.key.html +++ b/docs/build/html/python/_autosummary/mlx.core.random.key.html @@ -8,7 +8,7 @@ - mlx.core.random.key — MLX 0.28.0 documentation + mlx.core.random.key — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.random.laplace.html b/docs/build/html/python/_autosummary/mlx.core.random.laplace.html index 42bedc78b..f447277fc 100644 --- a/docs/build/html/python/_autosummary/mlx.core.random.laplace.html +++ b/docs/build/html/python/_autosummary/mlx.core.random.laplace.html @@ -8,7 +8,7 @@ - mlx.core.random.laplace — MLX 0.28.0 documentation + mlx.core.random.laplace — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.random.multivariate_normal.html b/docs/build/html/python/_autosummary/mlx.core.random.multivariate_normal.html index 655b027c2..a8af7eaeb 100644 --- a/docs/build/html/python/_autosummary/mlx.core.random.multivariate_normal.html +++ b/docs/build/html/python/_autosummary/mlx.core.random.multivariate_normal.html @@ -8,7 +8,7 @@ - mlx.core.random.multivariate_normal — MLX 0.28.0 documentation + mlx.core.random.multivariate_normal — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.random.normal.html b/docs/build/html/python/_autosummary/mlx.core.random.normal.html index 52a8ee2b9..1b5cbf0ec 100644 --- a/docs/build/html/python/_autosummary/mlx.core.random.normal.html +++ b/docs/build/html/python/_autosummary/mlx.core.random.normal.html @@ -8,7 +8,7 @@ - mlx.core.random.normal — MLX 0.28.0 documentation + mlx.core.random.normal — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.random.permutation.html b/docs/build/html/python/_autosummary/mlx.core.random.permutation.html index cc43f8cb8..0544f2fac 100644 --- a/docs/build/html/python/_autosummary/mlx.core.random.permutation.html +++ b/docs/build/html/python/_autosummary/mlx.core.random.permutation.html @@ -8,7 +8,7 @@ - mlx.core.random.permutation — MLX 0.28.0 documentation + mlx.core.random.permutation — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.random.randint.html b/docs/build/html/python/_autosummary/mlx.core.random.randint.html index 3de6c7ba3..420c0c348 100644 --- a/docs/build/html/python/_autosummary/mlx.core.random.randint.html +++ b/docs/build/html/python/_autosummary/mlx.core.random.randint.html @@ -8,7 +8,7 @@ - mlx.core.random.randint — MLX 0.28.0 documentation + mlx.core.random.randint — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.random.seed.html b/docs/build/html/python/_autosummary/mlx.core.random.seed.html index cfd2bbce1..41ada4fe7 100644 --- a/docs/build/html/python/_autosummary/mlx.core.random.seed.html +++ b/docs/build/html/python/_autosummary/mlx.core.random.seed.html @@ -8,7 +8,7 @@ - mlx.core.random.seed — MLX 0.28.0 documentation + mlx.core.random.seed — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.random.split.html b/docs/build/html/python/_autosummary/mlx.core.random.split.html index 3c416de0a..e50d48c86 100644 --- a/docs/build/html/python/_autosummary/mlx.core.random.split.html +++ b/docs/build/html/python/_autosummary/mlx.core.random.split.html @@ -8,7 +8,7 @@ - mlx.core.random.split — MLX 0.28.0 documentation + mlx.core.random.split — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.random.truncated_normal.html b/docs/build/html/python/_autosummary/mlx.core.random.truncated_normal.html index bf4cfa0ff..16c04f02c 100644 --- a/docs/build/html/python/_autosummary/mlx.core.random.truncated_normal.html +++ b/docs/build/html/python/_autosummary/mlx.core.random.truncated_normal.html @@ -8,7 +8,7 @@ - mlx.core.random.truncated_normal — MLX 0.28.0 documentation + mlx.core.random.truncated_normal — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.random.uniform.html b/docs/build/html/python/_autosummary/mlx.core.random.uniform.html index ee6dc62cb..29ae89e85 100644 --- a/docs/build/html/python/_autosummary/mlx.core.random.uniform.html +++ b/docs/build/html/python/_autosummary/mlx.core.random.uniform.html @@ -8,7 +8,7 @@ - mlx.core.random.uniform — MLX 0.28.0 documentation + mlx.core.random.uniform — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.real.html b/docs/build/html/python/_autosummary/mlx.core.real.html index 8ba2e94f3..8860607dc 100644 --- a/docs/build/html/python/_autosummary/mlx.core.real.html +++ b/docs/build/html/python/_autosummary/mlx.core.real.html @@ -8,7 +8,7 @@ - mlx.core.real — MLX 0.28.0 documentation + mlx.core.real — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.reciprocal.html b/docs/build/html/python/_autosummary/mlx.core.reciprocal.html index bf04d470d..aa59ff078 100644 --- a/docs/build/html/python/_autosummary/mlx.core.reciprocal.html +++ b/docs/build/html/python/_autosummary/mlx.core.reciprocal.html @@ -8,7 +8,7 @@ - mlx.core.reciprocal — MLX 0.28.0 documentation + mlx.core.reciprocal — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.remainder.html b/docs/build/html/python/_autosummary/mlx.core.remainder.html index 5b358762e..c250b0be7 100644 --- a/docs/build/html/python/_autosummary/mlx.core.remainder.html +++ b/docs/build/html/python/_autosummary/mlx.core.remainder.html @@ -8,7 +8,7 @@ - mlx.core.remainder — MLX 0.28.0 documentation + mlx.core.remainder — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.repeat.html b/docs/build/html/python/_autosummary/mlx.core.repeat.html index a3b2eb935..fc6554f33 100644 --- a/docs/build/html/python/_autosummary/mlx.core.repeat.html +++ b/docs/build/html/python/_autosummary/mlx.core.repeat.html @@ -8,7 +8,7 @@ - mlx.core.repeat — MLX 0.28.0 documentation + mlx.core.repeat — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.reset_peak_memory.html b/docs/build/html/python/_autosummary/mlx.core.reset_peak_memory.html index 65129e5c5..cf78ee743 100644 --- a/docs/build/html/python/_autosummary/mlx.core.reset_peak_memory.html +++ b/docs/build/html/python/_autosummary/mlx.core.reset_peak_memory.html @@ -8,7 +8,7 @@ - mlx.core.reset_peak_memory — MLX 0.28.0 documentation + mlx.core.reset_peak_memory — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.reshape.html b/docs/build/html/python/_autosummary/mlx.core.reshape.html index b0b6ea30b..e1ba9ce4c 100644 --- a/docs/build/html/python/_autosummary/mlx.core.reshape.html +++ b/docs/build/html/python/_autosummary/mlx.core.reshape.html @@ -8,7 +8,7 @@ - mlx.core.reshape — MLX 0.28.0 documentation + mlx.core.reshape — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.right_shift.html b/docs/build/html/python/_autosummary/mlx.core.right_shift.html index b1f7db1db..a406e435e 100644 --- a/docs/build/html/python/_autosummary/mlx.core.right_shift.html +++ b/docs/build/html/python/_autosummary/mlx.core.right_shift.html @@ -8,7 +8,7 @@ - mlx.core.right_shift — MLX 0.28.0 documentation + mlx.core.right_shift — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.roll.html b/docs/build/html/python/_autosummary/mlx.core.roll.html index 692709b81..326a002f1 100644 --- a/docs/build/html/python/_autosummary/mlx.core.roll.html +++ b/docs/build/html/python/_autosummary/mlx.core.roll.html @@ -8,7 +8,7 @@ - mlx.core.roll — MLX 0.28.0 documentation + mlx.core.roll — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.round.html b/docs/build/html/python/_autosummary/mlx.core.round.html index 87949ccb7..080668044 100644 --- a/docs/build/html/python/_autosummary/mlx.core.round.html +++ b/docs/build/html/python/_autosummary/mlx.core.round.html @@ -8,7 +8,7 @@ - mlx.core.round — MLX 0.28.0 documentation + mlx.core.round — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.rsqrt.html b/docs/build/html/python/_autosummary/mlx.core.rsqrt.html index d443408f3..9bb223d5c 100644 --- a/docs/build/html/python/_autosummary/mlx.core.rsqrt.html +++ b/docs/build/html/python/_autosummary/mlx.core.rsqrt.html @@ -8,7 +8,7 @@ - mlx.core.rsqrt — MLX 0.28.0 documentation + mlx.core.rsqrt — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.save.html b/docs/build/html/python/_autosummary/mlx.core.save.html index 981ea60c0..ed53b1e0c 100644 --- a/docs/build/html/python/_autosummary/mlx.core.save.html +++ b/docs/build/html/python/_autosummary/mlx.core.save.html @@ -8,7 +8,7 @@ - mlx.core.save — MLX 0.28.0 documentation + mlx.core.save — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • @@ -922,12 +930,12 @@ document.write(`

      mlx.core.save#

      -save(file: str, arr: array) None#
      +save(file: file | str | Path, arr: array) None#

      Save the array to a binary file in .npy format.

      Parameters:
        -
      • file (str) – File to which the array is saved

      • +
      • file (str, Path, file) – File to which the array is saved

      • arr (array) – Array to be saved.

      diff --git a/docs/build/html/python/_autosummary/mlx.core.save_gguf.html b/docs/build/html/python/_autosummary/mlx.core.save_gguf.html index d17111059..e8ca64d51 100644 --- a/docs/build/html/python/_autosummary/mlx.core.save_gguf.html +++ b/docs/build/html/python/_autosummary/mlx.core.save_gguf.html @@ -8,7 +8,7 @@ - mlx.core.save_gguf — MLX 0.28.0 documentation + mlx.core.save_gguf — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • @@ -922,14 +930,14 @@ document.write(`

      mlx.core.save_gguf#

      -save_gguf(file: str, arrays: dict[str, array], metadata: dict[str, array | str | list[str]])#
      +save_gguf(file: file | str | Path, arrays: dict[str, array], metadata: dict[str, array | str | list[str]])#

      Save array(s) to a binary file in .gguf format.

      See the GGUF documentation for more information on the format.

      Parameters:
        -
      • file (file, str) – File in which the array is saved.

      • +
      • file (file, str, Path) – File in which the array is saved.

      • arrays (dict(str, array)) – The dictionary of names to arrays to be saved.

      • metadata (dict(str, Union[array, str, list(str)])) – The dictionary diff --git a/docs/build/html/python/_autosummary/mlx.core.save_safetensors.html b/docs/build/html/python/_autosummary/mlx.core.save_safetensors.html index fb93b5f86..feb1b8475 100644 --- a/docs/build/html/python/_autosummary/mlx.core.save_safetensors.html +++ b/docs/build/html/python/_autosummary/mlx.core.save_safetensors.html @@ -8,7 +8,7 @@ - mlx.core.save_safetensors — MLX 0.28.0 documentation + mlx.core.save_safetensors — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@

      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • @@ -922,14 +930,14 @@ document.write(`

      mlx.core.save_safetensors#

      -save_safetensors(file: str, arrays: dict[str, array], metadata: dict[str, str] | None = None)#
      +save_safetensors(file: file | str | Path, arrays: dict[str, array], metadata: dict[str, str] | None = None)#

      Save array(s) to a binary file in .safetensors format.

      See the Safetensors documentation for more information on the format.

      Parameters:
        -
      • file (file, str) – File in which the array is saved.

      • +
      • file (file, str, Path) – File in which the array is saved.

      • arrays (dict(str, array)) – The dictionary of names to arrays to be saved.

      • metadata (dict(str, str), optional) – The dictionary of diff --git a/docs/build/html/python/_autosummary/mlx.core.savez.html b/docs/build/html/python/_autosummary/mlx.core.savez.html index cd4bf9632..573b37ede 100644 --- a/docs/build/html/python/_autosummary/mlx.core.savez.html +++ b/docs/build/html/python/_autosummary/mlx.core.savez.html @@ -8,7 +8,7 @@ - mlx.core.savez — MLX 0.28.0 documentation + mlx.core.savez — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@

      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • @@ -922,7 +930,7 @@ document.write(`

      mlx.core.savez#

      -savez(file: object, *args, **kwargs) None#
      +savez(file: file | str | Path, *args, **kwargs)#

      Save several arrays to a binary file in uncompressed .npz format.

      import mlx.core as mx
      @@ -941,7 +949,7 @@ format.

      Parameters:
        -
      • file (file, str) – Path to file to which the arrays are saved.

      • +
      • file (file, str, Path) – Path to file to which the arrays are saved.

      • *args (arrays) – Arrays to be saved.

      • **kwargs (arrays) – Arrays to be saved. Each array will be saved with the associated keyword as the output file name.

      • diff --git a/docs/build/html/python/_autosummary/mlx.core.savez_compressed.html b/docs/build/html/python/_autosummary/mlx.core.savez_compressed.html index 0a7448808..227c57788 100644 --- a/docs/build/html/python/_autosummary/mlx.core.savez_compressed.html +++ b/docs/build/html/python/_autosummary/mlx.core.savez_compressed.html @@ -8,7 +8,7 @@ - mlx.core.savez_compressed — MLX 0.28.0 documentation + mlx.core.savez_compressed — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home +
      @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • @@ -922,12 +930,12 @@ document.write(`

      mlx.core.savez_compressed#

      -savez_compressed(file: str, *args, **kwargs)#
      +savez_compressed(file: file | str | Path, *args, **kwargs)#

      Save several arrays to a binary file in compressed .npz format.

      Parameters:
        -
      • file (file, str) – Path to file to which the arrays are saved.

      • +
      • file (file, str, Path) – Path to file to which the arrays are saved.

      • *args (arrays) – Arrays to be saved.

      • **kwargs (arrays) – Arrays to be saved. Each array will be saved with the associated keyword as the output file name.

      • diff --git a/docs/build/html/python/_autosummary/mlx.core.set_cache_limit.html b/docs/build/html/python/_autosummary/mlx.core.set_cache_limit.html index 3a48dc4ac..2c9234beb 100644 --- a/docs/build/html/python/_autosummary/mlx.core.set_cache_limit.html +++ b/docs/build/html/python/_autosummary/mlx.core.set_cache_limit.html @@ -8,7 +8,7 @@ - mlx.core.set_cache_limit — MLX 0.28.0 documentation + mlx.core.set_cache_limit — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.set_default_device.html b/docs/build/html/python/_autosummary/mlx.core.set_default_device.html index 6fd807b99..734768537 100644 --- a/docs/build/html/python/_autosummary/mlx.core.set_default_device.html +++ b/docs/build/html/python/_autosummary/mlx.core.set_default_device.html @@ -8,7 +8,7 @@ - mlx.core.set_default_device — MLX 0.28.0 documentation + mlx.core.set_default_device — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.set_default_stream.html b/docs/build/html/python/_autosummary/mlx.core.set_default_stream.html index 9ebd2b893..2df885af6 100644 --- a/docs/build/html/python/_autosummary/mlx.core.set_default_stream.html +++ b/docs/build/html/python/_autosummary/mlx.core.set_default_stream.html @@ -8,7 +8,7 @@ - mlx.core.set_default_stream — MLX 0.28.0 documentation + mlx.core.set_default_stream — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.set_memory_limit.html b/docs/build/html/python/_autosummary/mlx.core.set_memory_limit.html index 2bbefcd44..592ea4c04 100644 --- a/docs/build/html/python/_autosummary/mlx.core.set_memory_limit.html +++ b/docs/build/html/python/_autosummary/mlx.core.set_memory_limit.html @@ -8,7 +8,7 @@ - mlx.core.set_memory_limit — MLX 0.28.0 documentation + mlx.core.set_memory_limit — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.set_wired_limit.html b/docs/build/html/python/_autosummary/mlx.core.set_wired_limit.html index 7e2a2c78a..344d794b7 100644 --- a/docs/build/html/python/_autosummary/mlx.core.set_wired_limit.html +++ b/docs/build/html/python/_autosummary/mlx.core.set_wired_limit.html @@ -8,7 +8,7 @@ - mlx.core.set_wired_limit — MLX 0.28.0 documentation + mlx.core.set_wired_limit — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.sigmoid.html b/docs/build/html/python/_autosummary/mlx.core.sigmoid.html index 251dd3e22..b8e055238 100644 --- a/docs/build/html/python/_autosummary/mlx.core.sigmoid.html +++ b/docs/build/html/python/_autosummary/mlx.core.sigmoid.html @@ -8,7 +8,7 @@ - mlx.core.sigmoid — MLX 0.28.0 documentation + mlx.core.sigmoid — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.sign.html b/docs/build/html/python/_autosummary/mlx.core.sign.html index f7ac0b7d6..0801f2de4 100644 --- a/docs/build/html/python/_autosummary/mlx.core.sign.html +++ b/docs/build/html/python/_autosummary/mlx.core.sign.html @@ -8,7 +8,7 @@ - mlx.core.sign — MLX 0.28.0 documentation + mlx.core.sign — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.sin.html b/docs/build/html/python/_autosummary/mlx.core.sin.html index 97f930811..f0222e0e2 100644 --- a/docs/build/html/python/_autosummary/mlx.core.sin.html +++ b/docs/build/html/python/_autosummary/mlx.core.sin.html @@ -8,7 +8,7 @@ - mlx.core.sin — MLX 0.28.0 documentation + mlx.core.sin — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.sinh.html b/docs/build/html/python/_autosummary/mlx.core.sinh.html index affd0b77d..d19d6f8f9 100644 --- a/docs/build/html/python/_autosummary/mlx.core.sinh.html +++ b/docs/build/html/python/_autosummary/mlx.core.sinh.html @@ -8,7 +8,7 @@ - mlx.core.sinh — MLX 0.28.0 documentation + mlx.core.sinh — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.slice.html b/docs/build/html/python/_autosummary/mlx.core.slice.html index cc6a53580..19742487c 100644 --- a/docs/build/html/python/_autosummary/mlx.core.slice.html +++ b/docs/build/html/python/_autosummary/mlx.core.slice.html @@ -8,7 +8,7 @@ - mlx.core.slice — MLX 0.28.0 documentation + mlx.core.slice — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.slice_update.html b/docs/build/html/python/_autosummary/mlx.core.slice_update.html index a05d11ad1..9414d6c07 100644 --- a/docs/build/html/python/_autosummary/mlx.core.slice_update.html +++ b/docs/build/html/python/_autosummary/mlx.core.slice_update.html @@ -8,7 +8,7 @@ - mlx.core.slice_update — MLX 0.28.0 documentation + mlx.core.slice_update — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.softmax.html b/docs/build/html/python/_autosummary/mlx.core.softmax.html index f9feb7412..17a525ee0 100644 --- a/docs/build/html/python/_autosummary/mlx.core.softmax.html +++ b/docs/build/html/python/_autosummary/mlx.core.softmax.html @@ -8,7 +8,7 @@ - mlx.core.softmax — MLX 0.28.0 documentation + mlx.core.softmax — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.sort.html b/docs/build/html/python/_autosummary/mlx.core.sort.html index 42b1fb6ca..85e92e731 100644 --- a/docs/build/html/python/_autosummary/mlx.core.sort.html +++ b/docs/build/html/python/_autosummary/mlx.core.sort.html @@ -8,7 +8,7 @@ - mlx.core.sort — MLX 0.28.0 documentation + mlx.core.sort — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.split.html b/docs/build/html/python/_autosummary/mlx.core.split.html index 1c07fc04d..5e542f4c7 100644 --- a/docs/build/html/python/_autosummary/mlx.core.split.html +++ b/docs/build/html/python/_autosummary/mlx.core.split.html @@ -8,7 +8,7 @@ - mlx.core.split — MLX 0.28.0 documentation + mlx.core.split — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.sqrt.html b/docs/build/html/python/_autosummary/mlx.core.sqrt.html index e66519804..b835e16c7 100644 --- a/docs/build/html/python/_autosummary/mlx.core.sqrt.html +++ b/docs/build/html/python/_autosummary/mlx.core.sqrt.html @@ -8,7 +8,7 @@ - mlx.core.sqrt — MLX 0.28.0 documentation + mlx.core.sqrt — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.square.html b/docs/build/html/python/_autosummary/mlx.core.square.html index 518618c7e..adf25a732 100644 --- a/docs/build/html/python/_autosummary/mlx.core.square.html +++ b/docs/build/html/python/_autosummary/mlx.core.square.html @@ -8,7 +8,7 @@ - mlx.core.square — MLX 0.28.0 documentation + mlx.core.square — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.squeeze.html b/docs/build/html/python/_autosummary/mlx.core.squeeze.html index e107d0c64..f3463afd6 100644 --- a/docs/build/html/python/_autosummary/mlx.core.squeeze.html +++ b/docs/build/html/python/_autosummary/mlx.core.squeeze.html @@ -8,7 +8,7 @@ - mlx.core.squeeze — MLX 0.28.0 documentation + mlx.core.squeeze — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.stack.html b/docs/build/html/python/_autosummary/mlx.core.stack.html index 09fdc5885..23c3ad3fb 100644 --- a/docs/build/html/python/_autosummary/mlx.core.stack.html +++ b/docs/build/html/python/_autosummary/mlx.core.stack.html @@ -8,7 +8,7 @@ - mlx.core.stack — MLX 0.28.0 documentation + mlx.core.stack — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.std.html b/docs/build/html/python/_autosummary/mlx.core.std.html index 8a473c8e3..364f18e80 100644 --- a/docs/build/html/python/_autosummary/mlx.core.std.html +++ b/docs/build/html/python/_autosummary/mlx.core.std.html @@ -8,7 +8,7 @@ - mlx.core.std — MLX 0.28.0 documentation + mlx.core.std — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.stop_gradient.html b/docs/build/html/python/_autosummary/mlx.core.stop_gradient.html index d603c7935..5956d0970 100644 --- a/docs/build/html/python/_autosummary/mlx.core.stop_gradient.html +++ b/docs/build/html/python/_autosummary/mlx.core.stop_gradient.html @@ -8,7 +8,7 @@ - mlx.core.stop_gradient — MLX 0.28.0 documentation + mlx.core.stop_gradient — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.stream.html b/docs/build/html/python/_autosummary/mlx.core.stream.html index f9f08ad74..7d2115808 100644 --- a/docs/build/html/python/_autosummary/mlx.core.stream.html +++ b/docs/build/html/python/_autosummary/mlx.core.stream.html @@ -8,7 +8,7 @@ - mlx.core.stream — MLX 0.28.0 documentation + mlx.core.stream — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.subtract.html b/docs/build/html/python/_autosummary/mlx.core.subtract.html index e716b40a3..3704a737b 100644 --- a/docs/build/html/python/_autosummary/mlx.core.subtract.html +++ b/docs/build/html/python/_autosummary/mlx.core.subtract.html @@ -8,7 +8,7 @@ - mlx.core.subtract — MLX 0.28.0 documentation + mlx.core.subtract — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.sum.html b/docs/build/html/python/_autosummary/mlx.core.sum.html index 4f87e1c37..0cec9c7b6 100644 --- a/docs/build/html/python/_autosummary/mlx.core.sum.html +++ b/docs/build/html/python/_autosummary/mlx.core.sum.html @@ -8,7 +8,7 @@ - mlx.core.sum — MLX 0.28.0 documentation + mlx.core.sum — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.swapaxes.html b/docs/build/html/python/_autosummary/mlx.core.swapaxes.html index 15e4140e7..5a6737e7c 100644 --- a/docs/build/html/python/_autosummary/mlx.core.swapaxes.html +++ b/docs/build/html/python/_autosummary/mlx.core.swapaxes.html @@ -8,7 +8,7 @@ - mlx.core.swapaxes — MLX 0.28.0 documentation + mlx.core.swapaxes — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.synchronize.html b/docs/build/html/python/_autosummary/mlx.core.synchronize.html index 89314100e..0c2aef01f 100644 --- a/docs/build/html/python/_autosummary/mlx.core.synchronize.html +++ b/docs/build/html/python/_autosummary/mlx.core.synchronize.html @@ -8,7 +8,7 @@ - mlx.core.synchronize — MLX 0.28.0 documentation + mlx.core.synchronize — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.take.html b/docs/build/html/python/_autosummary/mlx.core.take.html index ec9200ab5..6e8f653f0 100644 --- a/docs/build/html/python/_autosummary/mlx.core.take.html +++ b/docs/build/html/python/_autosummary/mlx.core.take.html @@ -8,7 +8,7 @@ - mlx.core.take — MLX 0.28.0 documentation + mlx.core.take — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.take_along_axis.html b/docs/build/html/python/_autosummary/mlx.core.take_along_axis.html index 799d0afe2..b327d68e1 100644 --- a/docs/build/html/python/_autosummary/mlx.core.take_along_axis.html +++ b/docs/build/html/python/_autosummary/mlx.core.take_along_axis.html @@ -8,7 +8,7 @@ - mlx.core.take_along_axis — MLX 0.28.0 documentation + mlx.core.take_along_axis — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.tan.html b/docs/build/html/python/_autosummary/mlx.core.tan.html index 0ae42fe77..c10d65754 100644 --- a/docs/build/html/python/_autosummary/mlx.core.tan.html +++ b/docs/build/html/python/_autosummary/mlx.core.tan.html @@ -8,7 +8,7 @@ - mlx.core.tan — MLX 0.28.0 documentation + mlx.core.tan — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.tanh.html b/docs/build/html/python/_autosummary/mlx.core.tanh.html index 4ffd087c5..ff757b4ce 100644 --- a/docs/build/html/python/_autosummary/mlx.core.tanh.html +++ b/docs/build/html/python/_autosummary/mlx.core.tanh.html @@ -8,7 +8,7 @@ - mlx.core.tanh — MLX 0.28.0 documentation + mlx.core.tanh — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.tensordot.html b/docs/build/html/python/_autosummary/mlx.core.tensordot.html index 62a3cdc1e..82126c944 100644 --- a/docs/build/html/python/_autosummary/mlx.core.tensordot.html +++ b/docs/build/html/python/_autosummary/mlx.core.tensordot.html @@ -8,7 +8,7 @@ - mlx.core.tensordot — MLX 0.28.0 documentation + mlx.core.tensordot — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.tile.html b/docs/build/html/python/_autosummary/mlx.core.tile.html index 68c14088a..ef15f70e6 100644 --- a/docs/build/html/python/_autosummary/mlx.core.tile.html +++ b/docs/build/html/python/_autosummary/mlx.core.tile.html @@ -8,7 +8,7 @@ - mlx.core.tile — MLX 0.28.0 documentation + mlx.core.tile — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.topk.html b/docs/build/html/python/_autosummary/mlx.core.topk.html index ffcc291e2..5e158bd89 100644 --- a/docs/build/html/python/_autosummary/mlx.core.topk.html +++ b/docs/build/html/python/_autosummary/mlx.core.topk.html @@ -8,7 +8,7 @@ - mlx.core.topk — MLX 0.28.0 documentation + mlx.core.topk — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.trace.html b/docs/build/html/python/_autosummary/mlx.core.trace.html index c206c9d2e..435580093 100644 --- a/docs/build/html/python/_autosummary/mlx.core.trace.html +++ b/docs/build/html/python/_autosummary/mlx.core.trace.html @@ -8,7 +8,7 @@ - mlx.core.trace — MLX 0.28.0 documentation + mlx.core.trace — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.transpose.html b/docs/build/html/python/_autosummary/mlx.core.transpose.html index ecc32a3ea..2012a23ff 100644 --- a/docs/build/html/python/_autosummary/mlx.core.transpose.html +++ b/docs/build/html/python/_autosummary/mlx.core.transpose.html @@ -8,7 +8,7 @@ - mlx.core.transpose — MLX 0.28.0 documentation + mlx.core.transpose — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.tri.html b/docs/build/html/python/_autosummary/mlx.core.tri.html index 285ed7655..74b6d46b3 100644 --- a/docs/build/html/python/_autosummary/mlx.core.tri.html +++ b/docs/build/html/python/_autosummary/mlx.core.tri.html @@ -8,7 +8,7 @@ - mlx.core.tri — MLX 0.28.0 documentation + mlx.core.tri — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.tril.html b/docs/build/html/python/_autosummary/mlx.core.tril.html index 9971f0084..2b8c3d1b2 100644 --- a/docs/build/html/python/_autosummary/mlx.core.tril.html +++ b/docs/build/html/python/_autosummary/mlx.core.tril.html @@ -8,7 +8,7 @@ - mlx.core.tril — MLX 0.28.0 documentation + mlx.core.tril — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.triu.html b/docs/build/html/python/_autosummary/mlx.core.triu.html index aa67143d0..93dcf5f8c 100644 --- a/docs/build/html/python/_autosummary/mlx.core.triu.html +++ b/docs/build/html/python/_autosummary/mlx.core.triu.html @@ -8,7 +8,7 @@ - mlx.core.triu — MLX 0.28.0 documentation + mlx.core.triu — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.unflatten.html b/docs/build/html/python/_autosummary/mlx.core.unflatten.html index f097fbfbf..a6e913e61 100644 --- a/docs/build/html/python/_autosummary/mlx.core.unflatten.html +++ b/docs/build/html/python/_autosummary/mlx.core.unflatten.html @@ -8,7 +8,7 @@ - mlx.core.unflatten — MLX 0.28.0 documentation + mlx.core.unflatten — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.value_and_grad.html b/docs/build/html/python/_autosummary/mlx.core.value_and_grad.html index 889e90204..32ad998b8 100644 --- a/docs/build/html/python/_autosummary/mlx.core.value_and_grad.html +++ b/docs/build/html/python/_autosummary/mlx.core.value_and_grad.html @@ -8,7 +8,7 @@ - mlx.core.value_and_grad — MLX 0.28.0 documentation + mlx.core.value_and_grad — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.var.html b/docs/build/html/python/_autosummary/mlx.core.var.html index 9633cec8a..1331c2ac6 100644 --- a/docs/build/html/python/_autosummary/mlx.core.var.html +++ b/docs/build/html/python/_autosummary/mlx.core.var.html @@ -8,7 +8,7 @@ - mlx.core.var — MLX 0.28.0 documentation + mlx.core.var — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.view.html b/docs/build/html/python/_autosummary/mlx.core.view.html index f4e984a91..5485b77ce 100644 --- a/docs/build/html/python/_autosummary/mlx.core.view.html +++ b/docs/build/html/python/_autosummary/mlx.core.view.html @@ -8,7 +8,7 @@ - mlx.core.view — MLX 0.28.0 documentation + mlx.core.view — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.vjp.html b/docs/build/html/python/_autosummary/mlx.core.vjp.html index d5ade67c6..b1441bcb5 100644 --- a/docs/build/html/python/_autosummary/mlx.core.vjp.html +++ b/docs/build/html/python/_autosummary/mlx.core.vjp.html @@ -8,7 +8,7 @@ - mlx.core.vjp — MLX 0.28.0 documentation + mlx.core.vjp — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.vmap.html b/docs/build/html/python/_autosummary/mlx.core.vmap.html index f4c54f71a..604236cc9 100644 --- a/docs/build/html/python/_autosummary/mlx.core.vmap.html +++ b/docs/build/html/python/_autosummary/mlx.core.vmap.html @@ -8,7 +8,7 @@ - mlx.core.vmap — MLX 0.28.0 documentation + mlx.core.vmap — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.where.html b/docs/build/html/python/_autosummary/mlx.core.where.html index 7d459a1b4..235d2db6c 100644 --- a/docs/build/html/python/_autosummary/mlx.core.where.html +++ b/docs/build/html/python/_autosummary/mlx.core.where.html @@ -8,7 +8,7 @@ - mlx.core.where — MLX 0.28.0 documentation + mlx.core.where — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.zeros.html b/docs/build/html/python/_autosummary/mlx.core.zeros.html index 81111e3f5..344a5e100 100644 --- a/docs/build/html/python/_autosummary/mlx.core.zeros.html +++ b/docs/build/html/python/_autosummary/mlx.core.zeros.html @@ -8,7 +8,7 @@ - mlx.core.zeros — MLX 0.28.0 documentation + mlx.core.zeros — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.core.zeros_like.html b/docs/build/html/python/_autosummary/mlx.core.zeros_like.html index b8c42200d..d7461cd21 100644 --- a/docs/build/html/python/_autosummary/mlx.core.zeros_like.html +++ b/docs/build/html/python/_autosummary/mlx.core.zeros_like.html @@ -8,7 +8,7 @@ - mlx.core.zeros_like — MLX 0.28.0 documentation + mlx.core.zeros_like — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.nn.average_gradients.html b/docs/build/html/python/_autosummary/mlx.nn.average_gradients.html index f20090be9..a47331916 100644 --- a/docs/build/html/python/_autosummary/mlx.nn.average_gradients.html +++ b/docs/build/html/python/_autosummary/mlx.nn.average_gradients.html @@ -8,7 +8,7 @@ - mlx.nn.average_gradients — MLX 0.28.0 documentation + mlx.nn.average_gradients — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • @@ -922,7 +930,7 @@ document.write(`

      mlx.nn.average_gradients#

      -average_gradients(gradients: Any, group: Group | None = None, all_reduce_size: int = 33554432, communication_type: Dtype | None = None)#
      +average_gradients(gradients: Any, group: Group | None = None, all_reduce_size: int = 33554432, communication_type: Dtype | None = None, communication_stream: Stream | None = None)#

      Average the gradients across the distributed processes in the passed group.

      This helper enables concatenating several gradients of small arrays to one big all reduce call for better networking performance.

      @@ -940,6 +948,9 @@ less or equal to 0 array grouping is disabled. Default: Dtype]) – If provided cast to this type before performing the communication. Typically cast to a smaller float to reduce the communication size. Default: None.

      +
    • communication_stream (Optional[Stream]) – The stream to usse +for the communication. If unspecified the default communication +stream is used which can vary by back-end. Default: None.

    diff --git a/docs/build/html/python/_autosummary/mlx.nn.quantize.html b/docs/build/html/python/_autosummary/mlx.nn.quantize.html index 9e441ad5d..35f1cab48 100644 --- a/docs/build/html/python/_autosummary/mlx.nn.quantize.html +++ b/docs/build/html/python/_autosummary/mlx.nn.quantize.html @@ -8,7 +8,7 @@ - mlx.nn.quantize — MLX 0.28.0 documentation + mlx.nn.quantize — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
  • mlx.core.fast.rope
  • mlx.core.fast.scaled_dot_product_attention
  • mlx.core.fast.metal_kernel
  • +
  • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • @@ -922,7 +930,7 @@ document.write(`

      mlx.nn.quantize#

      -quantize(model: Module, group_size: int = 64, bits: int = 4, class_predicate: Callable[[str, Module], bool | dict] | None = None)#
      +quantize(model: Module, group_size: int = 64, bits: int = 4, *, mode: str = 'affine', class_predicate: Callable[[str, Module], bool | dict] | None = None)#

      Quantize the sub-modules of a module according to a predicate.

      By default all layers that define a to_quantized(group_size, bits) method will be quantized. Both Linear and Embedding layers @@ -935,6 +943,8 @@ will be quantized. Note also, the module is updated in-place.

      mlx.core.quantize()). Default: 64.

    • bits (int) – The number of bits per parameter (see mlx.core.quantize()). Default: 4.

    • +
    • mode (str) – The quantization method to use (see +mlx.core.quantize()). Default: "affine".

    • class_predicate (Optional[Callable]) – A callable which receives the Module path and Module itself and returns True or a dict of params for to_quantized if it should be quantized and diff --git a/docs/build/html/python/_autosummary/mlx.nn.value_and_grad.html b/docs/build/html/python/_autosummary/mlx.nn.value_and_grad.html index 5b173640e..6f76d1aa5 100644 --- a/docs/build/html/python/_autosummary/mlx.nn.value_and_grad.html +++ b/docs/build/html/python/_autosummary/mlx.nn.value_and_grad.html @@ -8,7 +8,7 @@ - mlx.nn.value_and_grad — MLX 0.28.0 documentation + mlx.nn.value_and_grad — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@

    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.optimizers.clip_grad_norm.html b/docs/build/html/python/_autosummary/mlx.optimizers.clip_grad_norm.html index b66abcb81..ff8f99472 100644 --- a/docs/build/html/python/_autosummary/mlx.optimizers.clip_grad_norm.html +++ b/docs/build/html/python/_autosummary/mlx.optimizers.clip_grad_norm.html @@ -8,7 +8,7 @@ - mlx.optimizers.clip_grad_norm — MLX 0.28.0 documentation + mlx.optimizers.clip_grad_norm — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.utils.tree_flatten.html b/docs/build/html/python/_autosummary/mlx.utils.tree_flatten.html index 12b099761..13f8f348d 100644 --- a/docs/build/html/python/_autosummary/mlx.utils.tree_flatten.html +++ b/docs/build/html/python/_autosummary/mlx.utils.tree_flatten.html @@ -8,7 +8,7 @@ - mlx.utils.tree_flatten — MLX 0.28.0 documentation + mlx.utils.tree_flatten — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.utils.tree_map.html b/docs/build/html/python/_autosummary/mlx.utils.tree_map.html index 5b2875bf1..ca56a7a9e 100644 --- a/docs/build/html/python/_autosummary/mlx.utils.tree_map.html +++ b/docs/build/html/python/_autosummary/mlx.utils.tree_map.html @@ -8,7 +8,7 @@ - mlx.utils.tree_map — MLX 0.28.0 documentation + mlx.utils.tree_map — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.utils.tree_map_with_path.html b/docs/build/html/python/_autosummary/mlx.utils.tree_map_with_path.html index 3b274f77c..03090b5e4 100644 --- a/docs/build/html/python/_autosummary/mlx.utils.tree_map_with_path.html +++ b/docs/build/html/python/_autosummary/mlx.utils.tree_map_with_path.html @@ -8,7 +8,7 @@ - mlx.utils.tree_map_with_path — MLX 0.28.0 documentation + mlx.utils.tree_map_with_path — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.utils.tree_reduce.html b/docs/build/html/python/_autosummary/mlx.utils.tree_reduce.html index d9121575b..75c128dcc 100644 --- a/docs/build/html/python/_autosummary/mlx.utils.tree_reduce.html +++ b/docs/build/html/python/_autosummary/mlx.utils.tree_reduce.html @@ -8,7 +8,7 @@ - mlx.utils.tree_reduce — MLX 0.28.0 documentation + mlx.utils.tree_reduce — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/mlx.utils.tree_unflatten.html b/docs/build/html/python/_autosummary/mlx.utils.tree_unflatten.html index fa1e31120..298d639c3 100644 --- a/docs/build/html/python/_autosummary/mlx.utils.tree_unflatten.html +++ b/docs/build/html/python/_autosummary/mlx.utils.tree_unflatten.html @@ -8,7 +8,7 @@ - mlx.utils.tree_unflatten — MLX 0.28.0 documentation + mlx.utils.tree_unflatten — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/_autosummary/stream_class.html b/docs/build/html/python/_autosummary/stream_class.html index 056392fd4..ed74e5d71 100644 --- a/docs/build/html/python/_autosummary/stream_class.html +++ b/docs/build/html/python/_autosummary/stream_class.html @@ -8,7 +8,7 @@ - mlx.core.Stream — MLX 0.28.0 documentation + mlx.core.Stream — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/array.html b/docs/build/html/python/array.html index 363437c71..269d42966 100644 --- a/docs/build/html/python/array.html +++ b/docs/build/html/python/array.html @@ -8,7 +8,7 @@ - Array — MLX 0.28.0 documentation + Array — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/devices_and_streams.html b/docs/build/html/python/devices_and_streams.html index c942019be..5d7d06c6f 100644 --- a/docs/build/html/python/devices_and_streams.html +++ b/docs/build/html/python/devices_and_streams.html @@ -8,7 +8,7 @@ - Devices and Streams — MLX 0.28.0 documentation + Devices and Streams — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/distributed.html b/docs/build/html/python/distributed.html index af2b026b2..122c37387 100644 --- a/docs/build/html/python/distributed.html +++ b/docs/build/html/python/distributed.html @@ -8,7 +8,7 @@ - Distributed Communication — MLX 0.28.0 documentation + Distributed Communication — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/export.html b/docs/build/html/python/export.html index d0c38a229..632f2b1a2 100644 --- a/docs/build/html/python/export.html +++ b/docs/build/html/python/export.html @@ -8,7 +8,7 @@ - Export Functions — MLX 0.28.0 documentation + Export Functions — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/fast.html b/docs/build/html/python/fast.html index 93ef9a0c5..e9a8ab2ff 100644 --- a/docs/build/html/python/fast.html +++ b/docs/build/html/python/fast.html @@ -8,7 +8,7 @@ - Fast — MLX 0.28.0 documentation + Fast — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • @@ -927,6 +935,9 @@ document.write(`

      metal_kernel(name, input_names, ...[, ...])

      A jit-compiled custom Metal kernel defined from a source string.

      +

      cuda_kernel(name, input_names, output_names, ...)

      +

      A jit-compiled custom CUDA kernel defined from a source string.

      + diff --git a/docs/build/html/python/fft.html b/docs/build/html/python/fft.html index 851ba6a0b..ece463b05 100644 --- a/docs/build/html/python/fft.html +++ b/docs/build/html/python/fft.html @@ -8,7 +8,7 @@ - FFT — MLX 0.28.0 documentation + FFT — MLX 0.29.0 documentation @@ -30,22 +30,25 @@ + - + + + - + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/memory_management.html b/docs/build/html/python/memory_management.html index 5c0364bf3..6723479a7 100644 --- a/docs/build/html/python/memory_management.html +++ b/docs/build/html/python/memory_management.html @@ -8,7 +8,7 @@ - Memory Management — MLX 0.28.0 documentation + Memory Management — MLX 0.29.0 documentation @@ -30,22 +30,25 @@ + - + + + - + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/nn.html b/docs/build/html/python/nn.html index 4023f09ff..ad9679b65 100644 --- a/docs/build/html/python/nn.html +++ b/docs/build/html/python/nn.html @@ -8,7 +8,7 @@ - Neural Networks — MLX 0.28.0 documentation + Neural Networks — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • @@ -1073,7 +1081,7 @@ parameters as the first argument to the function returned by

      value_and_grad(model, fn)

      Transform the passed function fn to a function that computes the gradients of fn wrt the model's trainable parameters and also its value.

      -

      quantize(model[, group_size, bits, ...])

      +

      quantize(model[, group_size, bits, mode, ...])

      Quantize the sub-modules of a module according to a predicate.

      average_gradients(gradients[, group, ...])

      diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.ALiBi.html b/docs/build/html/python/nn/_autosummary/mlx.nn.ALiBi.html index e2f7f0aa3..448da760c 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.ALiBi.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.ALiBi.html @@ -8,7 +8,7 @@ - mlx.nn.ALiBi — MLX 0.28.0 documentation + mlx.nn.ALiBi — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.AvgPool1d.html b/docs/build/html/python/nn/_autosummary/mlx.nn.AvgPool1d.html index b874172af..17bca5b9f 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.AvgPool1d.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.AvgPool1d.html @@ -8,7 +8,7 @@ - mlx.nn.AvgPool1d — MLX 0.28.0 documentation + mlx.nn.AvgPool1d — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.AvgPool2d.html b/docs/build/html/python/nn/_autosummary/mlx.nn.AvgPool2d.html index 8214225f8..0412782bf 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.AvgPool2d.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.AvgPool2d.html @@ -8,7 +8,7 @@ - mlx.nn.AvgPool2d — MLX 0.28.0 documentation + mlx.nn.AvgPool2d — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.AvgPool3d.html b/docs/build/html/python/nn/_autosummary/mlx.nn.AvgPool3d.html index dff5d95e5..af6a330a4 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.AvgPool3d.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.AvgPool3d.html @@ -8,7 +8,7 @@ - mlx.nn.AvgPool3d — MLX 0.28.0 documentation + mlx.nn.AvgPool3d — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.BatchNorm.html b/docs/build/html/python/nn/_autosummary/mlx.nn.BatchNorm.html index 5838e43d0..2b495797a 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.BatchNorm.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.BatchNorm.html @@ -8,7 +8,7 @@ - mlx.nn.BatchNorm — MLX 0.28.0 documentation + mlx.nn.BatchNorm — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.CELU.html b/docs/build/html/python/nn/_autosummary/mlx.nn.CELU.html index 864a2c495..ed4c8221a 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.CELU.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.CELU.html @@ -8,7 +8,7 @@ - mlx.nn.CELU — MLX 0.28.0 documentation + mlx.nn.CELU — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.Conv1d.html b/docs/build/html/python/nn/_autosummary/mlx.nn.Conv1d.html index 7cfe64707..327d744d0 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.Conv1d.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.Conv1d.html @@ -8,7 +8,7 @@ - mlx.nn.Conv1d — MLX 0.28.0 documentation + mlx.nn.Conv1d — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.Conv2d.html b/docs/build/html/python/nn/_autosummary/mlx.nn.Conv2d.html index b57249a2c..1f8cac742 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.Conv2d.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.Conv2d.html @@ -8,7 +8,7 @@ - mlx.nn.Conv2d — MLX 0.28.0 documentation + mlx.nn.Conv2d — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.Conv3d.html b/docs/build/html/python/nn/_autosummary/mlx.nn.Conv3d.html index fdc5d4141..29f6025b8 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.Conv3d.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.Conv3d.html @@ -8,7 +8,7 @@ - mlx.nn.Conv3d — MLX 0.28.0 documentation + mlx.nn.Conv3d — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.ConvTranspose1d.html b/docs/build/html/python/nn/_autosummary/mlx.nn.ConvTranspose1d.html index 03961e3c5..144e5e941 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.ConvTranspose1d.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.ConvTranspose1d.html @@ -8,7 +8,7 @@ - mlx.nn.ConvTranspose1d — MLX 0.28.0 documentation + mlx.nn.ConvTranspose1d — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.ConvTranspose2d.html b/docs/build/html/python/nn/_autosummary/mlx.nn.ConvTranspose2d.html index 167884b11..bc76a649b 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.ConvTranspose2d.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.ConvTranspose2d.html @@ -8,7 +8,7 @@ - mlx.nn.ConvTranspose2d — MLX 0.28.0 documentation + mlx.nn.ConvTranspose2d — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.ConvTranspose3d.html b/docs/build/html/python/nn/_autosummary/mlx.nn.ConvTranspose3d.html index 85a15f939..208411993 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.ConvTranspose3d.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.ConvTranspose3d.html @@ -8,7 +8,7 @@ - mlx.nn.ConvTranspose3d — MLX 0.28.0 documentation + mlx.nn.ConvTranspose3d — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.Dropout.html b/docs/build/html/python/nn/_autosummary/mlx.nn.Dropout.html index 560e8cdf1..e2cd51c35 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.Dropout.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.Dropout.html @@ -8,7 +8,7 @@ - mlx.nn.Dropout — MLX 0.28.0 documentation + mlx.nn.Dropout — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.Dropout2d.html b/docs/build/html/python/nn/_autosummary/mlx.nn.Dropout2d.html index fe008883c..572fe904b 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.Dropout2d.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.Dropout2d.html @@ -8,7 +8,7 @@ - mlx.nn.Dropout2d — MLX 0.28.0 documentation + mlx.nn.Dropout2d — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.Dropout3d.html b/docs/build/html/python/nn/_autosummary/mlx.nn.Dropout3d.html index 1bd787ef1..4a2c12109 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.Dropout3d.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.Dropout3d.html @@ -8,7 +8,7 @@ - mlx.nn.Dropout3d — MLX 0.28.0 documentation + mlx.nn.Dropout3d — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.ELU.html b/docs/build/html/python/nn/_autosummary/mlx.nn.ELU.html index 0e514954e..5a21f503a 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.ELU.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.ELU.html @@ -8,7 +8,7 @@ - mlx.nn.ELU — MLX 0.28.0 documentation + mlx.nn.ELU — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.Embedding.html b/docs/build/html/python/nn/_autosummary/mlx.nn.Embedding.html index a0daf16c9..9273c85d2 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.Embedding.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.Embedding.html @@ -8,7 +8,7 @@ - mlx.nn.Embedding — MLX 0.28.0 documentation + mlx.nn.Embedding — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • @@ -941,7 +949,7 @@ Usually called the vocabulary size.

      as_linear(x)

      Call the embedding layer as a linear layer.

      -

      to_quantized([group_size, bits])

      +

      to_quantized([group_size, bits, mode])

      Return a QuantizedEmbedding layer that approximates this embedding layer.

      diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.GELU.html b/docs/build/html/python/nn/_autosummary/mlx.nn.GELU.html index c56ba3981..76302a4c1 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.GELU.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.GELU.html @@ -8,7 +8,7 @@ - mlx.nn.GELU — MLX 0.28.0 documentation + mlx.nn.GELU — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.GLU.html b/docs/build/html/python/nn/_autosummary/mlx.nn.GLU.html index efdade93b..7424d2356 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.GLU.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.GLU.html @@ -8,7 +8,7 @@ - mlx.nn.GLU — MLX 0.28.0 documentation + mlx.nn.GLU — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.GRU.html b/docs/build/html/python/nn/_autosummary/mlx.nn.GRU.html index 38b3a8d1a..14390873b 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.GRU.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.GRU.html @@ -8,7 +8,7 @@ - mlx.nn.GRU — MLX 0.28.0 documentation + mlx.nn.GRU — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.GroupNorm.html b/docs/build/html/python/nn/_autosummary/mlx.nn.GroupNorm.html index 20088fdae..9473acc3e 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.GroupNorm.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.GroupNorm.html @@ -8,7 +8,7 @@ - mlx.nn.GroupNorm — MLX 0.28.0 documentation + mlx.nn.GroupNorm — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.HardShrink.html b/docs/build/html/python/nn/_autosummary/mlx.nn.HardShrink.html index 10ae85457..987e45e91 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.HardShrink.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.HardShrink.html @@ -8,7 +8,7 @@ - mlx.nn.HardShrink — MLX 0.28.0 documentation + mlx.nn.HardShrink — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.HardTanh.html b/docs/build/html/python/nn/_autosummary/mlx.nn.HardTanh.html index 52c04c3e8..bd53a37e4 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.HardTanh.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.HardTanh.html @@ -8,7 +8,7 @@ - mlx.nn.HardTanh — MLX 0.28.0 documentation + mlx.nn.HardTanh — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.Hardswish.html b/docs/build/html/python/nn/_autosummary/mlx.nn.Hardswish.html index d08414202..e65cff682 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.Hardswish.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.Hardswish.html @@ -8,7 +8,7 @@ - mlx.nn.Hardswish — MLX 0.28.0 documentation + mlx.nn.Hardswish — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.InstanceNorm.html b/docs/build/html/python/nn/_autosummary/mlx.nn.InstanceNorm.html index b3dcfecf2..430fa6b06 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.InstanceNorm.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.InstanceNorm.html @@ -8,7 +8,7 @@ - mlx.nn.InstanceNorm — MLX 0.28.0 documentation + mlx.nn.InstanceNorm — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.LSTM.html b/docs/build/html/python/nn/_autosummary/mlx.nn.LSTM.html index e43721609..f3b6fea73 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.LSTM.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.LSTM.html @@ -8,7 +8,7 @@ - mlx.nn.LSTM — MLX 0.28.0 documentation + mlx.nn.LSTM — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.LayerNorm.html b/docs/build/html/python/nn/_autosummary/mlx.nn.LayerNorm.html index 3a8655969..c5bba762b 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.LayerNorm.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.LayerNorm.html @@ -8,7 +8,7 @@ - mlx.nn.LayerNorm — MLX 0.28.0 documentation + mlx.nn.LayerNorm — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.LeakyReLU.html b/docs/build/html/python/nn/_autosummary/mlx.nn.LeakyReLU.html index 8825a43c9..28be6a2dd 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.LeakyReLU.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.LeakyReLU.html @@ -8,7 +8,7 @@ - mlx.nn.LeakyReLU — MLX 0.28.0 documentation + mlx.nn.LeakyReLU — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.Linear.html b/docs/build/html/python/nn/_autosummary/mlx.nn.Linear.html index 1d8706b7c..9be2a9e6f 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.Linear.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.Linear.html @@ -8,7 +8,7 @@ - mlx.nn.Linear — MLX 0.28.0 documentation + mlx.nn.Linear — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
  • FFT
  • +
  • CUDA
    +
  • Memory Management
    • mlx.core.get_active_memory
    • mlx.core.get_peak_memory
    • @@ -945,7 +953,7 @@ not use a bias. Default is Methods

      - + diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.LogSigmoid.html b/docs/build/html/python/nn/_autosummary/mlx.nn.LogSigmoid.html index 9fd516ac5..a5c4aff3f 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.LogSigmoid.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.LogSigmoid.html @@ -8,7 +8,7 @@ - mlx.nn.LogSigmoid — MLX 0.28.0 documentation + mlx.nn.LogSigmoid — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.LogSoftmax.html b/docs/build/html/python/nn/_autosummary/mlx.nn.LogSoftmax.html index fb4fd2c7f..9c3cb770c 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.LogSoftmax.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.LogSoftmax.html @@ -8,7 +8,7 @@ - mlx.nn.LogSoftmax — MLX 0.28.0 documentation + mlx.nn.LogSoftmax — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.MaxPool1d.html b/docs/build/html/python/nn/_autosummary/mlx.nn.MaxPool1d.html index bbe9b7c73..fbe0bea68 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.MaxPool1d.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.MaxPool1d.html @@ -8,7 +8,7 @@ - mlx.nn.MaxPool1d — MLX 0.28.0 documentation + mlx.nn.MaxPool1d — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.MaxPool2d.html b/docs/build/html/python/nn/_autosummary/mlx.nn.MaxPool2d.html index cbcc4d62d..d966c3012 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.MaxPool2d.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.MaxPool2d.html @@ -8,7 +8,7 @@ - mlx.nn.MaxPool2d — MLX 0.28.0 documentation + mlx.nn.MaxPool2d — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.MaxPool3d.html b/docs/build/html/python/nn/_autosummary/mlx.nn.MaxPool3d.html index 26be2333e..3f8b7d46a 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.MaxPool3d.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.MaxPool3d.html @@ -8,7 +8,7 @@ - mlx.nn.MaxPool3d — MLX 0.28.0 documentation + mlx.nn.MaxPool3d — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.Mish.html b/docs/build/html/python/nn/_autosummary/mlx.nn.Mish.html index c3f76a41f..ab743a2d4 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.Mish.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.Mish.html @@ -8,7 +8,7 @@ - mlx.nn.Mish — MLX 0.28.0 documentation + mlx.nn.Mish — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.Module.apply.html b/docs/build/html/python/nn/_autosummary/mlx.nn.Module.apply.html index df51ade8b..e45f3b464 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.Module.apply.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.Module.apply.html @@ -8,7 +8,7 @@ - mlx.nn.Module.apply — MLX 0.28.0 documentation + mlx.nn.Module.apply — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.Module.apply_to_modules.html b/docs/build/html/python/nn/_autosummary/mlx.nn.Module.apply_to_modules.html index a817b32a3..26e7a52da 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.Module.apply_to_modules.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.Module.apply_to_modules.html @@ -8,7 +8,7 @@ - mlx.nn.Module.apply_to_modules — MLX 0.28.0 documentation + mlx.nn.Module.apply_to_modules — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.Module.children.html b/docs/build/html/python/nn/_autosummary/mlx.nn.Module.children.html index df510cacc..e517dedb3 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.Module.children.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.Module.children.html @@ -8,7 +8,7 @@ - mlx.nn.Module.children — MLX 0.28.0 documentation + mlx.nn.Module.children — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.Module.eval.html b/docs/build/html/python/nn/_autosummary/mlx.nn.Module.eval.html index 268430df7..d1a234a49 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.Module.eval.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.Module.eval.html @@ -8,7 +8,7 @@ - mlx.nn.Module.eval — MLX 0.28.0 documentation + mlx.nn.Module.eval — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.Module.filter_and_map.html b/docs/build/html/python/nn/_autosummary/mlx.nn.Module.filter_and_map.html index c80a530d6..9d6d2e9cb 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.Module.filter_and_map.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.Module.filter_and_map.html @@ -8,7 +8,7 @@ - mlx.nn.Module.filter_and_map — MLX 0.28.0 documentation + mlx.nn.Module.filter_and_map — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.Module.freeze.html b/docs/build/html/python/nn/_autosummary/mlx.nn.Module.freeze.html index 5c73c1593..61e66e345 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.Module.freeze.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.Module.freeze.html @@ -8,7 +8,7 @@ - mlx.nn.Module.freeze — MLX 0.28.0 documentation + mlx.nn.Module.freeze — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.Module.leaf_modules.html b/docs/build/html/python/nn/_autosummary/mlx.nn.Module.leaf_modules.html index 827dc91b3..0709fa68f 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.Module.leaf_modules.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.Module.leaf_modules.html @@ -8,7 +8,7 @@ - mlx.nn.Module.leaf_modules — MLX 0.28.0 documentation + mlx.nn.Module.leaf_modules — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.Module.load_weights.html b/docs/build/html/python/nn/_autosummary/mlx.nn.Module.load_weights.html index 112a59584..0ee8ef8da 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.Module.load_weights.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.Module.load_weights.html @@ -8,7 +8,7 @@ - mlx.nn.Module.load_weights — MLX 0.28.0 documentation + mlx.nn.Module.load_weights — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.Module.modules.html b/docs/build/html/python/nn/_autosummary/mlx.nn.Module.modules.html index eac582e8c..bfa4fc3bb 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.Module.modules.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.Module.modules.html @@ -8,7 +8,7 @@ - mlx.nn.Module.modules — MLX 0.28.0 documentation + mlx.nn.Module.modules — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.Module.named_modules.html b/docs/build/html/python/nn/_autosummary/mlx.nn.Module.named_modules.html index e883ac3a4..af83c5973 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.Module.named_modules.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.Module.named_modules.html @@ -8,7 +8,7 @@ - mlx.nn.Module.named_modules — MLX 0.28.0 documentation + mlx.nn.Module.named_modules — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.Module.parameters.html b/docs/build/html/python/nn/_autosummary/mlx.nn.Module.parameters.html index f29b0d821..1b39d6d85 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.Module.parameters.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.Module.parameters.html @@ -8,7 +8,7 @@ - mlx.nn.Module.parameters — MLX 0.28.0 documentation + mlx.nn.Module.parameters — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.Module.save_weights.html b/docs/build/html/python/nn/_autosummary/mlx.nn.Module.save_weights.html index c2c679389..1f69d2663 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.Module.save_weights.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.Module.save_weights.html @@ -8,7 +8,7 @@ - mlx.nn.Module.save_weights — MLX 0.28.0 documentation + mlx.nn.Module.save_weights — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.Module.set_dtype.html b/docs/build/html/python/nn/_autosummary/mlx.nn.Module.set_dtype.html index 1086e5802..11ab8bf6e 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.Module.set_dtype.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.Module.set_dtype.html @@ -8,7 +8,7 @@ - mlx.nn.Module.set_dtype — MLX 0.28.0 documentation + mlx.nn.Module.set_dtype — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.Module.state.html b/docs/build/html/python/nn/_autosummary/mlx.nn.Module.state.html index 129b5615a..14f7829c5 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.Module.state.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.Module.state.html @@ -8,7 +8,7 @@ - mlx.nn.Module.state — MLX 0.28.0 documentation + mlx.nn.Module.state — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.Module.train.html b/docs/build/html/python/nn/_autosummary/mlx.nn.Module.train.html index 36096c034..a7621c717 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.Module.train.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.Module.train.html @@ -8,7 +8,7 @@ - mlx.nn.Module.train — MLX 0.28.0 documentation + mlx.nn.Module.train — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.Module.trainable_parameters.html b/docs/build/html/python/nn/_autosummary/mlx.nn.Module.trainable_parameters.html index 4c2a367c5..be5c0a2fc 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.Module.trainable_parameters.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.Module.trainable_parameters.html @@ -8,7 +8,7 @@ - mlx.nn.Module.trainable_parameters — MLX 0.28.0 documentation + mlx.nn.Module.trainable_parameters — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.Module.training.html b/docs/build/html/python/nn/_autosummary/mlx.nn.Module.training.html index 58f74ae15..4f423de21 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.Module.training.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.Module.training.html @@ -8,7 +8,7 @@ - mlx.nn.Module.training — MLX 0.28.0 documentation + mlx.nn.Module.training — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.Module.unfreeze.html b/docs/build/html/python/nn/_autosummary/mlx.nn.Module.unfreeze.html index 6e4369567..aa94032eb 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.Module.unfreeze.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.Module.unfreeze.html @@ -8,7 +8,7 @@ - mlx.nn.Module.unfreeze — MLX 0.28.0 documentation + mlx.nn.Module.unfreeze — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.Module.update.html b/docs/build/html/python/nn/_autosummary/mlx.nn.Module.update.html index f61f39ef3..149cf5e9d 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.Module.update.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.Module.update.html @@ -8,7 +8,7 @@ - mlx.nn.Module.update — MLX 0.28.0 documentation + mlx.nn.Module.update — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.Module.update_modules.html b/docs/build/html/python/nn/_autosummary/mlx.nn.Module.update_modules.html index 7794cb207..61fbdebc9 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.Module.update_modules.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.Module.update_modules.html @@ -8,7 +8,7 @@ - mlx.nn.Module.update_modules — MLX 0.28.0 documentation + mlx.nn.Module.update_modules — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.MultiHeadAttention.html b/docs/build/html/python/nn/_autosummary/mlx.nn.MultiHeadAttention.html index 16af46d34..e38b9436e 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.MultiHeadAttention.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.MultiHeadAttention.html @@ -8,7 +8,7 @@ - mlx.nn.MultiHeadAttention — MLX 0.28.0 documentation + mlx.nn.MultiHeadAttention — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.PReLU.html b/docs/build/html/python/nn/_autosummary/mlx.nn.PReLU.html index dd4af3b3b..a0103213a 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.PReLU.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.PReLU.html @@ -8,7 +8,7 @@ - mlx.nn.PReLU — MLX 0.28.0 documentation + mlx.nn.PReLU — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.QuantizedEmbedding.html b/docs/build/html/python/nn/_autosummary/mlx.nn.QuantizedEmbedding.html index 360e1e635..aae0cbe9f 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.QuantizedEmbedding.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.QuantizedEmbedding.html @@ -8,7 +8,7 @@ - mlx.nn.QuantizedEmbedding — MLX 0.28.0 documentation + mlx.nn.QuantizedEmbedding — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.QuantizedLinear.html b/docs/build/html/python/nn/_autosummary/mlx.nn.QuantizedLinear.html index 73db101b4..bbda8a8a4 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.QuantizedLinear.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.QuantizedLinear.html @@ -8,7 +8,7 @@ - mlx.nn.QuantizedLinear — MLX 0.28.0 documentation + mlx.nn.QuantizedLinear — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • @@ -922,7 +930,7 @@ document.write(`

        mlx.nn.QuantizedLinear#

        -class QuantizedLinear(input_dims: int, output_dims: int, bias: bool = True, group_size: int = 64, bits: int = 4)#
        +class QuantizedLinear(input_dims: int, output_dims: int, bias: bool = True, group_size: int = 64, bits: int = 4, mode: str = 'affine')#

        Applies an affine transformation to the input using a quantized weight matrix.

        It is the quantized equivalent of mlx.nn.Linear. For now its parameters are frozen and will not be included in any gradient computation @@ -940,13 +948,15 @@ a bias. Default: Tr weight. See quantize(). Default: 64.

      • bits (int, optional) – The bit width to use for the quantized weight. See quantize(). Default: 4.

      • +
      • mode (str) – The quantization method to use (see +mlx.core.quantize()). Default: "affine".

      Methods

    • to_quantized([group_size, bits])

      to_quantized([group_size, bits, mode])

      Return a QuantizedLinear layer that approximates this layer.

      - + diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.RMSNorm.html b/docs/build/html/python/nn/_autosummary/mlx.nn.RMSNorm.html index 7b64d055c..ef21f65a3 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.RMSNorm.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.RMSNorm.html @@ -8,7 +8,7 @@ - mlx.nn.RMSNorm — MLX 0.28.0 documentation + mlx.nn.RMSNorm — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.RNN.html b/docs/build/html/python/nn/_autosummary/mlx.nn.RNN.html index d8c6a99f9..bb309ecef 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.RNN.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.RNN.html @@ -8,7 +8,7 @@ - mlx.nn.RNN — MLX 0.28.0 documentation + mlx.nn.RNN — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.ReLU.html b/docs/build/html/python/nn/_autosummary/mlx.nn.ReLU.html index 3315376fe..a96170718 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.ReLU.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.ReLU.html @@ -8,7 +8,7 @@ - mlx.nn.ReLU — MLX 0.28.0 documentation + mlx.nn.ReLU — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.ReLU6.html b/docs/build/html/python/nn/_autosummary/mlx.nn.ReLU6.html index ac1195e85..aa197d868 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.ReLU6.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.ReLU6.html @@ -8,7 +8,7 @@ - mlx.nn.ReLU6 — MLX 0.28.0 documentation + mlx.nn.ReLU6 — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.RoPE.html b/docs/build/html/python/nn/_autosummary/mlx.nn.RoPE.html index 9efa1774e..17c7f3d12 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.RoPE.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.RoPE.html @@ -8,7 +8,7 @@ - mlx.nn.RoPE — MLX 0.28.0 documentation + mlx.nn.RoPE — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.SELU.html b/docs/build/html/python/nn/_autosummary/mlx.nn.SELU.html index 4ba1ce974..8db874480 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.SELU.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.SELU.html @@ -8,7 +8,7 @@ - mlx.nn.SELU — MLX 0.28.0 documentation + mlx.nn.SELU — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.Sequential.html b/docs/build/html/python/nn/_autosummary/mlx.nn.Sequential.html index ab9a99e26..5e838543e 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.Sequential.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.Sequential.html @@ -8,7 +8,7 @@ - mlx.nn.Sequential — MLX 0.28.0 documentation + mlx.nn.Sequential — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.SiLU.html b/docs/build/html/python/nn/_autosummary/mlx.nn.SiLU.html index add08e2be..ac614c702 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.SiLU.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.SiLU.html @@ -8,7 +8,7 @@ - mlx.nn.SiLU — MLX 0.28.0 documentation + mlx.nn.SiLU — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.Sigmoid.html b/docs/build/html/python/nn/_autosummary/mlx.nn.Sigmoid.html index a4db7a2e9..9c20fdd0e 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.Sigmoid.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.Sigmoid.html @@ -8,7 +8,7 @@ - mlx.nn.Sigmoid — MLX 0.28.0 documentation + mlx.nn.Sigmoid — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.SinusoidalPositionalEncoding.html b/docs/build/html/python/nn/_autosummary/mlx.nn.SinusoidalPositionalEncoding.html index 5fd911c44..3f4d99aff 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.SinusoidalPositionalEncoding.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.SinusoidalPositionalEncoding.html @@ -8,7 +8,7 @@ - mlx.nn.SinusoidalPositionalEncoding — MLX 0.28.0 documentation + mlx.nn.SinusoidalPositionalEncoding — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.Softmax.html b/docs/build/html/python/nn/_autosummary/mlx.nn.Softmax.html index 2a4153e33..824f12965 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.Softmax.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.Softmax.html @@ -8,7 +8,7 @@ - mlx.nn.Softmax — MLX 0.28.0 documentation + mlx.nn.Softmax — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.Softmin.html b/docs/build/html/python/nn/_autosummary/mlx.nn.Softmin.html index e56b52013..59d6a81ce 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.Softmin.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.Softmin.html @@ -8,7 +8,7 @@ - mlx.nn.Softmin — MLX 0.28.0 documentation + mlx.nn.Softmin — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.Softplus.html b/docs/build/html/python/nn/_autosummary/mlx.nn.Softplus.html index 53e908064..16347141f 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.Softplus.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.Softplus.html @@ -8,7 +8,7 @@ - mlx.nn.Softplus — MLX 0.28.0 documentation + mlx.nn.Softplus — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.Softshrink.html b/docs/build/html/python/nn/_autosummary/mlx.nn.Softshrink.html index ef8c13ce3..176734560 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.Softshrink.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.Softshrink.html @@ -8,7 +8,7 @@ - mlx.nn.Softshrink — MLX 0.28.0 documentation + mlx.nn.Softshrink — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.Softsign.html b/docs/build/html/python/nn/_autosummary/mlx.nn.Softsign.html index b6196368c..a399d9253 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.Softsign.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.Softsign.html @@ -8,7 +8,7 @@ - mlx.nn.Softsign — MLX 0.28.0 documentation + mlx.nn.Softsign — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.Step.html b/docs/build/html/python/nn/_autosummary/mlx.nn.Step.html index 16f8b3f99..7f088d827 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.Step.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.Step.html @@ -8,7 +8,7 @@ - mlx.nn.Step — MLX 0.28.0 documentation + mlx.nn.Step — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.Tanh.html b/docs/build/html/python/nn/_autosummary/mlx.nn.Tanh.html index 0e61e37b9..64c39d441 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.Tanh.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.Tanh.html @@ -8,7 +8,7 @@ - mlx.nn.Tanh — MLX 0.28.0 documentation + mlx.nn.Tanh — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.Transformer.html b/docs/build/html/python/nn/_autosummary/mlx.nn.Transformer.html index 2d6e032ee..791daaca0 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.Transformer.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.Transformer.html @@ -8,7 +8,7 @@ - mlx.nn.Transformer — MLX 0.28.0 documentation + mlx.nn.Transformer — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.Upsample.html b/docs/build/html/python/nn/_autosummary/mlx.nn.Upsample.html index 2c1e9ec1c..21aca51e8 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.Upsample.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.Upsample.html @@ -8,7 +8,7 @@ - mlx.nn.Upsample — MLX 0.28.0 documentation + mlx.nn.Upsample — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.init.constant.html b/docs/build/html/python/nn/_autosummary/mlx.nn.init.constant.html index 15495f225..b86adb0db 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.init.constant.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.init.constant.html @@ -8,7 +8,7 @@ - mlx.nn.init.constant — MLX 0.28.0 documentation + mlx.nn.init.constant — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.init.glorot_normal.html b/docs/build/html/python/nn/_autosummary/mlx.nn.init.glorot_normal.html index 05a72481a..12a8b85da 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.init.glorot_normal.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.init.glorot_normal.html @@ -8,7 +8,7 @@ - mlx.nn.init.glorot_normal — MLX 0.28.0 documentation + mlx.nn.init.glorot_normal — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.init.glorot_uniform.html b/docs/build/html/python/nn/_autosummary/mlx.nn.init.glorot_uniform.html index 565849aeb..f65a47657 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.init.glorot_uniform.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.init.glorot_uniform.html @@ -8,7 +8,7 @@ - mlx.nn.init.glorot_uniform — MLX 0.28.0 documentation + mlx.nn.init.glorot_uniform — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.init.he_normal.html b/docs/build/html/python/nn/_autosummary/mlx.nn.init.he_normal.html index 6c2c74594..9f6aaa736 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.init.he_normal.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.init.he_normal.html @@ -8,7 +8,7 @@ - mlx.nn.init.he_normal — MLX 0.28.0 documentation + mlx.nn.init.he_normal — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.init.he_uniform.html b/docs/build/html/python/nn/_autosummary/mlx.nn.init.he_uniform.html index 858d7249d..c572052a2 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.init.he_uniform.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.init.he_uniform.html @@ -8,7 +8,7 @@ - mlx.nn.init.he_uniform — MLX 0.28.0 documentation + mlx.nn.init.he_uniform — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.init.identity.html b/docs/build/html/python/nn/_autosummary/mlx.nn.init.identity.html index 4e223ec26..56bffc22a 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.init.identity.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.init.identity.html @@ -8,7 +8,7 @@ - mlx.nn.init.identity — MLX 0.28.0 documentation + mlx.nn.init.identity — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.init.normal.html b/docs/build/html/python/nn/_autosummary/mlx.nn.init.normal.html index 6344d8134..8e3586c62 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.init.normal.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.init.normal.html @@ -8,7 +8,7 @@ - mlx.nn.init.normal — MLX 0.28.0 documentation + mlx.nn.init.normal — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary/mlx.nn.init.uniform.html b/docs/build/html/python/nn/_autosummary/mlx.nn.init.uniform.html index 2be0096e9..86229b023 100644 --- a/docs/build/html/python/nn/_autosummary/mlx.nn.init.uniform.html +++ b/docs/build/html/python/nn/_autosummary/mlx.nn.init.uniform.html @@ -8,7 +8,7 @@ - mlx.nn.init.uniform — MLX 0.28.0 documentation + mlx.nn.init.uniform — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.celu.html b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.celu.html index a499cd1b1..97a6c781a 100644 --- a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.celu.html +++ b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.celu.html @@ -8,7 +8,7 @@ - mlx.nn.celu — MLX 0.28.0 documentation + mlx.nn.celu — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.elu.html b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.elu.html index cec3e4bce..39f1ba279 100644 --- a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.elu.html +++ b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.elu.html @@ -8,7 +8,7 @@ - mlx.nn.elu — MLX 0.28.0 documentation + mlx.nn.elu — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.gelu.html b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.gelu.html index 5614ab267..526d7e976 100644 --- a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.gelu.html +++ b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.gelu.html @@ -8,7 +8,7 @@ - mlx.nn.gelu — MLX 0.28.0 documentation + mlx.nn.gelu — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.gelu_approx.html b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.gelu_approx.html index 3f652a493..5add44bc9 100644 --- a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.gelu_approx.html +++ b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.gelu_approx.html @@ -8,7 +8,7 @@ - mlx.nn.gelu_approx — MLX 0.28.0 documentation + mlx.nn.gelu_approx — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.gelu_fast_approx.html b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.gelu_fast_approx.html index 95ae230a8..234df4e9b 100644 --- a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.gelu_fast_approx.html +++ b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.gelu_fast_approx.html @@ -8,7 +8,7 @@ - mlx.nn.gelu_fast_approx — MLX 0.28.0 documentation + mlx.nn.gelu_fast_approx — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.glu.html b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.glu.html index a39d50a5a..385df67b4 100644 --- a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.glu.html +++ b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.glu.html @@ -8,7 +8,7 @@ - mlx.nn.glu — MLX 0.28.0 documentation + mlx.nn.glu — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.hard_shrink.html b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.hard_shrink.html index 0fcf738f6..19549f3c6 100644 --- a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.hard_shrink.html +++ b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.hard_shrink.html @@ -8,7 +8,7 @@ - mlx.nn.hard_shrink — MLX 0.28.0 documentation + mlx.nn.hard_shrink — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.hard_tanh.html b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.hard_tanh.html index be420832c..03ae5adc4 100644 --- a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.hard_tanh.html +++ b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.hard_tanh.html @@ -8,7 +8,7 @@ - mlx.nn.hard_tanh — MLX 0.28.0 documentation + mlx.nn.hard_tanh — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.hardswish.html b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.hardswish.html index cd4a29d55..2bb895231 100644 --- a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.hardswish.html +++ b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.hardswish.html @@ -8,7 +8,7 @@ - mlx.nn.hardswish — MLX 0.28.0 documentation + mlx.nn.hardswish — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.leaky_relu.html b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.leaky_relu.html index 1ff1a6cc7..2cfde6ad2 100644 --- a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.leaky_relu.html +++ b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.leaky_relu.html @@ -8,7 +8,7 @@ - mlx.nn.leaky_relu — MLX 0.28.0 documentation + mlx.nn.leaky_relu — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.log_sigmoid.html b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.log_sigmoid.html index f6f13ba71..fe0b7399d 100644 --- a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.log_sigmoid.html +++ b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.log_sigmoid.html @@ -8,7 +8,7 @@ - mlx.nn.log_sigmoid — MLX 0.28.0 documentation + mlx.nn.log_sigmoid — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.log_softmax.html b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.log_softmax.html index 74f1ebe8c..110be3bf5 100644 --- a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.log_softmax.html +++ b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.log_softmax.html @@ -8,7 +8,7 @@ - mlx.nn.log_softmax — MLX 0.28.0 documentation + mlx.nn.log_softmax — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.binary_cross_entropy.html b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.binary_cross_entropy.html index 4f8004f4b..dfe8991e6 100644 --- a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.binary_cross_entropy.html +++ b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.binary_cross_entropy.html @@ -8,7 +8,7 @@ - mlx.nn.losses.binary_cross_entropy — MLX 0.28.0 documentation + mlx.nn.losses.binary_cross_entropy — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.cosine_similarity_loss.html b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.cosine_similarity_loss.html index c6656b066..1b0448058 100644 --- a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.cosine_similarity_loss.html +++ b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.cosine_similarity_loss.html @@ -8,7 +8,7 @@ - mlx.nn.losses.cosine_similarity_loss — MLX 0.28.0 documentation + mlx.nn.losses.cosine_similarity_loss — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.cross_entropy.html b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.cross_entropy.html index 73e5046ff..e745f79c9 100644 --- a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.cross_entropy.html +++ b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.cross_entropy.html @@ -8,7 +8,7 @@ - mlx.nn.losses.cross_entropy — MLX 0.28.0 documentation + mlx.nn.losses.cross_entropy — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.gaussian_nll_loss.html b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.gaussian_nll_loss.html index b3ae6f30b..8d41445c1 100644 --- a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.gaussian_nll_loss.html +++ b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.gaussian_nll_loss.html @@ -8,7 +8,7 @@ - mlx.nn.losses.gaussian_nll_loss — MLX 0.28.0 documentation + mlx.nn.losses.gaussian_nll_loss — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.hinge_loss.html b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.hinge_loss.html index 567444068..93a7f888f 100644 --- a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.hinge_loss.html +++ b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.hinge_loss.html @@ -8,7 +8,7 @@ - mlx.nn.losses.hinge_loss — MLX 0.28.0 documentation + mlx.nn.losses.hinge_loss — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.huber_loss.html b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.huber_loss.html index 9178ba0ed..d320ede3b 100644 --- a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.huber_loss.html +++ b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.huber_loss.html @@ -8,7 +8,7 @@ - mlx.nn.losses.huber_loss — MLX 0.28.0 documentation + mlx.nn.losses.huber_loss — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.kl_div_loss.html b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.kl_div_loss.html index 610f633c6..d7dd711a7 100644 --- a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.kl_div_loss.html +++ b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.kl_div_loss.html @@ -8,7 +8,7 @@ - mlx.nn.losses.kl_div_loss — MLX 0.28.0 documentation + mlx.nn.losses.kl_div_loss — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.l1_loss.html b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.l1_loss.html index 872726dde..6d4fd4e83 100644 --- a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.l1_loss.html +++ b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.l1_loss.html @@ -8,7 +8,7 @@ - mlx.nn.losses.l1_loss — MLX 0.28.0 documentation + mlx.nn.losses.l1_loss — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.log_cosh_loss.html b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.log_cosh_loss.html index dfe773dce..950353fbf 100644 --- a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.log_cosh_loss.html +++ b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.log_cosh_loss.html @@ -8,7 +8,7 @@ - mlx.nn.losses.log_cosh_loss — MLX 0.28.0 documentation + mlx.nn.losses.log_cosh_loss — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.margin_ranking_loss.html b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.margin_ranking_loss.html index 2623a05fc..0bd302d62 100644 --- a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.margin_ranking_loss.html +++ b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.margin_ranking_loss.html @@ -8,7 +8,7 @@ - mlx.nn.losses.margin_ranking_loss — MLX 0.28.0 documentation + mlx.nn.losses.margin_ranking_loss — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.mse_loss.html b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.mse_loss.html index 2eea3ad7e..1b890c49a 100644 --- a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.mse_loss.html +++ b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.mse_loss.html @@ -8,7 +8,7 @@ - mlx.nn.losses.mse_loss — MLX 0.28.0 documentation + mlx.nn.losses.mse_loss — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.nll_loss.html b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.nll_loss.html index 9171e012d..6c2210d29 100644 --- a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.nll_loss.html +++ b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.nll_loss.html @@ -8,7 +8,7 @@ - mlx.nn.losses.nll_loss — MLX 0.28.0 documentation + mlx.nn.losses.nll_loss — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.smooth_l1_loss.html b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.smooth_l1_loss.html index 1f837fa76..3a442fae5 100644 --- a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.smooth_l1_loss.html +++ b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.smooth_l1_loss.html @@ -8,7 +8,7 @@ - mlx.nn.losses.smooth_l1_loss — MLX 0.28.0 documentation + mlx.nn.losses.smooth_l1_loss — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.triplet_loss.html b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.triplet_loss.html index e1f7de606..56cd03913 100644 --- a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.triplet_loss.html +++ b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.losses.triplet_loss.html @@ -8,7 +8,7 @@ - mlx.nn.losses.triplet_loss — MLX 0.28.0 documentation + mlx.nn.losses.triplet_loss — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.mish.html b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.mish.html index 6e232658d..d97c02ff2 100644 --- a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.mish.html +++ b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.mish.html @@ -8,7 +8,7 @@ - mlx.nn.mish — MLX 0.28.0 documentation + mlx.nn.mish — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.prelu.html b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.prelu.html index 45c9a2be7..6b419f3d8 100644 --- a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.prelu.html +++ b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.prelu.html @@ -8,7 +8,7 @@ - mlx.nn.prelu — MLX 0.28.0 documentation + mlx.nn.prelu — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.relu.html b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.relu.html index d037c8a73..6666af94b 100644 --- a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.relu.html +++ b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.relu.html @@ -8,7 +8,7 @@ - mlx.nn.relu — MLX 0.28.0 documentation + mlx.nn.relu — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.relu6.html b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.relu6.html index 280804b60..cc788b527 100644 --- a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.relu6.html +++ b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.relu6.html @@ -8,7 +8,7 @@ - mlx.nn.relu6 — MLX 0.28.0 documentation + mlx.nn.relu6 — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.selu.html b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.selu.html index 217b50667..fc22e7698 100644 --- a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.selu.html +++ b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.selu.html @@ -8,7 +8,7 @@ - mlx.nn.selu — MLX 0.28.0 documentation + mlx.nn.selu — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.sigmoid.html b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.sigmoid.html index 70a54ea77..a2c9fc09b 100644 --- a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.sigmoid.html +++ b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.sigmoid.html @@ -8,7 +8,7 @@ - mlx.nn.sigmoid — MLX 0.28.0 documentation + mlx.nn.sigmoid — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.silu.html b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.silu.html index c4c2747f3..eddba81b6 100644 --- a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.silu.html +++ b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.silu.html @@ -8,7 +8,7 @@ - mlx.nn.silu — MLX 0.28.0 documentation + mlx.nn.silu — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.softmax.html b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.softmax.html index a30085742..ff75f55ed 100644 --- a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.softmax.html +++ b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.softmax.html @@ -8,7 +8,7 @@ - mlx.nn.softmax — MLX 0.28.0 documentation + mlx.nn.softmax — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.softmin.html b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.softmin.html index bb003d481..7172d033a 100644 --- a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.softmin.html +++ b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.softmin.html @@ -8,7 +8,7 @@ - mlx.nn.softmin — MLX 0.28.0 documentation + mlx.nn.softmin — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.softplus.html b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.softplus.html index 709d95f0f..f4b7086ab 100644 --- a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.softplus.html +++ b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.softplus.html @@ -8,7 +8,7 @@ - mlx.nn.softplus — MLX 0.28.0 documentation + mlx.nn.softplus — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.softshrink.html b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.softshrink.html index 8120d79b5..fc6ff06f1 100644 --- a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.softshrink.html +++ b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.softshrink.html @@ -8,7 +8,7 @@ - mlx.nn.softshrink — MLX 0.28.0 documentation + mlx.nn.softshrink — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.step.html b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.step.html index b8b15ee85..223f1c3cd 100644 --- a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.step.html +++ b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.step.html @@ -8,7 +8,7 @@ - mlx.nn.step — MLX 0.28.0 documentation + mlx.nn.step — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.tanh.html b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.tanh.html index a4c817544..a32bb76fe 100644 --- a/docs/build/html/python/nn/_autosummary_functions/mlx.nn.tanh.html +++ b/docs/build/html/python/nn/_autosummary_functions/mlx.nn.tanh.html @@ -8,7 +8,7 @@ - mlx.nn.tanh — MLX 0.28.0 documentation + mlx.nn.tanh — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/functions.html b/docs/build/html/python/nn/functions.html index 7b97aaf35..49bc6b481 100644 --- a/docs/build/html/python/nn/functions.html +++ b/docs/build/html/python/nn/functions.html @@ -8,7 +8,7 @@ - Functions — MLX 0.28.0 documentation + Functions — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/init.html b/docs/build/html/python/nn/init.html index 964754a0d..8a40e8010 100644 --- a/docs/build/html/python/nn/init.html +++ b/docs/build/html/python/nn/init.html @@ -8,7 +8,7 @@ - Initializers — MLX 0.28.0 documentation + Initializers — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/layers.html b/docs/build/html/python/nn/layers.html index c93deb727..25cee2548 100644 --- a/docs/build/html/python/nn/layers.html +++ b/docs/build/html/python/nn/layers.html @@ -8,7 +8,7 @@ - Layers — MLX 0.28.0 documentation + Layers — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/losses.html b/docs/build/html/python/nn/losses.html index fa6fe19f2..fdc62108e 100644 --- a/docs/build/html/python/nn/losses.html +++ b/docs/build/html/python/nn/losses.html @@ -8,7 +8,7 @@ - Loss Functions — MLX 0.28.0 documentation + Loss Functions — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/nn/module.html b/docs/build/html/python/nn/module.html index 3ce43a4e4..37c2abe4e 100644 --- a/docs/build/html/python/nn/module.html +++ b/docs/build/html/python/nn/module.html @@ -8,7 +8,7 @@ - Module — MLX 0.28.0 documentation + Module — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/ops.html b/docs/build/html/python/ops.html index 8482c6c09..dedcf3bc4 100644 --- a/docs/build/html/python/ops.html +++ b/docs/build/html/python/ops.html @@ -8,7 +8,7 @@ - Operations — MLX 0.28.0 documentation + Operations — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
    • - - + + @@ -1123,7 +1131,7 @@ document.write(` - + @@ -1273,10 +1281,10 @@ document.write(` - + - + diff --git a/docs/build/html/python/optimizers.html b/docs/build/html/python/optimizers.html index f0340890e..47f54bd03 100644 --- a/docs/build/html/python/optimizers.html +++ b/docs/build/html/python/optimizers.html @@ -8,7 +8,7 @@ - Optimizers — MLX 0.28.0 documentation + Optimizers — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
    • mlx.core.fast.rope
    • mlx.core.fast.scaled_dot_product_attention
    • mlx.core.fast.metal_kernel
    • +
    • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.AdaDelta.html b/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.AdaDelta.html index acfcb3984..7e2992d6f 100644 --- a/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.AdaDelta.html +++ b/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.AdaDelta.html @@ -8,7 +8,7 @@ - mlx.optimizers.AdaDelta — MLX 0.28.0 documentation + mlx.optimizers.AdaDelta — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.Adafactor.html b/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.Adafactor.html index 0c69929a4..54ed954b8 100644 --- a/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.Adafactor.html +++ b/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.Adafactor.html @@ -8,7 +8,7 @@ - mlx.optimizers.Adafactor — MLX 0.28.0 documentation + mlx.optimizers.Adafactor — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.Adagrad.html b/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.Adagrad.html index b545de197..506b99508 100644 --- a/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.Adagrad.html +++ b/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.Adagrad.html @@ -8,7 +8,7 @@ - mlx.optimizers.Adagrad — MLX 0.28.0 documentation + mlx.optimizers.Adagrad — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.Adam.html b/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.Adam.html index 8931f44d0..f70e7bfed 100644 --- a/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.Adam.html +++ b/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.Adam.html @@ -8,7 +8,7 @@ - mlx.optimizers.Adam — MLX 0.28.0 documentation + mlx.optimizers.Adam — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.AdamW.html b/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.AdamW.html index 5ba5eac1e..00c87fff2 100644 --- a/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.AdamW.html +++ b/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.AdamW.html @@ -8,7 +8,7 @@ - mlx.optimizers.AdamW — MLX 0.28.0 documentation + mlx.optimizers.AdamW — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.Adamax.html b/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.Adamax.html index aa09e2375..2e7b77eb8 100644 --- a/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.Adamax.html +++ b/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.Adamax.html @@ -8,7 +8,7 @@ - mlx.optimizers.Adamax — MLX 0.28.0 documentation + mlx.optimizers.Adamax — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.Lion.html b/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.Lion.html index 25cae6782..b0648b390 100644 --- a/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.Lion.html +++ b/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.Lion.html @@ -8,7 +8,7 @@ - mlx.optimizers.Lion — MLX 0.28.0 documentation + mlx.optimizers.Lion — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.MultiOptimizer.html b/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.MultiOptimizer.html index f0260fa61..ff1c14d82 100644 --- a/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.MultiOptimizer.html +++ b/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.MultiOptimizer.html @@ -8,7 +8,7 @@ - mlx.optimizers.MultiOptimizer — MLX 0.28.0 documentation + mlx.optimizers.MultiOptimizer — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.Muon.html b/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.Muon.html index c84f6e5bd..42a10b53e 100644 --- a/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.Muon.html +++ b/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.Muon.html @@ -8,7 +8,7 @@ - mlx.optimizers.Muon — MLX 0.28.0 documentation + mlx.optimizers.Muon — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.Optimizer.apply_gradients.html b/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.Optimizer.apply_gradients.html index e90b31351..ac87b38ea 100644 --- a/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.Optimizer.apply_gradients.html +++ b/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.Optimizer.apply_gradients.html @@ -8,7 +8,7 @@ - mlx.optimizers.Optimizer.apply_gradients — MLX 0.28.0 documentation + mlx.optimizers.Optimizer.apply_gradients — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.Optimizer.init.html b/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.Optimizer.init.html index 804c964fa..24a3ce12d 100644 --- a/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.Optimizer.init.html +++ b/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.Optimizer.init.html @@ -8,7 +8,7 @@ - mlx.optimizers.Optimizer.init — MLX 0.28.0 documentation + mlx.optimizers.Optimizer.init — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.Optimizer.state.html b/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.Optimizer.state.html index d9b07439e..552c20bb2 100644 --- a/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.Optimizer.state.html +++ b/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.Optimizer.state.html @@ -8,7 +8,7 @@ - mlx.optimizers.Optimizer.state — MLX 0.28.0 documentation + mlx.optimizers.Optimizer.state — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.Optimizer.update.html b/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.Optimizer.update.html index c2afb0357..7f267e21f 100644 --- a/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.Optimizer.update.html +++ b/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.Optimizer.update.html @@ -8,7 +8,7 @@ - mlx.optimizers.Optimizer.update — MLX 0.28.0 documentation + mlx.optimizers.Optimizer.update — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.RMSprop.html b/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.RMSprop.html index 996df2f9b..a36193564 100644 --- a/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.RMSprop.html +++ b/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.RMSprop.html @@ -8,7 +8,7 @@ - mlx.optimizers.RMSprop — MLX 0.28.0 documentation + mlx.optimizers.RMSprop — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.SGD.html b/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.SGD.html index 174b08a02..3502079a9 100644 --- a/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.SGD.html +++ b/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.SGD.html @@ -8,7 +8,7 @@ - mlx.optimizers.SGD — MLX 0.28.0 documentation + mlx.optimizers.SGD — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.cosine_decay.html b/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.cosine_decay.html index 284023a68..18af518ab 100644 --- a/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.cosine_decay.html +++ b/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.cosine_decay.html @@ -8,7 +8,7 @@ - mlx.optimizers.cosine_decay — MLX 0.28.0 documentation + mlx.optimizers.cosine_decay — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.exponential_decay.html b/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.exponential_decay.html index c0edffe7c..9e7afe4d4 100644 --- a/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.exponential_decay.html +++ b/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.exponential_decay.html @@ -8,7 +8,7 @@ - mlx.optimizers.exponential_decay — MLX 0.28.0 documentation + mlx.optimizers.exponential_decay — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.join_schedules.html b/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.join_schedules.html index 24b0fff4a..069d8ca26 100644 --- a/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.join_schedules.html +++ b/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.join_schedules.html @@ -8,7 +8,7 @@ - mlx.optimizers.join_schedules — MLX 0.28.0 documentation + mlx.optimizers.join_schedules — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.linear_schedule.html b/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.linear_schedule.html index 03e057be5..983d5d2dd 100644 --- a/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.linear_schedule.html +++ b/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.linear_schedule.html @@ -8,7 +8,7 @@ - mlx.optimizers.linear_schedule — MLX 0.28.0 documentation + mlx.optimizers.linear_schedule — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.step_decay.html b/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.step_decay.html index ba9062b50..3b5617a9c 100644 --- a/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.step_decay.html +++ b/docs/build/html/python/optimizers/_autosummary/mlx.optimizers.step_decay.html @@ -8,7 +8,7 @@ - mlx.optimizers.step_decay — MLX 0.28.0 documentation + mlx.optimizers.step_decay — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/optimizers/common_optimizers.html b/docs/build/html/python/optimizers/common_optimizers.html index 7b6333ce8..8c49a4acc 100644 --- a/docs/build/html/python/optimizers/common_optimizers.html +++ b/docs/build/html/python/optimizers/common_optimizers.html @@ -8,7 +8,7 @@ - Common Optimizers — MLX 0.28.0 documentation + Common Optimizers — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/optimizers/optimizer.html b/docs/build/html/python/optimizers/optimizer.html index ed4293fdd..9bc2f7b39 100644 --- a/docs/build/html/python/optimizers/optimizer.html +++ b/docs/build/html/python/optimizers/optimizer.html @@ -8,7 +8,7 @@ - Optimizer — MLX 0.28.0 documentation + Optimizer — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/optimizers/schedulers.html b/docs/build/html/python/optimizers/schedulers.html index 3c92821ba..425f238eb 100644 --- a/docs/build/html/python/optimizers/schedulers.html +++ b/docs/build/html/python/optimizers/schedulers.html @@ -8,7 +8,7 @@ - Schedulers — MLX 0.28.0 documentation + Schedulers — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -138,8 +141,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -478,6 +481,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/random.html b/docs/build/html/python/random.html index b9cadbb8c..3632058be 100644 --- a/docs/build/html/python/random.html +++ b/docs/build/html/python/random.html @@ -8,7 +8,7 @@ - Random — MLX 0.28.0 documentation + Random — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/transforms.html b/docs/build/html/python/transforms.html index ff003e2c7..9b6297bbb 100644 --- a/docs/build/html/python/transforms.html +++ b/docs/build/html/python/transforms.html @@ -8,7 +8,7 @@ - Transforms — MLX 0.28.0 documentation + Transforms — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/python/tree_utils.html b/docs/build/html/python/tree_utils.html index 97c4de850..f0b43f6f3 100644 --- a/docs/build/html/python/tree_utils.html +++ b/docs/build/html/python/tree_utils.html @@ -8,7 +8,7 @@ - Tree Utils — MLX 0.28.0 documentation + Tree Utils — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/searchindex.js b/docs/build/html/searchindex.js index cd02c338b..69d715f54 100644 --- a/docs/build/html/searchindex.js +++ b/docs/build/html/searchindex.js @@ -1 +1 @@ -Search.setIndex({"alltitles": {"A Simple Example": [[522, "a-simple-example"]], "Array": [[343, null]], "Attention layer": [[6, "attention-layer"]], "Automatic Differentiation": [[515, "automatic-differentiation"]], "Automatic Vectorization": [[515, "automatic-vectorization"]], "Basics": [[520, "basics"]], "Basics of Compile": [[512, "basics-of-compile"]], "Basics of Exporting": [[514, "basics-of-exporting"]], "Binary Size Minimization": [[9, "binary-size-minimization"]], "Binding to Python": [[2, "binding-to-python"]], "Build Options": [[9, "id5"]], "Build Requirements": [[9, "build-requirements"]], "Build and Install": [[9, null]], "Build from source": [[9, "build-from-source"]], "Building and Binding": [[2, "building-and-binding"]], "Building with CMake": [[2, "building-with-cmake"]], "Building with setuptools": [[2, "building-with-setuptools"]], "C++ API": [[9, "c-api"]], "C++ API Reference": [[8, null]], "CPU-only (Linux)": [[9, "cpu-only-linux"]], "CUDA": [[9, "cuda"], [9, "id3"]], "Common Optimizers": [[506, null]], "Compilation": [[512, null]], "Compiling Training Graphs": [[512, "compiling-training-graphs"]], "Complex Example": [[1, "complex-example"]], "Conversion to NumPy and Other Frameworks": [[519, null]], "Converting the weights": [[6, "converting-the-weights"]], "Custom Extensions in MLX": [[2, null]], "Custom Metal Kernels": [[1, null]], "Data Types": [[344, null]], "Debugging": [[512, "debugging"]], "Defining a Ring": [[513, "defining-a-ring"]], "Devices and Streams": [[345, null]], "Differences from NumPy": [[516, "differences-from-numpy"]], "Distributed Communication": [[346, null], [513, null]], "Download the code": [[2, null], [6, null]], "Encoder layer": [[6, "encoder-layer"]], "Example Speedup": [[512, "example-speedup"]], "Examples": [[8, null]], "Export Functions": [[347, null]], "Exporting Functions": [[514, null]], "Exporting Modules": [[514, "exporting-modules"]], "Exporting Multiple Traces": [[514, "exporting-multiple-traces"]], "FFT": [[349, null]], "Fast": [[348, null]], "Full model": [[6, "full-model"]], "Function Transforms": [[515, null]], "Function and Graph Transformations": [[520, "function-and-graph-transformations"]], "Functions": [[479, null]], "Further Reading": [[8, null]], "Generation": [[6, "generation"]], "Getting Started": [[513, "getting-started"]], "Getting Started with MPI": [[513, "getting-started-with-mpi"]], "Getting Started with Ring": [[513, "getting-started-with-ring"]], "Grid Sample VJP": [[1, "grid-sample-vjp"]], "Implementing the CPU Back-end": [[2, "implementing-the-cpu-back-end"]], "Implementing the GPU Back-end": [[2, "implementing-the-gpu-back-end"]], "Implementing the Primitive": [[2, "implementing-the-primitive"]], "Implementing the model": [[6, "implementing-the-model"]], "Importing Functions in C++": [[514, "importing-functions-in-c"]], "In Place Updates": [[516, "in-place-updates"]], "Indexing Arrays": [[516, null]], "Initializers": [[480, null]], "Inspecting Modules": [[353, "inspecting-modules"]], "Install": [[8, null]], "Installing MPI": [[513, "installing-mpi"]], "Introducing the Example": [[2, "introducing-the-example"]], "JAX": [[519, "jax"]], "LLM inference": [[6, null]], "Launching Distributed Programs": [[517, null]], "Layers": [[481, null]], "Lazy Evaluation": [[518, null]], "Linear Algebra": [[350, null]], "Linear Regression": [[5, null]], "Linux": [[9, "linux"]], "Loss Functions": [[482, null]], "MLX": [[8, null]], "MPI Specifics": [[517, "mpi-specifics"]], "Memory Management": [[351, null]], "Metal": [[352, null]], "Metal Debugger": [[3, null]], "Metal not found": [[9, "metal-not-found"]], "Module": [[483, null]], "More Examples": [[514, "more-examples"]], "Multi-Layer Perceptron": [[7, null]], "Neural Networks": [[353, null]], "Only Compute What You Use": [[518, "only-compute-what-you-use"]], "Operations": [[0, null], [2, "operations"], [484, null]], "Operations and Primitives": [[2, "operations-and-primitives"]], "Optimizer": [[507, null]], "Optimizers": [[485, null]], "Package Variables": [[4, "id1"]], "Parameters": [[353, "parameters"]], "Primitive Transforms": [[2, "primitive-transforms"]], "Primitives": [[2, "primitives"]], "Providing Hosts": [[517, "providing-hosts"]], "Pure Functions": [[512, "pure-functions"]], "Putting it all together": [[6, "putting-it-all-together"]], "PyTorch": [[519, "pytorch"]], "Python API": [[9, "python-api"]], "Python API Reference": [[8, null]], "Python Installation": [[9, "python-installation"]], "Quick Start Guide": [[520, null]], "Quick Start with Neural Networks": [[353, "quick-start-with-neural-networks"]], "Random": [[509, null]], "Results": [[2, "results"]], "Ring Specifics": [[517, "ring-specifics"]], "Running Distributed Programs": [[513, "running-distributed-programs"]], "Saving and Loading": [[485, "saving-and-loading"]], "Saving and Loading Arrays": [[521, null]], "Schedulers": [[508, null]], "Scripts": [[2, "scripts"], [6, "scripts"]], "Selecting Backend": [[513, "selecting-backend"]], "Serialization Formats": [[521, "id1"]], "Setting up Remote Hosts": [[513, "setting-up-remote-hosts"], [517, "setting-up-remote-hosts"]], "Shapeless Compilation": [[512, "shapeless-compilation"]], "Shapeless Exports": [[514, "shapeless-exports"]], "Simple Example": [[1, "simple-example"]], "Specifying the Stream": [[523, "specifying-the-stream"]], "Supported Data Types": [[344, "id2"]], "TensorFlow": [[519, "tensorflow"]], "The Module Class": [[353, "the-module-class"]], "Thunderbolt Ring": [[513, "thunderbolt-ring"]], "Training Example": [[513, "training-example"]], "Transformations with Compile": [[512, "transformations-with-compile"]], "Transformations with Imported Functions": [[514, "transformations-with-imported-functions"]], "Transforming Compute Graphs": [[518, "transforming-compute-graphs"]], "Transforms": [[510, null]], "Tree Utils": [[511, null]], "Troubleshooting": [[9, "troubleshooting"], [9, "id4"]], "Tuning MPI All Reduce": [[513, "tuning-mpi-all-reduce"]], "Unified Memory": [[522, null]], "Updating the Parameters": [[353, "updating-the-parameters"]], "Usage": [[2, "usage"], [8, null], [517, "usage"]], "Using MLX in C++": [[4, null]], "Using Shape/Strides": [[1, "using-shape-strides"]], "Using Streams": [[523, null]], "Using the Primitive": [[2, "using-the-primitive"]], "Utilizing nn.average_gradients": [[513, "utilizing-nn-average-gradients"]], "Value and Grad": [[353, "value-and-grad"]], "Weight loading and benchmarking": [[6, "weight-loading-and-benchmarking"]], "When to Evaluate": [[518, "when-to-evaluate"]], "Why Lazy Evaluation": [[518, "why-lazy-evaluation"]], "Xcode Workflow": [[3, "xcode-workflow"]], "mlx.core.Device": [[10, null]], "mlx.core.Dtype": [[11, null]], "mlx.core.DtypeCategory": [[12, null]], "mlx.core.Stream": [[342, null]], "mlx.core.abs": [[13, null]], "mlx.core.add": [[14, null]], "mlx.core.addmm": [[15, null]], "mlx.core.all": [[16, null]], "mlx.core.allclose": [[17, null]], "mlx.core.any": [[18, null]], "mlx.core.arange": [[19, null]], "mlx.core.arccos": [[20, null]], "mlx.core.arccosh": [[21, null]], "mlx.core.arcsin": [[22, null]], "mlx.core.arcsinh": [[23, null]], "mlx.core.arctan": [[24, null]], "mlx.core.arctan2": [[25, null]], "mlx.core.arctanh": [[26, null]], "mlx.core.argmax": [[27, null]], "mlx.core.argmin": [[28, null]], "mlx.core.argpartition": [[29, null]], "mlx.core.argsort": [[30, null]], "mlx.core.array": [[31, null]], "mlx.core.array.T": [[32, null]], "mlx.core.array.abs": [[33, null]], "mlx.core.array.all": [[34, null]], "mlx.core.array.any": [[35, null]], "mlx.core.array.argmax": [[36, null]], "mlx.core.array.argmin": [[37, null]], "mlx.core.array.astype": [[38, null]], "mlx.core.array.at": [[39, null]], "mlx.core.array.conj": [[40, null]], "mlx.core.array.cos": [[41, null]], "mlx.core.array.cummax": [[42, null]], "mlx.core.array.cummin": [[43, null]], "mlx.core.array.cumprod": [[44, null]], "mlx.core.array.cumsum": [[45, null]], "mlx.core.array.diag": [[46, null]], "mlx.core.array.diagonal": [[47, null]], "mlx.core.array.dtype": [[48, null]], "mlx.core.array.exp": [[49, null]], "mlx.core.array.flatten": [[50, null]], "mlx.core.array.imag": [[51, null]], "mlx.core.array.item": [[52, null]], "mlx.core.array.itemsize": [[53, null]], "mlx.core.array.log": [[54, null]], "mlx.core.array.log10": [[55, null]], "mlx.core.array.log1p": [[56, null]], "mlx.core.array.log2": [[57, null]], "mlx.core.array.logcumsumexp": [[58, null]], "mlx.core.array.logsumexp": [[59, null]], "mlx.core.array.max": [[60, null]], "mlx.core.array.mean": [[61, null]], "mlx.core.array.min": [[62, null]], "mlx.core.array.moveaxis": [[63, null]], "mlx.core.array.nbytes": [[64, null]], "mlx.core.array.ndim": [[65, null]], "mlx.core.array.prod": [[66, null]], "mlx.core.array.real": [[67, null]], "mlx.core.array.reciprocal": [[68, null]], "mlx.core.array.reshape": [[69, null]], "mlx.core.array.round": [[70, null]], "mlx.core.array.rsqrt": [[71, null]], "mlx.core.array.shape": [[72, null]], "mlx.core.array.sin": [[73, null]], "mlx.core.array.size": [[74, null]], "mlx.core.array.split": [[75, null]], "mlx.core.array.sqrt": [[76, null]], "mlx.core.array.square": [[77, null]], "mlx.core.array.squeeze": [[78, null]], "mlx.core.array.std": [[79, null]], "mlx.core.array.sum": [[80, null]], "mlx.core.array.swapaxes": [[81, null]], "mlx.core.array.tolist": [[82, null]], "mlx.core.array.transpose": [[83, null]], "mlx.core.array.var": [[84, null]], "mlx.core.array.view": [[85, null]], "mlx.core.array_equal": [[86, null]], "mlx.core.as_strided": [[87, null]], "mlx.core.async_eval": [[88, null]], "mlx.core.atleast_1d": [[89, null]], "mlx.core.atleast_2d": [[90, null]], "mlx.core.atleast_3d": [[91, null]], "mlx.core.bitwise_and": [[92, null]], "mlx.core.bitwise_invert": [[93, null]], "mlx.core.bitwise_or": [[94, null]], "mlx.core.bitwise_xor": [[95, null]], "mlx.core.block_masked_mm": [[96, null]], "mlx.core.broadcast_arrays": [[97, null]], "mlx.core.broadcast_to": [[98, null]], "mlx.core.ceil": [[99, null]], "mlx.core.clear_cache": [[100, null]], "mlx.core.clip": [[101, null]], "mlx.core.compile": [[102, null]], "mlx.core.concatenate": [[103, null]], "mlx.core.conj": [[104, null]], "mlx.core.conjugate": [[105, null]], "mlx.core.contiguous": [[106, null]], "mlx.core.conv1d": [[107, null]], "mlx.core.conv2d": [[108, null]], "mlx.core.conv3d": [[109, null]], "mlx.core.conv_general": [[110, null]], "mlx.core.conv_transpose1d": [[111, null]], "mlx.core.conv_transpose2d": [[112, null]], "mlx.core.conv_transpose3d": [[113, null]], "mlx.core.convolve": [[114, null]], "mlx.core.cos": [[115, null]], "mlx.core.cosh": [[116, null]], "mlx.core.cummax": [[117, null]], "mlx.core.cummin": [[118, null]], "mlx.core.cumprod": [[119, null]], "mlx.core.cumsum": [[120, null]], "mlx.core.custom_function": [[121, null]], "mlx.core.default_device": [[122, null]], "mlx.core.default_stream": [[123, null]], "mlx.core.degrees": [[124, null]], "mlx.core.dequantize": [[125, null]], "mlx.core.diag": [[126, null]], "mlx.core.diagonal": [[127, null]], "mlx.core.disable_compile": [[128, null]], "mlx.core.distributed.Group": [[129, null]], "mlx.core.distributed.all_gather": [[130, null]], "mlx.core.distributed.all_sum": [[131, null]], "mlx.core.distributed.init": [[132, null]], "mlx.core.distributed.is_available": [[133, null]], "mlx.core.distributed.recv": [[134, null]], "mlx.core.distributed.recv_like": [[135, null]], "mlx.core.distributed.send": [[136, null]], "mlx.core.divide": [[137, null]], "mlx.core.divmod": [[138, null]], "mlx.core.einsum": [[139, null]], "mlx.core.einsum_path": [[140, null]], "mlx.core.enable_compile": [[141, null]], "mlx.core.equal": [[142, null]], "mlx.core.erf": [[143, null]], "mlx.core.erfinv": [[144, null]], "mlx.core.eval": [[145, null]], "mlx.core.exp": [[146, null]], "mlx.core.expand_dims": [[147, null]], "mlx.core.expm1": [[148, null]], "mlx.core.export_function": [[149, null]], "mlx.core.export_to_dot": [[150, null]], "mlx.core.exporter": [[151, null]], "mlx.core.eye": [[152, null]], "mlx.core.fast.layer_norm": [[153, null]], "mlx.core.fast.metal_kernel": [[154, null]], "mlx.core.fast.rms_norm": [[155, null]], "mlx.core.fast.rope": [[156, null]], "mlx.core.fast.scaled_dot_product_attention": [[157, null]], "mlx.core.fft.fft": [[158, null]], "mlx.core.fft.fft2": [[159, null]], "mlx.core.fft.fftn": [[160, null]], "mlx.core.fft.fftshift": [[161, null]], "mlx.core.fft.ifft": [[162, null]], "mlx.core.fft.ifft2": [[163, null]], "mlx.core.fft.ifftn": [[164, null]], "mlx.core.fft.ifftshift": [[165, null]], "mlx.core.fft.irfft": [[166, null]], "mlx.core.fft.irfft2": [[167, null]], "mlx.core.fft.irfftn": [[168, null]], "mlx.core.fft.rfft": [[169, null]], "mlx.core.fft.rfft2": [[170, null]], "mlx.core.fft.rfftn": [[171, null]], "mlx.core.finfo": [[172, null]], "mlx.core.flatten": [[173, null]], "mlx.core.floor": [[174, null]], "mlx.core.floor_divide": [[175, null]], "mlx.core.full": [[176, null]], "mlx.core.gather_mm": [[177, null]], "mlx.core.gather_qmm": [[178, null]], "mlx.core.get_active_memory": [[179, null]], "mlx.core.get_cache_memory": [[180, null]], "mlx.core.get_peak_memory": [[181, null]], "mlx.core.grad": [[182, null]], "mlx.core.greater": [[183, null]], "mlx.core.greater_equal": [[184, null]], "mlx.core.hadamard_transform": [[185, null]], "mlx.core.identity": [[186, null]], "mlx.core.imag": [[187, null]], "mlx.core.import_function": [[188, null]], "mlx.core.inner": [[189, null]], "mlx.core.isclose": [[190, null]], "mlx.core.isfinite": [[191, null]], "mlx.core.isinf": [[192, null]], "mlx.core.isnan": [[193, null]], "mlx.core.isneginf": [[194, null]], "mlx.core.isposinf": [[195, null]], "mlx.core.issubdtype": [[196, null]], "mlx.core.jvp": [[197, null]], "mlx.core.kron": [[198, null]], "mlx.core.left_shift": [[199, null]], "mlx.core.less": [[200, null]], "mlx.core.less_equal": [[201, null]], "mlx.core.linalg.cholesky": [[202, null]], "mlx.core.linalg.cholesky_inv": [[203, null]], "mlx.core.linalg.cross": [[204, null]], "mlx.core.linalg.eig": [[205, null]], "mlx.core.linalg.eigh": [[206, null]], "mlx.core.linalg.eigvals": [[207, null]], "mlx.core.linalg.eigvalsh": [[208, null]], "mlx.core.linalg.inv": [[209, null]], "mlx.core.linalg.lu": [[210, null]], "mlx.core.linalg.lu_factor": [[211, null]], "mlx.core.linalg.norm": [[212, null]], "mlx.core.linalg.pinv": [[213, null]], "mlx.core.linalg.qr": [[214, null]], "mlx.core.linalg.solve": [[215, null]], "mlx.core.linalg.solve_triangular": [[216, null]], "mlx.core.linalg.svd": [[217, null]], "mlx.core.linalg.tri_inv": [[218, null]], "mlx.core.linspace": [[219, null]], "mlx.core.load": [[220, null]], "mlx.core.log": [[221, null]], "mlx.core.log10": [[222, null]], "mlx.core.log1p": [[223, null]], "mlx.core.log2": [[224, null]], "mlx.core.logaddexp": [[225, null]], "mlx.core.logcumsumexp": [[226, null]], "mlx.core.logical_and": [[227, null]], "mlx.core.logical_not": [[228, null]], "mlx.core.logical_or": [[229, null]], "mlx.core.logsumexp": [[230, null]], "mlx.core.matmul": [[231, null]], "mlx.core.max": [[232, null]], "mlx.core.maximum": [[233, null]], "mlx.core.mean": [[234, null]], "mlx.core.meshgrid": [[235, null]], "mlx.core.metal.device_info": [[236, null]], "mlx.core.metal.is_available": [[237, null]], "mlx.core.metal.start_capture": [[238, null]], "mlx.core.metal.stop_capture": [[239, null]], "mlx.core.min": [[240, null]], "mlx.core.minimum": [[241, null]], "mlx.core.moveaxis": [[242, null]], "mlx.core.multiply": [[243, null]], "mlx.core.nan_to_num": [[244, null]], "mlx.core.negative": [[245, null]], "mlx.core.new_stream": [[246, null]], "mlx.core.not_equal": [[247, null]], "mlx.core.ones": [[248, null]], "mlx.core.ones_like": [[249, null]], "mlx.core.outer": [[250, null]], "mlx.core.pad": [[251, null]], "mlx.core.partition": [[252, null]], "mlx.core.power": [[253, null]], "mlx.core.prod": [[254, null]], "mlx.core.put_along_axis": [[255, null]], "mlx.core.quantize": [[256, null]], "mlx.core.quantized_matmul": [[257, null]], "mlx.core.radians": [[258, null]], "mlx.core.random.bernoulli": [[259, null]], "mlx.core.random.categorical": [[260, null]], "mlx.core.random.gumbel": [[261, null]], "mlx.core.random.key": [[262, null]], "mlx.core.random.laplace": [[263, null]], "mlx.core.random.multivariate_normal": [[264, null]], "mlx.core.random.normal": [[265, null]], "mlx.core.random.permutation": [[266, null]], "mlx.core.random.randint": [[267, null]], "mlx.core.random.seed": [[268, null]], "mlx.core.random.split": [[269, null]], "mlx.core.random.truncated_normal": [[270, null]], "mlx.core.random.uniform": [[271, null]], "mlx.core.real": [[272, null]], "mlx.core.reciprocal": [[273, null]], "mlx.core.remainder": [[274, null]], "mlx.core.repeat": [[275, null]], "mlx.core.reset_peak_memory": [[276, null]], "mlx.core.reshape": [[277, null]], "mlx.core.right_shift": [[278, null]], "mlx.core.roll": [[279, null]], "mlx.core.round": [[280, null]], "mlx.core.rsqrt": [[281, null]], "mlx.core.save": [[282, null]], "mlx.core.save_gguf": [[283, null]], "mlx.core.save_safetensors": [[284, null]], "mlx.core.savez": [[285, null]], "mlx.core.savez_compressed": [[286, null]], "mlx.core.set_cache_limit": [[287, null]], "mlx.core.set_default_device": [[288, null]], "mlx.core.set_default_stream": [[289, null]], "mlx.core.set_memory_limit": [[290, null]], "mlx.core.set_wired_limit": [[291, null]], "mlx.core.sigmoid": [[292, null]], "mlx.core.sign": [[293, null]], "mlx.core.sin": [[294, null]], "mlx.core.sinh": [[295, null]], "mlx.core.slice": [[296, null]], "mlx.core.slice_update": [[297, null]], "mlx.core.softmax": [[298, null]], "mlx.core.sort": [[299, null]], "mlx.core.split": [[300, null]], "mlx.core.sqrt": [[301, null]], "mlx.core.square": [[302, null]], "mlx.core.squeeze": [[303, null]], "mlx.core.stack": [[304, null]], "mlx.core.std": [[305, null]], "mlx.core.stop_gradient": [[306, null]], "mlx.core.stream": [[307, null]], "mlx.core.subtract": [[308, null]], "mlx.core.sum": [[309, null]], "mlx.core.swapaxes": [[310, null]], "mlx.core.synchronize": [[311, null]], "mlx.core.take": [[312, null]], "mlx.core.take_along_axis": [[313, null]], "mlx.core.tan": [[314, null]], "mlx.core.tanh": [[315, null]], "mlx.core.tensordot": [[316, null]], "mlx.core.tile": [[317, null]], "mlx.core.topk": [[318, null]], "mlx.core.trace": [[319, null]], "mlx.core.transpose": [[320, null]], "mlx.core.tri": [[321, null]], "mlx.core.tril": [[322, null]], "mlx.core.triu": [[323, null]], "mlx.core.unflatten": [[324, null]], "mlx.core.value_and_grad": [[325, null]], "mlx.core.var": [[326, null]], "mlx.core.view": [[327, null]], "mlx.core.vjp": [[328, null]], "mlx.core.vmap": [[329, null]], "mlx.core.where": [[330, null]], "mlx.core.zeros": [[331, null]], "mlx.core.zeros_like": [[332, null]], "mlx.nn.ALiBi": [[354, null]], "mlx.nn.AvgPool1d": [[355, null]], "mlx.nn.AvgPool2d": [[356, null]], "mlx.nn.AvgPool3d": [[357, null]], "mlx.nn.BatchNorm": [[358, null]], "mlx.nn.CELU": [[359, null]], "mlx.nn.Conv1d": [[360, null]], "mlx.nn.Conv2d": [[361, null]], "mlx.nn.Conv3d": [[362, null]], "mlx.nn.ConvTranspose1d": [[363, null]], "mlx.nn.ConvTranspose2d": [[364, null]], "mlx.nn.ConvTranspose3d": [[365, null]], "mlx.nn.Dropout": [[366, null]], "mlx.nn.Dropout2d": [[367, null]], "mlx.nn.Dropout3d": [[368, null]], "mlx.nn.ELU": [[369, null]], "mlx.nn.Embedding": [[370, null]], "mlx.nn.GELU": [[371, null]], "mlx.nn.GLU": [[372, null]], "mlx.nn.GRU": [[373, null]], "mlx.nn.GroupNorm": [[374, null]], "mlx.nn.HardShrink": [[375, null]], "mlx.nn.HardTanh": [[376, null]], "mlx.nn.Hardswish": [[377, null]], "mlx.nn.InstanceNorm": [[378, null]], "mlx.nn.LSTM": [[379, null]], "mlx.nn.LayerNorm": [[380, null]], "mlx.nn.LeakyReLU": [[381, null]], "mlx.nn.Linear": [[382, null]], "mlx.nn.LogSigmoid": [[383, null]], "mlx.nn.LogSoftmax": [[384, null]], "mlx.nn.MaxPool1d": [[385, null]], "mlx.nn.MaxPool2d": [[386, null]], "mlx.nn.MaxPool3d": [[387, null]], "mlx.nn.Mish": [[388, null]], "mlx.nn.Module.apply": [[389, null]], "mlx.nn.Module.apply_to_modules": [[390, null]], "mlx.nn.Module.children": [[391, null]], "mlx.nn.Module.eval": [[392, null]], "mlx.nn.Module.filter_and_map": [[393, null]], "mlx.nn.Module.freeze": [[394, null]], "mlx.nn.Module.leaf_modules": [[395, null]], "mlx.nn.Module.load_weights": [[396, null]], "mlx.nn.Module.modules": [[397, null]], "mlx.nn.Module.named_modules": [[398, null]], "mlx.nn.Module.parameters": [[399, null]], "mlx.nn.Module.save_weights": [[400, null]], "mlx.nn.Module.set_dtype": [[401, null]], "mlx.nn.Module.state": [[402, null]], "mlx.nn.Module.train": [[403, null]], "mlx.nn.Module.trainable_parameters": [[404, null]], "mlx.nn.Module.training": [[405, null]], "mlx.nn.Module.unfreeze": [[406, null]], "mlx.nn.Module.update": [[407, null]], "mlx.nn.Module.update_modules": [[408, null]], "mlx.nn.MultiHeadAttention": [[409, null]], "mlx.nn.PReLU": [[410, null]], "mlx.nn.QuantizedEmbedding": [[411, null]], "mlx.nn.QuantizedLinear": [[412, null]], "mlx.nn.RMSNorm": [[413, null]], "mlx.nn.RNN": [[414, null]], "mlx.nn.ReLU": [[415, null]], "mlx.nn.ReLU6": [[416, null]], "mlx.nn.RoPE": [[417, null]], "mlx.nn.SELU": [[418, null]], "mlx.nn.Sequential": [[419, null]], "mlx.nn.SiLU": [[420, null]], "mlx.nn.Sigmoid": [[421, null]], "mlx.nn.SinusoidalPositionalEncoding": [[422, null]], "mlx.nn.Softmax": [[423, null]], "mlx.nn.Softmin": [[424, null]], "mlx.nn.Softplus": [[425, null]], "mlx.nn.Softshrink": [[426, null]], "mlx.nn.Softsign": [[427, null]], "mlx.nn.Step": [[428, null]], "mlx.nn.Tanh": [[429, null]], "mlx.nn.Transformer": [[430, null]], "mlx.nn.Upsample": [[431, null]], "mlx.nn.average_gradients": [[333, null]], "mlx.nn.celu": [[440, null]], "mlx.nn.elu": [[441, null]], "mlx.nn.gelu": [[442, null]], "mlx.nn.gelu_approx": [[443, null]], "mlx.nn.gelu_fast_approx": [[444, null]], "mlx.nn.glu": [[445, null]], "mlx.nn.hard_shrink": [[446, null]], "mlx.nn.hard_tanh": [[447, null]], "mlx.nn.hardswish": [[448, null]], "mlx.nn.init.constant": [[432, null]], "mlx.nn.init.glorot_normal": [[433, null]], "mlx.nn.init.glorot_uniform": [[434, null]], "mlx.nn.init.he_normal": [[435, null]], "mlx.nn.init.he_uniform": [[436, null]], "mlx.nn.init.identity": [[437, null]], "mlx.nn.init.normal": [[438, null]], "mlx.nn.init.uniform": [[439, null]], "mlx.nn.leaky_relu": [[449, null]], "mlx.nn.log_sigmoid": [[450, null]], "mlx.nn.log_softmax": [[451, null]], "mlx.nn.losses.binary_cross_entropy": [[452, null]], "mlx.nn.losses.cosine_similarity_loss": [[453, null]], "mlx.nn.losses.cross_entropy": [[454, null]], "mlx.nn.losses.gaussian_nll_loss": [[455, null]], "mlx.nn.losses.hinge_loss": [[456, null]], "mlx.nn.losses.huber_loss": [[457, null]], "mlx.nn.losses.kl_div_loss": [[458, null]], "mlx.nn.losses.l1_loss": [[459, null]], "mlx.nn.losses.log_cosh_loss": [[460, null]], "mlx.nn.losses.margin_ranking_loss": [[461, null]], "mlx.nn.losses.mse_loss": [[462, null]], "mlx.nn.losses.nll_loss": [[463, null]], "mlx.nn.losses.smooth_l1_loss": [[464, null]], "mlx.nn.losses.triplet_loss": [[465, null]], "mlx.nn.mish": [[466, null]], "mlx.nn.prelu": [[467, null]], "mlx.nn.quantize": [[334, null]], "mlx.nn.relu": [[468, null]], "mlx.nn.relu6": [[469, null]], "mlx.nn.selu": [[470, null]], "mlx.nn.sigmoid": [[471, null]], "mlx.nn.silu": [[472, null]], "mlx.nn.softmax": [[473, null]], "mlx.nn.softmin": [[474, null]], "mlx.nn.softplus": [[475, null]], "mlx.nn.softshrink": [[476, null]], "mlx.nn.step": [[477, null]], "mlx.nn.tanh": [[478, null]], "mlx.nn.value_and_grad": [[335, null]], "mlx.optimizers.AdaDelta": [[486, null]], "mlx.optimizers.Adafactor": [[487, null]], "mlx.optimizers.Adagrad": [[488, null]], "mlx.optimizers.Adam": [[489, null]], "mlx.optimizers.AdamW": [[490, null]], "mlx.optimizers.Adamax": [[491, null]], "mlx.optimizers.Lion": [[492, null]], "mlx.optimizers.MultiOptimizer": [[493, null]], "mlx.optimizers.Muon": [[494, null]], "mlx.optimizers.Optimizer.apply_gradients": [[495, null]], "mlx.optimizers.Optimizer.init": [[496, null]], "mlx.optimizers.Optimizer.state": [[497, null]], "mlx.optimizers.Optimizer.update": [[498, null]], "mlx.optimizers.RMSprop": [[499, null]], "mlx.optimizers.SGD": [[500, null]], "mlx.optimizers.clip_grad_norm": [[336, null]], "mlx.optimizers.cosine_decay": [[501, null]], "mlx.optimizers.exponential_decay": [[502, null]], "mlx.optimizers.join_schedules": [[503, null]], "mlx.optimizers.linear_schedule": [[504, null]], "mlx.optimizers.step_decay": [[505, null]], "mlx.utils.tree_flatten": [[337, null]], "mlx.utils.tree_map": [[338, null]], "mlx.utils.tree_map_with_path": [[339, null]], "mlx.utils.tree_reduce": [[340, null]], "mlx.utils.tree_unflatten": [[341, null]], "x86 Shell": [[9, "x86-shell"]]}, "docnames": ["cpp/ops", "dev/custom_metal_kernels", "dev/extensions", "dev/metal_debugger", "dev/mlx_in_cpp", "examples/linear_regression", "examples/llama-inference", "examples/mlp", "index", "install", "python/_autosummary/mlx.core.Device", "python/_autosummary/mlx.core.Dtype", "python/_autosummary/mlx.core.DtypeCategory", "python/_autosummary/mlx.core.abs", "python/_autosummary/mlx.core.add", "python/_autosummary/mlx.core.addmm", "python/_autosummary/mlx.core.all", "python/_autosummary/mlx.core.allclose", "python/_autosummary/mlx.core.any", "python/_autosummary/mlx.core.arange", "python/_autosummary/mlx.core.arccos", "python/_autosummary/mlx.core.arccosh", "python/_autosummary/mlx.core.arcsin", "python/_autosummary/mlx.core.arcsinh", "python/_autosummary/mlx.core.arctan", "python/_autosummary/mlx.core.arctan2", "python/_autosummary/mlx.core.arctanh", "python/_autosummary/mlx.core.argmax", "python/_autosummary/mlx.core.argmin", "python/_autosummary/mlx.core.argpartition", "python/_autosummary/mlx.core.argsort", "python/_autosummary/mlx.core.array", "python/_autosummary/mlx.core.array.T", "python/_autosummary/mlx.core.array.abs", "python/_autosummary/mlx.core.array.all", "python/_autosummary/mlx.core.array.any", "python/_autosummary/mlx.core.array.argmax", "python/_autosummary/mlx.core.array.argmin", "python/_autosummary/mlx.core.array.astype", "python/_autosummary/mlx.core.array.at", "python/_autosummary/mlx.core.array.conj", "python/_autosummary/mlx.core.array.cos", "python/_autosummary/mlx.core.array.cummax", "python/_autosummary/mlx.core.array.cummin", "python/_autosummary/mlx.core.array.cumprod", "python/_autosummary/mlx.core.array.cumsum", "python/_autosummary/mlx.core.array.diag", "python/_autosummary/mlx.core.array.diagonal", "python/_autosummary/mlx.core.array.dtype", "python/_autosummary/mlx.core.array.exp", "python/_autosummary/mlx.core.array.flatten", "python/_autosummary/mlx.core.array.imag", "python/_autosummary/mlx.core.array.item", "python/_autosummary/mlx.core.array.itemsize", "python/_autosummary/mlx.core.array.log", "python/_autosummary/mlx.core.array.log10", "python/_autosummary/mlx.core.array.log1p", "python/_autosummary/mlx.core.array.log2", "python/_autosummary/mlx.core.array.logcumsumexp", "python/_autosummary/mlx.core.array.logsumexp", "python/_autosummary/mlx.core.array.max", "python/_autosummary/mlx.core.array.mean", "python/_autosummary/mlx.core.array.min", "python/_autosummary/mlx.core.array.moveaxis", "python/_autosummary/mlx.core.array.nbytes", "python/_autosummary/mlx.core.array.ndim", "python/_autosummary/mlx.core.array.prod", "python/_autosummary/mlx.core.array.real", "python/_autosummary/mlx.core.array.reciprocal", "python/_autosummary/mlx.core.array.reshape", "python/_autosummary/mlx.core.array.round", "python/_autosummary/mlx.core.array.rsqrt", "python/_autosummary/mlx.core.array.shape", "python/_autosummary/mlx.core.array.sin", "python/_autosummary/mlx.core.array.size", "python/_autosummary/mlx.core.array.split", "python/_autosummary/mlx.core.array.sqrt", "python/_autosummary/mlx.core.array.square", "python/_autosummary/mlx.core.array.squeeze", "python/_autosummary/mlx.core.array.std", "python/_autosummary/mlx.core.array.sum", "python/_autosummary/mlx.core.array.swapaxes", "python/_autosummary/mlx.core.array.tolist", "python/_autosummary/mlx.core.array.transpose", "python/_autosummary/mlx.core.array.var", "python/_autosummary/mlx.core.array.view", "python/_autosummary/mlx.core.array_equal", "python/_autosummary/mlx.core.as_strided", "python/_autosummary/mlx.core.async_eval", "python/_autosummary/mlx.core.atleast_1d", "python/_autosummary/mlx.core.atleast_2d", "python/_autosummary/mlx.core.atleast_3d", "python/_autosummary/mlx.core.bitwise_and", "python/_autosummary/mlx.core.bitwise_invert", "python/_autosummary/mlx.core.bitwise_or", "python/_autosummary/mlx.core.bitwise_xor", "python/_autosummary/mlx.core.block_masked_mm", "python/_autosummary/mlx.core.broadcast_arrays", "python/_autosummary/mlx.core.broadcast_to", "python/_autosummary/mlx.core.ceil", "python/_autosummary/mlx.core.clear_cache", "python/_autosummary/mlx.core.clip", "python/_autosummary/mlx.core.compile", "python/_autosummary/mlx.core.concatenate", "python/_autosummary/mlx.core.conj", "python/_autosummary/mlx.core.conjugate", "python/_autosummary/mlx.core.contiguous", "python/_autosummary/mlx.core.conv1d", "python/_autosummary/mlx.core.conv2d", "python/_autosummary/mlx.core.conv3d", "python/_autosummary/mlx.core.conv_general", "python/_autosummary/mlx.core.conv_transpose1d", "python/_autosummary/mlx.core.conv_transpose2d", "python/_autosummary/mlx.core.conv_transpose3d", "python/_autosummary/mlx.core.convolve", "python/_autosummary/mlx.core.cos", "python/_autosummary/mlx.core.cosh", "python/_autosummary/mlx.core.cummax", "python/_autosummary/mlx.core.cummin", "python/_autosummary/mlx.core.cumprod", "python/_autosummary/mlx.core.cumsum", "python/_autosummary/mlx.core.custom_function", "python/_autosummary/mlx.core.default_device", "python/_autosummary/mlx.core.default_stream", "python/_autosummary/mlx.core.degrees", "python/_autosummary/mlx.core.dequantize", "python/_autosummary/mlx.core.diag", "python/_autosummary/mlx.core.diagonal", "python/_autosummary/mlx.core.disable_compile", "python/_autosummary/mlx.core.distributed.Group", "python/_autosummary/mlx.core.distributed.all_gather", "python/_autosummary/mlx.core.distributed.all_sum", "python/_autosummary/mlx.core.distributed.init", "python/_autosummary/mlx.core.distributed.is_available", "python/_autosummary/mlx.core.distributed.recv", "python/_autosummary/mlx.core.distributed.recv_like", "python/_autosummary/mlx.core.distributed.send", "python/_autosummary/mlx.core.divide", "python/_autosummary/mlx.core.divmod", "python/_autosummary/mlx.core.einsum", "python/_autosummary/mlx.core.einsum_path", "python/_autosummary/mlx.core.enable_compile", "python/_autosummary/mlx.core.equal", "python/_autosummary/mlx.core.erf", "python/_autosummary/mlx.core.erfinv", "python/_autosummary/mlx.core.eval", "python/_autosummary/mlx.core.exp", "python/_autosummary/mlx.core.expand_dims", "python/_autosummary/mlx.core.expm1", "python/_autosummary/mlx.core.export_function", "python/_autosummary/mlx.core.export_to_dot", "python/_autosummary/mlx.core.exporter", "python/_autosummary/mlx.core.eye", "python/_autosummary/mlx.core.fast.layer_norm", "python/_autosummary/mlx.core.fast.metal_kernel", "python/_autosummary/mlx.core.fast.rms_norm", "python/_autosummary/mlx.core.fast.rope", "python/_autosummary/mlx.core.fast.scaled_dot_product_attention", "python/_autosummary/mlx.core.fft.fft", "python/_autosummary/mlx.core.fft.fft2", "python/_autosummary/mlx.core.fft.fftn", "python/_autosummary/mlx.core.fft.fftshift", "python/_autosummary/mlx.core.fft.ifft", "python/_autosummary/mlx.core.fft.ifft2", "python/_autosummary/mlx.core.fft.ifftn", "python/_autosummary/mlx.core.fft.ifftshift", "python/_autosummary/mlx.core.fft.irfft", "python/_autosummary/mlx.core.fft.irfft2", "python/_autosummary/mlx.core.fft.irfftn", "python/_autosummary/mlx.core.fft.rfft", "python/_autosummary/mlx.core.fft.rfft2", "python/_autosummary/mlx.core.fft.rfftn", "python/_autosummary/mlx.core.finfo", "python/_autosummary/mlx.core.flatten", "python/_autosummary/mlx.core.floor", "python/_autosummary/mlx.core.floor_divide", "python/_autosummary/mlx.core.full", "python/_autosummary/mlx.core.gather_mm", "python/_autosummary/mlx.core.gather_qmm", "python/_autosummary/mlx.core.get_active_memory", "python/_autosummary/mlx.core.get_cache_memory", "python/_autosummary/mlx.core.get_peak_memory", "python/_autosummary/mlx.core.grad", "python/_autosummary/mlx.core.greater", "python/_autosummary/mlx.core.greater_equal", "python/_autosummary/mlx.core.hadamard_transform", "python/_autosummary/mlx.core.identity", "python/_autosummary/mlx.core.imag", "python/_autosummary/mlx.core.import_function", "python/_autosummary/mlx.core.inner", "python/_autosummary/mlx.core.isclose", "python/_autosummary/mlx.core.isfinite", "python/_autosummary/mlx.core.isinf", "python/_autosummary/mlx.core.isnan", "python/_autosummary/mlx.core.isneginf", "python/_autosummary/mlx.core.isposinf", "python/_autosummary/mlx.core.issubdtype", "python/_autosummary/mlx.core.jvp", "python/_autosummary/mlx.core.kron", "python/_autosummary/mlx.core.left_shift", "python/_autosummary/mlx.core.less", "python/_autosummary/mlx.core.less_equal", "python/_autosummary/mlx.core.linalg.cholesky", "python/_autosummary/mlx.core.linalg.cholesky_inv", "python/_autosummary/mlx.core.linalg.cross", "python/_autosummary/mlx.core.linalg.eig", "python/_autosummary/mlx.core.linalg.eigh", "python/_autosummary/mlx.core.linalg.eigvals", "python/_autosummary/mlx.core.linalg.eigvalsh", "python/_autosummary/mlx.core.linalg.inv", "python/_autosummary/mlx.core.linalg.lu", "python/_autosummary/mlx.core.linalg.lu_factor", "python/_autosummary/mlx.core.linalg.norm", "python/_autosummary/mlx.core.linalg.pinv", "python/_autosummary/mlx.core.linalg.qr", "python/_autosummary/mlx.core.linalg.solve", "python/_autosummary/mlx.core.linalg.solve_triangular", "python/_autosummary/mlx.core.linalg.svd", "python/_autosummary/mlx.core.linalg.tri_inv", "python/_autosummary/mlx.core.linspace", "python/_autosummary/mlx.core.load", "python/_autosummary/mlx.core.log", "python/_autosummary/mlx.core.log10", "python/_autosummary/mlx.core.log1p", "python/_autosummary/mlx.core.log2", "python/_autosummary/mlx.core.logaddexp", "python/_autosummary/mlx.core.logcumsumexp", "python/_autosummary/mlx.core.logical_and", "python/_autosummary/mlx.core.logical_not", "python/_autosummary/mlx.core.logical_or", "python/_autosummary/mlx.core.logsumexp", "python/_autosummary/mlx.core.matmul", "python/_autosummary/mlx.core.max", "python/_autosummary/mlx.core.maximum", "python/_autosummary/mlx.core.mean", "python/_autosummary/mlx.core.meshgrid", "python/_autosummary/mlx.core.metal.device_info", "python/_autosummary/mlx.core.metal.is_available", "python/_autosummary/mlx.core.metal.start_capture", "python/_autosummary/mlx.core.metal.stop_capture", "python/_autosummary/mlx.core.min", "python/_autosummary/mlx.core.minimum", "python/_autosummary/mlx.core.moveaxis", "python/_autosummary/mlx.core.multiply", "python/_autosummary/mlx.core.nan_to_num", "python/_autosummary/mlx.core.negative", "python/_autosummary/mlx.core.new_stream", "python/_autosummary/mlx.core.not_equal", "python/_autosummary/mlx.core.ones", "python/_autosummary/mlx.core.ones_like", "python/_autosummary/mlx.core.outer", "python/_autosummary/mlx.core.pad", "python/_autosummary/mlx.core.partition", "python/_autosummary/mlx.core.power", "python/_autosummary/mlx.core.prod", "python/_autosummary/mlx.core.put_along_axis", "python/_autosummary/mlx.core.quantize", "python/_autosummary/mlx.core.quantized_matmul", "python/_autosummary/mlx.core.radians", "python/_autosummary/mlx.core.random.bernoulli", "python/_autosummary/mlx.core.random.categorical", "python/_autosummary/mlx.core.random.gumbel", "python/_autosummary/mlx.core.random.key", "python/_autosummary/mlx.core.random.laplace", "python/_autosummary/mlx.core.random.multivariate_normal", "python/_autosummary/mlx.core.random.normal", "python/_autosummary/mlx.core.random.permutation", "python/_autosummary/mlx.core.random.randint", "python/_autosummary/mlx.core.random.seed", "python/_autosummary/mlx.core.random.split", "python/_autosummary/mlx.core.random.truncated_normal", "python/_autosummary/mlx.core.random.uniform", "python/_autosummary/mlx.core.real", "python/_autosummary/mlx.core.reciprocal", "python/_autosummary/mlx.core.remainder", "python/_autosummary/mlx.core.repeat", "python/_autosummary/mlx.core.reset_peak_memory", "python/_autosummary/mlx.core.reshape", "python/_autosummary/mlx.core.right_shift", "python/_autosummary/mlx.core.roll", "python/_autosummary/mlx.core.round", "python/_autosummary/mlx.core.rsqrt", "python/_autosummary/mlx.core.save", "python/_autosummary/mlx.core.save_gguf", "python/_autosummary/mlx.core.save_safetensors", "python/_autosummary/mlx.core.savez", "python/_autosummary/mlx.core.savez_compressed", "python/_autosummary/mlx.core.set_cache_limit", "python/_autosummary/mlx.core.set_default_device", "python/_autosummary/mlx.core.set_default_stream", "python/_autosummary/mlx.core.set_memory_limit", "python/_autosummary/mlx.core.set_wired_limit", "python/_autosummary/mlx.core.sigmoid", "python/_autosummary/mlx.core.sign", "python/_autosummary/mlx.core.sin", "python/_autosummary/mlx.core.sinh", "python/_autosummary/mlx.core.slice", "python/_autosummary/mlx.core.slice_update", "python/_autosummary/mlx.core.softmax", "python/_autosummary/mlx.core.sort", "python/_autosummary/mlx.core.split", "python/_autosummary/mlx.core.sqrt", "python/_autosummary/mlx.core.square", "python/_autosummary/mlx.core.squeeze", "python/_autosummary/mlx.core.stack", "python/_autosummary/mlx.core.std", "python/_autosummary/mlx.core.stop_gradient", "python/_autosummary/mlx.core.stream", "python/_autosummary/mlx.core.subtract", "python/_autosummary/mlx.core.sum", "python/_autosummary/mlx.core.swapaxes", "python/_autosummary/mlx.core.synchronize", "python/_autosummary/mlx.core.take", "python/_autosummary/mlx.core.take_along_axis", "python/_autosummary/mlx.core.tan", "python/_autosummary/mlx.core.tanh", "python/_autosummary/mlx.core.tensordot", "python/_autosummary/mlx.core.tile", "python/_autosummary/mlx.core.topk", "python/_autosummary/mlx.core.trace", "python/_autosummary/mlx.core.transpose", "python/_autosummary/mlx.core.tri", "python/_autosummary/mlx.core.tril", "python/_autosummary/mlx.core.triu", "python/_autosummary/mlx.core.unflatten", "python/_autosummary/mlx.core.value_and_grad", "python/_autosummary/mlx.core.var", "python/_autosummary/mlx.core.view", "python/_autosummary/mlx.core.vjp", "python/_autosummary/mlx.core.vmap", "python/_autosummary/mlx.core.where", "python/_autosummary/mlx.core.zeros", "python/_autosummary/mlx.core.zeros_like", "python/_autosummary/mlx.nn.average_gradients", "python/_autosummary/mlx.nn.quantize", "python/_autosummary/mlx.nn.value_and_grad", "python/_autosummary/mlx.optimizers.clip_grad_norm", "python/_autosummary/mlx.utils.tree_flatten", "python/_autosummary/mlx.utils.tree_map", "python/_autosummary/mlx.utils.tree_map_with_path", "python/_autosummary/mlx.utils.tree_reduce", "python/_autosummary/mlx.utils.tree_unflatten", "python/_autosummary/stream_class", "python/array", "python/data_types", "python/devices_and_streams", "python/distributed", "python/export", "python/fast", "python/fft", "python/linalg", "python/memory_management", "python/metal", "python/nn", "python/nn/_autosummary/mlx.nn.ALiBi", "python/nn/_autosummary/mlx.nn.AvgPool1d", "python/nn/_autosummary/mlx.nn.AvgPool2d", "python/nn/_autosummary/mlx.nn.AvgPool3d", "python/nn/_autosummary/mlx.nn.BatchNorm", "python/nn/_autosummary/mlx.nn.CELU", "python/nn/_autosummary/mlx.nn.Conv1d", "python/nn/_autosummary/mlx.nn.Conv2d", "python/nn/_autosummary/mlx.nn.Conv3d", "python/nn/_autosummary/mlx.nn.ConvTranspose1d", "python/nn/_autosummary/mlx.nn.ConvTranspose2d", "python/nn/_autosummary/mlx.nn.ConvTranspose3d", "python/nn/_autosummary/mlx.nn.Dropout", "python/nn/_autosummary/mlx.nn.Dropout2d", "python/nn/_autosummary/mlx.nn.Dropout3d", "python/nn/_autosummary/mlx.nn.ELU", "python/nn/_autosummary/mlx.nn.Embedding", "python/nn/_autosummary/mlx.nn.GELU", "python/nn/_autosummary/mlx.nn.GLU", "python/nn/_autosummary/mlx.nn.GRU", "python/nn/_autosummary/mlx.nn.GroupNorm", "python/nn/_autosummary/mlx.nn.HardShrink", "python/nn/_autosummary/mlx.nn.HardTanh", "python/nn/_autosummary/mlx.nn.Hardswish", "python/nn/_autosummary/mlx.nn.InstanceNorm", "python/nn/_autosummary/mlx.nn.LSTM", "python/nn/_autosummary/mlx.nn.LayerNorm", "python/nn/_autosummary/mlx.nn.LeakyReLU", "python/nn/_autosummary/mlx.nn.Linear", "python/nn/_autosummary/mlx.nn.LogSigmoid", "python/nn/_autosummary/mlx.nn.LogSoftmax", "python/nn/_autosummary/mlx.nn.MaxPool1d", "python/nn/_autosummary/mlx.nn.MaxPool2d", "python/nn/_autosummary/mlx.nn.MaxPool3d", "python/nn/_autosummary/mlx.nn.Mish", "python/nn/_autosummary/mlx.nn.Module.apply", "python/nn/_autosummary/mlx.nn.Module.apply_to_modules", "python/nn/_autosummary/mlx.nn.Module.children", "python/nn/_autosummary/mlx.nn.Module.eval", "python/nn/_autosummary/mlx.nn.Module.filter_and_map", "python/nn/_autosummary/mlx.nn.Module.freeze", "python/nn/_autosummary/mlx.nn.Module.leaf_modules", "python/nn/_autosummary/mlx.nn.Module.load_weights", "python/nn/_autosummary/mlx.nn.Module.modules", "python/nn/_autosummary/mlx.nn.Module.named_modules", "python/nn/_autosummary/mlx.nn.Module.parameters", "python/nn/_autosummary/mlx.nn.Module.save_weights", "python/nn/_autosummary/mlx.nn.Module.set_dtype", "python/nn/_autosummary/mlx.nn.Module.state", "python/nn/_autosummary/mlx.nn.Module.train", "python/nn/_autosummary/mlx.nn.Module.trainable_parameters", "python/nn/_autosummary/mlx.nn.Module.training", "python/nn/_autosummary/mlx.nn.Module.unfreeze", "python/nn/_autosummary/mlx.nn.Module.update", "python/nn/_autosummary/mlx.nn.Module.update_modules", "python/nn/_autosummary/mlx.nn.MultiHeadAttention", "python/nn/_autosummary/mlx.nn.PReLU", "python/nn/_autosummary/mlx.nn.QuantizedEmbedding", "python/nn/_autosummary/mlx.nn.QuantizedLinear", "python/nn/_autosummary/mlx.nn.RMSNorm", "python/nn/_autosummary/mlx.nn.RNN", "python/nn/_autosummary/mlx.nn.ReLU", "python/nn/_autosummary/mlx.nn.ReLU6", "python/nn/_autosummary/mlx.nn.RoPE", "python/nn/_autosummary/mlx.nn.SELU", "python/nn/_autosummary/mlx.nn.Sequential", "python/nn/_autosummary/mlx.nn.SiLU", "python/nn/_autosummary/mlx.nn.Sigmoid", "python/nn/_autosummary/mlx.nn.SinusoidalPositionalEncoding", "python/nn/_autosummary/mlx.nn.Softmax", "python/nn/_autosummary/mlx.nn.Softmin", "python/nn/_autosummary/mlx.nn.Softplus", "python/nn/_autosummary/mlx.nn.Softshrink", "python/nn/_autosummary/mlx.nn.Softsign", "python/nn/_autosummary/mlx.nn.Step", "python/nn/_autosummary/mlx.nn.Tanh", "python/nn/_autosummary/mlx.nn.Transformer", "python/nn/_autosummary/mlx.nn.Upsample", "python/nn/_autosummary/mlx.nn.init.constant", "python/nn/_autosummary/mlx.nn.init.glorot_normal", "python/nn/_autosummary/mlx.nn.init.glorot_uniform", "python/nn/_autosummary/mlx.nn.init.he_normal", "python/nn/_autosummary/mlx.nn.init.he_uniform", "python/nn/_autosummary/mlx.nn.init.identity", "python/nn/_autosummary/mlx.nn.init.normal", "python/nn/_autosummary/mlx.nn.init.uniform", "python/nn/_autosummary_functions/mlx.nn.celu", "python/nn/_autosummary_functions/mlx.nn.elu", "python/nn/_autosummary_functions/mlx.nn.gelu", "python/nn/_autosummary_functions/mlx.nn.gelu_approx", "python/nn/_autosummary_functions/mlx.nn.gelu_fast_approx", "python/nn/_autosummary_functions/mlx.nn.glu", "python/nn/_autosummary_functions/mlx.nn.hard_shrink", "python/nn/_autosummary_functions/mlx.nn.hard_tanh", "python/nn/_autosummary_functions/mlx.nn.hardswish", "python/nn/_autosummary_functions/mlx.nn.leaky_relu", "python/nn/_autosummary_functions/mlx.nn.log_sigmoid", "python/nn/_autosummary_functions/mlx.nn.log_softmax", "python/nn/_autosummary_functions/mlx.nn.losses.binary_cross_entropy", "python/nn/_autosummary_functions/mlx.nn.losses.cosine_similarity_loss", "python/nn/_autosummary_functions/mlx.nn.losses.cross_entropy", "python/nn/_autosummary_functions/mlx.nn.losses.gaussian_nll_loss", "python/nn/_autosummary_functions/mlx.nn.losses.hinge_loss", "python/nn/_autosummary_functions/mlx.nn.losses.huber_loss", "python/nn/_autosummary_functions/mlx.nn.losses.kl_div_loss", "python/nn/_autosummary_functions/mlx.nn.losses.l1_loss", "python/nn/_autosummary_functions/mlx.nn.losses.log_cosh_loss", "python/nn/_autosummary_functions/mlx.nn.losses.margin_ranking_loss", "python/nn/_autosummary_functions/mlx.nn.losses.mse_loss", "python/nn/_autosummary_functions/mlx.nn.losses.nll_loss", "python/nn/_autosummary_functions/mlx.nn.losses.smooth_l1_loss", "python/nn/_autosummary_functions/mlx.nn.losses.triplet_loss", "python/nn/_autosummary_functions/mlx.nn.mish", "python/nn/_autosummary_functions/mlx.nn.prelu", "python/nn/_autosummary_functions/mlx.nn.relu", "python/nn/_autosummary_functions/mlx.nn.relu6", "python/nn/_autosummary_functions/mlx.nn.selu", "python/nn/_autosummary_functions/mlx.nn.sigmoid", "python/nn/_autosummary_functions/mlx.nn.silu", "python/nn/_autosummary_functions/mlx.nn.softmax", "python/nn/_autosummary_functions/mlx.nn.softmin", "python/nn/_autosummary_functions/mlx.nn.softplus", "python/nn/_autosummary_functions/mlx.nn.softshrink", "python/nn/_autosummary_functions/mlx.nn.step", "python/nn/_autosummary_functions/mlx.nn.tanh", "python/nn/functions", "python/nn/init", "python/nn/layers", "python/nn/losses", "python/nn/module", "python/ops", "python/optimizers", "python/optimizers/_autosummary/mlx.optimizers.AdaDelta", "python/optimizers/_autosummary/mlx.optimizers.Adafactor", "python/optimizers/_autosummary/mlx.optimizers.Adagrad", "python/optimizers/_autosummary/mlx.optimizers.Adam", "python/optimizers/_autosummary/mlx.optimizers.AdamW", "python/optimizers/_autosummary/mlx.optimizers.Adamax", "python/optimizers/_autosummary/mlx.optimizers.Lion", "python/optimizers/_autosummary/mlx.optimizers.MultiOptimizer", "python/optimizers/_autosummary/mlx.optimizers.Muon", "python/optimizers/_autosummary/mlx.optimizers.Optimizer.apply_gradients", "python/optimizers/_autosummary/mlx.optimizers.Optimizer.init", "python/optimizers/_autosummary/mlx.optimizers.Optimizer.state", "python/optimizers/_autosummary/mlx.optimizers.Optimizer.update", "python/optimizers/_autosummary/mlx.optimizers.RMSprop", "python/optimizers/_autosummary/mlx.optimizers.SGD", "python/optimizers/_autosummary/mlx.optimizers.cosine_decay", "python/optimizers/_autosummary/mlx.optimizers.exponential_decay", "python/optimizers/_autosummary/mlx.optimizers.join_schedules", "python/optimizers/_autosummary/mlx.optimizers.linear_schedule", "python/optimizers/_autosummary/mlx.optimizers.step_decay", "python/optimizers/common_optimizers", "python/optimizers/optimizer", "python/optimizers/schedulers", "python/random", "python/transforms", "python/tree_utils", "usage/compile", "usage/distributed", "usage/export", "usage/function_transforms", "usage/indexing", "usage/launching_distributed", "usage/lazy_evaluation", "usage/numpy", "usage/quick_start", "usage/saving_and_loading", "usage/unified_memory", "usage/using_streams"], "envversion": {"sphinx": 62, "sphinx.domains.c": 3, "sphinx.domains.changeset": 1, "sphinx.domains.citation": 1, "sphinx.domains.cpp": 9, "sphinx.domains.index": 1, "sphinx.domains.javascript": 3, "sphinx.domains.math": 2, "sphinx.domains.python": 4, "sphinx.domains.rst": 2, "sphinx.domains.std": 2, "sphinx.ext.intersphinx": 1}, "filenames": ["cpp/ops.rst", "dev/custom_metal_kernels.rst", "dev/extensions.rst", "dev/metal_debugger.rst", "dev/mlx_in_cpp.rst", "examples/linear_regression.rst", "examples/llama-inference.rst", "examples/mlp.rst", "index.rst", "install.rst", "python/_autosummary/mlx.core.Device.rst", "python/_autosummary/mlx.core.Dtype.rst", "python/_autosummary/mlx.core.DtypeCategory.rst", "python/_autosummary/mlx.core.abs.rst", "python/_autosummary/mlx.core.add.rst", "python/_autosummary/mlx.core.addmm.rst", "python/_autosummary/mlx.core.all.rst", "python/_autosummary/mlx.core.allclose.rst", "python/_autosummary/mlx.core.any.rst", "python/_autosummary/mlx.core.arange.rst", "python/_autosummary/mlx.core.arccos.rst", "python/_autosummary/mlx.core.arccosh.rst", "python/_autosummary/mlx.core.arcsin.rst", "python/_autosummary/mlx.core.arcsinh.rst", "python/_autosummary/mlx.core.arctan.rst", "python/_autosummary/mlx.core.arctan2.rst", "python/_autosummary/mlx.core.arctanh.rst", "python/_autosummary/mlx.core.argmax.rst", "python/_autosummary/mlx.core.argmin.rst", "python/_autosummary/mlx.core.argpartition.rst", "python/_autosummary/mlx.core.argsort.rst", "python/_autosummary/mlx.core.array.rst", "python/_autosummary/mlx.core.array.T.rst", "python/_autosummary/mlx.core.array.abs.rst", "python/_autosummary/mlx.core.array.all.rst", "python/_autosummary/mlx.core.array.any.rst", "python/_autosummary/mlx.core.array.argmax.rst", "python/_autosummary/mlx.core.array.argmin.rst", "python/_autosummary/mlx.core.array.astype.rst", "python/_autosummary/mlx.core.array.at.rst", "python/_autosummary/mlx.core.array.conj.rst", "python/_autosummary/mlx.core.array.cos.rst", "python/_autosummary/mlx.core.array.cummax.rst", "python/_autosummary/mlx.core.array.cummin.rst", "python/_autosummary/mlx.core.array.cumprod.rst", "python/_autosummary/mlx.core.array.cumsum.rst", "python/_autosummary/mlx.core.array.diag.rst", "python/_autosummary/mlx.core.array.diagonal.rst", "python/_autosummary/mlx.core.array.dtype.rst", "python/_autosummary/mlx.core.array.exp.rst", "python/_autosummary/mlx.core.array.flatten.rst", "python/_autosummary/mlx.core.array.imag.rst", "python/_autosummary/mlx.core.array.item.rst", "python/_autosummary/mlx.core.array.itemsize.rst", "python/_autosummary/mlx.core.array.log.rst", "python/_autosummary/mlx.core.array.log10.rst", "python/_autosummary/mlx.core.array.log1p.rst", "python/_autosummary/mlx.core.array.log2.rst", "python/_autosummary/mlx.core.array.logcumsumexp.rst", "python/_autosummary/mlx.core.array.logsumexp.rst", "python/_autosummary/mlx.core.array.max.rst", "python/_autosummary/mlx.core.array.mean.rst", "python/_autosummary/mlx.core.array.min.rst", "python/_autosummary/mlx.core.array.moveaxis.rst", "python/_autosummary/mlx.core.array.nbytes.rst", "python/_autosummary/mlx.core.array.ndim.rst", "python/_autosummary/mlx.core.array.prod.rst", "python/_autosummary/mlx.core.array.real.rst", "python/_autosummary/mlx.core.array.reciprocal.rst", "python/_autosummary/mlx.core.array.reshape.rst", "python/_autosummary/mlx.core.array.round.rst", "python/_autosummary/mlx.core.array.rsqrt.rst", "python/_autosummary/mlx.core.array.shape.rst", "python/_autosummary/mlx.core.array.sin.rst", "python/_autosummary/mlx.core.array.size.rst", "python/_autosummary/mlx.core.array.split.rst", "python/_autosummary/mlx.core.array.sqrt.rst", "python/_autosummary/mlx.core.array.square.rst", "python/_autosummary/mlx.core.array.squeeze.rst", "python/_autosummary/mlx.core.array.std.rst", "python/_autosummary/mlx.core.array.sum.rst", "python/_autosummary/mlx.core.array.swapaxes.rst", "python/_autosummary/mlx.core.array.tolist.rst", "python/_autosummary/mlx.core.array.transpose.rst", "python/_autosummary/mlx.core.array.var.rst", "python/_autosummary/mlx.core.array.view.rst", "python/_autosummary/mlx.core.array_equal.rst", "python/_autosummary/mlx.core.as_strided.rst", "python/_autosummary/mlx.core.async_eval.rst", "python/_autosummary/mlx.core.atleast_1d.rst", "python/_autosummary/mlx.core.atleast_2d.rst", "python/_autosummary/mlx.core.atleast_3d.rst", "python/_autosummary/mlx.core.bitwise_and.rst", "python/_autosummary/mlx.core.bitwise_invert.rst", "python/_autosummary/mlx.core.bitwise_or.rst", "python/_autosummary/mlx.core.bitwise_xor.rst", "python/_autosummary/mlx.core.block_masked_mm.rst", "python/_autosummary/mlx.core.broadcast_arrays.rst", "python/_autosummary/mlx.core.broadcast_to.rst", "python/_autosummary/mlx.core.ceil.rst", "python/_autosummary/mlx.core.clear_cache.rst", "python/_autosummary/mlx.core.clip.rst", "python/_autosummary/mlx.core.compile.rst", "python/_autosummary/mlx.core.concatenate.rst", "python/_autosummary/mlx.core.conj.rst", "python/_autosummary/mlx.core.conjugate.rst", "python/_autosummary/mlx.core.contiguous.rst", "python/_autosummary/mlx.core.conv1d.rst", "python/_autosummary/mlx.core.conv2d.rst", "python/_autosummary/mlx.core.conv3d.rst", "python/_autosummary/mlx.core.conv_general.rst", "python/_autosummary/mlx.core.conv_transpose1d.rst", "python/_autosummary/mlx.core.conv_transpose2d.rst", "python/_autosummary/mlx.core.conv_transpose3d.rst", "python/_autosummary/mlx.core.convolve.rst", "python/_autosummary/mlx.core.cos.rst", "python/_autosummary/mlx.core.cosh.rst", "python/_autosummary/mlx.core.cummax.rst", "python/_autosummary/mlx.core.cummin.rst", "python/_autosummary/mlx.core.cumprod.rst", "python/_autosummary/mlx.core.cumsum.rst", "python/_autosummary/mlx.core.custom_function.rst", "python/_autosummary/mlx.core.default_device.rst", "python/_autosummary/mlx.core.default_stream.rst", "python/_autosummary/mlx.core.degrees.rst", "python/_autosummary/mlx.core.dequantize.rst", "python/_autosummary/mlx.core.diag.rst", "python/_autosummary/mlx.core.diagonal.rst", "python/_autosummary/mlx.core.disable_compile.rst", "python/_autosummary/mlx.core.distributed.Group.rst", "python/_autosummary/mlx.core.distributed.all_gather.rst", "python/_autosummary/mlx.core.distributed.all_sum.rst", "python/_autosummary/mlx.core.distributed.init.rst", "python/_autosummary/mlx.core.distributed.is_available.rst", "python/_autosummary/mlx.core.distributed.recv.rst", "python/_autosummary/mlx.core.distributed.recv_like.rst", "python/_autosummary/mlx.core.distributed.send.rst", "python/_autosummary/mlx.core.divide.rst", "python/_autosummary/mlx.core.divmod.rst", "python/_autosummary/mlx.core.einsum.rst", "python/_autosummary/mlx.core.einsum_path.rst", "python/_autosummary/mlx.core.enable_compile.rst", "python/_autosummary/mlx.core.equal.rst", "python/_autosummary/mlx.core.erf.rst", "python/_autosummary/mlx.core.erfinv.rst", "python/_autosummary/mlx.core.eval.rst", "python/_autosummary/mlx.core.exp.rst", "python/_autosummary/mlx.core.expand_dims.rst", "python/_autosummary/mlx.core.expm1.rst", "python/_autosummary/mlx.core.export_function.rst", "python/_autosummary/mlx.core.export_to_dot.rst", "python/_autosummary/mlx.core.exporter.rst", "python/_autosummary/mlx.core.eye.rst", "python/_autosummary/mlx.core.fast.layer_norm.rst", "python/_autosummary/mlx.core.fast.metal_kernel.rst", "python/_autosummary/mlx.core.fast.rms_norm.rst", "python/_autosummary/mlx.core.fast.rope.rst", "python/_autosummary/mlx.core.fast.scaled_dot_product_attention.rst", "python/_autosummary/mlx.core.fft.fft.rst", "python/_autosummary/mlx.core.fft.fft2.rst", "python/_autosummary/mlx.core.fft.fftn.rst", "python/_autosummary/mlx.core.fft.fftshift.rst", "python/_autosummary/mlx.core.fft.ifft.rst", "python/_autosummary/mlx.core.fft.ifft2.rst", "python/_autosummary/mlx.core.fft.ifftn.rst", "python/_autosummary/mlx.core.fft.ifftshift.rst", "python/_autosummary/mlx.core.fft.irfft.rst", "python/_autosummary/mlx.core.fft.irfft2.rst", "python/_autosummary/mlx.core.fft.irfftn.rst", "python/_autosummary/mlx.core.fft.rfft.rst", "python/_autosummary/mlx.core.fft.rfft2.rst", "python/_autosummary/mlx.core.fft.rfftn.rst", "python/_autosummary/mlx.core.finfo.rst", "python/_autosummary/mlx.core.flatten.rst", "python/_autosummary/mlx.core.floor.rst", "python/_autosummary/mlx.core.floor_divide.rst", "python/_autosummary/mlx.core.full.rst", "python/_autosummary/mlx.core.gather_mm.rst", "python/_autosummary/mlx.core.gather_qmm.rst", "python/_autosummary/mlx.core.get_active_memory.rst", "python/_autosummary/mlx.core.get_cache_memory.rst", "python/_autosummary/mlx.core.get_peak_memory.rst", "python/_autosummary/mlx.core.grad.rst", "python/_autosummary/mlx.core.greater.rst", "python/_autosummary/mlx.core.greater_equal.rst", "python/_autosummary/mlx.core.hadamard_transform.rst", "python/_autosummary/mlx.core.identity.rst", "python/_autosummary/mlx.core.imag.rst", "python/_autosummary/mlx.core.import_function.rst", "python/_autosummary/mlx.core.inner.rst", "python/_autosummary/mlx.core.isclose.rst", "python/_autosummary/mlx.core.isfinite.rst", "python/_autosummary/mlx.core.isinf.rst", "python/_autosummary/mlx.core.isnan.rst", "python/_autosummary/mlx.core.isneginf.rst", "python/_autosummary/mlx.core.isposinf.rst", "python/_autosummary/mlx.core.issubdtype.rst", "python/_autosummary/mlx.core.jvp.rst", "python/_autosummary/mlx.core.kron.rst", "python/_autosummary/mlx.core.left_shift.rst", "python/_autosummary/mlx.core.less.rst", "python/_autosummary/mlx.core.less_equal.rst", "python/_autosummary/mlx.core.linalg.cholesky.rst", "python/_autosummary/mlx.core.linalg.cholesky_inv.rst", "python/_autosummary/mlx.core.linalg.cross.rst", "python/_autosummary/mlx.core.linalg.eig.rst", "python/_autosummary/mlx.core.linalg.eigh.rst", "python/_autosummary/mlx.core.linalg.eigvals.rst", "python/_autosummary/mlx.core.linalg.eigvalsh.rst", "python/_autosummary/mlx.core.linalg.inv.rst", "python/_autosummary/mlx.core.linalg.lu.rst", "python/_autosummary/mlx.core.linalg.lu_factor.rst", "python/_autosummary/mlx.core.linalg.norm.rst", "python/_autosummary/mlx.core.linalg.pinv.rst", "python/_autosummary/mlx.core.linalg.qr.rst", "python/_autosummary/mlx.core.linalg.solve.rst", "python/_autosummary/mlx.core.linalg.solve_triangular.rst", "python/_autosummary/mlx.core.linalg.svd.rst", "python/_autosummary/mlx.core.linalg.tri_inv.rst", "python/_autosummary/mlx.core.linspace.rst", "python/_autosummary/mlx.core.load.rst", "python/_autosummary/mlx.core.log.rst", "python/_autosummary/mlx.core.log10.rst", "python/_autosummary/mlx.core.log1p.rst", "python/_autosummary/mlx.core.log2.rst", "python/_autosummary/mlx.core.logaddexp.rst", "python/_autosummary/mlx.core.logcumsumexp.rst", "python/_autosummary/mlx.core.logical_and.rst", "python/_autosummary/mlx.core.logical_not.rst", "python/_autosummary/mlx.core.logical_or.rst", "python/_autosummary/mlx.core.logsumexp.rst", "python/_autosummary/mlx.core.matmul.rst", "python/_autosummary/mlx.core.max.rst", "python/_autosummary/mlx.core.maximum.rst", "python/_autosummary/mlx.core.mean.rst", "python/_autosummary/mlx.core.meshgrid.rst", "python/_autosummary/mlx.core.metal.device_info.rst", "python/_autosummary/mlx.core.metal.is_available.rst", "python/_autosummary/mlx.core.metal.start_capture.rst", "python/_autosummary/mlx.core.metal.stop_capture.rst", "python/_autosummary/mlx.core.min.rst", "python/_autosummary/mlx.core.minimum.rst", "python/_autosummary/mlx.core.moveaxis.rst", "python/_autosummary/mlx.core.multiply.rst", "python/_autosummary/mlx.core.nan_to_num.rst", "python/_autosummary/mlx.core.negative.rst", "python/_autosummary/mlx.core.new_stream.rst", "python/_autosummary/mlx.core.not_equal.rst", "python/_autosummary/mlx.core.ones.rst", "python/_autosummary/mlx.core.ones_like.rst", "python/_autosummary/mlx.core.outer.rst", "python/_autosummary/mlx.core.pad.rst", "python/_autosummary/mlx.core.partition.rst", "python/_autosummary/mlx.core.power.rst", "python/_autosummary/mlx.core.prod.rst", "python/_autosummary/mlx.core.put_along_axis.rst", "python/_autosummary/mlx.core.quantize.rst", "python/_autosummary/mlx.core.quantized_matmul.rst", "python/_autosummary/mlx.core.radians.rst", "python/_autosummary/mlx.core.random.bernoulli.rst", "python/_autosummary/mlx.core.random.categorical.rst", "python/_autosummary/mlx.core.random.gumbel.rst", "python/_autosummary/mlx.core.random.key.rst", "python/_autosummary/mlx.core.random.laplace.rst", "python/_autosummary/mlx.core.random.multivariate_normal.rst", "python/_autosummary/mlx.core.random.normal.rst", "python/_autosummary/mlx.core.random.permutation.rst", "python/_autosummary/mlx.core.random.randint.rst", "python/_autosummary/mlx.core.random.seed.rst", "python/_autosummary/mlx.core.random.split.rst", "python/_autosummary/mlx.core.random.truncated_normal.rst", "python/_autosummary/mlx.core.random.uniform.rst", "python/_autosummary/mlx.core.real.rst", "python/_autosummary/mlx.core.reciprocal.rst", "python/_autosummary/mlx.core.remainder.rst", "python/_autosummary/mlx.core.repeat.rst", "python/_autosummary/mlx.core.reset_peak_memory.rst", "python/_autosummary/mlx.core.reshape.rst", "python/_autosummary/mlx.core.right_shift.rst", "python/_autosummary/mlx.core.roll.rst", "python/_autosummary/mlx.core.round.rst", "python/_autosummary/mlx.core.rsqrt.rst", "python/_autosummary/mlx.core.save.rst", "python/_autosummary/mlx.core.save_gguf.rst", "python/_autosummary/mlx.core.save_safetensors.rst", "python/_autosummary/mlx.core.savez.rst", "python/_autosummary/mlx.core.savez_compressed.rst", "python/_autosummary/mlx.core.set_cache_limit.rst", "python/_autosummary/mlx.core.set_default_device.rst", "python/_autosummary/mlx.core.set_default_stream.rst", "python/_autosummary/mlx.core.set_memory_limit.rst", "python/_autosummary/mlx.core.set_wired_limit.rst", "python/_autosummary/mlx.core.sigmoid.rst", "python/_autosummary/mlx.core.sign.rst", "python/_autosummary/mlx.core.sin.rst", "python/_autosummary/mlx.core.sinh.rst", "python/_autosummary/mlx.core.slice.rst", "python/_autosummary/mlx.core.slice_update.rst", "python/_autosummary/mlx.core.softmax.rst", "python/_autosummary/mlx.core.sort.rst", "python/_autosummary/mlx.core.split.rst", "python/_autosummary/mlx.core.sqrt.rst", "python/_autosummary/mlx.core.square.rst", "python/_autosummary/mlx.core.squeeze.rst", "python/_autosummary/mlx.core.stack.rst", "python/_autosummary/mlx.core.std.rst", "python/_autosummary/mlx.core.stop_gradient.rst", "python/_autosummary/mlx.core.stream.rst", "python/_autosummary/mlx.core.subtract.rst", "python/_autosummary/mlx.core.sum.rst", "python/_autosummary/mlx.core.swapaxes.rst", "python/_autosummary/mlx.core.synchronize.rst", "python/_autosummary/mlx.core.take.rst", "python/_autosummary/mlx.core.take_along_axis.rst", "python/_autosummary/mlx.core.tan.rst", "python/_autosummary/mlx.core.tanh.rst", "python/_autosummary/mlx.core.tensordot.rst", "python/_autosummary/mlx.core.tile.rst", "python/_autosummary/mlx.core.topk.rst", "python/_autosummary/mlx.core.trace.rst", "python/_autosummary/mlx.core.transpose.rst", "python/_autosummary/mlx.core.tri.rst", "python/_autosummary/mlx.core.tril.rst", "python/_autosummary/mlx.core.triu.rst", "python/_autosummary/mlx.core.unflatten.rst", "python/_autosummary/mlx.core.value_and_grad.rst", "python/_autosummary/mlx.core.var.rst", "python/_autosummary/mlx.core.view.rst", "python/_autosummary/mlx.core.vjp.rst", "python/_autosummary/mlx.core.vmap.rst", "python/_autosummary/mlx.core.where.rst", "python/_autosummary/mlx.core.zeros.rst", "python/_autosummary/mlx.core.zeros_like.rst", "python/_autosummary/mlx.nn.average_gradients.rst", "python/_autosummary/mlx.nn.quantize.rst", "python/_autosummary/mlx.nn.value_and_grad.rst", "python/_autosummary/mlx.optimizers.clip_grad_norm.rst", "python/_autosummary/mlx.utils.tree_flatten.rst", "python/_autosummary/mlx.utils.tree_map.rst", "python/_autosummary/mlx.utils.tree_map_with_path.rst", "python/_autosummary/mlx.utils.tree_reduce.rst", "python/_autosummary/mlx.utils.tree_unflatten.rst", "python/_autosummary/stream_class.rst", "python/array.rst", "python/data_types.rst", "python/devices_and_streams.rst", "python/distributed.rst", "python/export.rst", "python/fast.rst", "python/fft.rst", "python/linalg.rst", "python/memory_management.rst", "python/metal.rst", "python/nn.rst", "python/nn/_autosummary/mlx.nn.ALiBi.rst", "python/nn/_autosummary/mlx.nn.AvgPool1d.rst", "python/nn/_autosummary/mlx.nn.AvgPool2d.rst", "python/nn/_autosummary/mlx.nn.AvgPool3d.rst", "python/nn/_autosummary/mlx.nn.BatchNorm.rst", "python/nn/_autosummary/mlx.nn.CELU.rst", "python/nn/_autosummary/mlx.nn.Conv1d.rst", "python/nn/_autosummary/mlx.nn.Conv2d.rst", "python/nn/_autosummary/mlx.nn.Conv3d.rst", "python/nn/_autosummary/mlx.nn.ConvTranspose1d.rst", "python/nn/_autosummary/mlx.nn.ConvTranspose2d.rst", "python/nn/_autosummary/mlx.nn.ConvTranspose3d.rst", "python/nn/_autosummary/mlx.nn.Dropout.rst", "python/nn/_autosummary/mlx.nn.Dropout2d.rst", "python/nn/_autosummary/mlx.nn.Dropout3d.rst", "python/nn/_autosummary/mlx.nn.ELU.rst", "python/nn/_autosummary/mlx.nn.Embedding.rst", "python/nn/_autosummary/mlx.nn.GELU.rst", "python/nn/_autosummary/mlx.nn.GLU.rst", "python/nn/_autosummary/mlx.nn.GRU.rst", "python/nn/_autosummary/mlx.nn.GroupNorm.rst", "python/nn/_autosummary/mlx.nn.HardShrink.rst", "python/nn/_autosummary/mlx.nn.HardTanh.rst", "python/nn/_autosummary/mlx.nn.Hardswish.rst", "python/nn/_autosummary/mlx.nn.InstanceNorm.rst", "python/nn/_autosummary/mlx.nn.LSTM.rst", "python/nn/_autosummary/mlx.nn.LayerNorm.rst", "python/nn/_autosummary/mlx.nn.LeakyReLU.rst", "python/nn/_autosummary/mlx.nn.Linear.rst", "python/nn/_autosummary/mlx.nn.LogSigmoid.rst", "python/nn/_autosummary/mlx.nn.LogSoftmax.rst", "python/nn/_autosummary/mlx.nn.MaxPool1d.rst", "python/nn/_autosummary/mlx.nn.MaxPool2d.rst", "python/nn/_autosummary/mlx.nn.MaxPool3d.rst", "python/nn/_autosummary/mlx.nn.Mish.rst", "python/nn/_autosummary/mlx.nn.Module.apply.rst", "python/nn/_autosummary/mlx.nn.Module.apply_to_modules.rst", "python/nn/_autosummary/mlx.nn.Module.children.rst", "python/nn/_autosummary/mlx.nn.Module.eval.rst", "python/nn/_autosummary/mlx.nn.Module.filter_and_map.rst", "python/nn/_autosummary/mlx.nn.Module.freeze.rst", "python/nn/_autosummary/mlx.nn.Module.leaf_modules.rst", "python/nn/_autosummary/mlx.nn.Module.load_weights.rst", "python/nn/_autosummary/mlx.nn.Module.modules.rst", "python/nn/_autosummary/mlx.nn.Module.named_modules.rst", "python/nn/_autosummary/mlx.nn.Module.parameters.rst", "python/nn/_autosummary/mlx.nn.Module.save_weights.rst", "python/nn/_autosummary/mlx.nn.Module.set_dtype.rst", "python/nn/_autosummary/mlx.nn.Module.state.rst", "python/nn/_autosummary/mlx.nn.Module.train.rst", "python/nn/_autosummary/mlx.nn.Module.trainable_parameters.rst", "python/nn/_autosummary/mlx.nn.Module.training.rst", "python/nn/_autosummary/mlx.nn.Module.unfreeze.rst", "python/nn/_autosummary/mlx.nn.Module.update.rst", "python/nn/_autosummary/mlx.nn.Module.update_modules.rst", "python/nn/_autosummary/mlx.nn.MultiHeadAttention.rst", "python/nn/_autosummary/mlx.nn.PReLU.rst", "python/nn/_autosummary/mlx.nn.QuantizedEmbedding.rst", "python/nn/_autosummary/mlx.nn.QuantizedLinear.rst", "python/nn/_autosummary/mlx.nn.RMSNorm.rst", "python/nn/_autosummary/mlx.nn.RNN.rst", "python/nn/_autosummary/mlx.nn.ReLU.rst", "python/nn/_autosummary/mlx.nn.ReLU6.rst", "python/nn/_autosummary/mlx.nn.RoPE.rst", "python/nn/_autosummary/mlx.nn.SELU.rst", "python/nn/_autosummary/mlx.nn.Sequential.rst", "python/nn/_autosummary/mlx.nn.SiLU.rst", "python/nn/_autosummary/mlx.nn.Sigmoid.rst", "python/nn/_autosummary/mlx.nn.SinusoidalPositionalEncoding.rst", "python/nn/_autosummary/mlx.nn.Softmax.rst", "python/nn/_autosummary/mlx.nn.Softmin.rst", "python/nn/_autosummary/mlx.nn.Softplus.rst", "python/nn/_autosummary/mlx.nn.Softshrink.rst", "python/nn/_autosummary/mlx.nn.Softsign.rst", "python/nn/_autosummary/mlx.nn.Step.rst", "python/nn/_autosummary/mlx.nn.Tanh.rst", "python/nn/_autosummary/mlx.nn.Transformer.rst", "python/nn/_autosummary/mlx.nn.Upsample.rst", "python/nn/_autosummary/mlx.nn.init.constant.rst", "python/nn/_autosummary/mlx.nn.init.glorot_normal.rst", "python/nn/_autosummary/mlx.nn.init.glorot_uniform.rst", "python/nn/_autosummary/mlx.nn.init.he_normal.rst", "python/nn/_autosummary/mlx.nn.init.he_uniform.rst", "python/nn/_autosummary/mlx.nn.init.identity.rst", "python/nn/_autosummary/mlx.nn.init.normal.rst", "python/nn/_autosummary/mlx.nn.init.uniform.rst", "python/nn/_autosummary_functions/mlx.nn.celu.rst", "python/nn/_autosummary_functions/mlx.nn.elu.rst", "python/nn/_autosummary_functions/mlx.nn.gelu.rst", "python/nn/_autosummary_functions/mlx.nn.gelu_approx.rst", "python/nn/_autosummary_functions/mlx.nn.gelu_fast_approx.rst", "python/nn/_autosummary_functions/mlx.nn.glu.rst", "python/nn/_autosummary_functions/mlx.nn.hard_shrink.rst", "python/nn/_autosummary_functions/mlx.nn.hard_tanh.rst", "python/nn/_autosummary_functions/mlx.nn.hardswish.rst", "python/nn/_autosummary_functions/mlx.nn.leaky_relu.rst", "python/nn/_autosummary_functions/mlx.nn.log_sigmoid.rst", "python/nn/_autosummary_functions/mlx.nn.log_softmax.rst", "python/nn/_autosummary_functions/mlx.nn.losses.binary_cross_entropy.rst", "python/nn/_autosummary_functions/mlx.nn.losses.cosine_similarity_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.cross_entropy.rst", "python/nn/_autosummary_functions/mlx.nn.losses.gaussian_nll_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.hinge_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.huber_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.kl_div_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.l1_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.log_cosh_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.margin_ranking_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.mse_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.nll_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.smooth_l1_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.triplet_loss.rst", "python/nn/_autosummary_functions/mlx.nn.mish.rst", "python/nn/_autosummary_functions/mlx.nn.prelu.rst", "python/nn/_autosummary_functions/mlx.nn.relu.rst", "python/nn/_autosummary_functions/mlx.nn.relu6.rst", "python/nn/_autosummary_functions/mlx.nn.selu.rst", "python/nn/_autosummary_functions/mlx.nn.sigmoid.rst", "python/nn/_autosummary_functions/mlx.nn.silu.rst", "python/nn/_autosummary_functions/mlx.nn.softmax.rst", "python/nn/_autosummary_functions/mlx.nn.softmin.rst", "python/nn/_autosummary_functions/mlx.nn.softplus.rst", "python/nn/_autosummary_functions/mlx.nn.softshrink.rst", "python/nn/_autosummary_functions/mlx.nn.step.rst", "python/nn/_autosummary_functions/mlx.nn.tanh.rst", "python/nn/functions.rst", "python/nn/init.rst", "python/nn/layers.rst", "python/nn/losses.rst", "python/nn/module.rst", "python/ops.rst", "python/optimizers.rst", "python/optimizers/_autosummary/mlx.optimizers.AdaDelta.rst", "python/optimizers/_autosummary/mlx.optimizers.Adafactor.rst", "python/optimizers/_autosummary/mlx.optimizers.Adagrad.rst", "python/optimizers/_autosummary/mlx.optimizers.Adam.rst", "python/optimizers/_autosummary/mlx.optimizers.AdamW.rst", "python/optimizers/_autosummary/mlx.optimizers.Adamax.rst", "python/optimizers/_autosummary/mlx.optimizers.Lion.rst", "python/optimizers/_autosummary/mlx.optimizers.MultiOptimizer.rst", "python/optimizers/_autosummary/mlx.optimizers.Muon.rst", "python/optimizers/_autosummary/mlx.optimizers.Optimizer.apply_gradients.rst", "python/optimizers/_autosummary/mlx.optimizers.Optimizer.init.rst", "python/optimizers/_autosummary/mlx.optimizers.Optimizer.state.rst", "python/optimizers/_autosummary/mlx.optimizers.Optimizer.update.rst", "python/optimizers/_autosummary/mlx.optimizers.RMSprop.rst", "python/optimizers/_autosummary/mlx.optimizers.SGD.rst", "python/optimizers/_autosummary/mlx.optimizers.cosine_decay.rst", "python/optimizers/_autosummary/mlx.optimizers.exponential_decay.rst", "python/optimizers/_autosummary/mlx.optimizers.join_schedules.rst", "python/optimizers/_autosummary/mlx.optimizers.linear_schedule.rst", "python/optimizers/_autosummary/mlx.optimizers.step_decay.rst", "python/optimizers/common_optimizers.rst", "python/optimizers/optimizer.rst", "python/optimizers/schedulers.rst", "python/random.rst", "python/transforms.rst", "python/tree_utils.rst", "usage/compile.rst", "usage/distributed.rst", "usage/export.rst", "usage/function_transforms.rst", "usage/indexing.rst", "usage/launching_distributed.rst", "usage/lazy_evaluation.rst", "usage/numpy.rst", "usage/quick_start.rst", "usage/saving_and_loading.rst", "usage/unified_memory.rst", "usage/using_streams.rst"], "indexentries": {"__init__() (array method)": [[31, "mlx.core.array.__init__", false]], "__init__() (custom_function method)": [[121, "mlx.core.custom_function.__init__", false]], "__init__() (device method)": [[10, "mlx.core.Device.__init__", false]], "__init__() (dtype method)": [[11, "mlx.core.Dtype.__init__", false]], "__init__() (dtypecategory method)": [[12, "mlx.core.DtypeCategory.__init__", false]], "__init__() (finfo method)": [[172, "mlx.core.finfo.__init__", false]], "__init__() (group method)": [[129, "mlx.core.distributed.Group.__init__", false]], "__init__() (stream method)": [[342, "mlx.core.Stream.__init__", false]], "abs (c++ function)": [[0, "_CPPv43absRK5array14StreamOrDevice", false]], "abs() (array method)": [[33, "mlx.core.array.abs", false]], "abs() (in module mlx.core)": [[13, "mlx.core.abs", false]], "adadelta (class in mlx.optimizers)": [[486, "mlx.optimizers.AdaDelta", false]], "adafactor (class in mlx.optimizers)": [[487, "mlx.optimizers.Adafactor", false]], "adagrad (class in mlx.optimizers)": [[488, "mlx.optimizers.Adagrad", false]], "adam (class in mlx.optimizers)": [[489, "mlx.optimizers.Adam", false]], "adamax (class in mlx.optimizers)": [[491, "mlx.optimizers.Adamax", false]], "adamw (class in mlx.optimizers)": [[490, "mlx.optimizers.AdamW", false]], "add (c++ function)": [[0, "_CPPv43addRK5arrayRK5array14StreamOrDevice", false]], "add() (in module mlx.core)": [[14, "mlx.core.add", false]], "addmm (c++ function)": [[0, "_CPPv45addmm5array5array5arrayRKfRKf14StreamOrDevice", false]], "addmm() (in module mlx.core)": [[15, "mlx.core.addmm", false]], "alibi (class in mlx.nn)": [[354, "mlx.nn.ALiBi", false]], "all (c++ function)": [[0, "_CPPv43allRK5array14StreamOrDevice", false], [0, "_CPPv43allRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", false], [0, "_CPPv43allRK5arrayb14StreamOrDevice", false], [0, "_CPPv43allRK5arrayib14StreamOrDevice", false]], "all() (array method)": [[34, "mlx.core.array.all", false]], "all() (in module mlx.core)": [[16, "mlx.core.all", false]], "all_gather() (in module mlx.core.distributed)": [[130, "mlx.core.distributed.all_gather", false]], "all_sum() (in module mlx.core.distributed)": [[131, "mlx.core.distributed.all_sum", false]], "allclose (c++ function)": [[0, "_CPPv48allcloseRK5arrayRK5arrayddb14StreamOrDevice", false]], "allclose() (in module mlx.core)": [[17, "mlx.core.allclose", false]], "any (c++ function)": [[0, "_CPPv43anyRK5array14StreamOrDevice", false], [0, "_CPPv43anyRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", false], [0, "_CPPv43anyRK5arrayb14StreamOrDevice", false], [0, "_CPPv43anyRK5arrayib14StreamOrDevice", false]], "any() (array method)": [[35, "mlx.core.array.any", false]], "any() (in module mlx.core)": [[18, "mlx.core.any", false]], "apply() (module method)": [[389, "mlx.nn.Module.apply", false]], "apply_gradients() (optimizer method)": [[495, "mlx.optimizers.Optimizer.apply_gradients", false]], "apply_to_modules() (module method)": [[390, "mlx.nn.Module.apply_to_modules", false]], "arange (c++ function)": [[0, "_CPPv46aranged14StreamOrDevice", false], [0, "_CPPv46aranged5Dtype14StreamOrDevice", false], [0, "_CPPv46arangedd14StreamOrDevice", false], [0, "_CPPv46arangedd5Dtype14StreamOrDevice", false], [0, "_CPPv46arangeddd14StreamOrDevice", false], [0, "_CPPv46arangeddd5Dtype14StreamOrDevice", false], [0, "_CPPv46arangei14StreamOrDevice", false], [0, "_CPPv46arangeii14StreamOrDevice", false], [0, "_CPPv46arangeiii14StreamOrDevice", false]], "arange() (in module mlx.core)": [[19, "mlx.core.arange", false]], "arccos (c++ function)": [[0, "_CPPv46arccosRK5array14StreamOrDevice", false]], "arccos() (in module mlx.core)": [[20, "mlx.core.arccos", false]], "arccosh (c++ function)": [[0, "_CPPv47arccoshRK5array14StreamOrDevice", false]], "arccosh() (in module mlx.core)": [[21, "mlx.core.arccosh", false]], "arcsin (c++ function)": [[0, "_CPPv46arcsinRK5array14StreamOrDevice", false]], "arcsin() (in module mlx.core)": [[22, "mlx.core.arcsin", false]], "arcsinh (c++ function)": [[0, "_CPPv47arcsinhRK5array14StreamOrDevice", false]], "arcsinh() (in module mlx.core)": [[23, "mlx.core.arcsinh", false]], "arctan (c++ function)": [[0, "_CPPv46arctanRK5array14StreamOrDevice", false]], "arctan() (in module mlx.core)": [[24, "mlx.core.arctan", false]], "arctan2 (c++ function)": [[0, "_CPPv47arctan2RK5arrayRK5array14StreamOrDevice", false]], "arctan2() (in module mlx.core)": [[25, "mlx.core.arctan2", false]], "arctanh (c++ function)": [[0, "_CPPv47arctanhRK5array14StreamOrDevice", false]], "arctanh() (in module mlx.core)": [[26, "mlx.core.arctanh", false]], "argmax (c++ function)": [[0, "_CPPv46argmaxRK5array14StreamOrDevice", false], [0, "_CPPv46argmaxRK5arrayb14StreamOrDevice", false], [0, "_CPPv46argmaxRK5arrayib14StreamOrDevice", false]], "argmax() (array method)": [[36, "mlx.core.array.argmax", false]], "argmax() (in module mlx.core)": [[27, "mlx.core.argmax", false]], "argmin (c++ function)": [[0, "_CPPv46argminRK5array14StreamOrDevice", false], [0, "_CPPv46argminRK5arrayb14StreamOrDevice", false], [0, "_CPPv46argminRK5arrayib14StreamOrDevice", false]], "argmin() (array method)": [[37, "mlx.core.array.argmin", false]], "argmin() (in module mlx.core)": [[28, "mlx.core.argmin", false]], "argpartition (c++ function)": [[0, "_CPPv412argpartitionRK5arrayi14StreamOrDevice", false], [0, "_CPPv412argpartitionRK5arrayii14StreamOrDevice", false]], "argpartition() (in module mlx.core)": [[29, "mlx.core.argpartition", false]], "argsort (c++ function)": [[0, "_CPPv47argsortRK5array14StreamOrDevice", false], [0, "_CPPv47argsortRK5arrayi14StreamOrDevice", false]], "argsort() (in module mlx.core)": [[30, "mlx.core.argsort", false]], "array (class in mlx.core)": [[31, "mlx.core.array", false]], "array_equal (c++ function)": [[0, "_CPPv411array_equalRK5arrayRK5array14StreamOrDevice", false], [0, "_CPPv411array_equalRK5arrayRK5arrayb14StreamOrDevice", false]], "array_equal() (in module mlx.core)": [[86, "mlx.core.array_equal", false]], "as_strided (c++ function)": [[0, "_CPPv410as_strided5array5Shape7Strides6size_t14StreamOrDevice", false]], "as_strided() (in module mlx.core)": [[87, "mlx.core.as_strided", false]], "astype (c++ function)": [[0, "_CPPv46astype5array5Dtype14StreamOrDevice", false]], "astype() (array method)": [[38, "mlx.core.array.astype", false]], "async_eval() (in module mlx.core)": [[88, "mlx.core.async_eval", false]], "at (array property)": [[39, "mlx.core.array.at", false]], "atleast_1d (c++ function)": [[0, "_CPPv410atleast_1dRK5array14StreamOrDevice", false], [0, "_CPPv410atleast_1dRKNSt6vectorI5arrayEE14StreamOrDevice", false]], "atleast_1d() (in module mlx.core)": [[89, "mlx.core.atleast_1d", false]], "atleast_2d (c++ function)": [[0, "_CPPv410atleast_2dRK5array14StreamOrDevice", false], [0, "_CPPv410atleast_2dRKNSt6vectorI5arrayEE14StreamOrDevice", false]], "atleast_2d() (in module mlx.core)": [[90, "mlx.core.atleast_2d", false]], "atleast_3d (c++ function)": [[0, "_CPPv410atleast_3dRK5array14StreamOrDevice", false], [0, "_CPPv410atleast_3dRKNSt6vectorI5arrayEE14StreamOrDevice", false]], "atleast_3d() (in module mlx.core)": [[91, "mlx.core.atleast_3d", false]], "average_gradients() (in module mlx.nn)": [[333, "mlx.nn.average_gradients", false]], "avgpool1d (class in mlx.nn)": [[355, "mlx.nn.AvgPool1d", false]], "avgpool2d (class in mlx.nn)": [[356, "mlx.nn.AvgPool2d", false]], "avgpool3d (class in mlx.nn)": [[357, "mlx.nn.AvgPool3d", false]], "batchnorm (class in mlx.nn)": [[358, "mlx.nn.BatchNorm", false]], "bernoulli() (in module mlx.core.random)": [[259, "mlx.core.random.bernoulli", false]], "binary_cross_entropy (class in mlx.nn.losses)": [[452, "mlx.nn.losses.binary_cross_entropy", false]], "bitwise_and (c++ function)": [[0, "_CPPv411bitwise_andRK5arrayRK5array14StreamOrDevice", false]], "bitwise_and() (in module mlx.core)": [[92, "mlx.core.bitwise_and", false]], "bitwise_invert (c++ function)": [[0, "_CPPv414bitwise_invertRK5array14StreamOrDevice", false]], "bitwise_invert() (in module mlx.core)": [[93, "mlx.core.bitwise_invert", false]], "bitwise_or (c++ function)": [[0, "_CPPv410bitwise_orRK5arrayRK5array14StreamOrDevice", false]], "bitwise_or() (in module mlx.core)": [[94, "mlx.core.bitwise_or", false]], "bitwise_xor (c++ function)": [[0, "_CPPv411bitwise_xorRK5arrayRK5array14StreamOrDevice", false]], "bitwise_xor() (in module mlx.core)": [[95, "mlx.core.bitwise_xor", false]], "block_masked_mm (c++ function)": [[0, "_CPPv415block_masked_mm5array5arrayiNSt8optionalI5arrayEENSt8optionalI5arrayEENSt8optionalI5arrayEE14StreamOrDevice", false]], "block_masked_mm() (in module mlx.core)": [[96, "mlx.core.block_masked_mm", false]], "broadcast_arrays (c++ function)": [[0, "_CPPv416broadcast_arraysRKNSt6vectorI5arrayEE14StreamOrDevice", false]], "broadcast_arrays() (in module mlx.core)": [[97, "mlx.core.broadcast_arrays", false]], "broadcast_to (c++ function)": [[0, "_CPPv412broadcast_toRK5arrayRK5Shape14StreamOrDevice", false]], "broadcast_to() (in module mlx.core)": [[98, "mlx.core.broadcast_to", false]], "categorical() (in module mlx.core.random)": [[260, "mlx.core.random.categorical", false]], "ceil (c++ function)": [[0, "_CPPv44ceilRK5array14StreamOrDevice", false]], "ceil() (in module mlx.core)": [[99, "mlx.core.ceil", false]], "celu (class in mlx.nn)": [[359, "mlx.nn.CELU", false], [440, "mlx.nn.celu", false]], "children() (module method)": [[391, "mlx.nn.Module.children", false]], "cholesky() (in module mlx.core.linalg)": [[202, "mlx.core.linalg.cholesky", false]], "cholesky_inv() (in module mlx.core.linalg)": [[203, "mlx.core.linalg.cholesky_inv", false]], "clear_cache() (in module mlx.core)": [[100, "mlx.core.clear_cache", false]], "clip (c++ function)": [[0, "_CPPv44clipRK5arrayRKNSt8optionalI5arrayEERKNSt8optionalI5arrayEE14StreamOrDevice", false]], "clip() (in module mlx.core)": [[101, "mlx.core.clip", false]], "clip_grad_norm() (in module mlx.optimizers)": [[336, "mlx.optimizers.clip_grad_norm", false]], "compile() (in module mlx.core)": [[102, "mlx.core.compile", false]], "concatenate (c++ function)": [[0, "_CPPv411concatenateNSt6vectorI5arrayEE14StreamOrDevice", false], [0, "_CPPv411concatenateNSt6vectorI5arrayEEi14StreamOrDevice", false]], "concatenate() (in module mlx.core)": [[103, "mlx.core.concatenate", false]], "conj() (array method)": [[40, "mlx.core.array.conj", false]], "conj() (in module mlx.core)": [[104, "mlx.core.conj", false]], "conjugate (c++ function)": [[0, "_CPPv49conjugateRK5array14StreamOrDevice", false]], "conjugate() (in module mlx.core)": [[105, "mlx.core.conjugate", false]], "constant() (in module mlx.nn.init)": [[432, "mlx.nn.init.constant", false]], "contiguous (c++ function)": [[0, "_CPPv410contiguousRK5arrayb14StreamOrDevice", false]], "contiguous() (in module mlx.core)": [[106, "mlx.core.contiguous", false]], "conv1d (c++ function)": [[0, "_CPPv46conv1dRK5arrayRK5arrayiiii14StreamOrDevice", false]], "conv1d (class in mlx.nn)": [[360, "mlx.nn.Conv1d", false]], "conv1d() (in module mlx.core)": [[107, "mlx.core.conv1d", false]], "conv2d (c++ function)": [[0, "_CPPv46conv2dRK5arrayRK5arrayRKNSt4pairIiiEERKNSt4pairIiiEERKNSt4pairIiiEEi14StreamOrDevice", false]], "conv2d (class in mlx.nn)": [[361, "mlx.nn.Conv2d", false]], "conv2d() (in module mlx.core)": [[108, "mlx.core.conv2d", false]], "conv3d (c++ function)": [[0, "_CPPv46conv3dRK5arrayRK5arrayRKNSt5tupleIiiiEERKNSt5tupleIiiiEERKNSt5tupleIiiiEEi14StreamOrDevice", false]], "conv3d (class in mlx.nn)": [[362, "mlx.nn.Conv3d", false]], "conv3d() (in module mlx.core)": [[109, "mlx.core.conv3d", false]], "conv_general (c++ function)": [[0, "_CPPv412conv_general5array5arrayNSt6vectorIiEENSt6vectorIiEENSt6vectorIiEENSt6vectorIiEENSt6vectorIiEEib14StreamOrDevice", false], [0, "_CPPv412conv_generalRK5arrayRK5arrayNSt6vectorIiEENSt6vectorIiEENSt6vectorIiEENSt6vectorIiEEib14StreamOrDevice", false]], "conv_general() (in module mlx.core)": [[110, "mlx.core.conv_general", false]], "conv_transpose1d (c++ function)": [[0, "_CPPv416conv_transpose1dRK5arrayRK5arrayiiiii14StreamOrDevice", false]], "conv_transpose1d() (in module mlx.core)": [[111, "mlx.core.conv_transpose1d", false]], "conv_transpose2d (c++ function)": [[0, "_CPPv416conv_transpose2dRK5arrayRK5arrayRKNSt4pairIiiEERKNSt4pairIiiEERKNSt4pairIiiEERKNSt4pairIiiEEi14StreamOrDevice", false]], "conv_transpose2d() (in module mlx.core)": [[112, "mlx.core.conv_transpose2d", false]], "conv_transpose3d (c++ function)": [[0, "_CPPv416conv_transpose3dRK5arrayRK5arrayRKNSt5tupleIiiiEERKNSt5tupleIiiiEERKNSt5tupleIiiiEERKNSt5tupleIiiiEEi14StreamOrDevice", false]], "conv_transpose3d() (in module mlx.core)": [[113, "mlx.core.conv_transpose3d", false]], "convolve() (in module mlx.core)": [[114, "mlx.core.convolve", false]], "convtranspose1d (class in mlx.nn)": [[363, "mlx.nn.ConvTranspose1d", false]], "convtranspose2d (class in mlx.nn)": [[364, "mlx.nn.ConvTranspose2d", false]], "convtranspose3d (class in mlx.nn)": [[365, "mlx.nn.ConvTranspose3d", false]], "copy (c++ function)": [[0, "_CPPv44copy5array14StreamOrDevice", false]], "cos (c++ function)": [[0, "_CPPv43cosRK5array14StreamOrDevice", false]], "cos() (array method)": [[41, "mlx.core.array.cos", false]], "cos() (in module mlx.core)": [[115, "mlx.core.cos", false]], "cosh (c++ function)": [[0, "_CPPv44coshRK5array14StreamOrDevice", false]], "cosh() (in module mlx.core)": [[116, "mlx.core.cosh", false]], "cosine_decay() (in module mlx.optimizers)": [[501, "mlx.optimizers.cosine_decay", false]], "cosine_similarity_loss (class in mlx.nn.losses)": [[453, "mlx.nn.losses.cosine_similarity_loss", false]], "cross() (in module mlx.core.linalg)": [[204, "mlx.core.linalg.cross", false]], "cross_entropy (class in mlx.nn.losses)": [[454, "mlx.nn.losses.cross_entropy", false]], "cummax (c++ function)": [[0, "_CPPv46cummaxRK5arrayibb14StreamOrDevice", false]], "cummax() (array method)": [[42, "mlx.core.array.cummax", false]], "cummax() (in module mlx.core)": [[117, "mlx.core.cummax", false]], "cummin (c++ function)": [[0, "_CPPv46cumminRK5arrayibb14StreamOrDevice", false]], "cummin() (array method)": [[43, "mlx.core.array.cummin", false]], "cummin() (in module mlx.core)": [[118, "mlx.core.cummin", false]], "cumprod (c++ function)": [[0, "_CPPv47cumprodRK5arrayibb14StreamOrDevice", false]], "cumprod() (array method)": [[44, "mlx.core.array.cumprod", false]], "cumprod() (in module mlx.core)": [[119, "mlx.core.cumprod", false]], "cumsum (c++ function)": [[0, "_CPPv46cumsumRK5arrayibb14StreamOrDevice", false]], "cumsum() (array method)": [[45, "mlx.core.array.cumsum", false]], "cumsum() (in module mlx.core)": [[120, "mlx.core.cumsum", false]], "custom_function (class in mlx.core)": [[121, "mlx.core.custom_function", false]], "default_device() (in module mlx.core)": [[122, "mlx.core.default_device", false]], "default_stream() (in module mlx.core)": [[123, "mlx.core.default_stream", false]], "degrees (c++ function)": [[0, "_CPPv47degreesRK5array14StreamOrDevice", false]], "degrees() (in module mlx.core)": [[124, "mlx.core.degrees", false]], "depends (c++ function)": [[0, "_CPPv47dependsRKNSt6vectorI5arrayEERKNSt6vectorI5arrayEE", false]], "dequantize (c++ function)": [[0, "_CPPv410dequantizeRK5arrayRK5arrayRK5arrayii14StreamOrDevice", false]], "dequantize() (in module mlx.core)": [[125, "mlx.core.dequantize", false]], "device (class in mlx.core)": [[10, "mlx.core.Device", false]], "device_info() (in module mlx.core.metal)": [[236, "mlx.core.metal.device_info", false]], "diag (c++ function)": [[0, "_CPPv44diagRK5arrayi14StreamOrDevice", false]], "diag() (array method)": [[46, "mlx.core.array.diag", false]], "diag() (in module mlx.core)": [[126, "mlx.core.diag", false]], "diagonal (c++ function)": [[0, "_CPPv48diagonalRK5arrayiii14StreamOrDevice", false]], "diagonal() (array method)": [[47, "mlx.core.array.diagonal", false]], "diagonal() (in module mlx.core)": [[127, "mlx.core.diagonal", false]], "disable_compile() (in module mlx.core)": [[128, "mlx.core.disable_compile", false]], "divide (c++ function)": [[0, "_CPPv46divideRK5arrayRK5array14StreamOrDevice", false]], "divide() (in module mlx.core)": [[137, "mlx.core.divide", false]], "divmod (c++ function)": [[0, "_CPPv46divmodRK5arrayRK5array14StreamOrDevice", false]], "divmod() (in module mlx.core)": [[138, "mlx.core.divmod", false]], "dropout (class in mlx.nn)": [[366, "mlx.nn.Dropout", false]], "dropout2d (class in mlx.nn)": [[367, "mlx.nn.Dropout2d", false]], "dropout3d (class in mlx.nn)": [[368, "mlx.nn.Dropout3d", false]], "dtype (array property)": [[48, "mlx.core.array.dtype", false]], "dtype (class in mlx.core)": [[11, "mlx.core.Dtype", false]], "dtypecategory (class in mlx.core)": [[12, "mlx.core.DtypeCategory", false]], "eig() (in module mlx.core.linalg)": [[205, "mlx.core.linalg.eig", false]], "eigh() (in module mlx.core.linalg)": [[206, "mlx.core.linalg.eigh", false]], "eigvals() (in module mlx.core.linalg)": [[207, "mlx.core.linalg.eigvals", false]], "eigvalsh() (in module mlx.core.linalg)": [[208, "mlx.core.linalg.eigvalsh", false]], "einsum() (in module mlx.core)": [[139, "mlx.core.einsum", false]], "einsum_path() (in module mlx.core)": [[140, "mlx.core.einsum_path", false]], "elu (class in mlx.nn)": [[369, "mlx.nn.ELU", false], [441, "mlx.nn.elu", false]], "embedding (class in mlx.nn)": [[370, "mlx.nn.Embedding", false]], "enable_compile() (in module mlx.core)": [[141, "mlx.core.enable_compile", false]], "equal (c++ function)": [[0, "_CPPv45equalRK5arrayRK5array14StreamOrDevice", false]], "equal() (in module mlx.core)": [[142, "mlx.core.equal", false]], "erf (c++ function)": [[0, "_CPPv43erfRK5array14StreamOrDevice", false]], "erf() (in module mlx.core)": [[143, "mlx.core.erf", false]], "erfinv (c++ function)": [[0, "_CPPv46erfinvRK5array14StreamOrDevice", false]], "erfinv() (in module mlx.core)": [[144, "mlx.core.erfinv", false]], "eval() (in module mlx.core)": [[145, "mlx.core.eval", false]], "eval() (module method)": [[392, "mlx.nn.Module.eval", false]], "exp (c++ function)": [[0, "_CPPv43expRK5array14StreamOrDevice", false]], "exp() (array method)": [[49, "mlx.core.array.exp", false]], "exp() (in module mlx.core)": [[146, "mlx.core.exp", false]], "expand_dims (c++ function)": [[0, "_CPPv411expand_dimsRK5arrayRKNSt6vectorIiEE14StreamOrDevice", false], [0, "_CPPv411expand_dimsRK5arrayi14StreamOrDevice", false]], "expand_dims() (in module mlx.core)": [[147, "mlx.core.expand_dims", false]], "expm1 (c++ function)": [[0, "_CPPv45expm1RK5array14StreamOrDevice", false]], "expm1() (in module mlx.core)": [[148, "mlx.core.expm1", false]], "exponential_decay() (in module mlx.optimizers)": [[502, "mlx.optimizers.exponential_decay", false]], "export_function() (in module mlx.core)": [[149, "mlx.core.export_function", false]], "export_to_dot() (in module mlx.core)": [[150, "mlx.core.export_to_dot", false]], "exporter() (in module mlx.core)": [[151, "mlx.core.exporter", false]], "eye (c++ function)": [[0, "_CPPv43eyei14StreamOrDevice", false], [0, "_CPPv43eyei5Dtype14StreamOrDevice", false], [0, "_CPPv43eyeii14StreamOrDevice", false], [0, "_CPPv43eyeiii14StreamOrDevice", false], [0, "_CPPv43eyeiii5Dtype14StreamOrDevice", false]], "eye() (in module mlx.core)": [[152, "mlx.core.eye", false]], "fft() (in module mlx.core.fft)": [[158, "mlx.core.fft.fft", false]], "fft2() (in module mlx.core.fft)": [[159, "mlx.core.fft.fft2", false]], "fftn() (in module mlx.core.fft)": [[160, "mlx.core.fft.fftn", false]], "fftshift() (in module mlx.core.fft)": [[161, "mlx.core.fft.fftshift", false]], "filter_and_map() (module method)": [[393, "mlx.nn.Module.filter_and_map", false]], "finfo (class in mlx.core)": [[172, "mlx.core.finfo", false]], "flatten (c++ function)": [[0, "_CPPv47flattenRK5array14StreamOrDevice", false], [0, "_CPPv47flattenRK5arrayii14StreamOrDevice", false]], "flatten() (array method)": [[50, "mlx.core.array.flatten", false]], "flatten() (in module mlx.core)": [[173, "mlx.core.flatten", false]], "floor (c++ function)": [[0, "_CPPv45floorRK5array14StreamOrDevice", false]], "floor() (in module mlx.core)": [[174, "mlx.core.floor", false]], "floor_divide (c++ function)": [[0, "_CPPv412floor_divideRK5arrayRK5array14StreamOrDevice", false]], "floor_divide() (in module mlx.core)": [[175, "mlx.core.floor_divide", false]], "freeze() (module method)": [[394, "mlx.nn.Module.freeze", false]], "full (c++ function)": [[0, "_CPPv44full5Shape5array14StreamOrDevice", false], [0, "_CPPv44full5Shape5array5Dtype14StreamOrDevice", false], [0, "_CPPv4I0E4full5array5Shape1T14StreamOrDevice", false], [0, "_CPPv4I0E4full5array5Shape1T5Dtype14StreamOrDevice", false]], "full() (in module mlx.core)": [[176, "mlx.core.full", false]], "gather (c++ function)": [[0, "_CPPv46gatherRK5arrayRK5arrayiRK5Shape14StreamOrDevice", false], [0, "_CPPv46gatherRK5arrayRKNSt6vectorI5arrayEERKNSt6vectorIiEERK5Shape14StreamOrDevice", false]], "gather_mm (c++ function)": [[0, "_CPPv49gather_mm5array5arrayNSt8optionalI5arrayEENSt8optionalI5arrayEEb14StreamOrDevice", false]], "gather_mm() (in module mlx.core)": [[177, "mlx.core.gather_mm", false]], "gather_qmm (c++ function)": [[0, "_CPPv410gather_qmmRK5arrayRK5arrayRK5arrayRK5arrayNSt8optionalI5arrayEENSt8optionalI5arrayEEbiib14StreamOrDevice", false]], "gather_qmm() (in module mlx.core)": [[178, "mlx.core.gather_qmm", false]], "gaussian_nll_loss (class in mlx.nn.losses)": [[455, "mlx.nn.losses.gaussian_nll_loss", false]], "gelu (class in mlx.nn)": [[371, "mlx.nn.GELU", false], [442, "mlx.nn.gelu", false]], "gelu_approx (class in mlx.nn)": [[443, "mlx.nn.gelu_approx", false]], "gelu_fast_approx (class in mlx.nn)": [[444, "mlx.nn.gelu_fast_approx", false]], "get_active_memory() (in module mlx.core)": [[179, "mlx.core.get_active_memory", false]], "get_cache_memory() (in module mlx.core)": [[180, "mlx.core.get_cache_memory", false]], "get_peak_memory() (in module mlx.core)": [[181, "mlx.core.get_peak_memory", false]], "glorot_normal() (in module mlx.nn.init)": [[433, "mlx.nn.init.glorot_normal", false]], "glorot_uniform() (in module mlx.nn.init)": [[434, "mlx.nn.init.glorot_uniform", false]], "glu (class in mlx.nn)": [[372, "mlx.nn.GLU", false], [445, "mlx.nn.glu", false]], "grad() (in module mlx.core)": [[182, "mlx.core.grad", false]], "greater (c++ function)": [[0, "_CPPv47greaterRK5arrayRK5array14StreamOrDevice", false]], "greater() (in module mlx.core)": [[183, "mlx.core.greater", false]], "greater_equal (c++ function)": [[0, "_CPPv413greater_equalRK5arrayRK5array14StreamOrDevice", false]], "greater_equal() (in module mlx.core)": [[184, "mlx.core.greater_equal", false]], "group (class in mlx.core.distributed)": [[129, "mlx.core.distributed.Group", false]], "groupnorm (class in mlx.nn)": [[374, "mlx.nn.GroupNorm", false]], "gru (class in mlx.nn)": [[373, "mlx.nn.GRU", false]], "gumbel() (in module mlx.core.random)": [[261, "mlx.core.random.gumbel", false]], "hadamard_transform (c++ function)": [[0, "_CPPv418hadamard_transformRK5arrayNSt8optionalIfEE14StreamOrDevice", false]], "hadamard_transform() (in module mlx.core)": [[185, "mlx.core.hadamard_transform", false]], "hard_shrink (class in mlx.nn)": [[446, "mlx.nn.hard_shrink", false]], "hard_tanh (class in mlx.nn)": [[447, "mlx.nn.hard_tanh", false]], "hardshrink (class in mlx.nn)": [[375, "mlx.nn.HardShrink", false]], "hardswish (class in mlx.nn)": [[377, "mlx.nn.Hardswish", false], [448, "mlx.nn.hardswish", false]], "hardtanh (class in mlx.nn)": [[376, "mlx.nn.HardTanh", false]], "he_normal() (in module mlx.nn.init)": [[435, "mlx.nn.init.he_normal", false]], "he_uniform() (in module mlx.nn.init)": [[436, "mlx.nn.init.he_uniform", false]], "hinge_loss (class in mlx.nn.losses)": [[456, "mlx.nn.losses.hinge_loss", false]], "huber_loss (class in mlx.nn.losses)": [[457, "mlx.nn.losses.huber_loss", false]], "identity (c++ function)": [[0, "_CPPv48identityi14StreamOrDevice", false], [0, "_CPPv48identityi5Dtype14StreamOrDevice", false]], "identity() (in module mlx.core)": [[186, "mlx.core.identity", false]], "identity() (in module mlx.nn.init)": [[437, "mlx.nn.init.identity", false]], "ifft() (in module mlx.core.fft)": [[162, "mlx.core.fft.ifft", false]], "ifft2() (in module mlx.core.fft)": [[163, "mlx.core.fft.ifft2", false]], "ifftn() (in module mlx.core.fft)": [[164, "mlx.core.fft.ifftn", false]], "ifftshift() (in module mlx.core.fft)": [[165, "mlx.core.fft.ifftshift", false]], "imag (array property)": [[51, "mlx.core.array.imag", false]], "imag (c++ function)": [[0, "_CPPv44imagRK5array14StreamOrDevice", false]], "imag() (in module mlx.core)": [[187, "mlx.core.imag", false]], "import_function() (in module mlx.core)": [[188, "mlx.core.import_function", false]], "init() (in module mlx.core.distributed)": [[132, "mlx.core.distributed.init", false]], "init() (optimizer method)": [[496, "mlx.optimizers.Optimizer.init", false]], "inner (c++ function)": [[0, "_CPPv45innerRK5arrayRK5array14StreamOrDevice", false]], "inner() (in module mlx.core)": [[189, "mlx.core.inner", false]], "instancenorm (class in mlx.nn)": [[378, "mlx.nn.InstanceNorm", false]], "inv() (in module mlx.core.linalg)": [[209, "mlx.core.linalg.inv", false]], "irfft() (in module mlx.core.fft)": [[166, "mlx.core.fft.irfft", false]], "irfft2() (in module mlx.core.fft)": [[167, "mlx.core.fft.irfft2", false]], "irfftn() (in module mlx.core.fft)": [[168, "mlx.core.fft.irfftn", false]], "is_available() (in module mlx.core.distributed)": [[133, "mlx.core.distributed.is_available", false]], "is_available() (in module mlx.core.metal)": [[237, "mlx.core.metal.is_available", false]], "isclose (c++ function)": [[0, "_CPPv47iscloseRK5arrayRK5arrayddb14StreamOrDevice", false]], "isclose() (in module mlx.core)": [[190, "mlx.core.isclose", false]], "isfinite (c++ function)": [[0, "_CPPv48isfiniteRK5array14StreamOrDevice", false]], "isfinite() (in module mlx.core)": [[191, "mlx.core.isfinite", false]], "isinf (c++ function)": [[0, "_CPPv45isinfRK5array14StreamOrDevice", false]], "isinf() (in module mlx.core)": [[192, "mlx.core.isinf", false]], "isnan (c++ function)": [[0, "_CPPv45isnanRK5array14StreamOrDevice", false]], "isnan() (in module mlx.core)": [[193, "mlx.core.isnan", false]], "isneginf (c++ function)": [[0, "_CPPv48isneginfRK5array14StreamOrDevice", false]], "isneginf() (in module mlx.core)": [[194, "mlx.core.isneginf", false]], "isposinf (c++ function)": [[0, "_CPPv48isposinfRK5array14StreamOrDevice", false]], "isposinf() (in module mlx.core)": [[195, "mlx.core.isposinf", false]], "issubdtype() (in module mlx.core)": [[196, "mlx.core.issubdtype", false]], "item() (array method)": [[52, "mlx.core.array.item", false]], "itemsize (array property)": [[53, "mlx.core.array.itemsize", false]], "join_schedules() (in module mlx.optimizers)": [[503, "mlx.optimizers.join_schedules", false]], "jvp() (in module mlx.core)": [[197, "mlx.core.jvp", false]], "key() (in module mlx.core.random)": [[262, "mlx.core.random.key", false]], "kl_div_loss (class in mlx.nn.losses)": [[458, "mlx.nn.losses.kl_div_loss", false]], "kron (c++ function)": [[0, "_CPPv44kronRK5arrayRK5array14StreamOrDevice", false]], "kron() (in module mlx.core)": [[198, "mlx.core.kron", false]], "l1_loss (class in mlx.nn.losses)": [[459, "mlx.nn.losses.l1_loss", false]], "laplace() (in module mlx.core.random)": [[263, "mlx.core.random.laplace", false]], "layer_norm() (in module mlx.core.fast)": [[153, "mlx.core.fast.layer_norm", false]], "layernorm (class in mlx.nn)": [[380, "mlx.nn.LayerNorm", false]], "leaf_modules() (module method)": [[395, "mlx.nn.Module.leaf_modules", false]], "leaky_relu (class in mlx.nn)": [[449, "mlx.nn.leaky_relu", false]], "leakyrelu (class in mlx.nn)": [[381, "mlx.nn.LeakyReLU", false]], "left_shift (c++ function)": [[0, "_CPPv410left_shiftRK5arrayRK5array14StreamOrDevice", false]], "left_shift() (in module mlx.core)": [[199, "mlx.core.left_shift", false]], "less (c++ function)": [[0, "_CPPv44lessRK5arrayRK5array14StreamOrDevice", false]], "less() (in module mlx.core)": [[200, "mlx.core.less", false]], "less_equal (c++ function)": [[0, "_CPPv410less_equalRK5arrayRK5array14StreamOrDevice", false]], "less_equal() (in module mlx.core)": [[201, "mlx.core.less_equal", false]], "linear (class in mlx.nn)": [[382, "mlx.nn.Linear", false]], "linear_schedule() (in module mlx.optimizers)": [[504, "mlx.optimizers.linear_schedule", false]], "linspace (c++ function)": [[0, "_CPPv48linspaceddi5Dtype14StreamOrDevice", false]], "linspace() (in module mlx.core)": [[219, "mlx.core.linspace", false]], "lion (class in mlx.optimizers)": [[492, "mlx.optimizers.Lion", false]], "load() (in module mlx.core)": [[220, "mlx.core.load", false]], "load_weights() (module method)": [[396, "mlx.nn.Module.load_weights", false]], "log (c++ function)": [[0, "_CPPv43logRK5array14StreamOrDevice", false]], "log() (array method)": [[54, "mlx.core.array.log", false]], "log() (in module mlx.core)": [[221, "mlx.core.log", false]], "log10 (c++ function)": [[0, "_CPPv45log10RK5array14StreamOrDevice", false]], "log10() (array method)": [[55, "mlx.core.array.log10", false]], "log10() (in module mlx.core)": [[222, "mlx.core.log10", false]], "log1p (c++ function)": [[0, "_CPPv45log1pRK5array14StreamOrDevice", false]], "log1p() (array method)": [[56, "mlx.core.array.log1p", false]], "log1p() (in module mlx.core)": [[223, "mlx.core.log1p", false]], "log2 (c++ function)": [[0, "_CPPv44log2RK5array14StreamOrDevice", false]], "log2() (array method)": [[57, "mlx.core.array.log2", false]], "log2() (in module mlx.core)": [[224, "mlx.core.log2", false]], "log_cosh_loss (class in mlx.nn.losses)": [[460, "mlx.nn.losses.log_cosh_loss", false]], "log_sigmoid (class in mlx.nn)": [[450, "mlx.nn.log_sigmoid", false]], "log_softmax (class in mlx.nn)": [[451, "mlx.nn.log_softmax", false]], "logaddexp (c++ function)": [[0, "_CPPv49logaddexpRK5arrayRK5array14StreamOrDevice", false]], "logaddexp() (in module mlx.core)": [[225, "mlx.core.logaddexp", false]], "logcumsumexp (c++ function)": [[0, "_CPPv412logcumsumexpRK5arrayibb14StreamOrDevice", false]], "logcumsumexp() (array method)": [[58, "mlx.core.array.logcumsumexp", false]], "logcumsumexp() (in module mlx.core)": [[226, "mlx.core.logcumsumexp", false]], "logical_and (c++ function)": [[0, "_CPPv411logical_andRK5arrayRK5array14StreamOrDevice", false]], "logical_and() (in module mlx.core)": [[227, "mlx.core.logical_and", false]], "logical_not (c++ function)": [[0, "_CPPv411logical_notRK5array14StreamOrDevice", false]], "logical_not() (in module mlx.core)": [[228, "mlx.core.logical_not", false]], "logical_or (c++ function)": [[0, "_CPPv410logical_orRK5arrayRK5array14StreamOrDevice", false]], "logical_or() (in module mlx.core)": [[229, "mlx.core.logical_or", false]], "logsigmoid (class in mlx.nn)": [[383, "mlx.nn.LogSigmoid", false]], "logsoftmax (class in mlx.nn)": [[384, "mlx.nn.LogSoftmax", false]], "logsumexp (c++ function)": [[0, "_CPPv49logsumexpRK5array14StreamOrDevice", false], [0, "_CPPv49logsumexpRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", false], [0, "_CPPv49logsumexpRK5arrayb14StreamOrDevice", false], [0, "_CPPv49logsumexpRK5arrayib14StreamOrDevice", false]], "logsumexp() (array method)": [[59, "mlx.core.array.logsumexp", false]], "logsumexp() (in module mlx.core)": [[230, "mlx.core.logsumexp", false]], "lstm (class in mlx.nn)": [[379, "mlx.nn.LSTM", false]], "lu() (in module mlx.core.linalg)": [[210, "mlx.core.linalg.lu", false]], "lu_factor() (in module mlx.core.linalg)": [[211, "mlx.core.linalg.lu_factor", false]], "margin_ranking_loss (class in mlx.nn.losses)": [[461, "mlx.nn.losses.margin_ranking_loss", false]], "matmul (c++ function)": [[0, "_CPPv46matmulRK5arrayRK5array14StreamOrDevice", false]], "matmul() (in module mlx.core)": [[231, "mlx.core.matmul", false]], "max (c++ function)": [[0, "_CPPv43maxRK5array14StreamOrDevice", false], [0, "_CPPv43maxRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", false], [0, "_CPPv43maxRK5arrayb14StreamOrDevice", false], [0, "_CPPv43maxRK5arrayib14StreamOrDevice", false]], "max() (array method)": [[60, "mlx.core.array.max", false]], "max() (in module mlx.core)": [[232, "mlx.core.max", false]], "maximum (c++ function)": [[0, "_CPPv47maximumRK5arrayRK5array14StreamOrDevice", false]], "maximum() (in module mlx.core)": [[233, "mlx.core.maximum", false]], "maxpool1d (class in mlx.nn)": [[385, "mlx.nn.MaxPool1d", false]], "maxpool2d (class in mlx.nn)": [[386, "mlx.nn.MaxPool2d", false]], "maxpool3d (class in mlx.nn)": [[387, "mlx.nn.MaxPool3d", false]], "mean (c++ function)": [[0, "_CPPv44meanRK5array14StreamOrDevice", false], [0, "_CPPv44meanRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", false], [0, "_CPPv44meanRK5arrayb14StreamOrDevice", false], [0, "_CPPv44meanRK5arrayib14StreamOrDevice", false]], "mean() (array method)": [[61, "mlx.core.array.mean", false]], "mean() (in module mlx.core)": [[234, "mlx.core.mean", false]], "meshgrid (c++ function)": [[0, "_CPPv48meshgridRKNSt6vectorI5arrayEEbRKNSt6stringE14StreamOrDevice", false]], "meshgrid() (in module mlx.core)": [[235, "mlx.core.meshgrid", false]], "metal_kernel() (in module mlx.core.fast)": [[154, "mlx.core.fast.metal_kernel", false]], "min (c++ function)": [[0, "_CPPv43minRK5array14StreamOrDevice", false], [0, "_CPPv43minRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", false], [0, "_CPPv43minRK5arrayb14StreamOrDevice", false], [0, "_CPPv43minRK5arrayib14StreamOrDevice", false]], "min() (array method)": [[62, "mlx.core.array.min", false]], "min() (in module mlx.core)": [[240, "mlx.core.min", false]], "minimum (c++ function)": [[0, "_CPPv47minimumRK5arrayRK5array14StreamOrDevice", false]], "minimum() (in module mlx.core)": [[241, "mlx.core.minimum", false]], "mish (class in mlx.nn)": [[388, "mlx.nn.Mish", false], [466, "mlx.nn.mish", false]], "module (class in mlx.nn)": [[483, "mlx.nn.Module", false]], "modules() (module method)": [[397, "mlx.nn.Module.modules", false]], "moveaxis (c++ function)": [[0, "_CPPv48moveaxisRK5arrayii14StreamOrDevice", false]], "moveaxis() (array method)": [[63, "mlx.core.array.moveaxis", false]], "moveaxis() (in module mlx.core)": [[242, "mlx.core.moveaxis", false]], "mse_loss (class in mlx.nn.losses)": [[462, "mlx.nn.losses.mse_loss", false]], "multiheadattention (class in mlx.nn)": [[409, "mlx.nn.MultiHeadAttention", false]], "multioptimizer (class in mlx.optimizers)": [[493, "mlx.optimizers.MultiOptimizer", false]], "multiply (c++ function)": [[0, "_CPPv48multiplyRK5arrayRK5array14StreamOrDevice", false]], "multiply() (in module mlx.core)": [[243, "mlx.core.multiply", false]], "multivariate_normal() (in module mlx.core.random)": [[264, "mlx.core.random.multivariate_normal", false]], "muon (class in mlx.optimizers)": [[494, "mlx.optimizers.Muon", false]], "named_modules() (module method)": [[398, "mlx.nn.Module.named_modules", false]], "nan_to_num (c++ function)": [[0, "_CPPv410nan_to_numRK5arrayfKNSt8optionalIfEEKNSt8optionalIfEE14StreamOrDevice", false]], "nan_to_num() (in module mlx.core)": [[244, "mlx.core.nan_to_num", false]], "nbytes (array property)": [[64, "mlx.core.array.nbytes", false]], "ndim (array property)": [[65, "mlx.core.array.ndim", false]], "negative (c++ function)": [[0, "_CPPv48negativeRK5array14StreamOrDevice", false]], "negative() (in module mlx.core)": [[245, "mlx.core.negative", false]], "new_stream() (in module mlx.core)": [[246, "mlx.core.new_stream", false]], "nll_loss (class in mlx.nn.losses)": [[463, "mlx.nn.losses.nll_loss", false]], "norm() (in module mlx.core.linalg)": [[212, "mlx.core.linalg.norm", false]], "normal() (in module mlx.core.random)": [[265, "mlx.core.random.normal", false]], "normal() (in module mlx.nn.init)": [[438, "mlx.nn.init.normal", false]], "not_equal (c++ function)": [[0, "_CPPv49not_equalRK5arrayRK5array14StreamOrDevice", false]], "not_equal() (in module mlx.core)": [[247, "mlx.core.not_equal", false]], "number_of_elements (c++ function)": [[0, "_CPPv418number_of_elementsRK5arrayNSt6vectorIiEEb5Dtype14StreamOrDevice", false]], "ones (c++ function)": [[0, "_CPPv44onesRK5Shape14StreamOrDevice", false], [0, "_CPPv44onesRK5Shape5Dtype14StreamOrDevice", false]], "ones() (in module mlx.core)": [[248, "mlx.core.ones", false]], "ones_like (c++ function)": [[0, "_CPPv49ones_likeRK5array14StreamOrDevice", false]], "ones_like() (in module mlx.core)": [[249, "mlx.core.ones_like", false]], "operator!= (c++ function)": [[0, "_CPPv4I0Ene5array1TRK5array", false], [0, "_CPPv4I0Ene5arrayRK5array1T", false], [0, "_CPPv4neRK5arrayRK5array", false]], "operator% (c++ function)": [[0, "_CPPv4I0Erm5array1TRK5array", false], [0, "_CPPv4I0Erm5arrayRK5array1T", false], [0, "_CPPv4rmRK5arrayRK5array", false]], "operator& (c++ function)": [[0, "_CPPv4anRK5arrayRK5array", false]], "operator&& (c++ function)": [[0, "_CPPv4aaRK5arrayRK5array", false]], "operator* (c++ function)": [[0, "_CPPv4I0Eml5array1TRK5array", false], [0, "_CPPv4I0Eml5arrayRK5array1T", false], [0, "_CPPv4mlRK5arrayRK5array", false]], "operator+ (c++ function)": [[0, "_CPPv4I0Epl5array1TRK5array", false], [0, "_CPPv4I0Epl5arrayRK5array1T", false], [0, "_CPPv4plRK5arrayRK5array", false]], "operator- (c++ function)": [[0, "_CPPv4I0Emi5array1TRK5array", false], [0, "_CPPv4I0Emi5arrayRK5array1T", false], [0, "_CPPv4miRK5array", false], [0, "_CPPv4miRK5arrayRK5array", false]], "operator/ (c++ function)": [[0, "_CPPv4dvRK5arrayRK5array", false], [0, "_CPPv4dvRK5arrayd", false], [0, "_CPPv4dvdRK5array", false]], "operator< (c++ function)": [[0, "_CPPv4I0Elt5array1TRK5array", false], [0, "_CPPv4I0Elt5arrayRK5array1T", false], [0, "_CPPv4ltRK5arrayRK5array", false]], "operator<< (c++ function)": [[0, "_CPPv4lsRK5arrayRK5array", false]], "operator<= (c++ function)": [[0, "_CPPv4I0Ele5array1TRK5array", false], [0, "_CPPv4I0Ele5arrayRK5array1T", false], [0, "_CPPv4leRK5arrayRK5array", false]], "operator== (c++ function)": [[0, "_CPPv4I0Eeq5array1TRK5array", false], [0, "_CPPv4I0Eeq5arrayRK5array1T", false], [0, "_CPPv4eqRK5arrayRK5array", false]], "operator> (c++ function)": [[0, "_CPPv4I0Egt5array1TRK5array", false], [0, "_CPPv4I0Egt5arrayRK5array1T", false], [0, "_CPPv4gtRK5arrayRK5array", false]], "operator>= (c++ function)": [[0, "_CPPv4I0Ege5array1TRK5array", false], [0, "_CPPv4I0Ege5arrayRK5array1T", false], [0, "_CPPv4geRK5arrayRK5array", false]], "operator>> (c++ function)": [[0, "_CPPv4rsRK5arrayRK5array", false]], "operator^ (c++ function)": [[0, "_CPPv4eoRK5arrayRK5array", false]], "operator| (c++ function)": [[0, "_CPPv4orRK5arrayRK5array", false]], "operator|| (c++ function)": [[0, "_CPPv4ooRK5arrayRK5array", false]], "operator~ (c++ function)": [[0, "_CPPv4coRK5array", false]], "optimizer (class in mlx.optimizers)": [[507, "mlx.optimizers.Optimizer", false]], "outer (c++ function)": [[0, "_CPPv45outerRK5arrayRK5array14StreamOrDevice", false]], "outer() (in module mlx.core)": [[250, "mlx.core.outer", false]], "pad (c++ function)": [[0, "_CPPv43padRK5arrayRKNSt4pairIiiEERK5arrayRKNSt6stringE14StreamOrDevice", false], [0, "_CPPv43padRK5arrayRKNSt6vectorINSt4pairIiiEEEERK5arrayRKNSt6stringE14StreamOrDevice", false], [0, "_CPPv43padRK5arrayRKNSt6vectorIiEERK5ShapeRK5ShapeRK5arrayRKNSt6stringE14StreamOrDevice", false], [0, "_CPPv43padRK5arrayiRK5arrayRKNSt6stringE14StreamOrDevice", false]], "pad() (in module mlx.core)": [[251, "mlx.core.pad", false]], "parameters() (module method)": [[399, "mlx.nn.Module.parameters", false]], "partition (c++ function)": [[0, "_CPPv49partitionRK5arrayi14StreamOrDevice", false], [0, "_CPPv49partitionRK5arrayii14StreamOrDevice", false]], "partition() (in module mlx.core)": [[252, "mlx.core.partition", false]], "permutation() (in module mlx.core.random)": [[266, "mlx.core.random.permutation", false]], "pinv() (in module mlx.core.linalg)": [[213, "mlx.core.linalg.pinv", false]], "power (c++ function)": [[0, "_CPPv45powerRK5arrayRK5array14StreamOrDevice", false]], "power() (in module mlx.core)": [[253, "mlx.core.power", false]], "prelu (class in mlx.nn)": [[410, "mlx.nn.PReLU", false], [467, "mlx.nn.prelu", false]], "prod (c++ function)": [[0, "_CPPv44prodRK5array14StreamOrDevice", false], [0, "_CPPv44prodRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", false], [0, "_CPPv44prodRK5arrayb14StreamOrDevice", false], [0, "_CPPv44prodRK5arrayib14StreamOrDevice", false]], "prod() (array method)": [[66, "mlx.core.array.prod", false]], "prod() (in module mlx.core)": [[254, "mlx.core.prod", false]], "put_along_axis (c++ function)": [[0, "_CPPv414put_along_axisRK5arrayRK5arrayRK5arrayi14StreamOrDevice", false]], "put_along_axis() (in module mlx.core)": [[255, "mlx.core.put_along_axis", false]], "qr() (in module mlx.core.linalg)": [[214, "mlx.core.linalg.qr", false]], "quantize (c++ function)": [[0, "_CPPv48quantizeRK5arrayii14StreamOrDevice", false]], "quantize() (in module mlx.core)": [[256, "mlx.core.quantize", false]], "quantize() (in module mlx.nn)": [[334, "mlx.nn.quantize", false]], "quantized_matmul (c++ function)": [[0, "_CPPv416quantized_matmul5array5array5array5arraybii14StreamOrDevice", false]], "quantized_matmul() (in module mlx.core)": [[257, "mlx.core.quantized_matmul", false]], "quantizedembedding (class in mlx.nn)": [[411, "mlx.nn.QuantizedEmbedding", false]], "quantizedlinear (class in mlx.nn)": [[412, "mlx.nn.QuantizedLinear", false]], "radians (c++ function)": [[0, "_CPPv47radiansRK5array14StreamOrDevice", false]], "radians() (in module mlx.core)": [[258, "mlx.core.radians", false]], "randint() (in module mlx.core.random)": [[267, "mlx.core.random.randint", false]], "real (array property)": [[67, "mlx.core.array.real", false]], "real (c++ function)": [[0, "_CPPv44realRK5array14StreamOrDevice", false]], "real() (in module mlx.core)": [[272, "mlx.core.real", false]], "reciprocal (c++ function)": [[0, "_CPPv410reciprocalRK5array14StreamOrDevice", false]], "reciprocal() (array method)": [[68, "mlx.core.array.reciprocal", false]], "reciprocal() (in module mlx.core)": [[273, "mlx.core.reciprocal", false]], "recv() (in module mlx.core.distributed)": [[134, "mlx.core.distributed.recv", false]], "recv_like() (in module mlx.core.distributed)": [[135, "mlx.core.distributed.recv_like", false]], "relu (class in mlx.nn)": [[415, "mlx.nn.ReLU", false], [468, "mlx.nn.relu", false]], "relu6 (class in mlx.nn)": [[416, "mlx.nn.ReLU6", false], [469, "mlx.nn.relu6", false]], "remainder (c++ function)": [[0, "_CPPv49remainderRK5arrayRK5array14StreamOrDevice", false]], "remainder() (in module mlx.core)": [[274, "mlx.core.remainder", false]], "repeat (c++ function)": [[0, "_CPPv46repeatRK5arrayi14StreamOrDevice", false], [0, "_CPPv46repeatRK5arrayii14StreamOrDevice", false]], "repeat() (in module mlx.core)": [[275, "mlx.core.repeat", false]], "reset_peak_memory() (in module mlx.core)": [[276, "mlx.core.reset_peak_memory", false]], "reshape (c++ function)": [[0, "_CPPv47reshapeRK5array5Shape14StreamOrDevice", false]], "reshape() (array method)": [[69, "mlx.core.array.reshape", false]], "reshape() (in module mlx.core)": [[277, "mlx.core.reshape", false]], "rfft() (in module mlx.core.fft)": [[169, "mlx.core.fft.rfft", false]], "rfft2() (in module mlx.core.fft)": [[170, "mlx.core.fft.rfft2", false]], "rfftn() (in module mlx.core.fft)": [[171, "mlx.core.fft.rfftn", false]], "right_shift (c++ function)": [[0, "_CPPv411right_shiftRK5arrayRK5array14StreamOrDevice", false]], "right_shift() (in module mlx.core)": [[278, "mlx.core.right_shift", false]], "rms_norm() (in module mlx.core.fast)": [[155, "mlx.core.fast.rms_norm", false]], "rmsnorm (class in mlx.nn)": [[413, "mlx.nn.RMSNorm", false]], "rmsprop (class in mlx.optimizers)": [[499, "mlx.optimizers.RMSprop", false]], "rnn (class in mlx.nn)": [[414, "mlx.nn.RNN", false]], "roll (c++ function)": [[0, "_CPPv44rollRK5arrayRK5Shape14StreamOrDevice", false], [0, "_CPPv44rollRK5arrayRK5ShapeRKNSt6vectorIiEE14StreamOrDevice", false], [0, "_CPPv44rollRK5arrayRK5Shapei14StreamOrDevice", false], [0, "_CPPv44rollRK5arrayi14StreamOrDevice", false], [0, "_CPPv44rollRK5arrayiRKNSt6vectorIiEE14StreamOrDevice", false], [0, "_CPPv44rollRK5arrayii14StreamOrDevice", false]], "roll() (in module mlx.core)": [[279, "mlx.core.roll", false]], "rope (class in mlx.nn)": [[417, "mlx.nn.RoPE", false]], "rope() (in module mlx.core.fast)": [[156, "mlx.core.fast.rope", false]], "round (c++ function)": [[0, "_CPPv45roundRK5array14StreamOrDevice", false], [0, "_CPPv45roundRK5arrayi14StreamOrDevice", false]], "round() (array method)": [[70, "mlx.core.array.round", false]], "round() (in module mlx.core)": [[280, "mlx.core.round", false]], "rsqrt (c++ function)": [[0, "_CPPv45rsqrtRK5array14StreamOrDevice", false]], "rsqrt() (array method)": [[71, "mlx.core.array.rsqrt", false]], "rsqrt() (in module mlx.core)": [[281, "mlx.core.rsqrt", false]], "save() (in module mlx.core)": [[282, "mlx.core.save", false]], "save_gguf() (in module mlx.core)": [[283, "mlx.core.save_gguf", false]], "save_safetensors() (in module mlx.core)": [[284, "mlx.core.save_safetensors", false]], "save_weights() (module method)": [[400, "mlx.nn.Module.save_weights", false]], "savez() (in module mlx.core)": [[285, "mlx.core.savez", false]], "savez_compressed() (in module mlx.core)": [[286, "mlx.core.savez_compressed", false]], "scaled_dot_product_attention() (in module mlx.core.fast)": [[157, "mlx.core.fast.scaled_dot_product_attention", false]], "scatter (c++ function)": [[0, "_CPPv47scatterRK5arrayRK5arrayRK5arrayi14StreamOrDevice", false], [0, "_CPPv47scatterRK5arrayRKNSt6vectorI5arrayEERK5arrayRKNSt6vectorIiEE14StreamOrDevice", false]], "scatter_add (c++ function)": [[0, "_CPPv411scatter_addRK5arrayRK5arrayRK5arrayi14StreamOrDevice", false], [0, "_CPPv411scatter_addRK5arrayRKNSt6vectorI5arrayEERK5arrayRKNSt6vectorIiEE14StreamOrDevice", false]], "scatter_add_axis (c++ function)": [[0, "_CPPv416scatter_add_axisRK5arrayRK5arrayRK5arrayi14StreamOrDevice", false]], "scatter_max (c++ function)": [[0, "_CPPv411scatter_maxRK5arrayRK5arrayRK5arrayi14StreamOrDevice", false], [0, "_CPPv411scatter_maxRK5arrayRKNSt6vectorI5arrayEERK5arrayRKNSt6vectorIiEE14StreamOrDevice", false]], "scatter_min (c++ function)": [[0, "_CPPv411scatter_minRK5arrayRK5arrayRK5arrayi14StreamOrDevice", false], [0, "_CPPv411scatter_minRK5arrayRKNSt6vectorI5arrayEERK5arrayRKNSt6vectorIiEE14StreamOrDevice", false]], "scatter_prod (c++ function)": [[0, "_CPPv412scatter_prodRK5arrayRK5arrayRK5arrayi14StreamOrDevice", false], [0, "_CPPv412scatter_prodRK5arrayRKNSt6vectorI5arrayEERK5arrayRKNSt6vectorIiEE14StreamOrDevice", false]], "seed() (in module mlx.core.random)": [[268, "mlx.core.random.seed", false]], "segmented_mm (c++ function)": [[0, "_CPPv412segmented_mm5array5array5array14StreamOrDevice", false]], "selu (class in mlx.nn)": [[418, "mlx.nn.SELU", false], [470, "mlx.nn.selu", false]], "send() (in module mlx.core.distributed)": [[136, "mlx.core.distributed.send", false]], "sequential (class in mlx.nn)": [[419, "mlx.nn.Sequential", false]], "set_cache_limit() (in module mlx.core)": [[287, "mlx.core.set_cache_limit", false]], "set_default_device() (in module mlx.core)": [[288, "mlx.core.set_default_device", false]], "set_default_stream() (in module mlx.core)": [[289, "mlx.core.set_default_stream", false]], "set_dtype() (module method)": [[401, "mlx.nn.Module.set_dtype", false]], "set_memory_limit() (in module mlx.core)": [[290, "mlx.core.set_memory_limit", false]], "set_wired_limit() (in module mlx.core)": [[291, "mlx.core.set_wired_limit", false]], "sgd (class in mlx.optimizers)": [[500, "mlx.optimizers.SGD", false]], "shape (array property)": [[72, "mlx.core.array.shape", false]], "sigmoid (c++ function)": [[0, "_CPPv47sigmoidRK5array14StreamOrDevice", false]], "sigmoid (class in mlx.nn)": [[421, "mlx.nn.Sigmoid", false], [471, "mlx.nn.sigmoid", false]], "sigmoid() (in module mlx.core)": [[292, "mlx.core.sigmoid", false]], "sign (c++ function)": [[0, "_CPPv44signRK5array14StreamOrDevice", false]], "sign() (in module mlx.core)": [[293, "mlx.core.sign", false]], "silu (class in mlx.nn)": [[420, "mlx.nn.SiLU", false], [472, "mlx.nn.silu", false]], "sin (c++ function)": [[0, "_CPPv43sinRK5array14StreamOrDevice", false]], "sin() (array method)": [[73, "mlx.core.array.sin", false]], "sin() (in module mlx.core)": [[294, "mlx.core.sin", false]], "sinh (c++ function)": [[0, "_CPPv44sinhRK5array14StreamOrDevice", false]], "sinh() (in module mlx.core)": [[295, "mlx.core.sinh", false]], "sinusoidalpositionalencoding (class in mlx.nn)": [[422, "mlx.nn.SinusoidalPositionalEncoding", false]], "size (array property)": [[74, "mlx.core.array.size", false]], "slice (c++ function)": [[0, "_CPPv45sliceRK5array5Shape5Shape14StreamOrDevice", false], [0, "_CPPv45sliceRK5array5Shape5Shape5Shape14StreamOrDevice", false], [0, "_CPPv45sliceRK5arrayNSt16initializer_listIiEE5Shape5Shape14StreamOrDevice", false], [0, "_CPPv45sliceRK5arrayRK5arrayNSt6vectorIiEE5Shape14StreamOrDevice", false]], "slice() (in module mlx.core)": [[296, "mlx.core.slice", false]], "slice_update (c++ function)": [[0, "_CPPv412slice_updateRK5arrayRK5array5Shape5Shape14StreamOrDevice", false], [0, "_CPPv412slice_updateRK5arrayRK5array5Shape5Shape5Shape14StreamOrDevice", false], [0, "_CPPv412slice_updateRK5arrayRK5arrayRK5arrayNSt6vectorIiEE14StreamOrDevice", false]], "slice_update() (in module mlx.core)": [[297, "mlx.core.slice_update", false]], "smooth_l1_loss (class in mlx.nn.losses)": [[464, "mlx.nn.losses.smooth_l1_loss", false]], "softmax (c++ function)": [[0, "_CPPv47softmaxRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", false], [0, "_CPPv47softmaxRK5arrayb14StreamOrDevice", false], [0, "_CPPv47softmaxRK5arrayib14StreamOrDevice", false]], "softmax (class in mlx.nn)": [[423, "mlx.nn.Softmax", false], [473, "mlx.nn.softmax", false]], "softmax() (in module mlx.core)": [[298, "mlx.core.softmax", false]], "softmin (class in mlx.nn)": [[424, "mlx.nn.Softmin", false], [474, "mlx.nn.softmin", false]], "softplus (class in mlx.nn)": [[425, "mlx.nn.Softplus", false], [475, "mlx.nn.softplus", false]], "softshrink (class in mlx.nn)": [[426, "mlx.nn.Softshrink", false], [476, "mlx.nn.softshrink", false]], "softsign (class in mlx.nn)": [[427, "mlx.nn.Softsign", false]], "solve() (in module mlx.core.linalg)": [[215, "mlx.core.linalg.solve", false]], "solve_triangular() (in module mlx.core.linalg)": [[216, "mlx.core.linalg.solve_triangular", false]], "sort (c++ function)": [[0, "_CPPv44sortRK5array14StreamOrDevice", false], [0, "_CPPv44sortRK5arrayi14StreamOrDevice", false]], "sort() (in module mlx.core)": [[299, "mlx.core.sort", false]], "split (c++ function)": [[0, "_CPPv45splitRK5arrayRK5Shape14StreamOrDevice", false], [0, "_CPPv45splitRK5arrayRK5Shapei14StreamOrDevice", false], [0, "_CPPv45splitRK5arrayi14StreamOrDevice", false], [0, "_CPPv45splitRK5arrayii14StreamOrDevice", false]], "split() (array method)": [[75, "mlx.core.array.split", false]], "split() (in module mlx.core)": [[300, "mlx.core.split", false]], "split() (in module mlx.core.random)": [[269, "mlx.core.random.split", false]], "sqrt (c++ function)": [[0, "_CPPv44sqrtRK5array14StreamOrDevice", false]], "sqrt() (array method)": [[76, "mlx.core.array.sqrt", false]], "sqrt() (in module mlx.core)": [[301, "mlx.core.sqrt", false]], "square (c++ function)": [[0, "_CPPv46squareRK5array14StreamOrDevice", false]], "square() (array method)": [[77, "mlx.core.array.square", false]], "square() (in module mlx.core)": [[302, "mlx.core.square", false]], "squeeze (c++ function)": [[0, "_CPPv47squeezeRK5array14StreamOrDevice", false], [0, "_CPPv47squeezeRK5arrayRKNSt6vectorIiEE14StreamOrDevice", false], [0, "_CPPv47squeezeRK5arrayi14StreamOrDevice", false]], "squeeze() (array method)": [[78, "mlx.core.array.squeeze", false]], "squeeze() (in module mlx.core)": [[303, "mlx.core.squeeze", false]], "stack (c++ function)": [[0, "_CPPv45stackRKNSt6vectorI5arrayEE14StreamOrDevice", false], [0, "_CPPv45stackRKNSt6vectorI5arrayEEi14StreamOrDevice", false]], "stack() (in module mlx.core)": [[304, "mlx.core.stack", false]], "start_capture() (in module mlx.core.metal)": [[238, "mlx.core.metal.start_capture", false]], "state (module property)": [[402, "mlx.nn.Module.state", false]], "state (optimizer property)": [[497, "mlx.optimizers.Optimizer.state", false]], "std (c++ function)": [[0, "_CPPv4StRK5array14StreamOrDevice", false], [0, "_CPPv4StRK5arrayRKNSt6vectorIiEEbi14StreamOrDevice", false], [0, "_CPPv4StRK5arraybi14StreamOrDevice", false], [0, "_CPPv4StRK5arrayibi14StreamOrDevice", false]], "std() (array method)": [[79, "mlx.core.array.std", false]], "std() (in module mlx.core)": [[305, "mlx.core.std", false]], "step (class in mlx.nn)": [[428, "mlx.nn.Step", false], [477, "mlx.nn.step", false]], "step_decay() (in module mlx.optimizers)": [[505, "mlx.optimizers.step_decay", false]], "stop_capture() (in module mlx.core.metal)": [[239, "mlx.core.metal.stop_capture", false]], "stop_gradient (c++ function)": [[0, "_CPPv413stop_gradientRK5array14StreamOrDevice", false]], "stop_gradient() (in module mlx.core)": [[306, "mlx.core.stop_gradient", false]], "stream (class in mlx.core)": [[342, "mlx.core.Stream", false]], "stream() (in module mlx.core)": [[307, "mlx.core.stream", false]], "subtract (c++ function)": [[0, "_CPPv48subtractRK5arrayRK5array14StreamOrDevice", false]], "subtract() (in module mlx.core)": [[308, "mlx.core.subtract", false]], "sum (c++ function)": [[0, "_CPPv43sumRK5array14StreamOrDevice", false], [0, "_CPPv43sumRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", false], [0, "_CPPv43sumRK5arrayb14StreamOrDevice", false], [0, "_CPPv43sumRK5arrayib14StreamOrDevice", false]], "sum() (array method)": [[80, "mlx.core.array.sum", false]], "sum() (in module mlx.core)": [[309, "mlx.core.sum", false]], "svd() (in module mlx.core.linalg)": [[217, "mlx.core.linalg.svd", false]], "swapaxes (c++ function)": [[0, "_CPPv48swapaxesRK5arrayii14StreamOrDevice", false]], "swapaxes() (array method)": [[81, "mlx.core.array.swapaxes", false]], "swapaxes() (in module mlx.core)": [[310, "mlx.core.swapaxes", false]], "synchronize() (in module mlx.core)": [[311, "mlx.core.synchronize", false]], "t (array property)": [[32, "mlx.core.array.T", false]], "take (c++ function)": [[0, "_CPPv44takeRK5arrayRK5array14StreamOrDevice", false], [0, "_CPPv44takeRK5arrayRK5arrayi14StreamOrDevice", false], [0, "_CPPv44takeRK5arrayi14StreamOrDevice", false], [0, "_CPPv44takeRK5arrayii14StreamOrDevice", false]], "take() (in module mlx.core)": [[312, "mlx.core.take", false]], "take_along_axis (c++ function)": [[0, "_CPPv415take_along_axisRK5arrayRK5arrayi14StreamOrDevice", false]], "take_along_axis() (in module mlx.core)": [[313, "mlx.core.take_along_axis", false]], "tan (c++ function)": [[0, "_CPPv43tanRK5array14StreamOrDevice", false]], "tan() (in module mlx.core)": [[314, "mlx.core.tan", false]], "tanh (c++ function)": [[0, "_CPPv44tanhRK5array14StreamOrDevice", false]], "tanh (class in mlx.nn)": [[429, "mlx.nn.Tanh", false], [478, "mlx.nn.tanh", false]], "tanh() (in module mlx.core)": [[315, "mlx.core.tanh", false]], "tensordot (c++ function)": [[0, "_CPPv49tensordotRK5arrayRK5arrayKi14StreamOrDevice", false], [0, "_CPPv49tensordotRK5arrayRK5arrayRKNSt6vectorIiEERKNSt6vectorIiEE14StreamOrDevice", false]], "tensordot() (in module mlx.core)": [[316, "mlx.core.tensordot", false]], "tile (c++ function)": [[0, "_CPPv44tileRK5arrayNSt6vectorIiEE14StreamOrDevice", false]], "tile() (in module mlx.core)": [[317, "mlx.core.tile", false]], "tolist() (array method)": [[82, "mlx.core.array.tolist", false]], "topk (c++ function)": [[0, "_CPPv44topkRK5arrayi14StreamOrDevice", false], [0, "_CPPv44topkRK5arrayii14StreamOrDevice", false]], "topk() (in module mlx.core)": [[318, "mlx.core.topk", false]], "trace (c++ function)": [[0, "_CPPv45traceRK5array14StreamOrDevice", false], [0, "_CPPv45traceRK5arrayiii14StreamOrDevice", false], [0, "_CPPv45traceRK5arrayiii5Dtype14StreamOrDevice", false]], "trace() (in module mlx.core)": [[319, "mlx.core.trace", false]], "train() (module method)": [[403, "mlx.nn.Module.train", false]], "trainable_parameters() (module method)": [[404, "mlx.nn.Module.trainable_parameters", false]], "training (module property)": [[405, "mlx.nn.Module.training", false]], "transformer (class in mlx.nn)": [[430, "mlx.nn.Transformer", false]], "transpose (c++ function)": [[0, "_CPPv49transposeRK5array14StreamOrDevice", false], [0, "_CPPv49transposeRK5arrayNSt16initializer_listIiEE14StreamOrDevice", false], [0, "_CPPv49transposeRK5arrayNSt6vectorIiEE14StreamOrDevice", false]], "transpose() (array method)": [[83, "mlx.core.array.transpose", false]], "transpose() (in module mlx.core)": [[320, "mlx.core.transpose", false]], "tree_flatten() (in module mlx.utils)": [[337, "mlx.utils.tree_flatten", false]], "tree_map() (in module mlx.utils)": [[338, "mlx.utils.tree_map", false]], "tree_map_with_path() (in module mlx.utils)": [[339, "mlx.utils.tree_map_with_path", false]], "tree_reduce() (in module mlx.utils)": [[340, "mlx.utils.tree_reduce", false]], "tree_unflatten() (in module mlx.utils)": [[341, "mlx.utils.tree_unflatten", false]], "tri (c++ function)": [[0, "_CPPv43trii5Dtype14StreamOrDevice", false], [0, "_CPPv43triiii5Dtype14StreamOrDevice", false]], "tri() (in module mlx.core)": [[321, "mlx.core.tri", false]], "tri_inv() (in module mlx.core.linalg)": [[218, "mlx.core.linalg.tri_inv", false]], "tril (c++ function)": [[0, "_CPPv44tril5arrayi14StreamOrDevice", false]], "tril() (in module mlx.core)": [[322, "mlx.core.tril", false]], "triplet_loss (class in mlx.nn.losses)": [[465, "mlx.nn.losses.triplet_loss", false]], "triu (c++ function)": [[0, "_CPPv44triu5arrayi14StreamOrDevice", false]], "triu() (in module mlx.core)": [[323, "mlx.core.triu", false]], "truncated_normal() (in module mlx.core.random)": [[270, "mlx.core.random.truncated_normal", false]], "unflatten (c++ function)": [[0, "_CPPv49unflattenRK5arrayi5Shape14StreamOrDevice", false]], "unflatten() (in module mlx.core)": [[324, "mlx.core.unflatten", false]], "unfreeze() (module method)": [[406, "mlx.nn.Module.unfreeze", false]], "uniform() (in module mlx.core.random)": [[271, "mlx.core.random.uniform", false]], "uniform() (in module mlx.nn.init)": [[439, "mlx.nn.init.uniform", false]], "update() (module method)": [[407, "mlx.nn.Module.update", false]], "update() (optimizer method)": [[498, "mlx.optimizers.Optimizer.update", false]], "update_modules() (module method)": [[408, "mlx.nn.Module.update_modules", false]], "upsample (class in mlx.nn)": [[431, "mlx.nn.Upsample", false]], "value_and_grad() (in module mlx.core)": [[325, "mlx.core.value_and_grad", false]], "value_and_grad() (in module mlx.nn)": [[335, "mlx.nn.value_and_grad", false]], "var (c++ function)": [[0, "_CPPv43varRK5array14StreamOrDevice", false], [0, "_CPPv43varRK5arrayRKNSt6vectorIiEEbi14StreamOrDevice", false], [0, "_CPPv43varRK5arraybi14StreamOrDevice", false], [0, "_CPPv43varRK5arrayibi14StreamOrDevice", false]], "var() (array method)": [[84, "mlx.core.array.var", false]], "var() (in module mlx.core)": [[326, "mlx.core.var", false]], "view (c++ function)": [[0, "_CPPv44viewRK5arrayRK5Dtype14StreamOrDevice", false]], "view() (array method)": [[85, "mlx.core.array.view", false]], "view() (in module mlx.core)": [[327, "mlx.core.view", false]], "vjp() (in module mlx.core)": [[328, "mlx.core.vjp", false]], "vmap() (in module mlx.core)": [[329, "mlx.core.vmap", false]], "where (c++ function)": [[0, "_CPPv45whereRK5arrayRK5arrayRK5array14StreamOrDevice", false]], "where() (in module mlx.core)": [[330, "mlx.core.where", false]], "zeros (c++ function)": [[0, "_CPPv45zerosRK5Shape14StreamOrDevice", false], [0, "_CPPv45zerosRK5Shape5Dtype14StreamOrDevice", false]], "zeros() (in module mlx.core)": [[331, "mlx.core.zeros", false]], "zeros_like (c++ function)": [[0, "_CPPv410zeros_likeRK5array14StreamOrDevice", false]], "zeros_like() (in module mlx.core)": [[332, "mlx.core.zeros_like", false]]}, "objects": {"": [[0, 0, 1, "_CPPv43absRK5array14StreamOrDevice", "abs"], [0, 1, 1, "_CPPv43absRK5array14StreamOrDevice", "abs::a"], [0, 1, 1, "_CPPv43absRK5array14StreamOrDevice", "abs::s"], [0, 0, 1, "_CPPv43addRK5arrayRK5array14StreamOrDevice", "add"], [0, 1, 1, "_CPPv43addRK5arrayRK5array14StreamOrDevice", "add::a"], [0, 1, 1, "_CPPv43addRK5arrayRK5array14StreamOrDevice", "add::b"], [0, 1, 1, "_CPPv43addRK5arrayRK5array14StreamOrDevice", "add::s"], [0, 0, 1, "_CPPv45addmm5array5array5arrayRKfRKf14StreamOrDevice", "addmm"], [0, 1, 1, "_CPPv45addmm5array5array5arrayRKfRKf14StreamOrDevice", "addmm::a"], [0, 1, 1, "_CPPv45addmm5array5array5arrayRKfRKf14StreamOrDevice", "addmm::alpha"], [0, 1, 1, "_CPPv45addmm5array5array5arrayRKfRKf14StreamOrDevice", "addmm::b"], [0, 1, 1, "_CPPv45addmm5array5array5arrayRKfRKf14StreamOrDevice", "addmm::beta"], [0, 1, 1, "_CPPv45addmm5array5array5arrayRKfRKf14StreamOrDevice", "addmm::c"], [0, 1, 1, "_CPPv45addmm5array5array5arrayRKfRKf14StreamOrDevice", "addmm::s"], [0, 0, 1, "_CPPv43allRK5array14StreamOrDevice", "all"], [0, 0, 1, "_CPPv43allRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "all"], [0, 0, 1, "_CPPv43allRK5arrayb14StreamOrDevice", "all"], [0, 0, 1, "_CPPv43allRK5arrayib14StreamOrDevice", "all"], [0, 1, 1, "_CPPv43allRK5array14StreamOrDevice", "all::a"], [0, 1, 1, "_CPPv43allRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "all::a"], [0, 1, 1, "_CPPv43allRK5arrayb14StreamOrDevice", "all::a"], [0, 1, 1, "_CPPv43allRK5arrayib14StreamOrDevice", "all::a"], [0, 1, 1, "_CPPv43allRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "all::axes"], [0, 1, 1, "_CPPv43allRK5arrayib14StreamOrDevice", "all::axis"], [0, 1, 1, "_CPPv43allRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "all::keepdims"], [0, 1, 1, "_CPPv43allRK5arrayb14StreamOrDevice", "all::keepdims"], [0, 1, 1, "_CPPv43allRK5arrayib14StreamOrDevice", "all::keepdims"], [0, 1, 1, "_CPPv43allRK5array14StreamOrDevice", "all::s"], [0, 1, 1, "_CPPv43allRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "all::s"], [0, 1, 1, "_CPPv43allRK5arrayb14StreamOrDevice", "all::s"], [0, 1, 1, "_CPPv43allRK5arrayib14StreamOrDevice", "all::s"], [0, 0, 1, "_CPPv48allcloseRK5arrayRK5arrayddb14StreamOrDevice", "allclose"], [0, 1, 1, "_CPPv48allcloseRK5arrayRK5arrayddb14StreamOrDevice", "allclose::a"], [0, 1, 1, "_CPPv48allcloseRK5arrayRK5arrayddb14StreamOrDevice", "allclose::atol"], [0, 1, 1, "_CPPv48allcloseRK5arrayRK5arrayddb14StreamOrDevice", "allclose::b"], [0, 1, 1, "_CPPv48allcloseRK5arrayRK5arrayddb14StreamOrDevice", "allclose::equal_nan"], [0, 1, 1, "_CPPv48allcloseRK5arrayRK5arrayddb14StreamOrDevice", "allclose::rtol"], [0, 1, 1, "_CPPv48allcloseRK5arrayRK5arrayddb14StreamOrDevice", "allclose::s"], [0, 0, 1, "_CPPv43anyRK5array14StreamOrDevice", "any"], [0, 0, 1, "_CPPv43anyRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "any"], [0, 0, 1, "_CPPv43anyRK5arrayb14StreamOrDevice", "any"], [0, 0, 1, "_CPPv43anyRK5arrayib14StreamOrDevice", "any"], [0, 1, 1, "_CPPv43anyRK5array14StreamOrDevice", "any::a"], [0, 1, 1, "_CPPv43anyRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "any::a"], [0, 1, 1, "_CPPv43anyRK5arrayb14StreamOrDevice", "any::a"], [0, 1, 1, "_CPPv43anyRK5arrayib14StreamOrDevice", "any::a"], [0, 1, 1, "_CPPv43anyRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "any::axes"], [0, 1, 1, "_CPPv43anyRK5arrayib14StreamOrDevice", "any::axis"], [0, 1, 1, "_CPPv43anyRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "any::keepdims"], [0, 1, 1, "_CPPv43anyRK5arrayb14StreamOrDevice", "any::keepdims"], [0, 1, 1, "_CPPv43anyRK5arrayib14StreamOrDevice", "any::keepdims"], [0, 1, 1, "_CPPv43anyRK5array14StreamOrDevice", "any::s"], [0, 1, 1, "_CPPv43anyRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "any::s"], [0, 1, 1, "_CPPv43anyRK5arrayb14StreamOrDevice", "any::s"], [0, 1, 1, "_CPPv43anyRK5arrayib14StreamOrDevice", "any::s"], [0, 0, 1, "_CPPv46aranged14StreamOrDevice", "arange"], [0, 0, 1, "_CPPv46aranged5Dtype14StreamOrDevice", "arange"], [0, 0, 1, "_CPPv46arangedd14StreamOrDevice", "arange"], [0, 0, 1, "_CPPv46arangedd5Dtype14StreamOrDevice", "arange"], [0, 0, 1, "_CPPv46arangeddd14StreamOrDevice", "arange"], [0, 0, 1, "_CPPv46arangeddd5Dtype14StreamOrDevice", "arange"], [0, 0, 1, "_CPPv46arangei14StreamOrDevice", "arange"], [0, 0, 1, "_CPPv46arangeii14StreamOrDevice", "arange"], [0, 0, 1, "_CPPv46arangeiii14StreamOrDevice", "arange"], [0, 1, 1, "_CPPv46aranged5Dtype14StreamOrDevice", "arange::dtype"], [0, 1, 1, "_CPPv46arangedd5Dtype14StreamOrDevice", "arange::dtype"], [0, 1, 1, "_CPPv46arangeddd5Dtype14StreamOrDevice", "arange::dtype"], [0, 1, 1, "_CPPv46aranged14StreamOrDevice", "arange::s"], [0, 1, 1, "_CPPv46aranged5Dtype14StreamOrDevice", "arange::s"], [0, 1, 1, "_CPPv46arangedd14StreamOrDevice", "arange::s"], [0, 1, 1, "_CPPv46arangedd5Dtype14StreamOrDevice", "arange::s"], [0, 1, 1, "_CPPv46arangeddd14StreamOrDevice", "arange::s"], [0, 1, 1, "_CPPv46arangeddd5Dtype14StreamOrDevice", "arange::s"], [0, 1, 1, "_CPPv46arangei14StreamOrDevice", "arange::s"], [0, 1, 1, "_CPPv46arangeii14StreamOrDevice", "arange::s"], [0, 1, 1, "_CPPv46arangeiii14StreamOrDevice", "arange::s"], [0, 1, 1, "_CPPv46arangedd14StreamOrDevice", "arange::start"], [0, 1, 1, "_CPPv46arangedd5Dtype14StreamOrDevice", "arange::start"], [0, 1, 1, "_CPPv46arangeddd14StreamOrDevice", "arange::start"], [0, 1, 1, "_CPPv46arangeddd5Dtype14StreamOrDevice", "arange::start"], [0, 1, 1, "_CPPv46arangeii14StreamOrDevice", "arange::start"], [0, 1, 1, "_CPPv46arangeiii14StreamOrDevice", "arange::start"], [0, 1, 1, "_CPPv46arangeddd14StreamOrDevice", "arange::step"], [0, 1, 1, "_CPPv46arangeddd5Dtype14StreamOrDevice", "arange::step"], [0, 1, 1, "_CPPv46arangeiii14StreamOrDevice", "arange::step"], [0, 1, 1, "_CPPv46aranged14StreamOrDevice", "arange::stop"], [0, 1, 1, "_CPPv46aranged5Dtype14StreamOrDevice", "arange::stop"], [0, 1, 1, "_CPPv46arangedd14StreamOrDevice", "arange::stop"], [0, 1, 1, "_CPPv46arangedd5Dtype14StreamOrDevice", "arange::stop"], [0, 1, 1, "_CPPv46arangeddd14StreamOrDevice", "arange::stop"], [0, 1, 1, "_CPPv46arangeddd5Dtype14StreamOrDevice", "arange::stop"], [0, 1, 1, "_CPPv46arangei14StreamOrDevice", "arange::stop"], [0, 1, 1, "_CPPv46arangeii14StreamOrDevice", "arange::stop"], [0, 1, 1, "_CPPv46arangeiii14StreamOrDevice", "arange::stop"], [0, 0, 1, "_CPPv46arccosRK5array14StreamOrDevice", "arccos"], [0, 1, 1, "_CPPv46arccosRK5array14StreamOrDevice", "arccos::a"], [0, 1, 1, "_CPPv46arccosRK5array14StreamOrDevice", "arccos::s"], [0, 0, 1, "_CPPv47arccoshRK5array14StreamOrDevice", "arccosh"], [0, 1, 1, "_CPPv47arccoshRK5array14StreamOrDevice", "arccosh::a"], [0, 1, 1, "_CPPv47arccoshRK5array14StreamOrDevice", "arccosh::s"], [0, 0, 1, "_CPPv46arcsinRK5array14StreamOrDevice", "arcsin"], [0, 1, 1, "_CPPv46arcsinRK5array14StreamOrDevice", "arcsin::a"], [0, 1, 1, "_CPPv46arcsinRK5array14StreamOrDevice", "arcsin::s"], [0, 0, 1, "_CPPv47arcsinhRK5array14StreamOrDevice", "arcsinh"], [0, 1, 1, "_CPPv47arcsinhRK5array14StreamOrDevice", "arcsinh::a"], [0, 1, 1, "_CPPv47arcsinhRK5array14StreamOrDevice", "arcsinh::s"], [0, 0, 1, "_CPPv46arctanRK5array14StreamOrDevice", "arctan"], [0, 0, 1, "_CPPv47arctan2RK5arrayRK5array14StreamOrDevice", "arctan2"], [0, 1, 1, "_CPPv47arctan2RK5arrayRK5array14StreamOrDevice", "arctan2::a"], [0, 1, 1, "_CPPv47arctan2RK5arrayRK5array14StreamOrDevice", "arctan2::b"], [0, 1, 1, "_CPPv47arctan2RK5arrayRK5array14StreamOrDevice", "arctan2::s"], [0, 1, 1, "_CPPv46arctanRK5array14StreamOrDevice", "arctan::a"], [0, 1, 1, "_CPPv46arctanRK5array14StreamOrDevice", "arctan::s"], [0, 0, 1, "_CPPv47arctanhRK5array14StreamOrDevice", "arctanh"], [0, 1, 1, "_CPPv47arctanhRK5array14StreamOrDevice", "arctanh::a"], [0, 1, 1, "_CPPv47arctanhRK5array14StreamOrDevice", "arctanh::s"], [0, 0, 1, "_CPPv46argmaxRK5array14StreamOrDevice", "argmax"], [0, 0, 1, "_CPPv46argmaxRK5arrayb14StreamOrDevice", "argmax"], [0, 0, 1, "_CPPv46argmaxRK5arrayib14StreamOrDevice", "argmax"], [0, 1, 1, "_CPPv46argmaxRK5array14StreamOrDevice", "argmax::a"], [0, 1, 1, "_CPPv46argmaxRK5arrayb14StreamOrDevice", "argmax::a"], [0, 1, 1, "_CPPv46argmaxRK5arrayib14StreamOrDevice", "argmax::a"], [0, 1, 1, "_CPPv46argmaxRK5arrayib14StreamOrDevice", "argmax::axis"], [0, 1, 1, "_CPPv46argmaxRK5arrayb14StreamOrDevice", "argmax::keepdims"], [0, 1, 1, "_CPPv46argmaxRK5arrayib14StreamOrDevice", "argmax::keepdims"], [0, 1, 1, "_CPPv46argmaxRK5array14StreamOrDevice", "argmax::s"], [0, 1, 1, "_CPPv46argmaxRK5arrayb14StreamOrDevice", "argmax::s"], [0, 1, 1, "_CPPv46argmaxRK5arrayib14StreamOrDevice", "argmax::s"], [0, 0, 1, "_CPPv46argminRK5array14StreamOrDevice", "argmin"], [0, 0, 1, "_CPPv46argminRK5arrayb14StreamOrDevice", "argmin"], [0, 0, 1, "_CPPv46argminRK5arrayib14StreamOrDevice", "argmin"], [0, 1, 1, "_CPPv46argminRK5array14StreamOrDevice", "argmin::a"], [0, 1, 1, "_CPPv46argminRK5arrayb14StreamOrDevice", "argmin::a"], [0, 1, 1, "_CPPv46argminRK5arrayib14StreamOrDevice", "argmin::a"], [0, 1, 1, "_CPPv46argminRK5arrayib14StreamOrDevice", "argmin::axis"], [0, 1, 1, "_CPPv46argminRK5arrayb14StreamOrDevice", "argmin::keepdims"], [0, 1, 1, "_CPPv46argminRK5arrayib14StreamOrDevice", "argmin::keepdims"], [0, 1, 1, "_CPPv46argminRK5array14StreamOrDevice", "argmin::s"], [0, 1, 1, "_CPPv46argminRK5arrayb14StreamOrDevice", "argmin::s"], [0, 1, 1, "_CPPv46argminRK5arrayib14StreamOrDevice", "argmin::s"], [0, 0, 1, "_CPPv412argpartitionRK5arrayi14StreamOrDevice", "argpartition"], [0, 0, 1, "_CPPv412argpartitionRK5arrayii14StreamOrDevice", "argpartition"], [0, 1, 1, "_CPPv412argpartitionRK5arrayi14StreamOrDevice", "argpartition::a"], [0, 1, 1, "_CPPv412argpartitionRK5arrayii14StreamOrDevice", "argpartition::a"], [0, 1, 1, "_CPPv412argpartitionRK5arrayii14StreamOrDevice", "argpartition::axis"], [0, 1, 1, "_CPPv412argpartitionRK5arrayi14StreamOrDevice", "argpartition::kth"], [0, 1, 1, "_CPPv412argpartitionRK5arrayii14StreamOrDevice", "argpartition::kth"], [0, 1, 1, "_CPPv412argpartitionRK5arrayi14StreamOrDevice", "argpartition::s"], [0, 1, 1, "_CPPv412argpartitionRK5arrayii14StreamOrDevice", "argpartition::s"], [0, 0, 1, "_CPPv47argsortRK5array14StreamOrDevice", "argsort"], [0, 0, 1, "_CPPv47argsortRK5arrayi14StreamOrDevice", "argsort"], [0, 1, 1, "_CPPv47argsortRK5array14StreamOrDevice", "argsort::a"], [0, 1, 1, "_CPPv47argsortRK5arrayi14StreamOrDevice", "argsort::a"], [0, 1, 1, "_CPPv47argsortRK5arrayi14StreamOrDevice", "argsort::axis"], [0, 1, 1, "_CPPv47argsortRK5array14StreamOrDevice", "argsort::s"], [0, 1, 1, "_CPPv47argsortRK5arrayi14StreamOrDevice", "argsort::s"], [0, 0, 1, "_CPPv411array_equalRK5arrayRK5array14StreamOrDevice", "array_equal"], [0, 0, 1, "_CPPv411array_equalRK5arrayRK5arrayb14StreamOrDevice", "array_equal"], [0, 1, 1, "_CPPv411array_equalRK5arrayRK5array14StreamOrDevice", "array_equal::a"], [0, 1, 1, "_CPPv411array_equalRK5arrayRK5arrayb14StreamOrDevice", "array_equal::a"], [0, 1, 1, "_CPPv411array_equalRK5arrayRK5array14StreamOrDevice", "array_equal::b"], [0, 1, 1, "_CPPv411array_equalRK5arrayRK5arrayb14StreamOrDevice", "array_equal::b"], [0, 1, 1, "_CPPv411array_equalRK5arrayRK5arrayb14StreamOrDevice", "array_equal::equal_nan"], [0, 1, 1, "_CPPv411array_equalRK5arrayRK5array14StreamOrDevice", "array_equal::s"], [0, 1, 1, "_CPPv411array_equalRK5arrayRK5arrayb14StreamOrDevice", "array_equal::s"], [0, 0, 1, "_CPPv410as_strided5array5Shape7Strides6size_t14StreamOrDevice", "as_strided"], [0, 1, 1, "_CPPv410as_strided5array5Shape7Strides6size_t14StreamOrDevice", "as_strided::a"], [0, 1, 1, "_CPPv410as_strided5array5Shape7Strides6size_t14StreamOrDevice", "as_strided::offset"], [0, 1, 1, "_CPPv410as_strided5array5Shape7Strides6size_t14StreamOrDevice", "as_strided::s"], [0, 1, 1, "_CPPv410as_strided5array5Shape7Strides6size_t14StreamOrDevice", "as_strided::shape"], [0, 1, 1, "_CPPv410as_strided5array5Shape7Strides6size_t14StreamOrDevice", "as_strided::strides"], [0, 0, 1, "_CPPv46astype5array5Dtype14StreamOrDevice", "astype"], [0, 1, 1, "_CPPv46astype5array5Dtype14StreamOrDevice", "astype::a"], [0, 1, 1, "_CPPv46astype5array5Dtype14StreamOrDevice", "astype::dtype"], [0, 1, 1, "_CPPv46astype5array5Dtype14StreamOrDevice", "astype::s"], [0, 0, 1, "_CPPv410atleast_1dRK5array14StreamOrDevice", "atleast_1d"], [0, 0, 1, "_CPPv410atleast_1dRKNSt6vectorI5arrayEE14StreamOrDevice", "atleast_1d"], [0, 1, 1, "_CPPv410atleast_1dRK5array14StreamOrDevice", "atleast_1d::a"], [0, 1, 1, "_CPPv410atleast_1dRKNSt6vectorI5arrayEE14StreamOrDevice", "atleast_1d::a"], [0, 1, 1, "_CPPv410atleast_1dRK5array14StreamOrDevice", "atleast_1d::s"], [0, 1, 1, "_CPPv410atleast_1dRKNSt6vectorI5arrayEE14StreamOrDevice", "atleast_1d::s"], [0, 0, 1, "_CPPv410atleast_2dRK5array14StreamOrDevice", "atleast_2d"], [0, 0, 1, "_CPPv410atleast_2dRKNSt6vectorI5arrayEE14StreamOrDevice", "atleast_2d"], [0, 1, 1, "_CPPv410atleast_2dRK5array14StreamOrDevice", "atleast_2d::a"], [0, 1, 1, "_CPPv410atleast_2dRKNSt6vectorI5arrayEE14StreamOrDevice", "atleast_2d::a"], [0, 1, 1, "_CPPv410atleast_2dRK5array14StreamOrDevice", "atleast_2d::s"], [0, 1, 1, "_CPPv410atleast_2dRKNSt6vectorI5arrayEE14StreamOrDevice", "atleast_2d::s"], [0, 0, 1, "_CPPv410atleast_3dRK5array14StreamOrDevice", "atleast_3d"], [0, 0, 1, "_CPPv410atleast_3dRKNSt6vectorI5arrayEE14StreamOrDevice", "atleast_3d"], [0, 1, 1, "_CPPv410atleast_3dRK5array14StreamOrDevice", "atleast_3d::a"], [0, 1, 1, "_CPPv410atleast_3dRKNSt6vectorI5arrayEE14StreamOrDevice", "atleast_3d::a"], [0, 1, 1, "_CPPv410atleast_3dRK5array14StreamOrDevice", "atleast_3d::s"], [0, 1, 1, "_CPPv410atleast_3dRKNSt6vectorI5arrayEE14StreamOrDevice", "atleast_3d::s"], [0, 0, 1, "_CPPv411bitwise_andRK5arrayRK5array14StreamOrDevice", "bitwise_and"], [0, 1, 1, "_CPPv411bitwise_andRK5arrayRK5array14StreamOrDevice", "bitwise_and::a"], [0, 1, 1, "_CPPv411bitwise_andRK5arrayRK5array14StreamOrDevice", "bitwise_and::b"], [0, 1, 1, "_CPPv411bitwise_andRK5arrayRK5array14StreamOrDevice", "bitwise_and::s"], [0, 0, 1, "_CPPv414bitwise_invertRK5array14StreamOrDevice", "bitwise_invert"], [0, 1, 1, "_CPPv414bitwise_invertRK5array14StreamOrDevice", "bitwise_invert::a"], [0, 1, 1, "_CPPv414bitwise_invertRK5array14StreamOrDevice", "bitwise_invert::s"], [0, 0, 1, "_CPPv410bitwise_orRK5arrayRK5array14StreamOrDevice", "bitwise_or"], [0, 1, 1, "_CPPv410bitwise_orRK5arrayRK5array14StreamOrDevice", "bitwise_or::a"], [0, 1, 1, "_CPPv410bitwise_orRK5arrayRK5array14StreamOrDevice", "bitwise_or::b"], [0, 1, 1, "_CPPv410bitwise_orRK5arrayRK5array14StreamOrDevice", "bitwise_or::s"], [0, 0, 1, "_CPPv411bitwise_xorRK5arrayRK5array14StreamOrDevice", "bitwise_xor"], [0, 1, 1, "_CPPv411bitwise_xorRK5arrayRK5array14StreamOrDevice", "bitwise_xor::a"], [0, 1, 1, "_CPPv411bitwise_xorRK5arrayRK5array14StreamOrDevice", "bitwise_xor::b"], [0, 1, 1, "_CPPv411bitwise_xorRK5arrayRK5array14StreamOrDevice", "bitwise_xor::s"], [0, 0, 1, "_CPPv415block_masked_mm5array5arrayiNSt8optionalI5arrayEENSt8optionalI5arrayEENSt8optionalI5arrayEE14StreamOrDevice", "block_masked_mm"], [0, 1, 1, "_CPPv415block_masked_mm5array5arrayiNSt8optionalI5arrayEENSt8optionalI5arrayEENSt8optionalI5arrayEE14StreamOrDevice", "block_masked_mm::a"], [0, 1, 1, "_CPPv415block_masked_mm5array5arrayiNSt8optionalI5arrayEENSt8optionalI5arrayEENSt8optionalI5arrayEE14StreamOrDevice", "block_masked_mm::b"], [0, 1, 1, "_CPPv415block_masked_mm5array5arrayiNSt8optionalI5arrayEENSt8optionalI5arrayEENSt8optionalI5arrayEE14StreamOrDevice", "block_masked_mm::block_size"], [0, 1, 1, "_CPPv415block_masked_mm5array5arrayiNSt8optionalI5arrayEENSt8optionalI5arrayEENSt8optionalI5arrayEE14StreamOrDevice", "block_masked_mm::mask_lhs"], [0, 1, 1, "_CPPv415block_masked_mm5array5arrayiNSt8optionalI5arrayEENSt8optionalI5arrayEENSt8optionalI5arrayEE14StreamOrDevice", "block_masked_mm::mask_out"], [0, 1, 1, "_CPPv415block_masked_mm5array5arrayiNSt8optionalI5arrayEENSt8optionalI5arrayEENSt8optionalI5arrayEE14StreamOrDevice", "block_masked_mm::mask_rhs"], [0, 1, 1, "_CPPv415block_masked_mm5array5arrayiNSt8optionalI5arrayEENSt8optionalI5arrayEENSt8optionalI5arrayEE14StreamOrDevice", "block_masked_mm::s"], [0, 0, 1, "_CPPv416broadcast_arraysRKNSt6vectorI5arrayEE14StreamOrDevice", "broadcast_arrays"], [0, 1, 1, "_CPPv416broadcast_arraysRKNSt6vectorI5arrayEE14StreamOrDevice", "broadcast_arrays::inputs"], [0, 1, 1, "_CPPv416broadcast_arraysRKNSt6vectorI5arrayEE14StreamOrDevice", "broadcast_arrays::s"], [0, 0, 1, "_CPPv412broadcast_toRK5arrayRK5Shape14StreamOrDevice", "broadcast_to"], [0, 1, 1, "_CPPv412broadcast_toRK5arrayRK5Shape14StreamOrDevice", "broadcast_to::a"], [0, 1, 1, "_CPPv412broadcast_toRK5arrayRK5Shape14StreamOrDevice", "broadcast_to::s"], [0, 1, 1, "_CPPv412broadcast_toRK5arrayRK5Shape14StreamOrDevice", "broadcast_to::shape"], [0, 0, 1, "_CPPv44ceilRK5array14StreamOrDevice", "ceil"], [0, 1, 1, "_CPPv44ceilRK5array14StreamOrDevice", "ceil::a"], [0, 1, 1, "_CPPv44ceilRK5array14StreamOrDevice", "ceil::s"], [0, 0, 1, "_CPPv44clipRK5arrayRKNSt8optionalI5arrayEERKNSt8optionalI5arrayEE14StreamOrDevice", "clip"], [0, 1, 1, "_CPPv44clipRK5arrayRKNSt8optionalI5arrayEERKNSt8optionalI5arrayEE14StreamOrDevice", "clip::a"], [0, 1, 1, "_CPPv44clipRK5arrayRKNSt8optionalI5arrayEERKNSt8optionalI5arrayEE14StreamOrDevice", "clip::a_max"], [0, 1, 1, "_CPPv44clipRK5arrayRKNSt8optionalI5arrayEERKNSt8optionalI5arrayEE14StreamOrDevice", "clip::a_min"], [0, 1, 1, "_CPPv44clipRK5arrayRKNSt8optionalI5arrayEERKNSt8optionalI5arrayEE14StreamOrDevice", "clip::s"], [0, 0, 1, "_CPPv411concatenateNSt6vectorI5arrayEE14StreamOrDevice", "concatenate"], [0, 0, 1, "_CPPv411concatenateNSt6vectorI5arrayEEi14StreamOrDevice", "concatenate"], [0, 1, 1, "_CPPv411concatenateNSt6vectorI5arrayEE14StreamOrDevice", "concatenate::arrays"], [0, 1, 1, "_CPPv411concatenateNSt6vectorI5arrayEEi14StreamOrDevice", "concatenate::arrays"], [0, 1, 1, "_CPPv411concatenateNSt6vectorI5arrayEEi14StreamOrDevice", "concatenate::axis"], [0, 1, 1, "_CPPv411concatenateNSt6vectorI5arrayEE14StreamOrDevice", "concatenate::s"], [0, 1, 1, "_CPPv411concatenateNSt6vectorI5arrayEEi14StreamOrDevice", "concatenate::s"], [0, 0, 1, "_CPPv49conjugateRK5array14StreamOrDevice", "conjugate"], [0, 1, 1, "_CPPv49conjugateRK5array14StreamOrDevice", "conjugate::a"], [0, 1, 1, "_CPPv49conjugateRK5array14StreamOrDevice", "conjugate::s"], [0, 0, 1, "_CPPv410contiguousRK5arrayb14StreamOrDevice", "contiguous"], [0, 1, 1, "_CPPv410contiguousRK5arrayb14StreamOrDevice", "contiguous::a"], [0, 1, 1, "_CPPv410contiguousRK5arrayb14StreamOrDevice", "contiguous::allow_col_major"], [0, 1, 1, "_CPPv410contiguousRK5arrayb14StreamOrDevice", "contiguous::s"], [0, 0, 1, "_CPPv46conv1dRK5arrayRK5arrayiiii14StreamOrDevice", "conv1d"], [0, 1, 1, "_CPPv46conv1dRK5arrayRK5arrayiiii14StreamOrDevice", "conv1d::dilation"], [0, 1, 1, "_CPPv46conv1dRK5arrayRK5arrayiiii14StreamOrDevice", "conv1d::groups"], [0, 1, 1, "_CPPv46conv1dRK5arrayRK5arrayiiii14StreamOrDevice", "conv1d::input"], [0, 1, 1, "_CPPv46conv1dRK5arrayRK5arrayiiii14StreamOrDevice", "conv1d::padding"], [0, 1, 1, "_CPPv46conv1dRK5arrayRK5arrayiiii14StreamOrDevice", "conv1d::s"], [0, 1, 1, "_CPPv46conv1dRK5arrayRK5arrayiiii14StreamOrDevice", "conv1d::stride"], [0, 1, 1, "_CPPv46conv1dRK5arrayRK5arrayiiii14StreamOrDevice", "conv1d::weight"], [0, 0, 1, "_CPPv46conv2dRK5arrayRK5arrayRKNSt4pairIiiEERKNSt4pairIiiEERKNSt4pairIiiEEi14StreamOrDevice", "conv2d"], [0, 1, 1, "_CPPv46conv2dRK5arrayRK5arrayRKNSt4pairIiiEERKNSt4pairIiiEERKNSt4pairIiiEEi14StreamOrDevice", "conv2d::dilation"], [0, 1, 1, "_CPPv46conv2dRK5arrayRK5arrayRKNSt4pairIiiEERKNSt4pairIiiEERKNSt4pairIiiEEi14StreamOrDevice", "conv2d::groups"], [0, 1, 1, "_CPPv46conv2dRK5arrayRK5arrayRKNSt4pairIiiEERKNSt4pairIiiEERKNSt4pairIiiEEi14StreamOrDevice", "conv2d::input"], [0, 1, 1, "_CPPv46conv2dRK5arrayRK5arrayRKNSt4pairIiiEERKNSt4pairIiiEERKNSt4pairIiiEEi14StreamOrDevice", "conv2d::padding"], [0, 1, 1, "_CPPv46conv2dRK5arrayRK5arrayRKNSt4pairIiiEERKNSt4pairIiiEERKNSt4pairIiiEEi14StreamOrDevice", "conv2d::s"], [0, 1, 1, "_CPPv46conv2dRK5arrayRK5arrayRKNSt4pairIiiEERKNSt4pairIiiEERKNSt4pairIiiEEi14StreamOrDevice", "conv2d::stride"], [0, 1, 1, "_CPPv46conv2dRK5arrayRK5arrayRKNSt4pairIiiEERKNSt4pairIiiEERKNSt4pairIiiEEi14StreamOrDevice", "conv2d::weight"], [0, 0, 1, "_CPPv46conv3dRK5arrayRK5arrayRKNSt5tupleIiiiEERKNSt5tupleIiiiEERKNSt5tupleIiiiEEi14StreamOrDevice", "conv3d"], [0, 1, 1, "_CPPv46conv3dRK5arrayRK5arrayRKNSt5tupleIiiiEERKNSt5tupleIiiiEERKNSt5tupleIiiiEEi14StreamOrDevice", "conv3d::dilation"], [0, 1, 1, "_CPPv46conv3dRK5arrayRK5arrayRKNSt5tupleIiiiEERKNSt5tupleIiiiEERKNSt5tupleIiiiEEi14StreamOrDevice", "conv3d::groups"], [0, 1, 1, "_CPPv46conv3dRK5arrayRK5arrayRKNSt5tupleIiiiEERKNSt5tupleIiiiEERKNSt5tupleIiiiEEi14StreamOrDevice", "conv3d::input"], [0, 1, 1, "_CPPv46conv3dRK5arrayRK5arrayRKNSt5tupleIiiiEERKNSt5tupleIiiiEERKNSt5tupleIiiiEEi14StreamOrDevice", "conv3d::padding"], [0, 1, 1, "_CPPv46conv3dRK5arrayRK5arrayRKNSt5tupleIiiiEERKNSt5tupleIiiiEERKNSt5tupleIiiiEEi14StreamOrDevice", "conv3d::s"], [0, 1, 1, "_CPPv46conv3dRK5arrayRK5arrayRKNSt5tupleIiiiEERKNSt5tupleIiiiEERKNSt5tupleIiiiEEi14StreamOrDevice", "conv3d::stride"], [0, 1, 1, "_CPPv46conv3dRK5arrayRK5arrayRKNSt5tupleIiiiEERKNSt5tupleIiiiEERKNSt5tupleIiiiEEi14StreamOrDevice", "conv3d::weight"], [0, 0, 1, "_CPPv412conv_general5array5arrayNSt6vectorIiEENSt6vectorIiEENSt6vectorIiEENSt6vectorIiEENSt6vectorIiEEib14StreamOrDevice", "conv_general"], [0, 0, 1, "_CPPv412conv_generalRK5arrayRK5arrayNSt6vectorIiEENSt6vectorIiEENSt6vectorIiEENSt6vectorIiEEib14StreamOrDevice", "conv_general"], [0, 1, 1, "_CPPv412conv_general5array5arrayNSt6vectorIiEENSt6vectorIiEENSt6vectorIiEENSt6vectorIiEENSt6vectorIiEEib14StreamOrDevice", "conv_general::flip"], [0, 1, 1, "_CPPv412conv_generalRK5arrayRK5arrayNSt6vectorIiEENSt6vectorIiEENSt6vectorIiEENSt6vectorIiEEib14StreamOrDevice", "conv_general::flip"], [0, 1, 1, "_CPPv412conv_general5array5arrayNSt6vectorIiEENSt6vectorIiEENSt6vectorIiEENSt6vectorIiEENSt6vectorIiEEib14StreamOrDevice", "conv_general::groups"], [0, 1, 1, "_CPPv412conv_generalRK5arrayRK5arrayNSt6vectorIiEENSt6vectorIiEENSt6vectorIiEENSt6vectorIiEEib14StreamOrDevice", "conv_general::groups"], [0, 1, 1, "_CPPv412conv_general5array5arrayNSt6vectorIiEENSt6vectorIiEENSt6vectorIiEENSt6vectorIiEENSt6vectorIiEEib14StreamOrDevice", "conv_general::input"], [0, 1, 1, "_CPPv412conv_generalRK5arrayRK5arrayNSt6vectorIiEENSt6vectorIiEENSt6vectorIiEENSt6vectorIiEEib14StreamOrDevice", "conv_general::input"], [0, 1, 1, "_CPPv412conv_general5array5arrayNSt6vectorIiEENSt6vectorIiEENSt6vectorIiEENSt6vectorIiEENSt6vectorIiEEib14StreamOrDevice", "conv_general::input_dilation"], [0, 1, 1, "_CPPv412conv_generalRK5arrayRK5arrayNSt6vectorIiEENSt6vectorIiEENSt6vectorIiEENSt6vectorIiEEib14StreamOrDevice", "conv_general::input_dilation"], [0, 1, 1, "_CPPv412conv_general5array5arrayNSt6vectorIiEENSt6vectorIiEENSt6vectorIiEENSt6vectorIiEENSt6vectorIiEEib14StreamOrDevice", "conv_general::kernel_dilation"], [0, 1, 1, "_CPPv412conv_generalRK5arrayRK5arrayNSt6vectorIiEENSt6vectorIiEENSt6vectorIiEENSt6vectorIiEEib14StreamOrDevice", "conv_general::kernel_dilation"], [0, 1, 1, "_CPPv412conv_generalRK5arrayRK5arrayNSt6vectorIiEENSt6vectorIiEENSt6vectorIiEENSt6vectorIiEEib14StreamOrDevice", "conv_general::padding"], [0, 1, 1, "_CPPv412conv_general5array5arrayNSt6vectorIiEENSt6vectorIiEENSt6vectorIiEENSt6vectorIiEENSt6vectorIiEEib14StreamOrDevice", "conv_general::padding_hi"], [0, 1, 1, "_CPPv412conv_general5array5arrayNSt6vectorIiEENSt6vectorIiEENSt6vectorIiEENSt6vectorIiEENSt6vectorIiEEib14StreamOrDevice", "conv_general::padding_lo"], [0, 1, 1, "_CPPv412conv_general5array5arrayNSt6vectorIiEENSt6vectorIiEENSt6vectorIiEENSt6vectorIiEENSt6vectorIiEEib14StreamOrDevice", "conv_general::s"], [0, 1, 1, "_CPPv412conv_generalRK5arrayRK5arrayNSt6vectorIiEENSt6vectorIiEENSt6vectorIiEENSt6vectorIiEEib14StreamOrDevice", "conv_general::s"], [0, 1, 1, "_CPPv412conv_general5array5arrayNSt6vectorIiEENSt6vectorIiEENSt6vectorIiEENSt6vectorIiEENSt6vectorIiEEib14StreamOrDevice", "conv_general::stride"], [0, 1, 1, "_CPPv412conv_generalRK5arrayRK5arrayNSt6vectorIiEENSt6vectorIiEENSt6vectorIiEENSt6vectorIiEEib14StreamOrDevice", "conv_general::stride"], [0, 1, 1, "_CPPv412conv_general5array5arrayNSt6vectorIiEENSt6vectorIiEENSt6vectorIiEENSt6vectorIiEENSt6vectorIiEEib14StreamOrDevice", "conv_general::weight"], [0, 1, 1, "_CPPv412conv_generalRK5arrayRK5arrayNSt6vectorIiEENSt6vectorIiEENSt6vectorIiEENSt6vectorIiEEib14StreamOrDevice", "conv_general::weight"], [0, 0, 1, "_CPPv416conv_transpose1dRK5arrayRK5arrayiiiii14StreamOrDevice", "conv_transpose1d"], [0, 1, 1, "_CPPv416conv_transpose1dRK5arrayRK5arrayiiiii14StreamOrDevice", "conv_transpose1d::dilation"], [0, 1, 1, "_CPPv416conv_transpose1dRK5arrayRK5arrayiiiii14StreamOrDevice", "conv_transpose1d::groups"], [0, 1, 1, "_CPPv416conv_transpose1dRK5arrayRK5arrayiiiii14StreamOrDevice", "conv_transpose1d::input"], [0, 1, 1, "_CPPv416conv_transpose1dRK5arrayRK5arrayiiiii14StreamOrDevice", "conv_transpose1d::output_padding"], [0, 1, 1, "_CPPv416conv_transpose1dRK5arrayRK5arrayiiiii14StreamOrDevice", "conv_transpose1d::padding"], [0, 1, 1, "_CPPv416conv_transpose1dRK5arrayRK5arrayiiiii14StreamOrDevice", "conv_transpose1d::s"], [0, 1, 1, "_CPPv416conv_transpose1dRK5arrayRK5arrayiiiii14StreamOrDevice", "conv_transpose1d::stride"], [0, 1, 1, "_CPPv416conv_transpose1dRK5arrayRK5arrayiiiii14StreamOrDevice", "conv_transpose1d::weight"], [0, 0, 1, "_CPPv416conv_transpose2dRK5arrayRK5arrayRKNSt4pairIiiEERKNSt4pairIiiEERKNSt4pairIiiEERKNSt4pairIiiEEi14StreamOrDevice", "conv_transpose2d"], [0, 1, 1, "_CPPv416conv_transpose2dRK5arrayRK5arrayRKNSt4pairIiiEERKNSt4pairIiiEERKNSt4pairIiiEERKNSt4pairIiiEEi14StreamOrDevice", "conv_transpose2d::dilation"], [0, 1, 1, "_CPPv416conv_transpose2dRK5arrayRK5arrayRKNSt4pairIiiEERKNSt4pairIiiEERKNSt4pairIiiEERKNSt4pairIiiEEi14StreamOrDevice", "conv_transpose2d::groups"], [0, 1, 1, "_CPPv416conv_transpose2dRK5arrayRK5arrayRKNSt4pairIiiEERKNSt4pairIiiEERKNSt4pairIiiEERKNSt4pairIiiEEi14StreamOrDevice", "conv_transpose2d::input"], [0, 1, 1, "_CPPv416conv_transpose2dRK5arrayRK5arrayRKNSt4pairIiiEERKNSt4pairIiiEERKNSt4pairIiiEERKNSt4pairIiiEEi14StreamOrDevice", "conv_transpose2d::output_padding"], [0, 1, 1, "_CPPv416conv_transpose2dRK5arrayRK5arrayRKNSt4pairIiiEERKNSt4pairIiiEERKNSt4pairIiiEERKNSt4pairIiiEEi14StreamOrDevice", "conv_transpose2d::padding"], [0, 1, 1, "_CPPv416conv_transpose2dRK5arrayRK5arrayRKNSt4pairIiiEERKNSt4pairIiiEERKNSt4pairIiiEERKNSt4pairIiiEEi14StreamOrDevice", "conv_transpose2d::s"], [0, 1, 1, "_CPPv416conv_transpose2dRK5arrayRK5arrayRKNSt4pairIiiEERKNSt4pairIiiEERKNSt4pairIiiEERKNSt4pairIiiEEi14StreamOrDevice", "conv_transpose2d::stride"], [0, 1, 1, "_CPPv416conv_transpose2dRK5arrayRK5arrayRKNSt4pairIiiEERKNSt4pairIiiEERKNSt4pairIiiEERKNSt4pairIiiEEi14StreamOrDevice", "conv_transpose2d::weight"], [0, 0, 1, "_CPPv416conv_transpose3dRK5arrayRK5arrayRKNSt5tupleIiiiEERKNSt5tupleIiiiEERKNSt5tupleIiiiEERKNSt5tupleIiiiEEi14StreamOrDevice", "conv_transpose3d"], [0, 1, 1, "_CPPv416conv_transpose3dRK5arrayRK5arrayRKNSt5tupleIiiiEERKNSt5tupleIiiiEERKNSt5tupleIiiiEERKNSt5tupleIiiiEEi14StreamOrDevice", "conv_transpose3d::dilation"], [0, 1, 1, "_CPPv416conv_transpose3dRK5arrayRK5arrayRKNSt5tupleIiiiEERKNSt5tupleIiiiEERKNSt5tupleIiiiEERKNSt5tupleIiiiEEi14StreamOrDevice", "conv_transpose3d::groups"], [0, 1, 1, "_CPPv416conv_transpose3dRK5arrayRK5arrayRKNSt5tupleIiiiEERKNSt5tupleIiiiEERKNSt5tupleIiiiEERKNSt5tupleIiiiEEi14StreamOrDevice", "conv_transpose3d::input"], [0, 1, 1, "_CPPv416conv_transpose3dRK5arrayRK5arrayRKNSt5tupleIiiiEERKNSt5tupleIiiiEERKNSt5tupleIiiiEERKNSt5tupleIiiiEEi14StreamOrDevice", "conv_transpose3d::output_padding"], [0, 1, 1, "_CPPv416conv_transpose3dRK5arrayRK5arrayRKNSt5tupleIiiiEERKNSt5tupleIiiiEERKNSt5tupleIiiiEERKNSt5tupleIiiiEEi14StreamOrDevice", "conv_transpose3d::padding"], [0, 1, 1, "_CPPv416conv_transpose3dRK5arrayRK5arrayRKNSt5tupleIiiiEERKNSt5tupleIiiiEERKNSt5tupleIiiiEERKNSt5tupleIiiiEEi14StreamOrDevice", "conv_transpose3d::s"], [0, 1, 1, "_CPPv416conv_transpose3dRK5arrayRK5arrayRKNSt5tupleIiiiEERKNSt5tupleIiiiEERKNSt5tupleIiiiEERKNSt5tupleIiiiEEi14StreamOrDevice", "conv_transpose3d::stride"], [0, 1, 1, "_CPPv416conv_transpose3dRK5arrayRK5arrayRKNSt5tupleIiiiEERKNSt5tupleIiiiEERKNSt5tupleIiiiEERKNSt5tupleIiiiEEi14StreamOrDevice", "conv_transpose3d::weight"], [0, 0, 1, "_CPPv44copy5array14StreamOrDevice", "copy"], [0, 1, 1, "_CPPv44copy5array14StreamOrDevice", "copy::a"], [0, 1, 1, "_CPPv44copy5array14StreamOrDevice", "copy::s"], [0, 0, 1, "_CPPv43cosRK5array14StreamOrDevice", "cos"], [0, 1, 1, "_CPPv43cosRK5array14StreamOrDevice", "cos::a"], [0, 1, 1, "_CPPv43cosRK5array14StreamOrDevice", "cos::s"], [0, 0, 1, "_CPPv44coshRK5array14StreamOrDevice", "cosh"], [0, 1, 1, "_CPPv44coshRK5array14StreamOrDevice", "cosh::a"], [0, 1, 1, "_CPPv44coshRK5array14StreamOrDevice", "cosh::s"], [0, 0, 1, "_CPPv46cummaxRK5arrayibb14StreamOrDevice", "cummax"], [0, 1, 1, "_CPPv46cummaxRK5arrayibb14StreamOrDevice", "cummax::a"], [0, 1, 1, "_CPPv46cummaxRK5arrayibb14StreamOrDevice", "cummax::axis"], [0, 1, 1, "_CPPv46cummaxRK5arrayibb14StreamOrDevice", "cummax::inclusive"], [0, 1, 1, "_CPPv46cummaxRK5arrayibb14StreamOrDevice", "cummax::reverse"], [0, 1, 1, "_CPPv46cummaxRK5arrayibb14StreamOrDevice", "cummax::s"], [0, 0, 1, "_CPPv46cumminRK5arrayibb14StreamOrDevice", "cummin"], [0, 1, 1, "_CPPv46cumminRK5arrayibb14StreamOrDevice", "cummin::a"], [0, 1, 1, "_CPPv46cumminRK5arrayibb14StreamOrDevice", "cummin::axis"], [0, 1, 1, "_CPPv46cumminRK5arrayibb14StreamOrDevice", "cummin::inclusive"], [0, 1, 1, "_CPPv46cumminRK5arrayibb14StreamOrDevice", "cummin::reverse"], [0, 1, 1, "_CPPv46cumminRK5arrayibb14StreamOrDevice", "cummin::s"], [0, 0, 1, "_CPPv47cumprodRK5arrayibb14StreamOrDevice", "cumprod"], [0, 1, 1, "_CPPv47cumprodRK5arrayibb14StreamOrDevice", "cumprod::a"], [0, 1, 1, "_CPPv47cumprodRK5arrayibb14StreamOrDevice", "cumprod::axis"], [0, 1, 1, "_CPPv47cumprodRK5arrayibb14StreamOrDevice", "cumprod::inclusive"], [0, 1, 1, "_CPPv47cumprodRK5arrayibb14StreamOrDevice", "cumprod::reverse"], [0, 1, 1, "_CPPv47cumprodRK5arrayibb14StreamOrDevice", "cumprod::s"], [0, 0, 1, "_CPPv46cumsumRK5arrayibb14StreamOrDevice", "cumsum"], [0, 1, 1, "_CPPv46cumsumRK5arrayibb14StreamOrDevice", "cumsum::a"], [0, 1, 1, "_CPPv46cumsumRK5arrayibb14StreamOrDevice", "cumsum::axis"], [0, 1, 1, "_CPPv46cumsumRK5arrayibb14StreamOrDevice", "cumsum::inclusive"], [0, 1, 1, "_CPPv46cumsumRK5arrayibb14StreamOrDevice", "cumsum::reverse"], [0, 1, 1, "_CPPv46cumsumRK5arrayibb14StreamOrDevice", "cumsum::s"], [0, 0, 1, "_CPPv47degreesRK5array14StreamOrDevice", "degrees"], [0, 1, 1, "_CPPv47degreesRK5array14StreamOrDevice", "degrees::a"], [0, 1, 1, "_CPPv47degreesRK5array14StreamOrDevice", "degrees::s"], [0, 0, 1, "_CPPv47dependsRKNSt6vectorI5arrayEERKNSt6vectorI5arrayEE", "depends"], [0, 1, 1, "_CPPv47dependsRKNSt6vectorI5arrayEERKNSt6vectorI5arrayEE", "depends::dependencies"], [0, 1, 1, "_CPPv47dependsRKNSt6vectorI5arrayEERKNSt6vectorI5arrayEE", "depends::inputs"], [0, 0, 1, "_CPPv410dequantizeRK5arrayRK5arrayRK5arrayii14StreamOrDevice", "dequantize"], [0, 1, 1, "_CPPv410dequantizeRK5arrayRK5arrayRK5arrayii14StreamOrDevice", "dequantize::biases"], [0, 1, 1, "_CPPv410dequantizeRK5arrayRK5arrayRK5arrayii14StreamOrDevice", "dequantize::bits"], [0, 1, 1, "_CPPv410dequantizeRK5arrayRK5arrayRK5arrayii14StreamOrDevice", "dequantize::group_size"], [0, 1, 1, "_CPPv410dequantizeRK5arrayRK5arrayRK5arrayii14StreamOrDevice", "dequantize::s"], [0, 1, 1, "_CPPv410dequantizeRK5arrayRK5arrayRK5arrayii14StreamOrDevice", "dequantize::scales"], [0, 1, 1, "_CPPv410dequantizeRK5arrayRK5arrayRK5arrayii14StreamOrDevice", "dequantize::w"], [0, 0, 1, "_CPPv44diagRK5arrayi14StreamOrDevice", "diag"], [0, 1, 1, "_CPPv44diagRK5arrayi14StreamOrDevice", "diag::a"], [0, 1, 1, "_CPPv44diagRK5arrayi14StreamOrDevice", "diag::k"], [0, 1, 1, "_CPPv44diagRK5arrayi14StreamOrDevice", "diag::s"], [0, 0, 1, "_CPPv48diagonalRK5arrayiii14StreamOrDevice", "diagonal"], [0, 1, 1, "_CPPv48diagonalRK5arrayiii14StreamOrDevice", "diagonal::a"], [0, 1, 1, "_CPPv48diagonalRK5arrayiii14StreamOrDevice", "diagonal::axis1"], [0, 1, 1, "_CPPv48diagonalRK5arrayiii14StreamOrDevice", "diagonal::axis2"], [0, 1, 1, "_CPPv48diagonalRK5arrayiii14StreamOrDevice", "diagonal::offset"], [0, 1, 1, "_CPPv48diagonalRK5arrayiii14StreamOrDevice", "diagonal::s"], [0, 0, 1, "_CPPv46divideRK5arrayRK5array14StreamOrDevice", "divide"], [0, 1, 1, "_CPPv46divideRK5arrayRK5array14StreamOrDevice", "divide::a"], [0, 1, 1, "_CPPv46divideRK5arrayRK5array14StreamOrDevice", "divide::b"], [0, 1, 1, "_CPPv46divideRK5arrayRK5array14StreamOrDevice", "divide::s"], [0, 0, 1, "_CPPv46divmodRK5arrayRK5array14StreamOrDevice", "divmod"], [0, 1, 1, "_CPPv46divmodRK5arrayRK5array14StreamOrDevice", "divmod::a"], [0, 1, 1, "_CPPv46divmodRK5arrayRK5array14StreamOrDevice", "divmod::b"], [0, 1, 1, "_CPPv46divmodRK5arrayRK5array14StreamOrDevice", "divmod::s"], [0, 0, 1, "_CPPv45equalRK5arrayRK5array14StreamOrDevice", "equal"], [0, 1, 1, "_CPPv45equalRK5arrayRK5array14StreamOrDevice", "equal::a"], [0, 1, 1, "_CPPv45equalRK5arrayRK5array14StreamOrDevice", "equal::b"], [0, 1, 1, "_CPPv45equalRK5arrayRK5array14StreamOrDevice", "equal::s"], [0, 0, 1, "_CPPv43erfRK5array14StreamOrDevice", "erf"], [0, 1, 1, "_CPPv43erfRK5array14StreamOrDevice", "erf::a"], [0, 1, 1, "_CPPv43erfRK5array14StreamOrDevice", "erf::s"], [0, 0, 1, "_CPPv46erfinvRK5array14StreamOrDevice", "erfinv"], [0, 1, 1, "_CPPv46erfinvRK5array14StreamOrDevice", "erfinv::a"], [0, 1, 1, "_CPPv46erfinvRK5array14StreamOrDevice", "erfinv::s"], [0, 0, 1, "_CPPv43expRK5array14StreamOrDevice", "exp"], [0, 1, 1, "_CPPv43expRK5array14StreamOrDevice", "exp::a"], [0, 1, 1, "_CPPv43expRK5array14StreamOrDevice", "exp::s"], [0, 0, 1, "_CPPv411expand_dimsRK5arrayRKNSt6vectorIiEE14StreamOrDevice", "expand_dims"], [0, 0, 1, "_CPPv411expand_dimsRK5arrayi14StreamOrDevice", "expand_dims"], [0, 1, 1, "_CPPv411expand_dimsRK5arrayRKNSt6vectorIiEE14StreamOrDevice", "expand_dims::a"], [0, 1, 1, "_CPPv411expand_dimsRK5arrayi14StreamOrDevice", "expand_dims::a"], [0, 1, 1, "_CPPv411expand_dimsRK5arrayRKNSt6vectorIiEE14StreamOrDevice", "expand_dims::axes"], [0, 1, 1, "_CPPv411expand_dimsRK5arrayi14StreamOrDevice", "expand_dims::axis"], [0, 1, 1, "_CPPv411expand_dimsRK5arrayRKNSt6vectorIiEE14StreamOrDevice", "expand_dims::s"], [0, 1, 1, "_CPPv411expand_dimsRK5arrayi14StreamOrDevice", "expand_dims::s"], [0, 0, 1, "_CPPv45expm1RK5array14StreamOrDevice", "expm1"], [0, 1, 1, "_CPPv45expm1RK5array14StreamOrDevice", "expm1::a"], [0, 1, 1, "_CPPv45expm1RK5array14StreamOrDevice", "expm1::s"], [0, 0, 1, "_CPPv43eyei14StreamOrDevice", "eye"], [0, 0, 1, "_CPPv43eyei5Dtype14StreamOrDevice", "eye"], [0, 0, 1, "_CPPv43eyeii14StreamOrDevice", "eye"], [0, 0, 1, "_CPPv43eyeiii14StreamOrDevice", "eye"], [0, 0, 1, "_CPPv43eyeiii5Dtype14StreamOrDevice", "eye"], [0, 1, 1, "_CPPv43eyei5Dtype14StreamOrDevice", "eye::dtype"], [0, 1, 1, "_CPPv43eyeiii5Dtype14StreamOrDevice", "eye::dtype"], [0, 1, 1, "_CPPv43eyeiii14StreamOrDevice", "eye::k"], [0, 1, 1, "_CPPv43eyeiii5Dtype14StreamOrDevice", "eye::k"], [0, 1, 1, "_CPPv43eyeii14StreamOrDevice", "eye::m"], [0, 1, 1, "_CPPv43eyeiii14StreamOrDevice", "eye::m"], [0, 1, 1, "_CPPv43eyeiii5Dtype14StreamOrDevice", "eye::m"], [0, 1, 1, "_CPPv43eyei14StreamOrDevice", "eye::n"], [0, 1, 1, "_CPPv43eyei5Dtype14StreamOrDevice", "eye::n"], [0, 1, 1, "_CPPv43eyeii14StreamOrDevice", "eye::n"], [0, 1, 1, "_CPPv43eyeiii14StreamOrDevice", "eye::n"], [0, 1, 1, "_CPPv43eyeiii5Dtype14StreamOrDevice", "eye::n"], [0, 1, 1, "_CPPv43eyei14StreamOrDevice", "eye::s"], [0, 1, 1, "_CPPv43eyei5Dtype14StreamOrDevice", "eye::s"], [0, 1, 1, "_CPPv43eyeii14StreamOrDevice", "eye::s"], [0, 1, 1, "_CPPv43eyeiii14StreamOrDevice", "eye::s"], [0, 1, 1, "_CPPv43eyeiii5Dtype14StreamOrDevice", "eye::s"], [0, 0, 1, "_CPPv47flattenRK5array14StreamOrDevice", "flatten"], [0, 0, 1, "_CPPv47flattenRK5arrayii14StreamOrDevice", "flatten"], [0, 1, 1, "_CPPv47flattenRK5array14StreamOrDevice", "flatten::a"], [0, 1, 1, "_CPPv47flattenRK5arrayii14StreamOrDevice", "flatten::a"], [0, 1, 1, "_CPPv47flattenRK5arrayii14StreamOrDevice", "flatten::end_axis"], [0, 1, 1, "_CPPv47flattenRK5array14StreamOrDevice", "flatten::s"], [0, 1, 1, "_CPPv47flattenRK5arrayii14StreamOrDevice", "flatten::s"], [0, 1, 1, "_CPPv47flattenRK5arrayii14StreamOrDevice", "flatten::start_axis"], [0, 0, 1, "_CPPv45floorRK5array14StreamOrDevice", "floor"], [0, 1, 1, "_CPPv45floorRK5array14StreamOrDevice", "floor::a"], [0, 1, 1, "_CPPv45floorRK5array14StreamOrDevice", "floor::s"], [0, 0, 1, "_CPPv412floor_divideRK5arrayRK5array14StreamOrDevice", "floor_divide"], [0, 1, 1, "_CPPv412floor_divideRK5arrayRK5array14StreamOrDevice", "floor_divide::a"], [0, 1, 1, "_CPPv412floor_divideRK5arrayRK5array14StreamOrDevice", "floor_divide::b"], [0, 1, 1, "_CPPv412floor_divideRK5arrayRK5array14StreamOrDevice", "floor_divide::s"], [0, 0, 1, "_CPPv44full5Shape5array14StreamOrDevice", "full"], [0, 0, 1, "_CPPv44full5Shape5array5Dtype14StreamOrDevice", "full"], [0, 0, 1, "_CPPv4I0E4full5array5Shape1T14StreamOrDevice", "full"], [0, 0, 1, "_CPPv4I0E4full5array5Shape1T5Dtype14StreamOrDevice", "full"], [0, 2, 1, "_CPPv4I0E4full5array5Shape1T14StreamOrDevice", "full::T"], [0, 2, 1, "_CPPv4I0E4full5array5Shape1T5Dtype14StreamOrDevice", "full::T"], [0, 1, 1, "_CPPv44full5Shape5array5Dtype14StreamOrDevice", "full::dtype"], [0, 1, 1, "_CPPv4I0E4full5array5Shape1T5Dtype14StreamOrDevice", "full::dtype"], [0, 1, 1, "_CPPv44full5Shape5array14StreamOrDevice", "full::s"], [0, 1, 1, "_CPPv44full5Shape5array5Dtype14StreamOrDevice", "full::s"], [0, 1, 1, "_CPPv4I0E4full5array5Shape1T14StreamOrDevice", "full::s"], [0, 1, 1, "_CPPv4I0E4full5array5Shape1T5Dtype14StreamOrDevice", "full::s"], [0, 1, 1, "_CPPv44full5Shape5array14StreamOrDevice", "full::shape"], [0, 1, 1, "_CPPv44full5Shape5array5Dtype14StreamOrDevice", "full::shape"], [0, 1, 1, "_CPPv4I0E4full5array5Shape1T14StreamOrDevice", "full::shape"], [0, 1, 1, "_CPPv4I0E4full5array5Shape1T5Dtype14StreamOrDevice", "full::shape"], [0, 1, 1, "_CPPv4I0E4full5array5Shape1T14StreamOrDevice", "full::val"], [0, 1, 1, "_CPPv4I0E4full5array5Shape1T5Dtype14StreamOrDevice", "full::val"], [0, 1, 1, "_CPPv44full5Shape5array14StreamOrDevice", "full::vals"], [0, 1, 1, "_CPPv44full5Shape5array5Dtype14StreamOrDevice", "full::vals"], [0, 0, 1, "_CPPv46gatherRK5arrayRK5arrayiRK5Shape14StreamOrDevice", "gather"], [0, 0, 1, "_CPPv46gatherRK5arrayRKNSt6vectorI5arrayEERKNSt6vectorIiEERK5Shape14StreamOrDevice", "gather"], [0, 1, 1, "_CPPv46gatherRK5arrayRK5arrayiRK5Shape14StreamOrDevice", "gather::a"], [0, 1, 1, "_CPPv46gatherRK5arrayRKNSt6vectorI5arrayEERKNSt6vectorIiEERK5Shape14StreamOrDevice", "gather::a"], [0, 1, 1, "_CPPv46gatherRK5arrayRKNSt6vectorI5arrayEERKNSt6vectorIiEERK5Shape14StreamOrDevice", "gather::axes"], [0, 1, 1, "_CPPv46gatherRK5arrayRK5arrayiRK5Shape14StreamOrDevice", "gather::axis"], [0, 1, 1, "_CPPv46gatherRK5arrayRK5arrayiRK5Shape14StreamOrDevice", "gather::indices"], [0, 1, 1, "_CPPv46gatherRK5arrayRKNSt6vectorI5arrayEERKNSt6vectorIiEERK5Shape14StreamOrDevice", "gather::indices"], [0, 1, 1, "_CPPv46gatherRK5arrayRK5arrayiRK5Shape14StreamOrDevice", "gather::s"], [0, 1, 1, "_CPPv46gatherRK5arrayRKNSt6vectorI5arrayEERKNSt6vectorIiEERK5Shape14StreamOrDevice", "gather::s"], [0, 1, 1, "_CPPv46gatherRK5arrayRK5arrayiRK5Shape14StreamOrDevice", "gather::slice_sizes"], [0, 1, 1, "_CPPv46gatherRK5arrayRKNSt6vectorI5arrayEERKNSt6vectorIiEERK5Shape14StreamOrDevice", "gather::slice_sizes"], [0, 0, 1, "_CPPv49gather_mm5array5arrayNSt8optionalI5arrayEENSt8optionalI5arrayEEb14StreamOrDevice", "gather_mm"], [0, 1, 1, "_CPPv49gather_mm5array5arrayNSt8optionalI5arrayEENSt8optionalI5arrayEEb14StreamOrDevice", "gather_mm::a"], [0, 1, 1, "_CPPv49gather_mm5array5arrayNSt8optionalI5arrayEENSt8optionalI5arrayEEb14StreamOrDevice", "gather_mm::b"], [0, 1, 1, "_CPPv49gather_mm5array5arrayNSt8optionalI5arrayEENSt8optionalI5arrayEEb14StreamOrDevice", "gather_mm::lhs_indices"], [0, 1, 1, "_CPPv49gather_mm5array5arrayNSt8optionalI5arrayEENSt8optionalI5arrayEEb14StreamOrDevice", "gather_mm::rhs_indices"], [0, 1, 1, "_CPPv49gather_mm5array5arrayNSt8optionalI5arrayEENSt8optionalI5arrayEEb14StreamOrDevice", "gather_mm::s"], [0, 1, 1, "_CPPv49gather_mm5array5arrayNSt8optionalI5arrayEENSt8optionalI5arrayEEb14StreamOrDevice", "gather_mm::sorted_indices"], [0, 0, 1, "_CPPv410gather_qmmRK5arrayRK5arrayRK5arrayRK5arrayNSt8optionalI5arrayEENSt8optionalI5arrayEEbiib14StreamOrDevice", "gather_qmm"], [0, 1, 1, "_CPPv410gather_qmmRK5arrayRK5arrayRK5arrayRK5arrayNSt8optionalI5arrayEENSt8optionalI5arrayEEbiib14StreamOrDevice", "gather_qmm::biases"], [0, 1, 1, "_CPPv410gather_qmmRK5arrayRK5arrayRK5arrayRK5arrayNSt8optionalI5arrayEENSt8optionalI5arrayEEbiib14StreamOrDevice", "gather_qmm::bits"], [0, 1, 1, "_CPPv410gather_qmmRK5arrayRK5arrayRK5arrayRK5arrayNSt8optionalI5arrayEENSt8optionalI5arrayEEbiib14StreamOrDevice", "gather_qmm::group_size"], [0, 1, 1, "_CPPv410gather_qmmRK5arrayRK5arrayRK5arrayRK5arrayNSt8optionalI5arrayEENSt8optionalI5arrayEEbiib14StreamOrDevice", "gather_qmm::lhs_indices"], [0, 1, 1, "_CPPv410gather_qmmRK5arrayRK5arrayRK5arrayRK5arrayNSt8optionalI5arrayEENSt8optionalI5arrayEEbiib14StreamOrDevice", "gather_qmm::rhs_indices"], [0, 1, 1, "_CPPv410gather_qmmRK5arrayRK5arrayRK5arrayRK5arrayNSt8optionalI5arrayEENSt8optionalI5arrayEEbiib14StreamOrDevice", "gather_qmm::s"], [0, 1, 1, "_CPPv410gather_qmmRK5arrayRK5arrayRK5arrayRK5arrayNSt8optionalI5arrayEENSt8optionalI5arrayEEbiib14StreamOrDevice", "gather_qmm::scales"], [0, 1, 1, "_CPPv410gather_qmmRK5arrayRK5arrayRK5arrayRK5arrayNSt8optionalI5arrayEENSt8optionalI5arrayEEbiib14StreamOrDevice", "gather_qmm::sorted_indices"], [0, 1, 1, "_CPPv410gather_qmmRK5arrayRK5arrayRK5arrayRK5arrayNSt8optionalI5arrayEENSt8optionalI5arrayEEbiib14StreamOrDevice", "gather_qmm::transpose"], [0, 1, 1, "_CPPv410gather_qmmRK5arrayRK5arrayRK5arrayRK5arrayNSt8optionalI5arrayEENSt8optionalI5arrayEEbiib14StreamOrDevice", "gather_qmm::w"], [0, 1, 1, "_CPPv410gather_qmmRK5arrayRK5arrayRK5arrayRK5arrayNSt8optionalI5arrayEENSt8optionalI5arrayEEbiib14StreamOrDevice", "gather_qmm::x"], [0, 0, 1, "_CPPv47greaterRK5arrayRK5array14StreamOrDevice", "greater"], [0, 1, 1, "_CPPv47greaterRK5arrayRK5array14StreamOrDevice", "greater::a"], [0, 1, 1, "_CPPv47greaterRK5arrayRK5array14StreamOrDevice", "greater::b"], [0, 1, 1, "_CPPv47greaterRK5arrayRK5array14StreamOrDevice", "greater::s"], [0, 0, 1, "_CPPv413greater_equalRK5arrayRK5array14StreamOrDevice", "greater_equal"], [0, 1, 1, "_CPPv413greater_equalRK5arrayRK5array14StreamOrDevice", "greater_equal::a"], [0, 1, 1, "_CPPv413greater_equalRK5arrayRK5array14StreamOrDevice", "greater_equal::b"], [0, 1, 1, "_CPPv413greater_equalRK5arrayRK5array14StreamOrDevice", "greater_equal::s"], [0, 0, 1, "_CPPv418hadamard_transformRK5arrayNSt8optionalIfEE14StreamOrDevice", "hadamard_transform"], [0, 1, 1, "_CPPv418hadamard_transformRK5arrayNSt8optionalIfEE14StreamOrDevice", "hadamard_transform::a"], [0, 1, 1, "_CPPv418hadamard_transformRK5arrayNSt8optionalIfEE14StreamOrDevice", "hadamard_transform::s"], [0, 1, 1, "_CPPv418hadamard_transformRK5arrayNSt8optionalIfEE14StreamOrDevice", "hadamard_transform::scale"], [0, 0, 1, "_CPPv48identityi14StreamOrDevice", "identity"], [0, 0, 1, "_CPPv48identityi5Dtype14StreamOrDevice", "identity"], [0, 1, 1, "_CPPv48identityi5Dtype14StreamOrDevice", "identity::dtype"], [0, 1, 1, "_CPPv48identityi14StreamOrDevice", "identity::n"], [0, 1, 1, "_CPPv48identityi5Dtype14StreamOrDevice", "identity::n"], [0, 1, 1, "_CPPv48identityi14StreamOrDevice", "identity::s"], [0, 1, 1, "_CPPv48identityi5Dtype14StreamOrDevice", "identity::s"], [0, 0, 1, "_CPPv44imagRK5array14StreamOrDevice", "imag"], [0, 1, 1, "_CPPv44imagRK5array14StreamOrDevice", "imag::a"], [0, 1, 1, "_CPPv44imagRK5array14StreamOrDevice", "imag::s"], [0, 0, 1, "_CPPv45innerRK5arrayRK5array14StreamOrDevice", "inner"], [0, 1, 1, "_CPPv45innerRK5arrayRK5array14StreamOrDevice", "inner::a"], [0, 1, 1, "_CPPv45innerRK5arrayRK5array14StreamOrDevice", "inner::b"], [0, 1, 1, "_CPPv45innerRK5arrayRK5array14StreamOrDevice", "inner::s"], [0, 0, 1, "_CPPv47iscloseRK5arrayRK5arrayddb14StreamOrDevice", "isclose"], [0, 1, 1, "_CPPv47iscloseRK5arrayRK5arrayddb14StreamOrDevice", "isclose::a"], [0, 1, 1, "_CPPv47iscloseRK5arrayRK5arrayddb14StreamOrDevice", "isclose::atol"], [0, 1, 1, "_CPPv47iscloseRK5arrayRK5arrayddb14StreamOrDevice", "isclose::b"], [0, 1, 1, "_CPPv47iscloseRK5arrayRK5arrayddb14StreamOrDevice", "isclose::equal_nan"], [0, 1, 1, "_CPPv47iscloseRK5arrayRK5arrayddb14StreamOrDevice", "isclose::rtol"], [0, 1, 1, "_CPPv47iscloseRK5arrayRK5arrayddb14StreamOrDevice", "isclose::s"], [0, 0, 1, "_CPPv48isfiniteRK5array14StreamOrDevice", "isfinite"], [0, 1, 1, "_CPPv48isfiniteRK5array14StreamOrDevice", "isfinite::a"], [0, 1, 1, "_CPPv48isfiniteRK5array14StreamOrDevice", "isfinite::s"], [0, 0, 1, "_CPPv45isinfRK5array14StreamOrDevice", "isinf"], [0, 1, 1, "_CPPv45isinfRK5array14StreamOrDevice", "isinf::a"], [0, 1, 1, "_CPPv45isinfRK5array14StreamOrDevice", "isinf::s"], [0, 0, 1, "_CPPv45isnanRK5array14StreamOrDevice", "isnan"], [0, 1, 1, "_CPPv45isnanRK5array14StreamOrDevice", "isnan::a"], [0, 1, 1, "_CPPv45isnanRK5array14StreamOrDevice", "isnan::s"], [0, 0, 1, "_CPPv48isneginfRK5array14StreamOrDevice", "isneginf"], [0, 1, 1, "_CPPv48isneginfRK5array14StreamOrDevice", "isneginf::a"], [0, 1, 1, "_CPPv48isneginfRK5array14StreamOrDevice", "isneginf::s"], [0, 0, 1, "_CPPv48isposinfRK5array14StreamOrDevice", "isposinf"], [0, 1, 1, "_CPPv48isposinfRK5array14StreamOrDevice", "isposinf::a"], [0, 1, 1, "_CPPv48isposinfRK5array14StreamOrDevice", "isposinf::s"], [0, 0, 1, "_CPPv44kronRK5arrayRK5array14StreamOrDevice", "kron"], [0, 1, 1, "_CPPv44kronRK5arrayRK5array14StreamOrDevice", "kron::a"], [0, 1, 1, "_CPPv44kronRK5arrayRK5array14StreamOrDevice", "kron::b"], [0, 1, 1, "_CPPv44kronRK5arrayRK5array14StreamOrDevice", "kron::s"], [0, 0, 1, "_CPPv410left_shiftRK5arrayRK5array14StreamOrDevice", "left_shift"], [0, 1, 1, "_CPPv410left_shiftRK5arrayRK5array14StreamOrDevice", "left_shift::a"], [0, 1, 1, "_CPPv410left_shiftRK5arrayRK5array14StreamOrDevice", "left_shift::b"], [0, 1, 1, "_CPPv410left_shiftRK5arrayRK5array14StreamOrDevice", "left_shift::s"], [0, 0, 1, "_CPPv44lessRK5arrayRK5array14StreamOrDevice", "less"], [0, 1, 1, "_CPPv44lessRK5arrayRK5array14StreamOrDevice", "less::a"], [0, 1, 1, "_CPPv44lessRK5arrayRK5array14StreamOrDevice", "less::b"], [0, 1, 1, "_CPPv44lessRK5arrayRK5array14StreamOrDevice", "less::s"], [0, 0, 1, "_CPPv410less_equalRK5arrayRK5array14StreamOrDevice", "less_equal"], [0, 1, 1, "_CPPv410less_equalRK5arrayRK5array14StreamOrDevice", "less_equal::a"], [0, 1, 1, "_CPPv410less_equalRK5arrayRK5array14StreamOrDevice", "less_equal::b"], [0, 1, 1, "_CPPv410less_equalRK5arrayRK5array14StreamOrDevice", "less_equal::s"], [0, 0, 1, "_CPPv48linspaceddi5Dtype14StreamOrDevice", "linspace"], [0, 1, 1, "_CPPv48linspaceddi5Dtype14StreamOrDevice", "linspace::dtype"], [0, 1, 1, "_CPPv48linspaceddi5Dtype14StreamOrDevice", "linspace::num"], [0, 1, 1, "_CPPv48linspaceddi5Dtype14StreamOrDevice", "linspace::s"], [0, 1, 1, "_CPPv48linspaceddi5Dtype14StreamOrDevice", "linspace::start"], [0, 1, 1, "_CPPv48linspaceddi5Dtype14StreamOrDevice", "linspace::stop"], [0, 0, 1, "_CPPv43logRK5array14StreamOrDevice", "log"], [0, 0, 1, "_CPPv45log10RK5array14StreamOrDevice", "log10"], [0, 1, 1, "_CPPv45log10RK5array14StreamOrDevice", "log10::a"], [0, 1, 1, "_CPPv45log10RK5array14StreamOrDevice", "log10::s"], [0, 0, 1, "_CPPv45log1pRK5array14StreamOrDevice", "log1p"], [0, 1, 1, "_CPPv45log1pRK5array14StreamOrDevice", "log1p::a"], [0, 1, 1, "_CPPv45log1pRK5array14StreamOrDevice", "log1p::s"], [0, 0, 1, "_CPPv44log2RK5array14StreamOrDevice", "log2"], [0, 1, 1, "_CPPv44log2RK5array14StreamOrDevice", "log2::a"], [0, 1, 1, "_CPPv44log2RK5array14StreamOrDevice", "log2::s"], [0, 1, 1, "_CPPv43logRK5array14StreamOrDevice", "log::a"], [0, 1, 1, "_CPPv43logRK5array14StreamOrDevice", "log::s"], [0, 0, 1, "_CPPv49logaddexpRK5arrayRK5array14StreamOrDevice", "logaddexp"], [0, 1, 1, "_CPPv49logaddexpRK5arrayRK5array14StreamOrDevice", "logaddexp::a"], [0, 1, 1, "_CPPv49logaddexpRK5arrayRK5array14StreamOrDevice", "logaddexp::b"], [0, 1, 1, "_CPPv49logaddexpRK5arrayRK5array14StreamOrDevice", "logaddexp::s"], [0, 0, 1, "_CPPv412logcumsumexpRK5arrayibb14StreamOrDevice", "logcumsumexp"], [0, 1, 1, "_CPPv412logcumsumexpRK5arrayibb14StreamOrDevice", "logcumsumexp::a"], [0, 1, 1, "_CPPv412logcumsumexpRK5arrayibb14StreamOrDevice", "logcumsumexp::axis"], [0, 1, 1, "_CPPv412logcumsumexpRK5arrayibb14StreamOrDevice", "logcumsumexp::inclusive"], [0, 1, 1, "_CPPv412logcumsumexpRK5arrayibb14StreamOrDevice", "logcumsumexp::reverse"], [0, 1, 1, "_CPPv412logcumsumexpRK5arrayibb14StreamOrDevice", "logcumsumexp::s"], [0, 0, 1, "_CPPv411logical_andRK5arrayRK5array14StreamOrDevice", "logical_and"], [0, 1, 1, "_CPPv411logical_andRK5arrayRK5array14StreamOrDevice", "logical_and::a"], [0, 1, 1, "_CPPv411logical_andRK5arrayRK5array14StreamOrDevice", "logical_and::b"], [0, 1, 1, "_CPPv411logical_andRK5arrayRK5array14StreamOrDevice", "logical_and::s"], [0, 0, 1, "_CPPv411logical_notRK5array14StreamOrDevice", "logical_not"], [0, 1, 1, "_CPPv411logical_notRK5array14StreamOrDevice", "logical_not::a"], [0, 1, 1, "_CPPv411logical_notRK5array14StreamOrDevice", "logical_not::s"], [0, 0, 1, "_CPPv410logical_orRK5arrayRK5array14StreamOrDevice", "logical_or"], [0, 1, 1, "_CPPv410logical_orRK5arrayRK5array14StreamOrDevice", "logical_or::a"], [0, 1, 1, "_CPPv410logical_orRK5arrayRK5array14StreamOrDevice", "logical_or::b"], [0, 1, 1, "_CPPv410logical_orRK5arrayRK5array14StreamOrDevice", "logical_or::s"], [0, 0, 1, "_CPPv49logsumexpRK5array14StreamOrDevice", "logsumexp"], [0, 0, 1, "_CPPv49logsumexpRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "logsumexp"], [0, 0, 1, "_CPPv49logsumexpRK5arrayb14StreamOrDevice", "logsumexp"], [0, 0, 1, "_CPPv49logsumexpRK5arrayib14StreamOrDevice", "logsumexp"], [0, 1, 1, "_CPPv49logsumexpRK5array14StreamOrDevice", "logsumexp::a"], [0, 1, 1, "_CPPv49logsumexpRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "logsumexp::a"], [0, 1, 1, "_CPPv49logsumexpRK5arrayb14StreamOrDevice", "logsumexp::a"], [0, 1, 1, "_CPPv49logsumexpRK5arrayib14StreamOrDevice", "logsumexp::a"], [0, 1, 1, "_CPPv49logsumexpRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "logsumexp::axes"], [0, 1, 1, "_CPPv49logsumexpRK5arrayib14StreamOrDevice", "logsumexp::axis"], [0, 1, 1, "_CPPv49logsumexpRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "logsumexp::keepdims"], [0, 1, 1, "_CPPv49logsumexpRK5arrayb14StreamOrDevice", "logsumexp::keepdims"], [0, 1, 1, "_CPPv49logsumexpRK5arrayib14StreamOrDevice", "logsumexp::keepdims"], [0, 1, 1, "_CPPv49logsumexpRK5array14StreamOrDevice", "logsumexp::s"], [0, 1, 1, "_CPPv49logsumexpRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "logsumexp::s"], [0, 1, 1, "_CPPv49logsumexpRK5arrayb14StreamOrDevice", "logsumexp::s"], [0, 1, 1, "_CPPv49logsumexpRK5arrayib14StreamOrDevice", "logsumexp::s"], [0, 0, 1, "_CPPv46matmulRK5arrayRK5array14StreamOrDevice", "matmul"], [0, 1, 1, "_CPPv46matmulRK5arrayRK5array14StreamOrDevice", "matmul::a"], [0, 1, 1, "_CPPv46matmulRK5arrayRK5array14StreamOrDevice", "matmul::b"], [0, 1, 1, "_CPPv46matmulRK5arrayRK5array14StreamOrDevice", "matmul::s"], [0, 0, 1, "_CPPv43maxRK5array14StreamOrDevice", "max"], [0, 0, 1, "_CPPv43maxRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "max"], [0, 0, 1, "_CPPv43maxRK5arrayb14StreamOrDevice", "max"], [0, 0, 1, "_CPPv43maxRK5arrayib14StreamOrDevice", "max"], [0, 1, 1, "_CPPv43maxRK5array14StreamOrDevice", "max::a"], [0, 1, 1, "_CPPv43maxRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "max::a"], [0, 1, 1, "_CPPv43maxRK5arrayb14StreamOrDevice", "max::a"], [0, 1, 1, "_CPPv43maxRK5arrayib14StreamOrDevice", "max::a"], [0, 1, 1, "_CPPv43maxRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "max::axes"], [0, 1, 1, "_CPPv43maxRK5arrayib14StreamOrDevice", "max::axis"], [0, 1, 1, "_CPPv43maxRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "max::keepdims"], [0, 1, 1, "_CPPv43maxRK5arrayb14StreamOrDevice", "max::keepdims"], [0, 1, 1, "_CPPv43maxRK5arrayib14StreamOrDevice", "max::keepdims"], [0, 1, 1, "_CPPv43maxRK5array14StreamOrDevice", "max::s"], [0, 1, 1, "_CPPv43maxRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "max::s"], [0, 1, 1, "_CPPv43maxRK5arrayb14StreamOrDevice", "max::s"], [0, 1, 1, "_CPPv43maxRK5arrayib14StreamOrDevice", "max::s"], [0, 0, 1, "_CPPv47maximumRK5arrayRK5array14StreamOrDevice", "maximum"], [0, 1, 1, "_CPPv47maximumRK5arrayRK5array14StreamOrDevice", "maximum::a"], [0, 1, 1, "_CPPv47maximumRK5arrayRK5array14StreamOrDevice", "maximum::b"], [0, 1, 1, "_CPPv47maximumRK5arrayRK5array14StreamOrDevice", "maximum::s"], [0, 0, 1, "_CPPv44meanRK5array14StreamOrDevice", "mean"], [0, 0, 1, "_CPPv44meanRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "mean"], [0, 0, 1, "_CPPv44meanRK5arrayb14StreamOrDevice", "mean"], [0, 0, 1, "_CPPv44meanRK5arrayib14StreamOrDevice", "mean"], [0, 1, 1, "_CPPv44meanRK5array14StreamOrDevice", "mean::a"], [0, 1, 1, "_CPPv44meanRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "mean::a"], [0, 1, 1, "_CPPv44meanRK5arrayb14StreamOrDevice", "mean::a"], [0, 1, 1, "_CPPv44meanRK5arrayib14StreamOrDevice", "mean::a"], [0, 1, 1, "_CPPv44meanRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "mean::axes"], [0, 1, 1, "_CPPv44meanRK5arrayib14StreamOrDevice", "mean::axis"], [0, 1, 1, "_CPPv44meanRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "mean::keepdims"], [0, 1, 1, "_CPPv44meanRK5arrayb14StreamOrDevice", "mean::keepdims"], [0, 1, 1, "_CPPv44meanRK5arrayib14StreamOrDevice", "mean::keepdims"], [0, 1, 1, "_CPPv44meanRK5array14StreamOrDevice", "mean::s"], [0, 1, 1, "_CPPv44meanRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "mean::s"], [0, 1, 1, "_CPPv44meanRK5arrayb14StreamOrDevice", "mean::s"], [0, 1, 1, "_CPPv44meanRK5arrayib14StreamOrDevice", "mean::s"], [0, 0, 1, "_CPPv48meshgridRKNSt6vectorI5arrayEEbRKNSt6stringE14StreamOrDevice", "meshgrid"], [0, 1, 1, "_CPPv48meshgridRKNSt6vectorI5arrayEEbRKNSt6stringE14StreamOrDevice", "meshgrid::arrays"], [0, 1, 1, "_CPPv48meshgridRKNSt6vectorI5arrayEEbRKNSt6stringE14StreamOrDevice", "meshgrid::indexing"], [0, 1, 1, "_CPPv48meshgridRKNSt6vectorI5arrayEEbRKNSt6stringE14StreamOrDevice", "meshgrid::s"], [0, 1, 1, "_CPPv48meshgridRKNSt6vectorI5arrayEEbRKNSt6stringE14StreamOrDevice", "meshgrid::sparse"], [0, 0, 1, "_CPPv43minRK5array14StreamOrDevice", "min"], [0, 0, 1, "_CPPv43minRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "min"], [0, 0, 1, "_CPPv43minRK5arrayb14StreamOrDevice", "min"], [0, 0, 1, "_CPPv43minRK5arrayib14StreamOrDevice", "min"], [0, 1, 1, "_CPPv43minRK5array14StreamOrDevice", "min::a"], [0, 1, 1, "_CPPv43minRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "min::a"], [0, 1, 1, "_CPPv43minRK5arrayb14StreamOrDevice", "min::a"], [0, 1, 1, "_CPPv43minRK5arrayib14StreamOrDevice", "min::a"], [0, 1, 1, "_CPPv43minRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "min::axes"], [0, 1, 1, "_CPPv43minRK5arrayib14StreamOrDevice", "min::axis"], [0, 1, 1, "_CPPv43minRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "min::keepdims"], [0, 1, 1, "_CPPv43minRK5arrayb14StreamOrDevice", "min::keepdims"], [0, 1, 1, "_CPPv43minRK5arrayib14StreamOrDevice", "min::keepdims"], [0, 1, 1, "_CPPv43minRK5array14StreamOrDevice", "min::s"], [0, 1, 1, "_CPPv43minRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "min::s"], [0, 1, 1, "_CPPv43minRK5arrayb14StreamOrDevice", "min::s"], [0, 1, 1, "_CPPv43minRK5arrayib14StreamOrDevice", "min::s"], [0, 0, 1, "_CPPv47minimumRK5arrayRK5array14StreamOrDevice", "minimum"], [0, 1, 1, "_CPPv47minimumRK5arrayRK5array14StreamOrDevice", "minimum::a"], [0, 1, 1, "_CPPv47minimumRK5arrayRK5array14StreamOrDevice", "minimum::b"], [0, 1, 1, "_CPPv47minimumRK5arrayRK5array14StreamOrDevice", "minimum::s"], [0, 0, 1, "_CPPv48moveaxisRK5arrayii14StreamOrDevice", "moveaxis"], [0, 1, 1, "_CPPv48moveaxisRK5arrayii14StreamOrDevice", "moveaxis::a"], [0, 1, 1, "_CPPv48moveaxisRK5arrayii14StreamOrDevice", "moveaxis::destination"], [0, 1, 1, "_CPPv48moveaxisRK5arrayii14StreamOrDevice", "moveaxis::s"], [0, 1, 1, "_CPPv48moveaxisRK5arrayii14StreamOrDevice", "moveaxis::source"], [0, 0, 1, "_CPPv48multiplyRK5arrayRK5array14StreamOrDevice", "multiply"], [0, 1, 1, "_CPPv48multiplyRK5arrayRK5array14StreamOrDevice", "multiply::a"], [0, 1, 1, "_CPPv48multiplyRK5arrayRK5array14StreamOrDevice", "multiply::b"], [0, 1, 1, "_CPPv48multiplyRK5arrayRK5array14StreamOrDevice", "multiply::s"], [0, 0, 1, "_CPPv410nan_to_numRK5arrayfKNSt8optionalIfEEKNSt8optionalIfEE14StreamOrDevice", "nan_to_num"], [0, 1, 1, "_CPPv410nan_to_numRK5arrayfKNSt8optionalIfEEKNSt8optionalIfEE14StreamOrDevice", "nan_to_num::a"], [0, 1, 1, "_CPPv410nan_to_numRK5arrayfKNSt8optionalIfEEKNSt8optionalIfEE14StreamOrDevice", "nan_to_num::nan"], [0, 1, 1, "_CPPv410nan_to_numRK5arrayfKNSt8optionalIfEEKNSt8optionalIfEE14StreamOrDevice", "nan_to_num::neginf"], [0, 1, 1, "_CPPv410nan_to_numRK5arrayfKNSt8optionalIfEEKNSt8optionalIfEE14StreamOrDevice", "nan_to_num::posinf"], [0, 1, 1, "_CPPv410nan_to_numRK5arrayfKNSt8optionalIfEEKNSt8optionalIfEE14StreamOrDevice", "nan_to_num::s"], [0, 0, 1, "_CPPv48negativeRK5array14StreamOrDevice", "negative"], [0, 1, 1, "_CPPv48negativeRK5array14StreamOrDevice", "negative::a"], [0, 1, 1, "_CPPv48negativeRK5array14StreamOrDevice", "negative::s"], [0, 0, 1, "_CPPv49not_equalRK5arrayRK5array14StreamOrDevice", "not_equal"], [0, 1, 1, "_CPPv49not_equalRK5arrayRK5array14StreamOrDevice", "not_equal::a"], [0, 1, 1, "_CPPv49not_equalRK5arrayRK5array14StreamOrDevice", "not_equal::b"], [0, 1, 1, "_CPPv49not_equalRK5arrayRK5array14StreamOrDevice", "not_equal::s"], [0, 0, 1, "_CPPv418number_of_elementsRK5arrayNSt6vectorIiEEb5Dtype14StreamOrDevice", "number_of_elements"], [0, 1, 1, "_CPPv418number_of_elementsRK5arrayNSt6vectorIiEEb5Dtype14StreamOrDevice", "number_of_elements::a"], [0, 1, 1, "_CPPv418number_of_elementsRK5arrayNSt6vectorIiEEb5Dtype14StreamOrDevice", "number_of_elements::axes"], [0, 1, 1, "_CPPv418number_of_elementsRK5arrayNSt6vectorIiEEb5Dtype14StreamOrDevice", "number_of_elements::dtype"], [0, 1, 1, "_CPPv418number_of_elementsRK5arrayNSt6vectorIiEEb5Dtype14StreamOrDevice", "number_of_elements::inverted"], [0, 1, 1, "_CPPv418number_of_elementsRK5arrayNSt6vectorIiEEb5Dtype14StreamOrDevice", "number_of_elements::s"], [0, 0, 1, "_CPPv44onesRK5Shape14StreamOrDevice", "ones"], [0, 0, 1, "_CPPv44onesRK5Shape5Dtype14StreamOrDevice", "ones"], [0, 1, 1, "_CPPv44onesRK5Shape5Dtype14StreamOrDevice", "ones::dtype"], [0, 1, 1, "_CPPv44onesRK5Shape14StreamOrDevice", "ones::s"], [0, 1, 1, "_CPPv44onesRK5Shape5Dtype14StreamOrDevice", "ones::s"], [0, 1, 1, "_CPPv44onesRK5Shape14StreamOrDevice", "ones::shape"], [0, 1, 1, "_CPPv44onesRK5Shape5Dtype14StreamOrDevice", "ones::shape"], [0, 0, 1, "_CPPv49ones_likeRK5array14StreamOrDevice", "ones_like"], [0, 1, 1, "_CPPv49ones_likeRK5array14StreamOrDevice", "ones_like::a"], [0, 1, 1, "_CPPv49ones_likeRK5array14StreamOrDevice", "ones_like::s"], [0, 0, 1, "_CPPv4I0Ene5array1TRK5array", "operator!="], [0, 0, 1, "_CPPv4I0Ene5arrayRK5array1T", "operator!="], [0, 0, 1, "_CPPv4neRK5arrayRK5array", "operator!="], [0, 2, 1, "_CPPv4I0Ene5array1TRK5array", "operator!=::T"], [0, 2, 1, "_CPPv4I0Ene5arrayRK5array1T", "operator!=::T"], [0, 1, 1, "_CPPv4I0Ene5array1TRK5array", "operator!=::a"], [0, 1, 1, "_CPPv4I0Ene5arrayRK5array1T", "operator!=::a"], [0, 1, 1, "_CPPv4neRK5arrayRK5array", "operator!=::a"], [0, 1, 1, "_CPPv4I0Ene5array1TRK5array", "operator!=::b"], [0, 1, 1, "_CPPv4I0Ene5arrayRK5array1T", "operator!=::b"], [0, 1, 1, "_CPPv4neRK5arrayRK5array", "operator!=::b"], [0, 0, 1, "_CPPv4I0Erm5array1TRK5array", "operator%"], [0, 0, 1, "_CPPv4I0Erm5arrayRK5array1T", "operator%"], [0, 0, 1, "_CPPv4rmRK5arrayRK5array", "operator%"], [0, 2, 1, "_CPPv4I0Erm5array1TRK5array", "operator%::T"], [0, 2, 1, "_CPPv4I0Erm5arrayRK5array1T", "operator%::T"], [0, 1, 1, "_CPPv4I0Erm5array1TRK5array", "operator%::a"], [0, 1, 1, "_CPPv4I0Erm5arrayRK5array1T", "operator%::a"], [0, 1, 1, "_CPPv4rmRK5arrayRK5array", "operator%::a"], [0, 1, 1, "_CPPv4I0Erm5array1TRK5array", "operator%::b"], [0, 1, 1, "_CPPv4I0Erm5arrayRK5array1T", "operator%::b"], [0, 1, 1, "_CPPv4rmRK5arrayRK5array", "operator%::b"], [0, 0, 1, "_CPPv4anRK5arrayRK5array", "operator&"], [0, 0, 1, "_CPPv4aaRK5arrayRK5array", "operator&&"], [0, 1, 1, "_CPPv4aaRK5arrayRK5array", "operator&&::a"], [0, 1, 1, "_CPPv4aaRK5arrayRK5array", "operator&&::b"], [0, 1, 1, "_CPPv4anRK5arrayRK5array", "operator&::a"], [0, 1, 1, "_CPPv4anRK5arrayRK5array", "operator&::b"], [0, 0, 1, "_CPPv4I0Eml5array1TRK5array", "operator*"], [0, 0, 1, "_CPPv4I0Eml5arrayRK5array1T", "operator*"], [0, 0, 1, "_CPPv4mlRK5arrayRK5array", "operator*"], [0, 2, 1, "_CPPv4I0Eml5array1TRK5array", "operator*::T"], [0, 2, 1, "_CPPv4I0Eml5arrayRK5array1T", "operator*::T"], [0, 1, 1, "_CPPv4I0Eml5array1TRK5array", "operator*::a"], [0, 1, 1, "_CPPv4I0Eml5arrayRK5array1T", "operator*::a"], [0, 1, 1, "_CPPv4mlRK5arrayRK5array", "operator*::a"], [0, 1, 1, "_CPPv4I0Eml5array1TRK5array", "operator*::b"], [0, 1, 1, "_CPPv4I0Eml5arrayRK5array1T", "operator*::b"], [0, 1, 1, "_CPPv4mlRK5arrayRK5array", "operator*::b"], [0, 0, 1, "_CPPv4I0Epl5array1TRK5array", "operator+"], [0, 0, 1, "_CPPv4I0Epl5arrayRK5array1T", "operator+"], [0, 0, 1, "_CPPv4plRK5arrayRK5array", "operator+"], [0, 2, 1, "_CPPv4I0Epl5array1TRK5array", "operator+::T"], [0, 2, 1, "_CPPv4I0Epl5arrayRK5array1T", "operator+::T"], [0, 1, 1, "_CPPv4I0Epl5array1TRK5array", "operator+::a"], [0, 1, 1, "_CPPv4I0Epl5arrayRK5array1T", "operator+::a"], [0, 1, 1, "_CPPv4plRK5arrayRK5array", "operator+::a"], [0, 1, 1, "_CPPv4I0Epl5array1TRK5array", "operator+::b"], [0, 1, 1, "_CPPv4I0Epl5arrayRK5array1T", "operator+::b"], [0, 1, 1, "_CPPv4plRK5arrayRK5array", "operator+::b"], [0, 0, 1, "_CPPv4I0Emi5array1TRK5array", "operator-"], [0, 0, 1, "_CPPv4I0Emi5arrayRK5array1T", "operator-"], [0, 0, 1, "_CPPv4miRK5array", "operator-"], [0, 0, 1, "_CPPv4miRK5arrayRK5array", "operator-"], [0, 2, 1, "_CPPv4I0Emi5array1TRK5array", "operator-::T"], [0, 2, 1, "_CPPv4I0Emi5arrayRK5array1T", "operator-::T"], [0, 1, 1, "_CPPv4I0Emi5array1TRK5array", "operator-::a"], [0, 1, 1, "_CPPv4I0Emi5arrayRK5array1T", "operator-::a"], [0, 1, 1, "_CPPv4miRK5array", "operator-::a"], [0, 1, 1, "_CPPv4miRK5arrayRK5array", "operator-::a"], [0, 1, 1, "_CPPv4I0Emi5array1TRK5array", "operator-::b"], [0, 1, 1, "_CPPv4I0Emi5arrayRK5array1T", "operator-::b"], [0, 1, 1, "_CPPv4miRK5arrayRK5array", "operator-::b"], [0, 0, 1, "_CPPv4dvRK5arrayRK5array", "operator/"], [0, 0, 1, "_CPPv4dvRK5arrayd", "operator/"], [0, 0, 1, "_CPPv4dvdRK5array", "operator/"], [0, 1, 1, "_CPPv4dvRK5arrayRK5array", "operator/::a"], [0, 1, 1, "_CPPv4dvRK5arrayd", "operator/::a"], [0, 1, 1, "_CPPv4dvdRK5array", "operator/::a"], [0, 1, 1, "_CPPv4dvRK5arrayRK5array", "operator/::b"], [0, 1, 1, "_CPPv4dvRK5arrayd", "operator/::b"], [0, 1, 1, "_CPPv4dvdRK5array", "operator/::b"], [0, 0, 1, "_CPPv4I0Elt5array1TRK5array", "operator<"], [0, 0, 1, "_CPPv4I0Elt5arrayRK5array1T", "operator<"], [0, 0, 1, "_CPPv4ltRK5arrayRK5array", "operator<"], [0, 2, 1, "_CPPv4I0Elt5array1TRK5array", "operator<::T"], [0, 2, 1, "_CPPv4I0Elt5arrayRK5array1T", "operator<::T"], [0, 1, 1, "_CPPv4I0Elt5array1TRK5array", "operator<::a"], [0, 1, 1, "_CPPv4I0Elt5arrayRK5array1T", "operator<::a"], [0, 1, 1, "_CPPv4ltRK5arrayRK5array", "operator<::a"], [0, 1, 1, "_CPPv4I0Elt5array1TRK5array", "operator<::b"], [0, 1, 1, "_CPPv4I0Elt5arrayRK5array1T", "operator<::b"], [0, 1, 1, "_CPPv4ltRK5arrayRK5array", "operator<::b"], [0, 0, 1, "_CPPv4lsRK5arrayRK5array", "operator<<"], [0, 1, 1, "_CPPv4lsRK5arrayRK5array", "operator<<::a"], [0, 1, 1, "_CPPv4lsRK5arrayRK5array", "operator<<::b"], [0, 0, 1, "_CPPv4I0Ele5array1TRK5array", "operator<="], [0, 0, 1, "_CPPv4I0Ele5arrayRK5array1T", "operator<="], [0, 0, 1, "_CPPv4leRK5arrayRK5array", "operator<="], [0, 2, 1, "_CPPv4I0Ele5array1TRK5array", "operator<=::T"], [0, 2, 1, "_CPPv4I0Ele5arrayRK5array1T", "operator<=::T"], [0, 1, 1, "_CPPv4I0Ele5array1TRK5array", "operator<=::a"], [0, 1, 1, "_CPPv4I0Ele5arrayRK5array1T", "operator<=::a"], [0, 1, 1, "_CPPv4leRK5arrayRK5array", "operator<=::a"], [0, 1, 1, "_CPPv4I0Ele5array1TRK5array", "operator<=::b"], [0, 1, 1, "_CPPv4I0Ele5arrayRK5array1T", "operator<=::b"], [0, 1, 1, "_CPPv4leRK5arrayRK5array", "operator<=::b"], [0, 0, 1, "_CPPv4I0Eeq5array1TRK5array", "operator=="], [0, 0, 1, "_CPPv4I0Eeq5arrayRK5array1T", "operator=="], [0, 0, 1, "_CPPv4eqRK5arrayRK5array", "operator=="], [0, 2, 1, "_CPPv4I0Eeq5array1TRK5array", "operator==::T"], [0, 2, 1, "_CPPv4I0Eeq5arrayRK5array1T", "operator==::T"], [0, 1, 1, "_CPPv4I0Eeq5array1TRK5array", "operator==::a"], [0, 1, 1, "_CPPv4I0Eeq5arrayRK5array1T", "operator==::a"], [0, 1, 1, "_CPPv4eqRK5arrayRK5array", "operator==::a"], [0, 1, 1, "_CPPv4I0Eeq5array1TRK5array", "operator==::b"], [0, 1, 1, "_CPPv4I0Eeq5arrayRK5array1T", "operator==::b"], [0, 1, 1, "_CPPv4eqRK5arrayRK5array", "operator==::b"], [0, 0, 1, "_CPPv4I0Egt5array1TRK5array", "operator>"], [0, 0, 1, "_CPPv4I0Egt5arrayRK5array1T", "operator>"], [0, 0, 1, "_CPPv4gtRK5arrayRK5array", "operator>"], [0, 2, 1, "_CPPv4I0Egt5array1TRK5array", "operator>::T"], [0, 2, 1, "_CPPv4I0Egt5arrayRK5array1T", "operator>::T"], [0, 1, 1, "_CPPv4I0Egt5array1TRK5array", "operator>::a"], [0, 1, 1, "_CPPv4I0Egt5arrayRK5array1T", "operator>::a"], [0, 1, 1, "_CPPv4gtRK5arrayRK5array", "operator>::a"], [0, 1, 1, "_CPPv4I0Egt5array1TRK5array", "operator>::b"], [0, 1, 1, "_CPPv4I0Egt5arrayRK5array1T", "operator>::b"], [0, 1, 1, "_CPPv4gtRK5arrayRK5array", "operator>::b"], [0, 0, 1, "_CPPv4I0Ege5array1TRK5array", "operator>="], [0, 0, 1, "_CPPv4I0Ege5arrayRK5array1T", "operator>="], [0, 0, 1, "_CPPv4geRK5arrayRK5array", "operator>="], [0, 2, 1, "_CPPv4I0Ege5array1TRK5array", "operator>=::T"], [0, 2, 1, "_CPPv4I0Ege5arrayRK5array1T", "operator>=::T"], [0, 1, 1, "_CPPv4I0Ege5array1TRK5array", "operator>=::a"], [0, 1, 1, "_CPPv4I0Ege5arrayRK5array1T", "operator>=::a"], [0, 1, 1, "_CPPv4geRK5arrayRK5array", "operator>=::a"], [0, 1, 1, "_CPPv4I0Ege5array1TRK5array", "operator>=::b"], [0, 1, 1, "_CPPv4I0Ege5arrayRK5array1T", "operator>=::b"], [0, 1, 1, "_CPPv4geRK5arrayRK5array", "operator>=::b"], [0, 0, 1, "_CPPv4rsRK5arrayRK5array", "operator>>"], [0, 1, 1, "_CPPv4rsRK5arrayRK5array", "operator>>::a"], [0, 1, 1, "_CPPv4rsRK5arrayRK5array", "operator>>::b"], [0, 0, 1, "_CPPv4eoRK5arrayRK5array", "operator^"], [0, 1, 1, "_CPPv4eoRK5arrayRK5array", "operator^::a"], [0, 1, 1, "_CPPv4eoRK5arrayRK5array", "operator^::b"], [0, 0, 1, "_CPPv4orRK5arrayRK5array", "operator|"], [0, 1, 1, "_CPPv4orRK5arrayRK5array", "operator|::a"], [0, 1, 1, "_CPPv4orRK5arrayRK5array", "operator|::b"], [0, 0, 1, "_CPPv4ooRK5arrayRK5array", "operator||"], [0, 1, 1, "_CPPv4ooRK5arrayRK5array", "operator||::a"], [0, 1, 1, "_CPPv4ooRK5arrayRK5array", "operator||::b"], [0, 0, 1, "_CPPv4coRK5array", "operator~"], [0, 1, 1, "_CPPv4coRK5array", "operator~::a"], [0, 0, 1, "_CPPv45outerRK5arrayRK5array14StreamOrDevice", "outer"], [0, 1, 1, "_CPPv45outerRK5arrayRK5array14StreamOrDevice", "outer::a"], [0, 1, 1, "_CPPv45outerRK5arrayRK5array14StreamOrDevice", "outer::b"], [0, 1, 1, "_CPPv45outerRK5arrayRK5array14StreamOrDevice", "outer::s"], [0, 0, 1, "_CPPv43padRK5arrayRKNSt4pairIiiEERK5arrayRKNSt6stringE14StreamOrDevice", "pad"], [0, 0, 1, "_CPPv43padRK5arrayRKNSt6vectorINSt4pairIiiEEEERK5arrayRKNSt6stringE14StreamOrDevice", "pad"], [0, 0, 1, "_CPPv43padRK5arrayRKNSt6vectorIiEERK5ShapeRK5ShapeRK5arrayRKNSt6stringE14StreamOrDevice", "pad"], [0, 0, 1, "_CPPv43padRK5arrayiRK5arrayRKNSt6stringE14StreamOrDevice", "pad"], [0, 1, 1, "_CPPv43padRK5arrayRKNSt4pairIiiEERK5arrayRKNSt6stringE14StreamOrDevice", "pad::a"], [0, 1, 1, "_CPPv43padRK5arrayRKNSt6vectorINSt4pairIiiEEEERK5arrayRKNSt6stringE14StreamOrDevice", "pad::a"], [0, 1, 1, "_CPPv43padRK5arrayRKNSt6vectorIiEERK5ShapeRK5ShapeRK5arrayRKNSt6stringE14StreamOrDevice", "pad::a"], [0, 1, 1, "_CPPv43padRK5arrayiRK5arrayRKNSt6stringE14StreamOrDevice", "pad::a"], [0, 1, 1, "_CPPv43padRK5arrayRKNSt6vectorIiEERK5ShapeRK5ShapeRK5arrayRKNSt6stringE14StreamOrDevice", "pad::axes"], [0, 1, 1, "_CPPv43padRK5arrayRKNSt6vectorIiEERK5ShapeRK5ShapeRK5arrayRKNSt6stringE14StreamOrDevice", "pad::high_pad_size"], [0, 1, 1, "_CPPv43padRK5arrayRKNSt6vectorIiEERK5ShapeRK5ShapeRK5arrayRKNSt6stringE14StreamOrDevice", "pad::low_pad_size"], [0, 1, 1, "_CPPv43padRK5arrayRKNSt4pairIiiEERK5arrayRKNSt6stringE14StreamOrDevice", "pad::mode"], [0, 1, 1, "_CPPv43padRK5arrayRKNSt6vectorINSt4pairIiiEEEERK5arrayRKNSt6stringE14StreamOrDevice", "pad::mode"], [0, 1, 1, "_CPPv43padRK5arrayRKNSt6vectorIiEERK5ShapeRK5ShapeRK5arrayRKNSt6stringE14StreamOrDevice", "pad::mode"], [0, 1, 1, "_CPPv43padRK5arrayiRK5arrayRKNSt6stringE14StreamOrDevice", "pad::mode"], [0, 1, 1, "_CPPv43padRK5arrayRKNSt4pairIiiEERK5arrayRKNSt6stringE14StreamOrDevice", "pad::pad_value"], [0, 1, 1, "_CPPv43padRK5arrayRKNSt6vectorINSt4pairIiiEEEERK5arrayRKNSt6stringE14StreamOrDevice", "pad::pad_value"], [0, 1, 1, "_CPPv43padRK5arrayRKNSt6vectorIiEERK5ShapeRK5ShapeRK5arrayRKNSt6stringE14StreamOrDevice", "pad::pad_value"], [0, 1, 1, "_CPPv43padRK5arrayiRK5arrayRKNSt6stringE14StreamOrDevice", "pad::pad_value"], [0, 1, 1, "_CPPv43padRK5arrayRKNSt4pairIiiEERK5arrayRKNSt6stringE14StreamOrDevice", "pad::pad_width"], [0, 1, 1, "_CPPv43padRK5arrayRKNSt6vectorINSt4pairIiiEEEERK5arrayRKNSt6stringE14StreamOrDevice", "pad::pad_width"], [0, 1, 1, "_CPPv43padRK5arrayiRK5arrayRKNSt6stringE14StreamOrDevice", "pad::pad_width"], [0, 1, 1, "_CPPv43padRK5arrayRKNSt4pairIiiEERK5arrayRKNSt6stringE14StreamOrDevice", "pad::s"], [0, 1, 1, "_CPPv43padRK5arrayRKNSt6vectorINSt4pairIiiEEEERK5arrayRKNSt6stringE14StreamOrDevice", "pad::s"], [0, 1, 1, "_CPPv43padRK5arrayRKNSt6vectorIiEERK5ShapeRK5ShapeRK5arrayRKNSt6stringE14StreamOrDevice", "pad::s"], [0, 1, 1, "_CPPv43padRK5arrayiRK5arrayRKNSt6stringE14StreamOrDevice", "pad::s"], [0, 0, 1, "_CPPv49partitionRK5arrayi14StreamOrDevice", "partition"], [0, 0, 1, "_CPPv49partitionRK5arrayii14StreamOrDevice", "partition"], [0, 1, 1, "_CPPv49partitionRK5arrayi14StreamOrDevice", "partition::a"], [0, 1, 1, "_CPPv49partitionRK5arrayii14StreamOrDevice", "partition::a"], [0, 1, 1, "_CPPv49partitionRK5arrayii14StreamOrDevice", "partition::axis"], [0, 1, 1, "_CPPv49partitionRK5arrayi14StreamOrDevice", "partition::kth"], [0, 1, 1, "_CPPv49partitionRK5arrayii14StreamOrDevice", "partition::kth"], [0, 1, 1, "_CPPv49partitionRK5arrayi14StreamOrDevice", "partition::s"], [0, 1, 1, "_CPPv49partitionRK5arrayii14StreamOrDevice", "partition::s"], [0, 0, 1, "_CPPv45powerRK5arrayRK5array14StreamOrDevice", "power"], [0, 1, 1, "_CPPv45powerRK5arrayRK5array14StreamOrDevice", "power::a"], [0, 1, 1, "_CPPv45powerRK5arrayRK5array14StreamOrDevice", "power::b"], [0, 1, 1, "_CPPv45powerRK5arrayRK5array14StreamOrDevice", "power::s"], [0, 0, 1, "_CPPv44prodRK5array14StreamOrDevice", "prod"], [0, 0, 1, "_CPPv44prodRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "prod"], [0, 0, 1, "_CPPv44prodRK5arrayb14StreamOrDevice", "prod"], [0, 0, 1, "_CPPv44prodRK5arrayib14StreamOrDevice", "prod"], [0, 1, 1, "_CPPv44prodRK5array14StreamOrDevice", "prod::a"], [0, 1, 1, "_CPPv44prodRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "prod::a"], [0, 1, 1, "_CPPv44prodRK5arrayb14StreamOrDevice", "prod::a"], [0, 1, 1, "_CPPv44prodRK5arrayib14StreamOrDevice", "prod::a"], [0, 1, 1, "_CPPv44prodRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "prod::axes"], [0, 1, 1, "_CPPv44prodRK5arrayib14StreamOrDevice", "prod::axis"], [0, 1, 1, "_CPPv44prodRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "prod::keepdims"], [0, 1, 1, "_CPPv44prodRK5arrayb14StreamOrDevice", "prod::keepdims"], [0, 1, 1, "_CPPv44prodRK5arrayib14StreamOrDevice", "prod::keepdims"], [0, 1, 1, "_CPPv44prodRK5array14StreamOrDevice", "prod::s"], [0, 1, 1, "_CPPv44prodRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "prod::s"], [0, 1, 1, "_CPPv44prodRK5arrayb14StreamOrDevice", "prod::s"], [0, 1, 1, "_CPPv44prodRK5arrayib14StreamOrDevice", "prod::s"], [0, 0, 1, "_CPPv414put_along_axisRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "put_along_axis"], [0, 1, 1, "_CPPv414put_along_axisRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "put_along_axis::a"], [0, 1, 1, "_CPPv414put_along_axisRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "put_along_axis::axis"], [0, 1, 1, "_CPPv414put_along_axisRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "put_along_axis::indices"], [0, 1, 1, "_CPPv414put_along_axisRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "put_along_axis::s"], [0, 1, 1, "_CPPv414put_along_axisRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "put_along_axis::values"], [0, 0, 1, "_CPPv48quantizeRK5arrayii14StreamOrDevice", "quantize"], [0, 1, 1, "_CPPv48quantizeRK5arrayii14StreamOrDevice", "quantize::bits"], [0, 1, 1, "_CPPv48quantizeRK5arrayii14StreamOrDevice", "quantize::group_size"], [0, 1, 1, "_CPPv48quantizeRK5arrayii14StreamOrDevice", "quantize::s"], [0, 1, 1, "_CPPv48quantizeRK5arrayii14StreamOrDevice", "quantize::w"], [0, 0, 1, "_CPPv416quantized_matmul5array5array5array5arraybii14StreamOrDevice", "quantized_matmul"], [0, 1, 1, "_CPPv416quantized_matmul5array5array5array5arraybii14StreamOrDevice", "quantized_matmul::biases"], [0, 1, 1, "_CPPv416quantized_matmul5array5array5array5arraybii14StreamOrDevice", "quantized_matmul::bits"], [0, 1, 1, "_CPPv416quantized_matmul5array5array5array5arraybii14StreamOrDevice", "quantized_matmul::group_size"], [0, 1, 1, "_CPPv416quantized_matmul5array5array5array5arraybii14StreamOrDevice", "quantized_matmul::s"], [0, 1, 1, "_CPPv416quantized_matmul5array5array5array5arraybii14StreamOrDevice", "quantized_matmul::scales"], [0, 1, 1, "_CPPv416quantized_matmul5array5array5array5arraybii14StreamOrDevice", "quantized_matmul::transpose"], [0, 1, 1, "_CPPv416quantized_matmul5array5array5array5arraybii14StreamOrDevice", "quantized_matmul::w"], [0, 1, 1, "_CPPv416quantized_matmul5array5array5array5arraybii14StreamOrDevice", "quantized_matmul::x"], [0, 0, 1, "_CPPv47radiansRK5array14StreamOrDevice", "radians"], [0, 1, 1, "_CPPv47radiansRK5array14StreamOrDevice", "radians::a"], [0, 1, 1, "_CPPv47radiansRK5array14StreamOrDevice", "radians::s"], [0, 0, 1, "_CPPv44realRK5array14StreamOrDevice", "real"], [0, 1, 1, "_CPPv44realRK5array14StreamOrDevice", "real::a"], [0, 1, 1, "_CPPv44realRK5array14StreamOrDevice", "real::s"], [0, 0, 1, "_CPPv410reciprocalRK5array14StreamOrDevice", "reciprocal"], [0, 1, 1, "_CPPv410reciprocalRK5array14StreamOrDevice", "reciprocal::a"], [0, 1, 1, "_CPPv410reciprocalRK5array14StreamOrDevice", "reciprocal::s"], [0, 0, 1, "_CPPv49remainderRK5arrayRK5array14StreamOrDevice", "remainder"], [0, 1, 1, "_CPPv49remainderRK5arrayRK5array14StreamOrDevice", "remainder::a"], [0, 1, 1, "_CPPv49remainderRK5arrayRK5array14StreamOrDevice", "remainder::b"], [0, 1, 1, "_CPPv49remainderRK5arrayRK5array14StreamOrDevice", "remainder::s"], [0, 0, 1, "_CPPv46repeatRK5arrayi14StreamOrDevice", "repeat"], [0, 0, 1, "_CPPv46repeatRK5arrayii14StreamOrDevice", "repeat"], [0, 1, 1, "_CPPv46repeatRK5arrayi14StreamOrDevice", "repeat::arr"], [0, 1, 1, "_CPPv46repeatRK5arrayii14StreamOrDevice", "repeat::arr"], [0, 1, 1, "_CPPv46repeatRK5arrayii14StreamOrDevice", "repeat::axis"], [0, 1, 1, "_CPPv46repeatRK5arrayi14StreamOrDevice", "repeat::repeats"], [0, 1, 1, "_CPPv46repeatRK5arrayii14StreamOrDevice", "repeat::repeats"], [0, 1, 1, "_CPPv46repeatRK5arrayi14StreamOrDevice", "repeat::s"], [0, 1, 1, "_CPPv46repeatRK5arrayii14StreamOrDevice", "repeat::s"], [0, 0, 1, "_CPPv47reshapeRK5array5Shape14StreamOrDevice", "reshape"], [0, 1, 1, "_CPPv47reshapeRK5array5Shape14StreamOrDevice", "reshape::a"], [0, 1, 1, "_CPPv47reshapeRK5array5Shape14StreamOrDevice", "reshape::s"], [0, 1, 1, "_CPPv47reshapeRK5array5Shape14StreamOrDevice", "reshape::shape"], [0, 0, 1, "_CPPv411right_shiftRK5arrayRK5array14StreamOrDevice", "right_shift"], [0, 1, 1, "_CPPv411right_shiftRK5arrayRK5array14StreamOrDevice", "right_shift::a"], [0, 1, 1, "_CPPv411right_shiftRK5arrayRK5array14StreamOrDevice", "right_shift::b"], [0, 1, 1, "_CPPv411right_shiftRK5arrayRK5array14StreamOrDevice", "right_shift::s"], [0, 0, 1, "_CPPv44rollRK5arrayRK5Shape14StreamOrDevice", "roll"], [0, 0, 1, "_CPPv44rollRK5arrayRK5ShapeRKNSt6vectorIiEE14StreamOrDevice", "roll"], [0, 0, 1, "_CPPv44rollRK5arrayRK5Shapei14StreamOrDevice", "roll"], [0, 0, 1, "_CPPv44rollRK5arrayi14StreamOrDevice", "roll"], [0, 0, 1, "_CPPv44rollRK5arrayiRKNSt6vectorIiEE14StreamOrDevice", "roll"], [0, 0, 1, "_CPPv44rollRK5arrayii14StreamOrDevice", "roll"], [0, 1, 1, "_CPPv44rollRK5arrayRK5Shape14StreamOrDevice", "roll::a"], [0, 1, 1, "_CPPv44rollRK5arrayRK5ShapeRKNSt6vectorIiEE14StreamOrDevice", "roll::a"], [0, 1, 1, "_CPPv44rollRK5arrayRK5Shapei14StreamOrDevice", "roll::a"], [0, 1, 1, "_CPPv44rollRK5arrayi14StreamOrDevice", "roll::a"], [0, 1, 1, "_CPPv44rollRK5arrayiRKNSt6vectorIiEE14StreamOrDevice", "roll::a"], [0, 1, 1, "_CPPv44rollRK5arrayii14StreamOrDevice", "roll::a"], [0, 1, 1, "_CPPv44rollRK5arrayRK5ShapeRKNSt6vectorIiEE14StreamOrDevice", "roll::axes"], [0, 1, 1, "_CPPv44rollRK5arrayiRKNSt6vectorIiEE14StreamOrDevice", "roll::axes"], [0, 1, 1, "_CPPv44rollRK5arrayRK5Shapei14StreamOrDevice", "roll::axis"], [0, 1, 1, "_CPPv44rollRK5arrayii14StreamOrDevice", "roll::axis"], [0, 1, 1, "_CPPv44rollRK5arrayRK5Shape14StreamOrDevice", "roll::s"], [0, 1, 1, "_CPPv44rollRK5arrayRK5ShapeRKNSt6vectorIiEE14StreamOrDevice", "roll::s"], [0, 1, 1, "_CPPv44rollRK5arrayRK5Shapei14StreamOrDevice", "roll::s"], [0, 1, 1, "_CPPv44rollRK5arrayi14StreamOrDevice", "roll::s"], [0, 1, 1, "_CPPv44rollRK5arrayiRKNSt6vectorIiEE14StreamOrDevice", "roll::s"], [0, 1, 1, "_CPPv44rollRK5arrayii14StreamOrDevice", "roll::s"], [0, 1, 1, "_CPPv44rollRK5arrayRK5Shape14StreamOrDevice", "roll::shift"], [0, 1, 1, "_CPPv44rollRK5arrayRK5ShapeRKNSt6vectorIiEE14StreamOrDevice", "roll::shift"], [0, 1, 1, "_CPPv44rollRK5arrayRK5Shapei14StreamOrDevice", "roll::shift"], [0, 1, 1, "_CPPv44rollRK5arrayi14StreamOrDevice", "roll::shift"], [0, 1, 1, "_CPPv44rollRK5arrayiRKNSt6vectorIiEE14StreamOrDevice", "roll::shift"], [0, 1, 1, "_CPPv44rollRK5arrayii14StreamOrDevice", "roll::shift"], [0, 0, 1, "_CPPv45roundRK5array14StreamOrDevice", "round"], [0, 0, 1, "_CPPv45roundRK5arrayi14StreamOrDevice", "round"], [0, 1, 1, "_CPPv45roundRK5array14StreamOrDevice", "round::a"], [0, 1, 1, "_CPPv45roundRK5arrayi14StreamOrDevice", "round::a"], [0, 1, 1, "_CPPv45roundRK5arrayi14StreamOrDevice", "round::decimals"], [0, 1, 1, "_CPPv45roundRK5array14StreamOrDevice", "round::s"], [0, 1, 1, "_CPPv45roundRK5arrayi14StreamOrDevice", "round::s"], [0, 0, 1, "_CPPv45rsqrtRK5array14StreamOrDevice", "rsqrt"], [0, 1, 1, "_CPPv45rsqrtRK5array14StreamOrDevice", "rsqrt::a"], [0, 1, 1, "_CPPv45rsqrtRK5array14StreamOrDevice", "rsqrt::s"], [0, 0, 1, "_CPPv47scatterRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "scatter"], [0, 0, 1, "_CPPv47scatterRK5arrayRKNSt6vectorI5arrayEERK5arrayRKNSt6vectorIiEE14StreamOrDevice", "scatter"], [0, 1, 1, "_CPPv47scatterRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "scatter::a"], [0, 1, 1, "_CPPv47scatterRK5arrayRKNSt6vectorI5arrayEERK5arrayRKNSt6vectorIiEE14StreamOrDevice", "scatter::a"], [0, 1, 1, "_CPPv47scatterRK5arrayRKNSt6vectorI5arrayEERK5arrayRKNSt6vectorIiEE14StreamOrDevice", "scatter::axes"], [0, 1, 1, "_CPPv47scatterRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "scatter::axis"], [0, 1, 1, "_CPPv47scatterRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "scatter::indices"], [0, 1, 1, "_CPPv47scatterRK5arrayRKNSt6vectorI5arrayEERK5arrayRKNSt6vectorIiEE14StreamOrDevice", "scatter::indices"], [0, 1, 1, "_CPPv47scatterRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "scatter::s"], [0, 1, 1, "_CPPv47scatterRK5arrayRKNSt6vectorI5arrayEERK5arrayRKNSt6vectorIiEE14StreamOrDevice", "scatter::s"], [0, 1, 1, "_CPPv47scatterRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "scatter::updates"], [0, 1, 1, "_CPPv47scatterRK5arrayRKNSt6vectorI5arrayEERK5arrayRKNSt6vectorIiEE14StreamOrDevice", "scatter::updates"], [0, 0, 1, "_CPPv411scatter_addRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "scatter_add"], [0, 0, 1, "_CPPv411scatter_addRK5arrayRKNSt6vectorI5arrayEERK5arrayRKNSt6vectorIiEE14StreamOrDevice", "scatter_add"], [0, 1, 1, "_CPPv411scatter_addRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "scatter_add::a"], [0, 1, 1, "_CPPv411scatter_addRK5arrayRKNSt6vectorI5arrayEERK5arrayRKNSt6vectorIiEE14StreamOrDevice", "scatter_add::a"], [0, 1, 1, "_CPPv411scatter_addRK5arrayRKNSt6vectorI5arrayEERK5arrayRKNSt6vectorIiEE14StreamOrDevice", "scatter_add::axes"], [0, 1, 1, "_CPPv411scatter_addRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "scatter_add::axis"], [0, 1, 1, "_CPPv411scatter_addRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "scatter_add::indices"], [0, 1, 1, "_CPPv411scatter_addRK5arrayRKNSt6vectorI5arrayEERK5arrayRKNSt6vectorIiEE14StreamOrDevice", "scatter_add::indices"], [0, 1, 1, "_CPPv411scatter_addRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "scatter_add::s"], [0, 1, 1, "_CPPv411scatter_addRK5arrayRKNSt6vectorI5arrayEERK5arrayRKNSt6vectorIiEE14StreamOrDevice", "scatter_add::s"], [0, 1, 1, "_CPPv411scatter_addRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "scatter_add::updates"], [0, 1, 1, "_CPPv411scatter_addRK5arrayRKNSt6vectorI5arrayEERK5arrayRKNSt6vectorIiEE14StreamOrDevice", "scatter_add::updates"], [0, 0, 1, "_CPPv416scatter_add_axisRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "scatter_add_axis"], [0, 1, 1, "_CPPv416scatter_add_axisRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "scatter_add_axis::a"], [0, 1, 1, "_CPPv416scatter_add_axisRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "scatter_add_axis::axis"], [0, 1, 1, "_CPPv416scatter_add_axisRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "scatter_add_axis::indices"], [0, 1, 1, "_CPPv416scatter_add_axisRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "scatter_add_axis::s"], [0, 1, 1, "_CPPv416scatter_add_axisRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "scatter_add_axis::values"], [0, 0, 1, "_CPPv411scatter_maxRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "scatter_max"], [0, 0, 1, "_CPPv411scatter_maxRK5arrayRKNSt6vectorI5arrayEERK5arrayRKNSt6vectorIiEE14StreamOrDevice", "scatter_max"], [0, 1, 1, "_CPPv411scatter_maxRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "scatter_max::a"], [0, 1, 1, "_CPPv411scatter_maxRK5arrayRKNSt6vectorI5arrayEERK5arrayRKNSt6vectorIiEE14StreamOrDevice", "scatter_max::a"], [0, 1, 1, "_CPPv411scatter_maxRK5arrayRKNSt6vectorI5arrayEERK5arrayRKNSt6vectorIiEE14StreamOrDevice", "scatter_max::axes"], [0, 1, 1, "_CPPv411scatter_maxRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "scatter_max::axis"], [0, 1, 1, "_CPPv411scatter_maxRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "scatter_max::indices"], [0, 1, 1, "_CPPv411scatter_maxRK5arrayRKNSt6vectorI5arrayEERK5arrayRKNSt6vectorIiEE14StreamOrDevice", "scatter_max::indices"], [0, 1, 1, "_CPPv411scatter_maxRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "scatter_max::s"], [0, 1, 1, "_CPPv411scatter_maxRK5arrayRKNSt6vectorI5arrayEERK5arrayRKNSt6vectorIiEE14StreamOrDevice", "scatter_max::s"], [0, 1, 1, "_CPPv411scatter_maxRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "scatter_max::updates"], [0, 1, 1, "_CPPv411scatter_maxRK5arrayRKNSt6vectorI5arrayEERK5arrayRKNSt6vectorIiEE14StreamOrDevice", "scatter_max::updates"], [0, 0, 1, "_CPPv411scatter_minRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "scatter_min"], [0, 0, 1, "_CPPv411scatter_minRK5arrayRKNSt6vectorI5arrayEERK5arrayRKNSt6vectorIiEE14StreamOrDevice", "scatter_min"], [0, 1, 1, "_CPPv411scatter_minRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "scatter_min::a"], [0, 1, 1, "_CPPv411scatter_minRK5arrayRKNSt6vectorI5arrayEERK5arrayRKNSt6vectorIiEE14StreamOrDevice", "scatter_min::a"], [0, 1, 1, "_CPPv411scatter_minRK5arrayRKNSt6vectorI5arrayEERK5arrayRKNSt6vectorIiEE14StreamOrDevice", "scatter_min::axes"], [0, 1, 1, "_CPPv411scatter_minRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "scatter_min::axis"], [0, 1, 1, "_CPPv411scatter_minRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "scatter_min::indices"], [0, 1, 1, "_CPPv411scatter_minRK5arrayRKNSt6vectorI5arrayEERK5arrayRKNSt6vectorIiEE14StreamOrDevice", "scatter_min::indices"], [0, 1, 1, "_CPPv411scatter_minRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "scatter_min::s"], [0, 1, 1, "_CPPv411scatter_minRK5arrayRKNSt6vectorI5arrayEERK5arrayRKNSt6vectorIiEE14StreamOrDevice", "scatter_min::s"], [0, 1, 1, "_CPPv411scatter_minRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "scatter_min::updates"], [0, 1, 1, "_CPPv411scatter_minRK5arrayRKNSt6vectorI5arrayEERK5arrayRKNSt6vectorIiEE14StreamOrDevice", "scatter_min::updates"], [0, 0, 1, "_CPPv412scatter_prodRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "scatter_prod"], [0, 0, 1, "_CPPv412scatter_prodRK5arrayRKNSt6vectorI5arrayEERK5arrayRKNSt6vectorIiEE14StreamOrDevice", "scatter_prod"], [0, 1, 1, "_CPPv412scatter_prodRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "scatter_prod::a"], [0, 1, 1, "_CPPv412scatter_prodRK5arrayRKNSt6vectorI5arrayEERK5arrayRKNSt6vectorIiEE14StreamOrDevice", "scatter_prod::a"], [0, 1, 1, "_CPPv412scatter_prodRK5arrayRKNSt6vectorI5arrayEERK5arrayRKNSt6vectorIiEE14StreamOrDevice", "scatter_prod::axes"], [0, 1, 1, "_CPPv412scatter_prodRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "scatter_prod::axis"], [0, 1, 1, "_CPPv412scatter_prodRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "scatter_prod::indices"], [0, 1, 1, "_CPPv412scatter_prodRK5arrayRKNSt6vectorI5arrayEERK5arrayRKNSt6vectorIiEE14StreamOrDevice", "scatter_prod::indices"], [0, 1, 1, "_CPPv412scatter_prodRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "scatter_prod::s"], [0, 1, 1, "_CPPv412scatter_prodRK5arrayRKNSt6vectorI5arrayEERK5arrayRKNSt6vectorIiEE14StreamOrDevice", "scatter_prod::s"], [0, 1, 1, "_CPPv412scatter_prodRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "scatter_prod::updates"], [0, 1, 1, "_CPPv412scatter_prodRK5arrayRKNSt6vectorI5arrayEERK5arrayRKNSt6vectorIiEE14StreamOrDevice", "scatter_prod::updates"], [0, 0, 1, "_CPPv412segmented_mm5array5array5array14StreamOrDevice", "segmented_mm"], [0, 1, 1, "_CPPv412segmented_mm5array5array5array14StreamOrDevice", "segmented_mm::a"], [0, 1, 1, "_CPPv412segmented_mm5array5array5array14StreamOrDevice", "segmented_mm::b"], [0, 1, 1, "_CPPv412segmented_mm5array5array5array14StreamOrDevice", "segmented_mm::s"], [0, 1, 1, "_CPPv412segmented_mm5array5array5array14StreamOrDevice", "segmented_mm::segments"], [0, 0, 1, "_CPPv47sigmoidRK5array14StreamOrDevice", "sigmoid"], [0, 1, 1, "_CPPv47sigmoidRK5array14StreamOrDevice", "sigmoid::a"], [0, 1, 1, "_CPPv47sigmoidRK5array14StreamOrDevice", "sigmoid::s"], [0, 0, 1, "_CPPv44signRK5array14StreamOrDevice", "sign"], [0, 1, 1, "_CPPv44signRK5array14StreamOrDevice", "sign::a"], [0, 1, 1, "_CPPv44signRK5array14StreamOrDevice", "sign::s"], [0, 0, 1, "_CPPv43sinRK5array14StreamOrDevice", "sin"], [0, 1, 1, "_CPPv43sinRK5array14StreamOrDevice", "sin::a"], [0, 1, 1, "_CPPv43sinRK5array14StreamOrDevice", "sin::s"], [0, 0, 1, "_CPPv44sinhRK5array14StreamOrDevice", "sinh"], [0, 1, 1, "_CPPv44sinhRK5array14StreamOrDevice", "sinh::a"], [0, 1, 1, "_CPPv44sinhRK5array14StreamOrDevice", "sinh::s"], [0, 0, 1, "_CPPv45sliceRK5array5Shape5Shape14StreamOrDevice", "slice"], [0, 0, 1, "_CPPv45sliceRK5array5Shape5Shape5Shape14StreamOrDevice", "slice"], [0, 0, 1, "_CPPv45sliceRK5arrayNSt16initializer_listIiEE5Shape5Shape14StreamOrDevice", "slice"], [0, 0, 1, "_CPPv45sliceRK5arrayRK5arrayNSt6vectorIiEE5Shape14StreamOrDevice", "slice"], [0, 1, 1, "_CPPv45sliceRK5array5Shape5Shape14StreamOrDevice", "slice::a"], [0, 1, 1, "_CPPv45sliceRK5array5Shape5Shape5Shape14StreamOrDevice", "slice::a"], [0, 1, 1, "_CPPv45sliceRK5arrayNSt16initializer_listIiEE5Shape5Shape14StreamOrDevice", "slice::a"], [0, 1, 1, "_CPPv45sliceRK5arrayRK5arrayNSt6vectorIiEE5Shape14StreamOrDevice", "slice::a"], [0, 1, 1, "_CPPv45sliceRK5arrayRK5arrayNSt6vectorIiEE5Shape14StreamOrDevice", "slice::axes"], [0, 1, 1, "_CPPv45sliceRK5array5Shape5Shape14StreamOrDevice", "slice::s"], [0, 1, 1, "_CPPv45sliceRK5array5Shape5Shape5Shape14StreamOrDevice", "slice::s"], [0, 1, 1, "_CPPv45sliceRK5arrayNSt16initializer_listIiEE5Shape5Shape14StreamOrDevice", "slice::s"], [0, 1, 1, "_CPPv45sliceRK5arrayRK5arrayNSt6vectorIiEE5Shape14StreamOrDevice", "slice::s"], [0, 1, 1, "_CPPv45sliceRK5arrayRK5arrayNSt6vectorIiEE5Shape14StreamOrDevice", "slice::slice_size"], [0, 1, 1, "_CPPv45sliceRK5array5Shape5Shape14StreamOrDevice", "slice::start"], [0, 1, 1, "_CPPv45sliceRK5array5Shape5Shape5Shape14StreamOrDevice", "slice::start"], [0, 1, 1, "_CPPv45sliceRK5arrayNSt16initializer_listIiEE5Shape5Shape14StreamOrDevice", "slice::start"], [0, 1, 1, "_CPPv45sliceRK5arrayRK5arrayNSt6vectorIiEE5Shape14StreamOrDevice", "slice::start"], [0, 1, 1, "_CPPv45sliceRK5array5Shape5Shape14StreamOrDevice", "slice::stop"], [0, 1, 1, "_CPPv45sliceRK5array5Shape5Shape5Shape14StreamOrDevice", "slice::stop"], [0, 1, 1, "_CPPv45sliceRK5arrayNSt16initializer_listIiEE5Shape5Shape14StreamOrDevice", "slice::stop"], [0, 1, 1, "_CPPv45sliceRK5array5Shape5Shape5Shape14StreamOrDevice", "slice::strides"], [0, 1, 1, "_CPPv45sliceRK5arrayNSt16initializer_listIiEE5Shape5Shape14StreamOrDevice", "slice::strides"], [0, 0, 1, "_CPPv412slice_updateRK5arrayRK5array5Shape5Shape14StreamOrDevice", "slice_update"], [0, 0, 1, "_CPPv412slice_updateRK5arrayRK5array5Shape5Shape5Shape14StreamOrDevice", "slice_update"], [0, 0, 1, "_CPPv412slice_updateRK5arrayRK5arrayRK5arrayNSt6vectorIiEE14StreamOrDevice", "slice_update"], [0, 1, 1, "_CPPv412slice_updateRK5arrayRK5arrayRK5arrayNSt6vectorIiEE14StreamOrDevice", "slice_update::axes"], [0, 1, 1, "_CPPv412slice_updateRK5arrayRK5array5Shape5Shape14StreamOrDevice", "slice_update::s"], [0, 1, 1, "_CPPv412slice_updateRK5arrayRK5array5Shape5Shape5Shape14StreamOrDevice", "slice_update::s"], [0, 1, 1, "_CPPv412slice_updateRK5arrayRK5arrayRK5arrayNSt6vectorIiEE14StreamOrDevice", "slice_update::s"], [0, 1, 1, "_CPPv412slice_updateRK5arrayRK5array5Shape5Shape14StreamOrDevice", "slice_update::src"], [0, 1, 1, "_CPPv412slice_updateRK5arrayRK5array5Shape5Shape5Shape14StreamOrDevice", "slice_update::src"], [0, 1, 1, "_CPPv412slice_updateRK5arrayRK5arrayRK5arrayNSt6vectorIiEE14StreamOrDevice", "slice_update::src"], [0, 1, 1, "_CPPv412slice_updateRK5arrayRK5array5Shape5Shape14StreamOrDevice", "slice_update::start"], [0, 1, 1, "_CPPv412slice_updateRK5arrayRK5array5Shape5Shape5Shape14StreamOrDevice", "slice_update::start"], [0, 1, 1, "_CPPv412slice_updateRK5arrayRK5arrayRK5arrayNSt6vectorIiEE14StreamOrDevice", "slice_update::start"], [0, 1, 1, "_CPPv412slice_updateRK5arrayRK5array5Shape5Shape14StreamOrDevice", "slice_update::stop"], [0, 1, 1, "_CPPv412slice_updateRK5arrayRK5array5Shape5Shape5Shape14StreamOrDevice", "slice_update::stop"], [0, 1, 1, "_CPPv412slice_updateRK5arrayRK5array5Shape5Shape5Shape14StreamOrDevice", "slice_update::strides"], [0, 1, 1, "_CPPv412slice_updateRK5arrayRK5array5Shape5Shape14StreamOrDevice", "slice_update::update"], [0, 1, 1, "_CPPv412slice_updateRK5arrayRK5array5Shape5Shape5Shape14StreamOrDevice", "slice_update::update"], [0, 1, 1, "_CPPv412slice_updateRK5arrayRK5arrayRK5arrayNSt6vectorIiEE14StreamOrDevice", "slice_update::update"], [0, 0, 1, "_CPPv47softmaxRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "softmax"], [0, 0, 1, "_CPPv47softmaxRK5arrayb14StreamOrDevice", "softmax"], [0, 0, 1, "_CPPv47softmaxRK5arrayib14StreamOrDevice", "softmax"], [0, 1, 1, "_CPPv47softmaxRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "softmax::a"], [0, 1, 1, "_CPPv47softmaxRK5arrayb14StreamOrDevice", "softmax::a"], [0, 1, 1, "_CPPv47softmaxRK5arrayib14StreamOrDevice", "softmax::a"], [0, 1, 1, "_CPPv47softmaxRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "softmax::axes"], [0, 1, 1, "_CPPv47softmaxRK5arrayib14StreamOrDevice", "softmax::axis"], [0, 1, 1, "_CPPv47softmaxRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "softmax::precise"], [0, 1, 1, "_CPPv47softmaxRK5arrayb14StreamOrDevice", "softmax::precise"], [0, 1, 1, "_CPPv47softmaxRK5arrayib14StreamOrDevice", "softmax::precise"], [0, 1, 1, "_CPPv47softmaxRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "softmax::s"], [0, 1, 1, "_CPPv47softmaxRK5arrayb14StreamOrDevice", "softmax::s"], [0, 1, 1, "_CPPv47softmaxRK5arrayib14StreamOrDevice", "softmax::s"], [0, 0, 1, "_CPPv44sortRK5array14StreamOrDevice", "sort"], [0, 0, 1, "_CPPv44sortRK5arrayi14StreamOrDevice", "sort"], [0, 1, 1, "_CPPv44sortRK5array14StreamOrDevice", "sort::a"], [0, 1, 1, "_CPPv44sortRK5arrayi14StreamOrDevice", "sort::a"], [0, 1, 1, "_CPPv44sortRK5arrayi14StreamOrDevice", "sort::axis"], [0, 1, 1, "_CPPv44sortRK5array14StreamOrDevice", "sort::s"], [0, 1, 1, "_CPPv44sortRK5arrayi14StreamOrDevice", "sort::s"], [0, 0, 1, "_CPPv45splitRK5arrayRK5Shape14StreamOrDevice", "split"], [0, 0, 1, "_CPPv45splitRK5arrayRK5Shapei14StreamOrDevice", "split"], [0, 0, 1, "_CPPv45splitRK5arrayi14StreamOrDevice", "split"], [0, 0, 1, "_CPPv45splitRK5arrayii14StreamOrDevice", "split"], [0, 1, 1, "_CPPv45splitRK5arrayRK5Shape14StreamOrDevice", "split::a"], [0, 1, 1, "_CPPv45splitRK5arrayRK5Shapei14StreamOrDevice", "split::a"], [0, 1, 1, "_CPPv45splitRK5arrayi14StreamOrDevice", "split::a"], [0, 1, 1, "_CPPv45splitRK5arrayii14StreamOrDevice", "split::a"], [0, 1, 1, "_CPPv45splitRK5arrayRK5Shapei14StreamOrDevice", "split::axis"], [0, 1, 1, "_CPPv45splitRK5arrayii14StreamOrDevice", "split::axis"], [0, 1, 1, "_CPPv45splitRK5arrayRK5Shape14StreamOrDevice", "split::indices"], [0, 1, 1, "_CPPv45splitRK5arrayRK5Shapei14StreamOrDevice", "split::indices"], [0, 1, 1, "_CPPv45splitRK5arrayi14StreamOrDevice", "split::num_splits"], [0, 1, 1, "_CPPv45splitRK5arrayii14StreamOrDevice", "split::num_splits"], [0, 1, 1, "_CPPv45splitRK5arrayRK5Shape14StreamOrDevice", "split::s"], [0, 1, 1, "_CPPv45splitRK5arrayRK5Shapei14StreamOrDevice", "split::s"], [0, 1, 1, "_CPPv45splitRK5arrayi14StreamOrDevice", "split::s"], [0, 1, 1, "_CPPv45splitRK5arrayii14StreamOrDevice", "split::s"], [0, 0, 1, "_CPPv44sqrtRK5array14StreamOrDevice", "sqrt"], [0, 1, 1, "_CPPv44sqrtRK5array14StreamOrDevice", "sqrt::a"], [0, 1, 1, "_CPPv44sqrtRK5array14StreamOrDevice", "sqrt::s"], [0, 0, 1, "_CPPv46squareRK5array14StreamOrDevice", "square"], [0, 1, 1, "_CPPv46squareRK5array14StreamOrDevice", "square::a"], [0, 1, 1, "_CPPv46squareRK5array14StreamOrDevice", "square::s"], [0, 0, 1, "_CPPv47squeezeRK5array14StreamOrDevice", "squeeze"], [0, 0, 1, "_CPPv47squeezeRK5arrayRKNSt6vectorIiEE14StreamOrDevice", "squeeze"], [0, 0, 1, "_CPPv47squeezeRK5arrayi14StreamOrDevice", "squeeze"], [0, 1, 1, "_CPPv47squeezeRK5array14StreamOrDevice", "squeeze::a"], [0, 1, 1, "_CPPv47squeezeRK5arrayRKNSt6vectorIiEE14StreamOrDevice", "squeeze::a"], [0, 1, 1, "_CPPv47squeezeRK5arrayi14StreamOrDevice", "squeeze::a"], [0, 1, 1, "_CPPv47squeezeRK5arrayRKNSt6vectorIiEE14StreamOrDevice", "squeeze::axes"], [0, 1, 1, "_CPPv47squeezeRK5arrayi14StreamOrDevice", "squeeze::axis"], [0, 1, 1, "_CPPv47squeezeRK5array14StreamOrDevice", "squeeze::s"], [0, 1, 1, "_CPPv47squeezeRK5arrayRKNSt6vectorIiEE14StreamOrDevice", "squeeze::s"], [0, 1, 1, "_CPPv47squeezeRK5arrayi14StreamOrDevice", "squeeze::s"], [0, 0, 1, "_CPPv45stackRKNSt6vectorI5arrayEE14StreamOrDevice", "stack"], [0, 0, 1, "_CPPv45stackRKNSt6vectorI5arrayEEi14StreamOrDevice", "stack"], [0, 1, 1, "_CPPv45stackRKNSt6vectorI5arrayEE14StreamOrDevice", "stack::arrays"], [0, 1, 1, "_CPPv45stackRKNSt6vectorI5arrayEEi14StreamOrDevice", "stack::arrays"], [0, 1, 1, "_CPPv45stackRKNSt6vectorI5arrayEEi14StreamOrDevice", "stack::axis"], [0, 1, 1, "_CPPv45stackRKNSt6vectorI5arrayEE14StreamOrDevice", "stack::s"], [0, 1, 1, "_CPPv45stackRKNSt6vectorI5arrayEEi14StreamOrDevice", "stack::s"], [0, 0, 1, "_CPPv4StRK5array14StreamOrDevice", "std"], [0, 0, 1, "_CPPv4StRK5arrayRKNSt6vectorIiEEbi14StreamOrDevice", "std"], [0, 0, 1, "_CPPv4StRK5arraybi14StreamOrDevice", "std"], [0, 0, 1, "_CPPv4StRK5arrayibi14StreamOrDevice", "std"], [0, 1, 1, "_CPPv4StRK5array14StreamOrDevice", "std::a"], [0, 1, 1, "_CPPv4StRK5arrayRKNSt6vectorIiEEbi14StreamOrDevice", "std::a"], [0, 1, 1, "_CPPv4StRK5arraybi14StreamOrDevice", "std::a"], [0, 1, 1, "_CPPv4StRK5arrayibi14StreamOrDevice", "std::a"], [0, 1, 1, "_CPPv4StRK5arrayRKNSt6vectorIiEEbi14StreamOrDevice", "std::axes"], [0, 1, 1, "_CPPv4StRK5arrayibi14StreamOrDevice", "std::axis"], [0, 1, 1, "_CPPv4StRK5arrayRKNSt6vectorIiEEbi14StreamOrDevice", "std::ddof"], [0, 1, 1, "_CPPv4StRK5arraybi14StreamOrDevice", "std::ddof"], [0, 1, 1, "_CPPv4StRK5arrayibi14StreamOrDevice", "std::ddof"], [0, 1, 1, "_CPPv4StRK5arrayRKNSt6vectorIiEEbi14StreamOrDevice", "std::keepdims"], [0, 1, 1, "_CPPv4StRK5arraybi14StreamOrDevice", "std::keepdims"], [0, 1, 1, "_CPPv4StRK5arrayibi14StreamOrDevice", "std::keepdims"], [0, 1, 1, "_CPPv4StRK5array14StreamOrDevice", "std::s"], [0, 1, 1, "_CPPv4StRK5arrayRKNSt6vectorIiEEbi14StreamOrDevice", "std::s"], [0, 1, 1, "_CPPv4StRK5arraybi14StreamOrDevice", "std::s"], [0, 1, 1, "_CPPv4StRK5arrayibi14StreamOrDevice", "std::s"], [0, 0, 1, "_CPPv413stop_gradientRK5array14StreamOrDevice", "stop_gradient"], [0, 1, 1, "_CPPv413stop_gradientRK5array14StreamOrDevice", "stop_gradient::a"], [0, 1, 1, "_CPPv413stop_gradientRK5array14StreamOrDevice", "stop_gradient::s"], [0, 0, 1, "_CPPv48subtractRK5arrayRK5array14StreamOrDevice", "subtract"], [0, 1, 1, "_CPPv48subtractRK5arrayRK5array14StreamOrDevice", "subtract::a"], [0, 1, 1, "_CPPv48subtractRK5arrayRK5array14StreamOrDevice", "subtract::b"], [0, 1, 1, "_CPPv48subtractRK5arrayRK5array14StreamOrDevice", "subtract::s"], [0, 0, 1, "_CPPv43sumRK5array14StreamOrDevice", "sum"], [0, 0, 1, "_CPPv43sumRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "sum"], [0, 0, 1, "_CPPv43sumRK5arrayb14StreamOrDevice", "sum"], [0, 0, 1, "_CPPv43sumRK5arrayib14StreamOrDevice", "sum"], [0, 1, 1, "_CPPv43sumRK5array14StreamOrDevice", "sum::a"], [0, 1, 1, "_CPPv43sumRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "sum::a"], [0, 1, 1, "_CPPv43sumRK5arrayb14StreamOrDevice", "sum::a"], [0, 1, 1, "_CPPv43sumRK5arrayib14StreamOrDevice", "sum::a"], [0, 1, 1, "_CPPv43sumRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "sum::axes"], [0, 1, 1, "_CPPv43sumRK5arrayib14StreamOrDevice", "sum::axis"], [0, 1, 1, "_CPPv43sumRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "sum::keepdims"], [0, 1, 1, "_CPPv43sumRK5arrayb14StreamOrDevice", "sum::keepdims"], [0, 1, 1, "_CPPv43sumRK5arrayib14StreamOrDevice", "sum::keepdims"], [0, 1, 1, "_CPPv43sumRK5array14StreamOrDevice", "sum::s"], [0, 1, 1, "_CPPv43sumRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "sum::s"], [0, 1, 1, "_CPPv43sumRK5arrayb14StreamOrDevice", "sum::s"], [0, 1, 1, "_CPPv43sumRK5arrayib14StreamOrDevice", "sum::s"], [0, 0, 1, "_CPPv48swapaxesRK5arrayii14StreamOrDevice", "swapaxes"], [0, 1, 1, "_CPPv48swapaxesRK5arrayii14StreamOrDevice", "swapaxes::a"], [0, 1, 1, "_CPPv48swapaxesRK5arrayii14StreamOrDevice", "swapaxes::axis1"], [0, 1, 1, "_CPPv48swapaxesRK5arrayii14StreamOrDevice", "swapaxes::axis2"], [0, 1, 1, "_CPPv48swapaxesRK5arrayii14StreamOrDevice", "swapaxes::s"], [0, 0, 1, "_CPPv44takeRK5arrayRK5array14StreamOrDevice", "take"], [0, 0, 1, "_CPPv44takeRK5arrayRK5arrayi14StreamOrDevice", "take"], [0, 0, 1, "_CPPv44takeRK5arrayi14StreamOrDevice", "take"], [0, 0, 1, "_CPPv44takeRK5arrayii14StreamOrDevice", "take"], [0, 1, 1, "_CPPv44takeRK5arrayRK5array14StreamOrDevice", "take::a"], [0, 1, 1, "_CPPv44takeRK5arrayRK5arrayi14StreamOrDevice", "take::a"], [0, 1, 1, "_CPPv44takeRK5arrayi14StreamOrDevice", "take::a"], [0, 1, 1, "_CPPv44takeRK5arrayii14StreamOrDevice", "take::a"], [0, 1, 1, "_CPPv44takeRK5arrayRK5arrayi14StreamOrDevice", "take::axis"], [0, 1, 1, "_CPPv44takeRK5arrayii14StreamOrDevice", "take::axis"], [0, 1, 1, "_CPPv44takeRK5arrayi14StreamOrDevice", "take::index"], [0, 1, 1, "_CPPv44takeRK5arrayii14StreamOrDevice", "take::index"], [0, 1, 1, "_CPPv44takeRK5arrayRK5array14StreamOrDevice", "take::indices"], [0, 1, 1, "_CPPv44takeRK5arrayRK5arrayi14StreamOrDevice", "take::indices"], [0, 1, 1, "_CPPv44takeRK5arrayRK5array14StreamOrDevice", "take::s"], [0, 1, 1, "_CPPv44takeRK5arrayRK5arrayi14StreamOrDevice", "take::s"], [0, 1, 1, "_CPPv44takeRK5arrayi14StreamOrDevice", "take::s"], [0, 1, 1, "_CPPv44takeRK5arrayii14StreamOrDevice", "take::s"], [0, 0, 1, "_CPPv415take_along_axisRK5arrayRK5arrayi14StreamOrDevice", "take_along_axis"], [0, 1, 1, "_CPPv415take_along_axisRK5arrayRK5arrayi14StreamOrDevice", "take_along_axis::a"], [0, 1, 1, "_CPPv415take_along_axisRK5arrayRK5arrayi14StreamOrDevice", "take_along_axis::axis"], [0, 1, 1, "_CPPv415take_along_axisRK5arrayRK5arrayi14StreamOrDevice", "take_along_axis::indices"], [0, 1, 1, "_CPPv415take_along_axisRK5arrayRK5arrayi14StreamOrDevice", "take_along_axis::s"], [0, 0, 1, "_CPPv43tanRK5array14StreamOrDevice", "tan"], [0, 1, 1, "_CPPv43tanRK5array14StreamOrDevice", "tan::a"], [0, 1, 1, "_CPPv43tanRK5array14StreamOrDevice", "tan::s"], [0, 0, 1, "_CPPv44tanhRK5array14StreamOrDevice", "tanh"], [0, 1, 1, "_CPPv44tanhRK5array14StreamOrDevice", "tanh::a"], [0, 1, 1, "_CPPv44tanhRK5array14StreamOrDevice", "tanh::s"], [0, 0, 1, "_CPPv49tensordotRK5arrayRK5arrayKi14StreamOrDevice", "tensordot"], [0, 0, 1, "_CPPv49tensordotRK5arrayRK5arrayRKNSt6vectorIiEERKNSt6vectorIiEE14StreamOrDevice", "tensordot"], [0, 1, 1, "_CPPv49tensordotRK5arrayRK5arrayKi14StreamOrDevice", "tensordot::a"], [0, 1, 1, "_CPPv49tensordotRK5arrayRK5arrayRKNSt6vectorIiEERKNSt6vectorIiEE14StreamOrDevice", "tensordot::a"], [0, 1, 1, "_CPPv49tensordotRK5arrayRK5arrayRKNSt6vectorIiEERKNSt6vectorIiEE14StreamOrDevice", "tensordot::axes_a"], [0, 1, 1, "_CPPv49tensordotRK5arrayRK5arrayRKNSt6vectorIiEERKNSt6vectorIiEE14StreamOrDevice", "tensordot::axes_b"], [0, 1, 1, "_CPPv49tensordotRK5arrayRK5arrayKi14StreamOrDevice", "tensordot::axis"], [0, 1, 1, "_CPPv49tensordotRK5arrayRK5arrayKi14StreamOrDevice", "tensordot::b"], [0, 1, 1, "_CPPv49tensordotRK5arrayRK5arrayRKNSt6vectorIiEERKNSt6vectorIiEE14StreamOrDevice", "tensordot::b"], [0, 1, 1, "_CPPv49tensordotRK5arrayRK5arrayKi14StreamOrDevice", "tensordot::s"], [0, 1, 1, "_CPPv49tensordotRK5arrayRK5arrayRKNSt6vectorIiEERKNSt6vectorIiEE14StreamOrDevice", "tensordot::s"], [0, 0, 1, "_CPPv44tileRK5arrayNSt6vectorIiEE14StreamOrDevice", "tile"], [0, 1, 1, "_CPPv44tileRK5arrayNSt6vectorIiEE14StreamOrDevice", "tile::arr"], [0, 1, 1, "_CPPv44tileRK5arrayNSt6vectorIiEE14StreamOrDevice", "tile::reps"], [0, 1, 1, "_CPPv44tileRK5arrayNSt6vectorIiEE14StreamOrDevice", "tile::s"], [0, 0, 1, "_CPPv44topkRK5arrayi14StreamOrDevice", "topk"], [0, 0, 1, "_CPPv44topkRK5arrayii14StreamOrDevice", "topk"], [0, 1, 1, "_CPPv44topkRK5arrayi14StreamOrDevice", "topk::a"], [0, 1, 1, "_CPPv44topkRK5arrayii14StreamOrDevice", "topk::a"], [0, 1, 1, "_CPPv44topkRK5arrayii14StreamOrDevice", "topk::axis"], [0, 1, 1, "_CPPv44topkRK5arrayi14StreamOrDevice", "topk::k"], [0, 1, 1, "_CPPv44topkRK5arrayii14StreamOrDevice", "topk::k"], [0, 1, 1, "_CPPv44topkRK5arrayi14StreamOrDevice", "topk::s"], [0, 1, 1, "_CPPv44topkRK5arrayii14StreamOrDevice", "topk::s"], [0, 0, 1, "_CPPv45traceRK5array14StreamOrDevice", "trace"], [0, 0, 1, "_CPPv45traceRK5arrayiii14StreamOrDevice", "trace"], [0, 0, 1, "_CPPv45traceRK5arrayiii5Dtype14StreamOrDevice", "trace"], [0, 1, 1, "_CPPv45traceRK5array14StreamOrDevice", "trace::a"], [0, 1, 1, "_CPPv45traceRK5arrayiii14StreamOrDevice", "trace::a"], [0, 1, 1, "_CPPv45traceRK5arrayiii5Dtype14StreamOrDevice", "trace::a"], [0, 1, 1, "_CPPv45traceRK5arrayiii14StreamOrDevice", "trace::axis1"], [0, 1, 1, "_CPPv45traceRK5arrayiii5Dtype14StreamOrDevice", "trace::axis1"], [0, 1, 1, "_CPPv45traceRK5arrayiii14StreamOrDevice", "trace::axis2"], [0, 1, 1, "_CPPv45traceRK5arrayiii5Dtype14StreamOrDevice", "trace::axis2"], [0, 1, 1, "_CPPv45traceRK5arrayiii5Dtype14StreamOrDevice", "trace::dtype"], [0, 1, 1, "_CPPv45traceRK5arrayiii14StreamOrDevice", "trace::offset"], [0, 1, 1, "_CPPv45traceRK5arrayiii5Dtype14StreamOrDevice", "trace::offset"], [0, 1, 1, "_CPPv45traceRK5array14StreamOrDevice", "trace::s"], [0, 1, 1, "_CPPv45traceRK5arrayiii14StreamOrDevice", "trace::s"], [0, 1, 1, "_CPPv45traceRK5arrayiii5Dtype14StreamOrDevice", "trace::s"], [0, 0, 1, "_CPPv49transposeRK5array14StreamOrDevice", "transpose"], [0, 0, 1, "_CPPv49transposeRK5arrayNSt16initializer_listIiEE14StreamOrDevice", "transpose"], [0, 0, 1, "_CPPv49transposeRK5arrayNSt6vectorIiEE14StreamOrDevice", "transpose"], [0, 1, 1, "_CPPv49transposeRK5array14StreamOrDevice", "transpose::a"], [0, 1, 1, "_CPPv49transposeRK5arrayNSt16initializer_listIiEE14StreamOrDevice", "transpose::a"], [0, 1, 1, "_CPPv49transposeRK5arrayNSt6vectorIiEE14StreamOrDevice", "transpose::a"], [0, 1, 1, "_CPPv49transposeRK5arrayNSt16initializer_listIiEE14StreamOrDevice", "transpose::axes"], [0, 1, 1, "_CPPv49transposeRK5arrayNSt6vectorIiEE14StreamOrDevice", "transpose::axes"], [0, 1, 1, "_CPPv49transposeRK5array14StreamOrDevice", "transpose::s"], [0, 1, 1, "_CPPv49transposeRK5arrayNSt16initializer_listIiEE14StreamOrDevice", "transpose::s"], [0, 1, 1, "_CPPv49transposeRK5arrayNSt6vectorIiEE14StreamOrDevice", "transpose::s"], [0, 0, 1, "_CPPv43trii5Dtype14StreamOrDevice", "tri"], [0, 0, 1, "_CPPv43triiii5Dtype14StreamOrDevice", "tri"], [0, 1, 1, "_CPPv43triiii5Dtype14StreamOrDevice", "tri::k"], [0, 1, 1, "_CPPv43triiii5Dtype14StreamOrDevice", "tri::m"], [0, 1, 1, "_CPPv43trii5Dtype14StreamOrDevice", "tri::n"], [0, 1, 1, "_CPPv43triiii5Dtype14StreamOrDevice", "tri::n"], [0, 1, 1, "_CPPv43trii5Dtype14StreamOrDevice", "tri::s"], [0, 1, 1, "_CPPv43triiii5Dtype14StreamOrDevice", "tri::s"], [0, 1, 1, "_CPPv43trii5Dtype14StreamOrDevice", "tri::type"], [0, 1, 1, "_CPPv43triiii5Dtype14StreamOrDevice", "tri::type"], [0, 0, 1, "_CPPv44tril5arrayi14StreamOrDevice", "tril"], [0, 1, 1, "_CPPv44tril5arrayi14StreamOrDevice", "tril::k"], [0, 1, 1, "_CPPv44tril5arrayi14StreamOrDevice", "tril::s"], [0, 1, 1, "_CPPv44tril5arrayi14StreamOrDevice", "tril::x"], [0, 0, 1, "_CPPv44triu5arrayi14StreamOrDevice", "triu"], [0, 1, 1, "_CPPv44triu5arrayi14StreamOrDevice", "triu::k"], [0, 1, 1, "_CPPv44triu5arrayi14StreamOrDevice", "triu::s"], [0, 1, 1, "_CPPv44triu5arrayi14StreamOrDevice", "triu::x"], [0, 0, 1, "_CPPv49unflattenRK5arrayi5Shape14StreamOrDevice", "unflatten"], [0, 1, 1, "_CPPv49unflattenRK5arrayi5Shape14StreamOrDevice", "unflatten::a"], [0, 1, 1, "_CPPv49unflattenRK5arrayi5Shape14StreamOrDevice", "unflatten::axis"], [0, 1, 1, "_CPPv49unflattenRK5arrayi5Shape14StreamOrDevice", "unflatten::s"], [0, 1, 1, "_CPPv49unflattenRK5arrayi5Shape14StreamOrDevice", "unflatten::shape"], [0, 0, 1, "_CPPv43varRK5array14StreamOrDevice", "var"], [0, 0, 1, "_CPPv43varRK5arrayRKNSt6vectorIiEEbi14StreamOrDevice", "var"], [0, 0, 1, "_CPPv43varRK5arraybi14StreamOrDevice", "var"], [0, 0, 1, "_CPPv43varRK5arrayibi14StreamOrDevice", "var"], [0, 1, 1, "_CPPv43varRK5array14StreamOrDevice", "var::a"], [0, 1, 1, "_CPPv43varRK5arrayRKNSt6vectorIiEEbi14StreamOrDevice", "var::a"], [0, 1, 1, "_CPPv43varRK5arraybi14StreamOrDevice", "var::a"], [0, 1, 1, "_CPPv43varRK5arrayibi14StreamOrDevice", "var::a"], [0, 1, 1, "_CPPv43varRK5arrayRKNSt6vectorIiEEbi14StreamOrDevice", "var::axes"], [0, 1, 1, "_CPPv43varRK5arrayibi14StreamOrDevice", "var::axis"], [0, 1, 1, "_CPPv43varRK5arrayRKNSt6vectorIiEEbi14StreamOrDevice", "var::ddof"], [0, 1, 1, "_CPPv43varRK5arraybi14StreamOrDevice", "var::ddof"], [0, 1, 1, "_CPPv43varRK5arrayibi14StreamOrDevice", "var::ddof"], [0, 1, 1, "_CPPv43varRK5arrayRKNSt6vectorIiEEbi14StreamOrDevice", "var::keepdims"], [0, 1, 1, "_CPPv43varRK5arraybi14StreamOrDevice", "var::keepdims"], [0, 1, 1, "_CPPv43varRK5arrayibi14StreamOrDevice", "var::keepdims"], [0, 1, 1, "_CPPv43varRK5array14StreamOrDevice", "var::s"], [0, 1, 1, "_CPPv43varRK5arrayRKNSt6vectorIiEEbi14StreamOrDevice", "var::s"], [0, 1, 1, "_CPPv43varRK5arraybi14StreamOrDevice", "var::s"], [0, 1, 1, "_CPPv43varRK5arrayibi14StreamOrDevice", "var::s"], [0, 0, 1, "_CPPv44viewRK5arrayRK5Dtype14StreamOrDevice", "view"], [0, 1, 1, "_CPPv44viewRK5arrayRK5Dtype14StreamOrDevice", "view::a"], [0, 1, 1, "_CPPv44viewRK5arrayRK5Dtype14StreamOrDevice", "view::dtype"], [0, 1, 1, "_CPPv44viewRK5arrayRK5Dtype14StreamOrDevice", "view::s"], [0, 0, 1, "_CPPv45whereRK5arrayRK5arrayRK5array14StreamOrDevice", "where"], [0, 1, 1, "_CPPv45whereRK5arrayRK5arrayRK5array14StreamOrDevice", "where::condition"], [0, 1, 1, "_CPPv45whereRK5arrayRK5arrayRK5array14StreamOrDevice", "where::s"], [0, 1, 1, "_CPPv45whereRK5arrayRK5arrayRK5array14StreamOrDevice", "where::x"], [0, 1, 1, "_CPPv45whereRK5arrayRK5arrayRK5array14StreamOrDevice", "where::y"], [0, 0, 1, "_CPPv45zerosRK5Shape14StreamOrDevice", "zeros"], [0, 0, 1, "_CPPv45zerosRK5Shape5Dtype14StreamOrDevice", "zeros"], [0, 1, 1, "_CPPv45zerosRK5Shape5Dtype14StreamOrDevice", "zeros::dtype"], [0, 1, 1, "_CPPv45zerosRK5Shape14StreamOrDevice", "zeros::s"], [0, 1, 1, "_CPPv45zerosRK5Shape5Dtype14StreamOrDevice", "zeros::s"], [0, 1, 1, "_CPPv45zerosRK5Shape14StreamOrDevice", "zeros::shape"], [0, 1, 1, "_CPPv45zerosRK5Shape5Dtype14StreamOrDevice", "zeros::shape"], [0, 0, 1, "_CPPv410zeros_likeRK5array14StreamOrDevice", "zeros_like"], [0, 1, 1, "_CPPv410zeros_likeRK5array14StreamOrDevice", "zeros_like::a"], [0, 1, 1, "_CPPv410zeros_likeRK5array14StreamOrDevice", "zeros_like::s"]], "mlx.core": [[10, 3, 1, "", "Device"], [11, 3, 1, "", "Dtype"], [12, 3, 1, "", "DtypeCategory"], [342, 3, 1, "", "Stream"], [13, 5, 1, "", "abs"], [14, 5, 1, "", "add"], [15, 5, 1, "", "addmm"], [16, 5, 1, "", "all"], [17, 5, 1, "", "allclose"], [18, 5, 1, "", "any"], [19, 5, 1, "", "arange"], [20, 5, 1, "", "arccos"], [21, 5, 1, "", "arccosh"], [22, 5, 1, "", "arcsin"], [23, 5, 1, "", "arcsinh"], [24, 5, 1, "", "arctan"], [25, 5, 1, "", "arctan2"], [26, 5, 1, "", "arctanh"], [27, 5, 1, "", "argmax"], [28, 5, 1, "", "argmin"], [29, 5, 1, "", "argpartition"], [30, 5, 1, "", "argsort"], [31, 3, 1, "", "array"], [86, 5, 1, "", "array_equal"], [87, 5, 1, "", "as_strided"], [88, 5, 1, "", "async_eval"], [89, 5, 1, "", "atleast_1d"], [90, 5, 1, "", "atleast_2d"], [91, 5, 1, "", "atleast_3d"], [92, 5, 1, "", "bitwise_and"], [93, 5, 1, "", "bitwise_invert"], [94, 5, 1, "", "bitwise_or"], [95, 5, 1, "", "bitwise_xor"], [96, 5, 1, "", "block_masked_mm"], [97, 5, 1, "", "broadcast_arrays"], [98, 5, 1, "", "broadcast_to"], [99, 5, 1, "", "ceil"], [100, 5, 1, "", "clear_cache"], [101, 5, 1, "", "clip"], [102, 5, 1, "", "compile"], [103, 5, 1, "", "concatenate"], [104, 5, 1, "", "conj"], [105, 5, 1, "", "conjugate"], [106, 5, 1, "", "contiguous"], [107, 5, 1, "", "conv1d"], [108, 5, 1, "", "conv2d"], [109, 5, 1, "", "conv3d"], [110, 5, 1, "", "conv_general"], [111, 5, 1, "", "conv_transpose1d"], [112, 5, 1, "", "conv_transpose2d"], [113, 5, 1, "", "conv_transpose3d"], [114, 5, 1, "", "convolve"], [115, 5, 1, "", "cos"], [116, 5, 1, "", "cosh"], [117, 5, 1, "", "cummax"], [118, 5, 1, "", "cummin"], [119, 5, 1, "", "cumprod"], [120, 5, 1, "", "cumsum"], [121, 3, 1, "", "custom_function"], [122, 5, 1, "", "default_device"], [123, 5, 1, "", "default_stream"], [124, 5, 1, "", "degrees"], [125, 5, 1, "", "dequantize"], [126, 5, 1, "", "diag"], [127, 5, 1, "", "diagonal"], [128, 5, 1, "", "disable_compile"], [137, 5, 1, "", "divide"], [138, 5, 1, "", "divmod"], [139, 5, 1, "", "einsum"], [140, 5, 1, "", "einsum_path"], [141, 5, 1, "", "enable_compile"], [142, 5, 1, "", "equal"], [143, 5, 1, "", "erf"], [144, 5, 1, "", "erfinv"], [145, 5, 1, "", "eval"], [146, 5, 1, "", "exp"], [147, 5, 1, "", "expand_dims"], [148, 5, 1, "", "expm1"], [149, 5, 1, "", "export_function"], [150, 5, 1, "", "export_to_dot"], [151, 5, 1, "", "exporter"], [152, 5, 1, "", "eye"], [172, 3, 1, "", "finfo"], [173, 5, 1, "", "flatten"], [174, 5, 1, "", "floor"], [175, 5, 1, "", "floor_divide"], [176, 5, 1, "", "full"], [177, 5, 1, "", "gather_mm"], [178, 5, 1, "", "gather_qmm"], [179, 5, 1, "", "get_active_memory"], [180, 5, 1, "", "get_cache_memory"], [181, 5, 1, "", "get_peak_memory"], [182, 5, 1, "", "grad"], [183, 5, 1, "", "greater"], [184, 5, 1, "", "greater_equal"], [185, 5, 1, "", "hadamard_transform"], [186, 5, 1, "", "identity"], [187, 5, 1, "", "imag"], [188, 5, 1, "", "import_function"], [189, 5, 1, "", "inner"], [190, 5, 1, "", "isclose"], [191, 5, 1, "", "isfinite"], [192, 5, 1, "", "isinf"], [193, 5, 1, "", "isnan"], [194, 5, 1, "", "isneginf"], [195, 5, 1, "", "isposinf"], [196, 5, 1, "", "issubdtype"], [197, 5, 1, "", "jvp"], [198, 5, 1, "", "kron"], [199, 5, 1, "", "left_shift"], [200, 5, 1, "", "less"], [201, 5, 1, "", "less_equal"], [219, 5, 1, "", "linspace"], [220, 5, 1, "", "load"], [221, 5, 1, "", "log"], [222, 5, 1, "", "log10"], [223, 5, 1, "", "log1p"], [224, 5, 1, "", "log2"], [225, 5, 1, "", "logaddexp"], [226, 5, 1, "", "logcumsumexp"], [227, 5, 1, "", "logical_and"], [228, 5, 1, "", "logical_not"], [229, 5, 1, "", "logical_or"], [230, 5, 1, "", "logsumexp"], [231, 5, 1, "", "matmul"], [232, 5, 1, "", "max"], [233, 5, 1, "", "maximum"], [234, 5, 1, "", "mean"], [235, 5, 1, "", "meshgrid"], [240, 5, 1, "", "min"], [241, 5, 1, "", "minimum"], [242, 5, 1, "", "moveaxis"], [243, 5, 1, "", "multiply"], [244, 5, 1, "", "nan_to_num"], [245, 5, 1, "", "negative"], [246, 5, 1, "", "new_stream"], [247, 5, 1, "", "not_equal"], [248, 5, 1, "", "ones"], [249, 5, 1, "", "ones_like"], [250, 5, 1, "", "outer"], [251, 5, 1, "", "pad"], [252, 5, 1, "", "partition"], [253, 5, 1, "", "power"], [254, 5, 1, "", "prod"], [255, 5, 1, "", "put_along_axis"], [256, 5, 1, "", "quantize"], [257, 5, 1, "", "quantized_matmul"], [258, 5, 1, "", "radians"], [272, 5, 1, "", "real"], [273, 5, 1, "", "reciprocal"], [274, 5, 1, "", "remainder"], [275, 5, 1, "", "repeat"], [276, 5, 1, "", "reset_peak_memory"], [277, 5, 1, "", "reshape"], [278, 5, 1, "", "right_shift"], [279, 5, 1, "", "roll"], [280, 5, 1, "", "round"], [281, 5, 1, "", "rsqrt"], [282, 5, 1, "", "save"], [283, 5, 1, "", "save_gguf"], [284, 5, 1, "", "save_safetensors"], [285, 5, 1, "", "savez"], [286, 5, 1, "", "savez_compressed"], [287, 5, 1, "", "set_cache_limit"], [288, 5, 1, "", "set_default_device"], [289, 5, 1, "", "set_default_stream"], [290, 5, 1, "", "set_memory_limit"], [291, 5, 1, "", "set_wired_limit"], [292, 5, 1, "", "sigmoid"], [293, 5, 1, "", "sign"], [294, 5, 1, "", "sin"], [295, 5, 1, "", "sinh"], [296, 5, 1, "", "slice"], [297, 5, 1, "", "slice_update"], [298, 5, 1, "", "softmax"], [299, 5, 1, "", "sort"], [300, 5, 1, "", "split"], [301, 5, 1, "", "sqrt"], [302, 5, 1, "", "square"], [303, 5, 1, "", "squeeze"], [304, 5, 1, "", "stack"], [305, 5, 1, "", "std"], [306, 5, 1, "", "stop_gradient"], [307, 5, 1, "", "stream"], [308, 5, 1, "", "subtract"], [309, 5, 1, "", "sum"], [310, 5, 1, "", "swapaxes"], [311, 5, 1, "", "synchronize"], [312, 5, 1, "", "take"], [313, 5, 1, "", "take_along_axis"], [314, 5, 1, "", "tan"], [315, 5, 1, "", "tanh"], [316, 5, 1, "", "tensordot"], [317, 5, 1, "", "tile"], [318, 5, 1, "", "topk"], [319, 5, 1, "", "trace"], [320, 5, 1, "", "transpose"], [321, 5, 1, "", "tri"], [322, 5, 1, "", "tril"], [323, 5, 1, "", "triu"], [324, 5, 1, "", "unflatten"], [325, 5, 1, "", "value_and_grad"], [326, 5, 1, "", "var"], [327, 5, 1, "", "view"], [328, 5, 1, "", "vjp"], [329, 5, 1, "", "vmap"], [330, 5, 1, "", "where"], [331, 5, 1, "", "zeros"], [332, 5, 1, "", "zeros_like"]], "mlx.core.Device": [[10, 4, 1, "", "__init__"]], "mlx.core.Dtype": [[11, 4, 1, "", "__init__"]], "mlx.core.DtypeCategory": [[12, 4, 1, "", "__init__"]], "mlx.core.Stream": [[342, 4, 1, "", "__init__"]], "mlx.core.array": [[32, 6, 1, "", "T"], [31, 4, 1, "", "__init__"], [33, 4, 1, "", "abs"], [34, 4, 1, "", "all"], [35, 4, 1, "", "any"], [36, 4, 1, "", "argmax"], [37, 4, 1, "", "argmin"], [38, 4, 1, "", "astype"], [39, 6, 1, "", "at"], [40, 4, 1, "", "conj"], [41, 4, 1, "", "cos"], [42, 4, 1, "", "cummax"], [43, 4, 1, "", "cummin"], [44, 4, 1, "", "cumprod"], [45, 4, 1, "", "cumsum"], [46, 4, 1, "", "diag"], [47, 4, 1, "", "diagonal"], [48, 6, 1, "", "dtype"], [49, 4, 1, "", "exp"], [50, 4, 1, "", "flatten"], [51, 6, 1, "", "imag"], [52, 4, 1, "", "item"], [53, 6, 1, "", "itemsize"], [54, 4, 1, "", "log"], [55, 4, 1, "", "log10"], [56, 4, 1, "", "log1p"], [57, 4, 1, "", "log2"], [58, 4, 1, "", "logcumsumexp"], [59, 4, 1, "", "logsumexp"], [60, 4, 1, "", "max"], [61, 4, 1, "", "mean"], [62, 4, 1, "", "min"], [63, 4, 1, "", "moveaxis"], [64, 6, 1, "", "nbytes"], [65, 6, 1, "", "ndim"], [66, 4, 1, "", "prod"], [67, 6, 1, "", "real"], [68, 4, 1, "", "reciprocal"], [69, 4, 1, "", "reshape"], [70, 4, 1, "", "round"], [71, 4, 1, "", "rsqrt"], [72, 6, 1, "", "shape"], [73, 4, 1, "", "sin"], [74, 6, 1, "", "size"], [75, 4, 1, "", "split"], [76, 4, 1, "", "sqrt"], [77, 4, 1, "", "square"], [78, 4, 1, "", "squeeze"], [79, 4, 1, "", "std"], [80, 4, 1, "", "sum"], [81, 4, 1, "", "swapaxes"], [82, 4, 1, "", "tolist"], [83, 4, 1, "", "transpose"], [84, 4, 1, "", "var"], [85, 4, 1, "", "view"]], "mlx.core.custom_function": [[121, 4, 1, "", "__init__"]], "mlx.core.distributed": [[129, 3, 1, "", "Group"], [130, 5, 1, "", "all_gather"], [131, 5, 1, "", "all_sum"], [132, 5, 1, "", "init"], [133, 5, 1, "", "is_available"], [134, 5, 1, "", "recv"], [135, 5, 1, "", "recv_like"], [136, 5, 1, "", "send"]], "mlx.core.distributed.Group": [[129, 4, 1, "", "__init__"]], "mlx.core.fast": [[153, 5, 1, "", "layer_norm"], [154, 5, 1, "", "metal_kernel"], [155, 5, 1, "", "rms_norm"], [156, 5, 1, "", "rope"], [157, 5, 1, "", "scaled_dot_product_attention"]], "mlx.core.fft": [[158, 5, 1, "", "fft"], [159, 5, 1, "", "fft2"], [160, 5, 1, "", "fftn"], [161, 5, 1, "", "fftshift"], [162, 5, 1, "", "ifft"], [163, 5, 1, "", "ifft2"], [164, 5, 1, "", "ifftn"], [165, 5, 1, "", "ifftshift"], [166, 5, 1, "", "irfft"], [167, 5, 1, "", "irfft2"], [168, 5, 1, "", "irfftn"], [169, 5, 1, "", "rfft"], [170, 5, 1, "", "rfft2"], [171, 5, 1, "", "rfftn"]], "mlx.core.finfo": [[172, 4, 1, "", "__init__"]], "mlx.core.linalg": [[202, 5, 1, "", "cholesky"], [203, 5, 1, "", "cholesky_inv"], [204, 5, 1, "", "cross"], [205, 5, 1, "", "eig"], [206, 5, 1, "", "eigh"], [207, 5, 1, "", "eigvals"], [208, 5, 1, "", "eigvalsh"], [209, 5, 1, "", "inv"], [210, 5, 1, "", "lu"], [211, 5, 1, "", "lu_factor"], [212, 5, 1, "", "norm"], [213, 5, 1, "", "pinv"], [214, 5, 1, "", "qr"], [215, 5, 1, "", "solve"], [216, 5, 1, "", "solve_triangular"], [217, 5, 1, "", "svd"], [218, 5, 1, "", "tri_inv"]], "mlx.core.metal": [[236, 5, 1, "", "device_info"], [237, 5, 1, "", "is_available"], [238, 5, 1, "", "start_capture"], [239, 5, 1, "", "stop_capture"]], "mlx.core.random": [[259, 5, 1, "", "bernoulli"], [260, 5, 1, "", "categorical"], [261, 5, 1, "", "gumbel"], [262, 5, 1, "", "key"], [263, 5, 1, "", "laplace"], [264, 5, 1, "", "multivariate_normal"], [265, 5, 1, "", "normal"], [266, 5, 1, "", "permutation"], [267, 5, 1, "", "randint"], [268, 5, 1, "", "seed"], [269, 5, 1, "", "split"], [270, 5, 1, "", "truncated_normal"], [271, 5, 1, "", "uniform"]], "mlx.nn": [[354, 3, 1, "", "ALiBi"], [355, 3, 1, "", "AvgPool1d"], [356, 3, 1, "", "AvgPool2d"], [357, 3, 1, "", "AvgPool3d"], [358, 3, 1, "", "BatchNorm"], [359, 3, 1, "", "CELU"], [360, 3, 1, "", "Conv1d"], [361, 3, 1, "", "Conv2d"], [362, 3, 1, "", "Conv3d"], [363, 3, 1, "", "ConvTranspose1d"], [364, 3, 1, "", "ConvTranspose2d"], [365, 3, 1, "", "ConvTranspose3d"], [366, 3, 1, "", "Dropout"], [367, 3, 1, "", "Dropout2d"], [368, 3, 1, "", "Dropout3d"], [369, 3, 1, "", "ELU"], [370, 3, 1, "", "Embedding"], [371, 3, 1, "", "GELU"], [372, 3, 1, "", "GLU"], [373, 3, 1, "", "GRU"], [374, 3, 1, "", "GroupNorm"], [375, 3, 1, "", "HardShrink"], [376, 3, 1, "", "HardTanh"], [377, 3, 1, "", "Hardswish"], [378, 3, 1, "", "InstanceNorm"], [379, 3, 1, "", "LSTM"], [380, 3, 1, "", "LayerNorm"], [381, 3, 1, "", "LeakyReLU"], [382, 3, 1, "", "Linear"], [383, 3, 1, "", "LogSigmoid"], [384, 3, 1, "", "LogSoftmax"], [385, 3, 1, "", "MaxPool1d"], [386, 3, 1, "", "MaxPool2d"], [387, 3, 1, "", "MaxPool3d"], [388, 3, 1, "", "Mish"], [483, 3, 1, "", "Module"], [409, 3, 1, "", "MultiHeadAttention"], [410, 3, 1, "", "PReLU"], [411, 3, 1, "", "QuantizedEmbedding"], [412, 3, 1, "", "QuantizedLinear"], [413, 3, 1, "", "RMSNorm"], [414, 3, 1, "", "RNN"], [415, 3, 1, "", "ReLU"], [416, 3, 1, "", "ReLU6"], [417, 3, 1, "", "RoPE"], [418, 3, 1, "", "SELU"], [419, 3, 1, "", "Sequential"], [420, 3, 1, "", "SiLU"], [421, 3, 1, "", "Sigmoid"], [422, 3, 1, "", "SinusoidalPositionalEncoding"], [423, 3, 1, "", "Softmax"], [424, 3, 1, "", "Softmin"], [425, 3, 1, "", "Softplus"], [426, 3, 1, "", "Softshrink"], [427, 3, 1, "", "Softsign"], [428, 3, 1, "", "Step"], [429, 3, 1, "", "Tanh"], [430, 3, 1, "", "Transformer"], [431, 3, 1, "", "Upsample"], [333, 5, 1, "", "average_gradients"], [440, 3, 1, "", "celu"], [441, 3, 1, "", "elu"], [442, 3, 1, "", "gelu"], [443, 3, 1, "", "gelu_approx"], [444, 3, 1, "", "gelu_fast_approx"], [445, 3, 1, "", "glu"], [446, 3, 1, "", "hard_shrink"], [447, 3, 1, "", "hard_tanh"], [448, 3, 1, "", "hardswish"], [449, 3, 1, "", "leaky_relu"], [450, 3, 1, "", "log_sigmoid"], [451, 3, 1, "", "log_softmax"], [466, 3, 1, "", "mish"], [467, 3, 1, "", "prelu"], [334, 5, 1, "", "quantize"], [468, 3, 1, "", "relu"], [469, 3, 1, "", "relu6"], [470, 3, 1, "", "selu"], [471, 3, 1, "", "sigmoid"], [472, 3, 1, "", "silu"], [473, 3, 1, "", "softmax"], [474, 3, 1, "", "softmin"], [475, 3, 1, "", "softplus"], [476, 3, 1, "", "softshrink"], [477, 3, 1, "", "step"], [478, 3, 1, "", "tanh"], [335, 5, 1, "", "value_and_grad"]], "mlx.nn.Module": [[389, 4, 1, "", "apply"], [390, 4, 1, "", "apply_to_modules"], [391, 4, 1, "", "children"], [392, 4, 1, "", "eval"], [393, 4, 1, "", "filter_and_map"], [394, 4, 1, "", "freeze"], [395, 4, 1, "", "leaf_modules"], [396, 4, 1, "", "load_weights"], [397, 4, 1, "", "modules"], [398, 4, 1, "", "named_modules"], [399, 4, 1, "", "parameters"], [400, 4, 1, "", "save_weights"], [401, 4, 1, "", "set_dtype"], [402, 6, 1, "", "state"], [403, 4, 1, "", "train"], [404, 4, 1, "", "trainable_parameters"], [405, 6, 1, "", "training"], [406, 4, 1, "", "unfreeze"], [407, 4, 1, "", "update"], [408, 4, 1, "", "update_modules"]], "mlx.nn.init": [[432, 5, 1, "", "constant"], [433, 5, 1, "", "glorot_normal"], [434, 5, 1, "", "glorot_uniform"], [435, 5, 1, "", "he_normal"], [436, 5, 1, "", "he_uniform"], [437, 5, 1, "", "identity"], [438, 5, 1, "", "normal"], [439, 5, 1, "", "uniform"]], "mlx.nn.losses": [[452, 3, 1, "", "binary_cross_entropy"], [453, 3, 1, "", "cosine_similarity_loss"], [454, 3, 1, "", "cross_entropy"], [455, 3, 1, "", "gaussian_nll_loss"], [456, 3, 1, "", "hinge_loss"], [457, 3, 1, "", "huber_loss"], [458, 3, 1, "", "kl_div_loss"], [459, 3, 1, "", "l1_loss"], [460, 3, 1, "", "log_cosh_loss"], [461, 3, 1, "", "margin_ranking_loss"], [462, 3, 1, "", "mse_loss"], [463, 3, 1, "", "nll_loss"], [464, 3, 1, "", "smooth_l1_loss"], [465, 3, 1, "", "triplet_loss"]], "mlx.optimizers": [[486, 3, 1, "", "AdaDelta"], [487, 3, 1, "", "Adafactor"], [488, 3, 1, "", "Adagrad"], [489, 3, 1, "", "Adam"], [490, 3, 1, "", "AdamW"], [491, 3, 1, "", "Adamax"], [492, 3, 1, "", "Lion"], [493, 3, 1, "", "MultiOptimizer"], [494, 3, 1, "", "Muon"], [507, 3, 1, "", "Optimizer"], [499, 3, 1, "", "RMSprop"], [500, 3, 1, "", "SGD"], [336, 5, 1, "", "clip_grad_norm"], [501, 5, 1, "", "cosine_decay"], [502, 5, 1, "", "exponential_decay"], [503, 5, 1, "", "join_schedules"], [504, 5, 1, "", "linear_schedule"], [505, 5, 1, "", "step_decay"]], "mlx.optimizers.Optimizer": [[495, 4, 1, "", "apply_gradients"], [496, 4, 1, "", "init"], [497, 6, 1, "", "state"], [498, 4, 1, "", "update"]], "mlx.utils": [[337, 5, 1, "", "tree_flatten"], [338, 5, 1, "", "tree_map"], [339, 5, 1, "", "tree_map_with_path"], [340, 5, 1, "", "tree_reduce"], [341, 5, 1, "", "tree_unflatten"]]}, "objnames": {"0": ["cpp", "function", "C++ function"], "1": ["cpp", "functionParam", "C++ function parameter"], "2": ["cpp", "templateParam", "C++ template parameter"], "3": ["py", "class", "Python class"], "4": ["py", "method", "Python method"], "5": ["py", "function", "Python function"], "6": ["py", "property", "Python property"]}, "objtypes": {"0": "cpp:function", "1": "cpp:functionParam", "2": "cpp:templateParam", "3": "py:class", "4": "py:method", "5": "py:function", "6": "py:property"}, "terms": {"": [0, 1, 2, 5, 6, 7, 48, 53, 65, 102, 123, 125, 159, 160, 163, 164, 167, 168, 170, 171, 182, 203, 212, 217, 220, 234, 250, 256, 260, 280, 283, 284, 305, 307, 325, 326, 327, 329, 335, 353, 356, 357, 373, 379, 386, 387, 393, 394, 396, 400, 401, 402, 406, 407, 408, 414, 485, 496, 497, 509, 512, 514, 515, 519, 520, 521, 522], "0": [0, 1, 2, 4, 5, 6, 7, 9, 10, 15, 19, 39, 46, 47, 50, 70, 75, 79, 84, 87, 88, 100, 103, 107, 108, 109, 110, 111, 112, 113, 121, 126, 127, 152, 154, 157, 173, 177, 182, 188, 198, 205, 206, 209, 210, 212, 214, 218, 244, 251, 259, 263, 265, 266, 271, 275, 280, 287, 291, 296, 297, 300, 304, 305, 319, 321, 322, 323, 324, 325, 326, 329, 333, 336, 337, 339, 340, 353, 355, 356, 357, 358, 359, 360, 361, 362, 363, 364, 365, 366, 367, 368, 369, 371, 374, 375, 378, 380, 381, 385, 386, 387, 410, 415, 417, 422, 426, 428, 430, 432, 433, 434, 435, 436, 437, 438, 439, 440, 441, 443, 444, 446, 447, 448, 449, 452, 454, 456, 457, 461, 464, 465, 467, 468, 469, 470, 476, 477, 480, 483, 486, 487, 489, 490, 491, 492, 494, 496, 499, 500, 501, 502, 503, 504, 505, 509, 512, 513, 514, 515, 516, 517, 518, 519, 520, 521], "00005": 5, "0001": 422, "0005": 443, "001": 487, "00364": 5, "01": [5, 381, 449, 490, 494], "0137595": 435, "015": 444, "0184009": 436, "02264": 434, "024": 515, "02765": 435, "0300242": 436, "044715": [371, 443], "0485873": 454, "05": [17, 190, 358, 374, 378, 380, 413], "0507": 470, "05202": 6, "06": [455, 465, 486], "0638": 461, "06450": 380, "0645099": 438, "06561": 502, "06675": 492, "07467": 413, "08": [17, 190, 453, 488, 489, 490, 491, 499], "08022": 378, "081": 505, "08415": 444, "08494": 374, "08619": 436, "08681": [388, 466], "09864": 6, "0999938": 503, "0999961": 501, "0d": 494, "0f": 0, "0j": [205, 207], "1": [0, 1, 2, 3, 4, 6, 7, 9, 15, 19, 29, 30, 39, 47, 50, 88, 107, 108, 109, 110, 111, 112, 113, 121, 126, 127, 148, 149, 150, 151, 154, 157, 158, 159, 162, 163, 166, 167, 168, 169, 170, 171, 173, 185, 189, 196, 198, 203, 204, 205, 206, 207, 208, 210, 212, 214, 231, 235, 250, 252, 256, 260, 263, 264, 265, 271, 290, 292, 296, 297, 299, 312, 318, 319, 324, 325, 336, 337, 339, 340, 344, 353, 355, 358, 359, 360, 361, 362, 363, 364, 365, 366, 367, 368, 369, 371, 372, 373, 374, 378, 379, 380, 382, 385, 410, 413, 414, 417, 421, 422, 428, 431, 433, 434, 435, 436, 437, 438, 439, 440, 441, 443, 444, 445, 447, 450, 451, 452, 453, 454, 455, 456, 457, 458, 460, 461, 463, 464, 465, 470, 471, 473, 474, 475, 477, 480, 483, 485, 486, 487, 488, 489, 490, 491, 492, 496, 499, 500, 501, 502, 503, 504, 505, 512, 513, 514, 515, 516, 517, 519, 520, 521, 522], "10": [0, 3, 6, 7, 198, 222, 280, 285, 338, 353, 396, 480, 503, 505, 512, 513, 516], "100": [2, 5, 6, 452, 504, 512, 515, 518, 522], "1000": [2, 157, 501, 512], "10000": 417, "101": 504, "1024": [1, 6], "105361": 452, "10_000": 5, "10x": 492, "11": 212, "12": [6, 9, 185, 198, 503], "1212": 486, "123": [513, 517], "12451": 434, "128": [157, 285, 353], "13": 9, "14": [9, 198], "15": [1, 9, 198, 212, 291, 340, 512], "150594": 433, "15268": 435, "16": [1, 154, 344, 355, 357, 378, 385, 387, 389, 483], "1606": 444, "1607": [378, 380], "16384": 185, "16506": 436, "168": 513, "17": [4, 9], "177208": 435, "18": 198, "1803": 374, "1908": [388, 466], "1910": 413, "191107": 433, "192": 513, "1985": 212, "1_000": 5, "1_all": 9, "1d": [0, 107, 111, 114, 283, 313, 494], "1e": [0, 5, 7, 17, 190, 358, 374, 378, 380, 381, 413, 453, 455, 465, 485, 486, 487, 488, 489, 490, 491, 496, 499, 501, 502, 503, 504, 505], "1e3": 512, "1st": 256, "2": [0, 1, 2, 4, 5, 6, 7, 9, 39, 108, 112, 121, 126, 127, 143, 149, 150, 151, 157, 159, 163, 166, 167, 168, 169, 170, 171, 173, 185, 196, 198, 202, 203, 204, 205, 206, 207, 208, 209, 210, 212, 213, 214, 217, 218, 224, 231, 256, 264, 265, 269, 296, 297, 316, 319, 321, 322, 323, 324, 336, 340, 344, 353, 355, 356, 357, 361, 364, 371, 381, 385, 386, 387, 413, 422, 431, 432, 433, 434, 435, 436, 437, 438, 439, 443, 454, 455, 457, 464, 465, 480, 483, 485, 486, 488, 489, 490, 496, 499, 512, 513, 514, 515, 516, 517, 518, 519, 520, 521, 522], "20": [185, 198, 212], "200": [6, 503, 515], "2002": 6, "2011": 488, "2012": [486, 499], "2015": [367, 489, 491], "2019": [6, 490], "2020": 6, "2021": 6, "20397": 452, "20_000": 6, "21": [6, 198, 505], "2104": 6, "223144": 452, "223404": 434, "225": 212, "225763": 461, "2302": 492, "23607": [212, 214], "24": 198, "24264": 212, "247": 6, "25": [9, 410, 431], "25211": 435, "256": [1, 7, 154], "256995": 461, "27": 4, "28": [185, 198], "2d": [0, 108, 112, 127, 256, 358, 367], "2nd": 256, "2x": 519, "3": [0, 1, 2, 4, 6, 9, 88, 109, 113, 121, 149, 151, 173, 196, 198, 204, 205, 206, 207, 208, 212, 214, 296, 297, 324, 336, 340, 357, 362, 365, 371, 387, 431, 434, 436, 443, 448, 487, 492, 509, 512, 513, 514, 516, 519, 520], "30": 487, "3118": 519, "32": [1, 6, 7, 96, 157, 256, 257, 344, 356, 357, 386, 387, 413, 512], "32mib": 333, "330": 6, "33333": 431, "33554432": 333, "348587": 454, "35": 9, "363207": 433, "36788": 512, "379159": 434, "380709": 438, "39": 6, "3d": [0, 2, 109, 113, 358, 368, 431], "3f": [2, 7, 512], "3x": 2, "4": [0, 1, 2, 6, 125, 154, 157, 173, 178, 198, 212, 256, 257, 285, 296, 324, 334, 340, 344, 355, 356, 357, 358, 378, 385, 386, 387, 411, 412, 430, 431, 433, 434, 435, 452, 512, 513, 514, 516, 520, 522], "4096": [2, 512, 515, 522], "40x": 1, "41421": 212, "417497": 439, "42": 341, "437": 6, "44": 6, "447214": 214, "458835": 435, "475": 6, "48095": 433, "4d": [1, 431, 494], "4m": 1, "5": [0, 1, 2, 5, 6, 9, 157, 198, 212, 259, 290, 296, 340, 355, 358, 366, 367, 368, 371, 375, 378, 385, 426, 431, 432, 435, 436, 443, 446, 464, 476, 480, 485, 494, 499, 501, 502, 512, 515, 516], "50": [0, 219], "500": [6, 522], "510826": 452, "512": [3, 6, 430, 522], "534422": 438, "539245": 452, "53947": 433, "54": 9, "55": 1, "550": 9, "559": 2, "5701": 486, "573409": 461, "57771": 214, "579": 6, "5f": 5, "6": [1, 2, 6, 121, 198, 212, 285, 296, 416, 430, 434, 443, 444, 448, 455, 465, 469, 499, 512, 516, 520], "61278": 433, "617261": 439, "628": 6, "633": 6, "639": 515, "64": [0, 1, 96, 125, 178, 256, 257, 334, 344, 411, 412], "64331": 436, "666329": 436, "66667": 431, "67326": 470, "676": 1, "690": 6, "6967": 435, "7": [2, 6, 9, 198, 212, 256, 516], "702": [371, 444], "707107": [205, 206], "71828": 512, "74166": 212, "74597": 212, "75": 431, "75596": 461, "75787": 435, "765166": 461, "773433": 461, "774": 2, "776856": 434, "793615": 436, "79854": 436, "7b": 6, "7m": 1, "8": [0, 1, 2, 6, 212, 256, 344, 356, 357, 378, 386, 387, 430, 453, 486, 487, 488, 489, 490, 491, 499, 512, 516, 520, 522], "8192": [6, 185], "84804": 212, "863726": 439, "883935": 439, "890597": 434, "894427": 214, "89613": 433, "8gb": 6, "8x": 1, "9": [4, 9, 212, 454, 486, 489, 490, 491, 492, 496, 502, 505, 519], "90041": 434, "912766": 434, "916291": 452, "95": [7, 494], "982273": 438, "99": [492, 499], "995016": 433, "999": [489, 490, 491], "A": [0, 2, 6, 8, 9, 10, 72, 86, 102, 149, 150, 153, 154, 155, 157, 182, 196, 197, 203, 205, 206, 207, 208, 210, 212, 214, 217, 220, 230, 231, 232, 236, 240, 256, 259, 260, 261, 263, 264, 265, 266, 267, 270, 271, 300, 304, 307, 325, 328, 329, 334, 335, 336, 337, 338, 339, 340, 341, 342, 353, 358, 367, 373, 374, 378, 380, 393, 397, 398, 401, 407, 408, 413, 419, 422, 430, 433, 434, 436, 444, 465, 466, 483, 485, 489, 491, 493, 495, 496, 498, 503, 512, 513, 514, 515, 517, 518, 519], "AS": 177, "And": [4, 6, 431], "As": [7, 39, 312, 353, 513], "At": [101, 324, 513], "But": [514, 522], "By": [6, 334, 401, 452, 513, 515, 519], "For": [0, 1, 2, 4, 6, 9, 39, 121, 157, 177, 196, 212, 256, 341, 353, 358, 367, 371, 389, 394, 403, 406, 412, 417, 422, 431, 433, 434, 435, 436, 452, 480, 485, 494, 509, 512, 513, 514, 515, 516, 517, 518, 519, 520, 521, 522], "If": [0, 1, 2, 4, 6, 9, 16, 17, 18, 19, 27, 28, 29, 30, 82, 86, 87, 88, 101, 103, 114, 117, 118, 119, 120, 126, 127, 130, 131, 132, 134, 135, 136, 145, 153, 155, 156, 157, 161, 165, 169, 170, 171, 175, 176, 177, 182, 190, 202, 203, 204, 212, 217, 220, 226, 230, 231, 232, 234, 235, 240, 244, 248, 251, 252, 254, 255, 260, 264, 265, 266, 275, 279, 287, 290, 298, 299, 300, 305, 309, 311, 312, 313, 316, 318, 319, 325, 326, 329, 331, 333, 334, 337, 338, 340, 358, 360, 361, 362, 363, 364, 365, 374, 380, 382, 394, 396, 406, 407, 408, 412, 414, 417, 419, 422, 431, 452, 454, 465, 487, 489, 490, 512, 513, 514, 515, 517, 518, 521, 522, 523], "In": [0, 1, 2, 6, 7, 39, 157, 231, 256, 338, 353, 367, 374, 483, 486, 488, 489, 491, 492, 495, 511, 512, 513, 514, 515, 517, 518, 521, 522], "It": [2, 6, 9, 135, 182, 289, 325, 336, 340, 353, 408, 412, 495, 507, 513, 517, 519, 521], "Its": [353, 514], "No": [2, 6, 206, 208, 513], "Not": [102, 247, 512], "ON": [3, 4, 9], "Of": 515, "On": [1, 512, 515, 518], "One": [158, 162, 169, 251, 281, 512, 514, 515, 517], "THE": 9, "That": [6, 265], "The": [0, 1, 2, 3, 4, 6, 7, 8, 9, 13, 14, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 38, 48, 51, 53, 64, 65, 67, 72, 82, 86, 87, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 124, 125, 126, 127, 130, 131, 132, 134, 135, 136, 137, 138, 139, 140, 142, 143, 144, 146, 147, 148, 149, 150, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 173, 174, 175, 176, 177, 178, 180, 181, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 197, 198, 199, 200, 201, 204, 205, 206, 207, 208, 210, 211, 212, 214, 215, 216, 217, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 238, 240, 241, 242, 243, 245, 247, 248, 249, 250, 251, 252, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 269, 270, 271, 272, 273, 274, 275, 277, 278, 279, 283, 284, 287, 290, 291, 292, 293, 294, 295, 296, 297, 298, 299, 301, 302, 303, 304, 305, 306, 307, 308, 309, 310, 311, 312, 313, 314, 315, 316, 317, 318, 319, 320, 321, 322, 323, 324, 325, 326, 327, 328, 329, 330, 331, 332, 333, 334, 335, 336, 337, 338, 339, 340, 341, 344, 346, 355, 356, 357, 358, 360, 361, 362, 363, 364, 365, 366, 367, 368, 370, 372, 373, 374, 378, 379, 380, 382, 385, 386, 387, 389, 390, 394, 396, 400, 401, 402, 403, 406, 407, 408, 409, 411, 412, 413, 414, 417, 419, 422, 428, 430, 431, 432, 433, 434, 435, 436, 437, 438, 439, 445, 452, 453, 454, 455, 456, 457, 458, 459, 460, 461, 462, 463, 464, 465, 477, 480, 483, 485, 486, 487, 488, 489, 490, 491, 492, 493, 494, 497, 499, 500, 501, 504, 507, 512, 513, 514, 515, 516, 517, 518, 519, 520, 521, 522, 523], "Then": [5, 9], "There": [1, 353, 431, 512], "These": [1, 2, 102, 255, 313, 454, 522], "To": [0, 1, 2, 3, 5, 6, 7, 9, 210, 287, 353, 480, 485, 512, 513, 514, 515, 520], "With": 514, "_": [1, 3, 5, 6, 339, 353, 501, 502, 503, 504, 505, 509, 512, 518, 522], "__call__": [1, 6, 7, 353, 483, 514], "__init__": [2, 6, 7, 10, 11, 12, 31, 121, 129, 172, 342, 353, 483], "__main__": [2, 6], "__name__": [2, 6], "_a": 2, "_ext": 2, "_f": 212, "_in": [433, 434], "_out": [433, 434], "_p": 465, "_val": 447, "a1": 177, "a2": 177, "a_": 212, "a_max": [0, 101], "a_min": [0, 101], "a_ndim": 1, "a_shap": 1, "a_strid": 1, "a_view": 519, "ab": [0, 17, 190, 212, 325, 374, 378, 380, 388, 413, 444, 466, 512, 514], "abil": 513, "abl": [2, 4, 256, 517], "abort": 121, "about": [1, 2, 6, 7, 140, 236, 518, 522], "abov": [1, 2, 6, 256, 322, 353, 431, 513, 514, 515, 516, 517, 518, 522], "absolut": [0, 13, 17, 190, 443, 444, 464, 513], "acc": 340, "acceler": [4, 358], "accept": [513, 517], "access": [0, 6, 52, 353, 483, 496, 513, 518, 522], "accord": [0, 261, 330, 334, 409, 433, 434, 435, 436], "accordingli": 2, "accumul": [340, 413], "accuraci": 7, "accustom": 6, "achiev": [353, 513], "across": [1, 2, 9, 333, 374, 513], "act": [2, 460], "action": 353, "activ": [2, 9, 179, 367, 428, 430, 446, 466, 476, 477, 479, 512], "actual": [6, 19, 396, 483, 518], "ad": [0, 1, 2, 5, 9, 153, 339, 363, 364, 365, 378, 483, 486, 487, 488, 489, 490, 491, 499, 513, 518, 521], "adadelta": 485, "adafactor": 485, "adagrad": 485, "adam": [485, 491, 492, 503, 504], "adamax": 485, "adamw": [485, 492, 494], "adapt": [486, 487, 488, 513], "add": [0, 1, 2, 3, 4, 6, 15, 39, 147, 225, 251, 256, 360, 361, 362, 363, 364, 365, 514, 515, 517, 522], "add_argu": 6, "add_depend": 2, "add_execut": 4, "add_fun": 514, "add_librari": 2, "addit": [0, 2, 4, 6, 9, 14, 15, 149, 153, 155, 157, 220, 358, 363, 364, 365, 374, 380, 409, 413, 483, 515], "addmm": 0, "address": 2, "adjac": 367, "advanc": [6, 512], "advantag": 522, "advis": 519, "affin": [358, 374, 378, 380, 382, 412], "after": [2, 6, 7, 29, 100, 173, 175, 178, 252, 256, 358, 374, 380, 389, 390, 394, 396, 403, 406, 407, 408, 409, 430, 464, 512, 513, 522], "after_1": 251, "after_2": 251, "after_i": 251, "after_n": 251, "afternoon": 6, "again": [6, 9, 353, 512], "against": [0, 4, 97], "aggreg": [409, 513], "ago": 6, "ai": 121, "aim": 513, "ainv": [209, 218], "albeit": 522, "algebra": 8, "algorithm": [431, 492], "alia": [104, 105, 371], "alibi": 353, "align": [203, 256, 373, 379], "align_corn": 431, "all": [0, 1, 2, 3, 7, 9, 17, 29, 39, 89, 90, 91, 102, 108, 109, 110, 112, 113, 121, 130, 131, 132, 150, 152, 160, 161, 164, 165, 168, 171, 177, 178, 205, 207, 217, 231, 251, 252, 279, 303, 333, 334, 353, 389, 390, 394, 397, 398, 399, 404, 406, 409, 422, 430, 431, 480, 483, 507, 509, 512, 516, 517, 518, 520, 523], "all_avg": 513, "all_reduce_grad": 513, "all_reduce_s": 333, "all_sum": 513, "allclos": [0, 1, 154], "alloc": [2, 180, 287, 290, 483], "allow": [0, 1, 2, 149, 151, 177, 178, 196, 336, 353, 408, 483, 507, 513, 516, 517, 520], "allow_col_major": [0, 106], "almost": [6, 513], "alon": [2, 519], "along": [0, 2, 27, 28, 102, 103, 117, 118, 119, 120, 130, 158, 159, 160, 162, 163, 164, 166, 167, 168, 169, 170, 171, 177, 178, 185, 204, 212, 226, 255, 266, 275, 279, 298, 300, 304, 312, 313, 316, 317, 318, 319, 327, 353, 372, 414, 445], "alpha": [0, 2, 15, 256, 359, 369, 440, 441, 465, 467, 470, 490, 499], "alpha_": 2, "alreadi": [2, 3, 6, 513], "also": [0, 1, 2, 4, 6, 7, 8, 9, 12, 14, 92, 94, 95, 128, 137, 138, 142, 160, 164, 168, 171, 183, 184, 199, 200, 201, 225, 233, 241, 243, 247, 253, 256, 274, 278, 308, 334, 335, 346, 353, 393, 407, 409, 411, 412, 420, 442, 470, 472, 479, 485, 512, 513, 514, 515, 516, 517, 518, 519, 520, 523], "altern": 509, "although": 513, "alwai": [1, 87, 179, 188, 205, 207, 337, 513, 514, 515], "am": 6, "among": 2, "amount": [6, 181, 290, 355, 385, 514], "amus": 6, "an": [0, 1, 2, 3, 4, 6, 7, 9, 11, 16, 18, 31, 88, 89, 90, 91, 98, 106, 107, 108, 109, 110, 111, 112, 113, 129, 134, 135, 136, 145, 149, 151, 152, 153, 157, 173, 176, 186, 188, 191, 202, 212, 220, 242, 248, 249, 251, 254, 255, 256, 257, 266, 275, 277, 279, 280, 290, 291, 300, 303, 310, 312, 313, 316, 317, 321, 324, 329, 331, 332, 337, 338, 339, 340, 344, 353, 366, 371, 374, 379, 380, 382, 389, 409, 410, 412, 414, 430, 431, 432, 433, 434, 435, 436, 437, 438, 439, 443, 467, 480, 485, 486, 494, 498, 502, 507, 509, 511, 512, 513, 514, 515, 516, 518, 519, 520, 521, 522, 523], "anaconda": 513, "anchor": 465, "angl": [124, 258, 381], "angular": [156, 417], "ani": [0, 1, 2, 6, 8, 19, 102, 121, 132, 333, 337, 338, 339, 340, 341, 353, 371, 389, 390, 393, 402, 412, 430, 431, 480, 494, 504, 511, 512, 513, 515, 518, 520, 521, 522], "anonym": 512, "anoth": [0, 97, 101, 196, 231, 308, 330, 344, 353, 389, 512, 514, 515, 516, 522], "anwywher": 9, "anyhow": 6, "anymor": 6, "anyth": [6, 325, 513, 518], "anytim": 518, "api": [1, 2, 88, 149, 151, 188, 371, 513, 514, 515], "aplu": 213, "app": 9, "append": [6, 231, 512, 518], "appl": [2, 6, 8, 9, 522], "appli": [0, 39, 156, 157, 177, 217, 338, 339, 340, 353, 355, 356, 357, 358, 359, 360, 361, 362, 363, 364, 365, 367, 368, 369, 371, 372, 374, 375, 376, 377, 378, 380, 381, 382, 383, 384, 385, 386, 387, 388, 390, 403, 410, 412, 413, 414, 415, 416, 418, 420, 421, 423, 424, 425, 426, 427, 428, 429, 431, 440, 441, 442, 445, 446, 447, 448, 449, 450, 451, 452, 453, 454, 455, 456, 457, 458, 459, 460, 461, 462, 463, 464, 465, 466, 467, 468, 469, 470, 471, 472, 473, 474, 475, 476, 477, 478, 480, 489, 490, 495, 498, 504, 507, 512, 513], "applic": [3, 9], "apply_fn": 390, "apply_gradi": 485, "apply_to_modul": [353, 394], "approach": [460, 515], "appropri": 512, "approx": 371, "approxim": [17, 371, 442, 443, 444], "apt": 9, "ar": [0, 1, 2, 5, 6, 7, 8, 9, 17, 19, 86, 88, 96, 97, 98, 101, 102, 110, 114, 121, 127, 132, 134, 135, 145, 152, 154, 157, 159, 160, 163, 164, 167, 168, 170, 171, 173, 177, 178, 182, 190, 191, 192, 193, 194, 195, 196, 197, 205, 206, 207, 208, 210, 212, 214, 220, 231, 250, 251, 252, 256, 257, 259, 260, 261, 265, 266, 267, 270, 271, 279, 285, 286, 303, 304, 312, 325, 328, 329, 334, 337, 338, 344, 358, 360, 361, 362, 363, 364, 365, 366, 367, 368, 374, 378, 380, 382, 396, 409, 412, 431, 452, 454, 455, 479, 483, 485, 492, 496, 511, 512, 513, 514, 515, 516, 517, 518, 519, 520, 521, 522], "arang": [0, 1, 212, 266, 344, 431, 516, 519], "arbitrari": [337, 483, 513], "arbitrarili": [1, 102, 353, 511, 515, 520], "arc": 0, "arcco": 0, "arccosh": 0, "architectur": [6, 9, 236, 353, 408, 522], "archiv": 521, "arcsin": 0, "arcsinh": 0, "arctan": 0, "arctan2": 0, "arctanh": 0, "arg": [2, 6, 11, 19, 88, 129, 145, 149, 150, 151, 172, 188, 285, 286, 342, 517], "arg1": 196, "arg2": 196, "argmax": [0, 7], "argmin": 0, "argnam": [182, 325], "argnum": [2, 121, 182, 325, 515], "argpars": 6, "argpartit": 0, "argsort": 0, "argument": [1, 32, 69, 83, 88, 102, 145, 182, 325, 338, 339, 340, 353, 431, 509, 513, 514, 515, 517, 521, 522, 523], "argumentpars": 6, "ari": [89, 90, 91], "aris": 519, "arm": 9, "arm64": 9, "around": 6, "arr": [0, 282, 516], "arr_0": 521, "arrai": [0, 1, 2, 4, 6, 7, 8, 11, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 124, 125, 126, 127, 130, 131, 134, 135, 136, 137, 138, 139, 140, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 173, 174, 175, 176, 177, 178, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 240, 241, 242, 243, 244, 245, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 269, 270, 271, 272, 273, 274, 275, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 292, 293, 294, 295, 296, 297, 298, 299, 300, 301, 302, 303, 304, 305, 306, 308, 309, 310, 312, 313, 314, 315, 316, 317, 318, 319, 320, 321, 322, 323, 324, 325, 326, 327, 328, 329, 330, 331, 332, 333, 336, 344, 353, 358, 379, 389, 396, 399, 404, 410, 431, 432, 433, 434, 435, 436, 437, 438, 439, 445, 452, 453, 454, 455, 456, 457, 458, 459, 460, 461, 462, 463, 464, 465, 466, 467, 477, 480, 483, 486, 487, 488, 489, 490, 491, 492, 493, 494, 499, 500, 501, 502, 503, 504, 505, 512, 513, 514, 515, 518, 519, 520, 522], "array_equ": [0, 17, 190], "arrayfir": 8, "arxiv": [6, 374, 378, 380, 388, 413, 444, 466, 486, 492], "as_strid": 0, "ascend": [206, 208], "ask": [6, 513, 517], "assert": [1, 2, 154], "assign": [0, 2, 39, 483, 513], "associ": [2, 285, 286, 518], "assum": [0, 6, 96, 204, 206, 208, 214, 338, 353, 374, 513], "ast": 1, "astyp": [0, 1, 2, 6, 154, 389, 519], "asynchron": 88, "atleast": 0, "atleast_1d": 0, "atleast_2d": 0, "atleast_3d": 0, "atol": [0, 17, 190], "atom": [1, 154], "atomic_fetch_add_explicit": 1, "atomic_output": [1, 154], "attach": 2, "attempt": [102, 513], "attend": 409, "attent": [157, 394, 409, 422, 430], "attention_norm": 6, "attribut": [1, 10, 11, 12, 31, 172, 342, 402, 483, 507], "audio": 431, "auto": [0, 2, 4, 9, 513, 514], "autom": 515, "automat": [1, 2, 8, 154, 220, 513, 520, 521, 522], "autoregress": 6, "avail": [2, 5, 6, 7, 9, 11, 132, 133, 237, 290, 346, 513, 517, 522], "averag": [333, 355, 356, 357, 486, 487, 489, 490, 491, 513], "avgpool1d": 353, "avgpool2d": 353, "avgpool3d": 353, "avoid": [1, 2, 401, 512, 513], "awai": [2, 6], "awar": [512, 518], "ax": [0, 2, 16, 18, 27, 28, 83, 121, 147, 159, 160, 161, 163, 164, 165, 167, 168, 170, 171, 173, 189, 212, 215, 216, 230, 232, 234, 240, 251, 254, 279, 296, 297, 298, 303, 305, 309, 310, 316, 320, 326, 515], "axes_a": 0, "axes_b": 0, "axi": [0, 2, 6, 7, 16, 18, 27, 28, 29, 30, 34, 35, 36, 37, 42, 43, 44, 45, 58, 59, 60, 61, 62, 66, 75, 78, 79, 80, 84, 103, 117, 118, 119, 120, 127, 130, 147, 153, 155, 158, 162, 166, 167, 168, 169, 170, 171, 173, 185, 204, 210, 212, 226, 230, 232, 234, 240, 242, 251, 252, 254, 255, 260, 266, 275, 279, 298, 299, 300, 303, 304, 305, 309, 310, 312, 313, 317, 318, 319, 320, 324, 326, 327, 329, 355, 356, 357, 372, 385, 386, 387, 414, 445, 451, 453, 454, 458, 463, 465, 473, 474, 516], "axis1": [0, 47, 81, 127, 310, 319], "axis2": [0, 47, 81, 127, 310, 319], "axpbi": 2, "axpby_gener": 2, "axpby_general_": 2, "axpby_general_bfloat16": 2, "axpby_general_complex64": 2, "axpby_general_float16": 2, "axpby_general_float32": 2, "axpby_impl": 2, "b": [0, 1, 2, 3, 4, 6, 14, 15, 17, 25, 86, 92, 94, 95, 96, 137, 138, 142, 154, 157, 175, 177, 183, 184, 188, 189, 190, 198, 199, 200, 201, 204, 212, 215, 216, 225, 227, 229, 231, 233, 241, 243, 247, 250, 253, 256, 263, 274, 278, 308, 316, 325, 337, 339, 340, 372, 382, 414, 431, 445, 515, 516, 518, 519, 520, 521, 522], "b1": 177, "b2": 177, "b_": [373, 379], "b_stride": 1, "ba": [489, 491], "back": [6, 121, 237, 519], "backend": [1, 9, 132, 133, 517], "backward": [1, 512, 515], "bad": 518, "balanc": 460, "baltimor": 212, "bandwidth": [512, 513], "base": [0, 2, 4, 156, 222, 224, 253, 417, 430, 483, 485, 491, 507, 509, 512, 516], "base_idx": 1, "basi": 507, "basic": [5, 280, 515], "batch": [6, 15, 96, 157, 177, 178, 231, 264, 358, 360, 361, 362, 363, 364, 365, 367, 368, 373, 379, 409, 414, 431, 518], "batch_idx": 1, "batch_iter": [7, 485], "batch_siz": [7, 485], "batchnorm": 353, "becaus": [6, 179, 353, 512, 513, 514, 518], "becom": 132, "been": [0, 2, 6, 180, 518], "befor": [1, 2, 6, 9, 29, 154, 252, 333, 393, 430, 496, 513, 516, 518], "before_1": 251, "before_2": 251, "before_i": 251, "before_n": 251, "beforehand": 250, "beggin": 279, "begin": [87, 181, 203, 256, 373, 379, 428, 446, 457, 464, 470, 476, 477, 513], "behav": 121, "behavior": [165, 210, 264, 460, 516, 518], "behaviour": [121, 202, 203], "behind": 515, "being": [306, 353], "bell": 2, "below": [2, 9, 212, 321, 323, 344, 431, 513, 518], "bench": 2, "benchmark": [2, 512], "benefici": [367, 368, 518], "benefit": 513, "best": 513, "beta": [0, 2, 15, 125, 256, 358, 374, 378, 380, 464, 485, 489, 490, 491, 492], "beta_": 2, "beta_1": [487, 489, 490, 491, 492], "beta_2": [489, 490, 491, 492], "better": [333, 494, 515, 522], "between": [0, 2, 8, 101, 173, 430, 453, 456, 457, 460, 503, 513, 517, 518, 519, 522], "beyond": [279, 501, 504], "bfloat16": [2, 12, 185, 344, 519], "bfloat16_t": 2, "bia": [6, 125, 153, 178, 256, 257, 338, 353, 360, 361, 362, 363, 364, 365, 373, 379, 380, 382, 394, 396, 406, 409, 412, 414, 489, 490, 491, 496, 515], "bias": [0, 125, 178, 256, 257, 373, 379, 394, 406, 409], "bias_correct": [489, 490], "bicub": 431, "big": [1, 333, 512], "bigger": [6, 487], "bilinear": [1, 431], "binari": [220, 282, 283, 284, 285, 286, 327, 428, 452, 477, 512, 517], "binary_cross_entropi": [353, 512], "bind": 517, "bit": [0, 125, 178, 199, 256, 257, 278, 334, 344, 389, 411, 412, 413], "bitwis": [0, 92, 93, 94, 95, 199, 278], "bitwise_and": 0, "bitwise_invert": 0, "bitwise_or": 0, "bitwise_xor": 0, "bla": 9, "block": [0, 2, 6, 96, 430], "block_masked_mm": 0, "block_siz": [0, 96], "bn": 358, "bodi": [1, 154], "bool": [0, 1, 2, 16, 17, 18, 27, 28, 34, 35, 36, 37, 42, 43, 44, 45, 58, 59, 60, 61, 62, 66, 79, 80, 82, 84, 86, 102, 106, 110, 117, 118, 119, 120, 132, 133, 149, 151, 154, 156, 177, 178, 190, 196, 202, 203, 212, 216, 217, 218, 220, 226, 230, 232, 234, 235, 237, 240, 254, 257, 305, 309, 326, 334, 358, 360, 361, 362, 363, 364, 365, 373, 374, 378, 379, 380, 382, 389, 393, 394, 396, 401, 403, 406, 407, 408, 409, 412, 414, 417, 422, 430, 431, 452, 455, 487, 489, 490, 493, 494, 500], "bool_": [12, 344], "boolean": [0, 17, 86, 157, 190, 191, 192, 193, 194, 195, 196, 227, 228, 229, 344, 405, 516], "both": [1, 2, 14, 92, 94, 95, 137, 138, 142, 183, 184, 196, 199, 200, 201, 212, 225, 233, 241, 243, 247, 253, 260, 274, 278, 308, 334, 355, 356, 357, 378, 379, 385, 386, 387, 485, 512, 513, 514, 515, 520, 522], "bottom": 431, "bound": [0, 267, 270, 271, 371, 439, 512, 516, 522], "boundari": 503, "bracket": 6, "brain": 344, "break": 519, "bregler": 367, "bridg": 513, "broadcast": [0, 2, 14, 17, 92, 94, 95, 97, 98, 101, 137, 138, 142, 157, 176, 183, 184, 190, 199, 200, 201, 225, 231, 233, 241, 243, 247, 253, 255, 259, 260, 264, 267, 270, 271, 274, 278, 308, 313, 330, 409], "broadcast_arrai": [0, 2], "broadcast_to": 0, "broadcasted_input": 2, "brought": 8, "btl_tcp_if_includ": [513, 517], "btl_tcp_link": [513, 517], "buffer": [1, 2, 179, 519], "bui": 6, "build": [1, 3, 4, 6, 8, 435, 483, 512, 514], "build_ext": [2, 9], "build_shared_lib": [2, 9], "built": [1, 2, 4, 9, 518], "bundl": 6, "byte": [53, 64, 179, 180, 181, 287, 290, 291, 333, 344, 517], "c": [0, 1, 2, 6, 15, 212, 358, 360, 361, 362, 363, 364, 365, 367, 368, 378, 379, 519, 520, 522], "c_": [379, 492], "c_in": [107, 108, 109, 110, 111, 112, 113], "c_out": [107, 108, 109, 110, 111, 112, 113], "c_pad": 1, "c_t": [379, 492], "cabl": 513, "cach": [6, 9, 100, 179, 180, 287, 512], "calcul": [212, 213, 452, 455, 461, 487], "call": [2, 3, 6, 7, 32, 100, 132, 135, 175, 181, 188, 333, 353, 370, 394, 406, 411, 419, 483, 485, 496, 512, 513, 514, 515, 517, 518], "callabl": [102, 121, 149, 151, 154, 182, 188, 197, 325, 328, 329, 334, 335, 337, 338, 339, 340, 389, 390, 393, 401, 414, 419, 430, 432, 433, 434, 435, 436, 437, 438, 439, 486, 487, 488, 489, 490, 491, 492, 493, 494, 499, 500, 501, 502, 503, 504, 505], "can": [1, 2, 3, 4, 6, 8, 9, 14, 19, 69, 83, 87, 88, 92, 94, 95, 102, 127, 128, 129, 137, 138, 142, 145, 149, 150, 157, 177, 183, 184, 188, 199, 200, 201, 212, 225, 233, 241, 243, 247, 253, 259, 260, 267, 270, 271, 274, 278, 283, 291, 308, 319, 324, 325, 340, 353, 356, 357, 370, 371, 386, 387, 393, 406, 411, 419, 431, 454, 480, 483, 485, 495, 496, 509, 511, 512, 513, 514, 515, 516, 517, 518, 519, 520, 521, 522, 523], "cannot": [6, 101, 516, 519], "captur": [2, 3, 102, 121, 238, 239, 353, 512], "care": [6, 513, 514, 517, 518], "carefulli": [512, 514], "carri": 2, "cartesian": 235, "case": [2, 6, 130, 131, 132, 134, 135, 136, 160, 164, 166, 168, 169, 170, 171, 173, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 213, 214, 215, 216, 217, 218, 231, 277, 303, 324, 356, 357, 367, 386, 387, 428, 446, 464, 470, 476, 477, 495, 496, 512, 513, 514, 515, 517, 520, 521, 522, 523], "cast": [2, 38, 169, 170, 171, 220, 333, 389, 401, 519], "caster": 2, "categor": 6, "categori": [12, 196, 344], "caus": [353, 512, 518], "causal": [6, 157], "caution": 87, "cd": [3, 9], "cdf": [261, 371, 442], "cdot": [444, 453, 456, 472], "ceil": 0, "ceildiv": 1, "cell": 379, "celu": 353, "center": 161, "certain": [403, 512], "chang": [87, 88, 102, 149, 151, 188, 289, 327, 407, 412, 431, 457, 464, 512, 519], "channel": [1, 107, 108, 109, 110, 111, 112, 113, 358, 360, 361, 362, 363, 364, 365, 367, 368], "channel_idx": 1, "char": 2, "charact": 337, "check": [0, 2, 9, 86, 133, 196, 206, 208, 237, 396, 407, 408, 513, 514, 515, 516], "checklist": [513, 517], "checkout": [3, 512], "checkpoint": [430, 485], "chen": 492, "child": 408, "children": 353, "chip": 9, "choleski": 203, "choos": [6, 156, 417, 517], "chosen": 140, "clamp": 173, "clang": 9, "clarifi": 513, "clariti": 515, "class": [2, 6, 7, 10, 11, 12, 31, 121, 129, 172, 342, 354, 355, 356, 357, 358, 359, 360, 361, 362, 363, 364, 365, 366, 367, 368, 369, 370, 371, 372, 373, 374, 375, 376, 377, 378, 379, 380, 381, 382, 383, 384, 385, 386, 387, 388, 409, 410, 411, 412, 413, 414, 415, 416, 417, 418, 419, 420, 421, 422, 423, 424, 425, 426, 427, 428, 429, 430, 431, 440, 441, 442, 443, 444, 445, 446, 447, 448, 449, 450, 451, 452, 453, 454, 455, 456, 457, 458, 459, 460, 461, 462, 463, 464, 465, 466, 467, 468, 469, 470, 471, 472, 473, 474, 475, 476, 477, 478, 483, 486, 487, 488, 489, 490, 491, 492, 493, 494, 499, 500, 507], "class_pred": 334, "classif": [435, 436], "classifi": 7, "classmethod": [411, 412], "clear": 100, "click": 9, "clip": [0, 336, 452, 487], "clip_threshold": 487, "clipped_grad": 336, "clone": 9, "close": [5, 8, 9, 17, 190], "closer": 338, "cmake": [3, 4, 9], "cmake_arg": [3, 9], "cmake_build_typ": 9, "cmake_current_list_dir": 2, "cmake_cxx_standard": 4, "cmake_cxx_standard_requir": 4, "cmake_host_system_processor": 9, "cmake_library_output_directori": 2, "cmake_minimum_requir": 4, "cmakebuild": 2, "cmakeextens": 2, "cmakelist": [2, 4], "cmdclass": 2, "co": [0, 2, 121, 422, 515], "code": [1, 154, 512, 513, 514, 518], "coeffici": [2, 486, 487, 489, 490, 491, 492], "col": [106, 321], "cold": 9, "collect": [338, 339, 511], "column": [106, 152, 186, 205, 206, 256], "com": 9, "combin": [6, 217, 340], "come": [2, 6, 513, 515], "command": [2, 3, 4, 9, 513, 517], "command_buff": 2, "common": [485, 512, 518], "commonli": [7, 407, 480, 512], "commun": [8, 129, 132, 133, 333, 517], "communication_typ": 333, "compact": 211, "compar": [2, 86, 512], "comparison": [17, 142, 183, 184, 200, 201, 247], "compat": [6, 149, 151, 157, 188, 260, 264, 371, 521], "compil": [0, 1, 3, 4, 8, 9, 128, 141, 154, 513, 514, 515, 518], "compiled_fun": [512, 514], "compiled_grad_fn": 512, "complement": 93, "complet": [5, 6, 9, 407, 408, 514, 515, 522], "complex": [2, 51, 67, 104, 105, 167, 168, 169, 170, 171, 187, 205, 206, 207, 208, 265, 272, 337, 344, 353, 408, 512, 514, 515], "complex64": [2, 12, 205, 207, 344], "complex64_t": 2, "complexflo": 12, "compon": [2, 4, 6, 161, 217], "compos": [8, 353, 512, 515, 520], "composit": 520, "compress": 286, "compromis": 6, "comput": [0, 1, 2, 5, 6, 7, 8, 9, 117, 118, 119, 120, 121, 125, 140, 148, 156, 182, 197, 198, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 215, 216, 218, 225, 226, 234, 250, 256, 274, 298, 305, 306, 316, 325, 326, 328, 335, 353, 358, 373, 374, 378, 379, 380, 394, 407, 412, 413, 417, 430, 433, 434, 435, 436, 443, 444, 452, 453, 454, 455, 456, 457, 458, 459, 460, 461, 462, 463, 464, 465, 485, 486, 487, 489, 490, 491, 492, 498, 512, 513, 514, 515, 520, 522], "computation": 518, "compute_encod": 2, "compute_uv": 217, "concaten": [0, 6, 130, 333], "concept": 483, "concis": 6, "concret": [2, 373, 379, 382, 414, 518, 522], "conda": [9, 513], "condit": [0, 330, 512, 522], "config": [2, 4, 513], "configu": 485, "configur": [125, 513], "confirm": [513, 517], "confus": 7, "conj": 105, "conjug": [0, 104], "connect": [494, 513, 517], "consecut": [156, 256, 417], "consequ": 6, "consid": [6, 17, 86, 106, 190, 337, 338, 339, 374, 493, 511, 513], "consider": 512, "const": [0, 1, 2, 455], "constant": [0, 2, 6, 9, 121, 153, 155, 251, 353, 358, 374, 380, 413, 455, 465, 499, 501, 512, 514, 519], "constant_valu": 251, "constitut": 338, "construct": [0, 2, 7, 46, 126, 176, 210, 248, 317, 331], "consult": 513, "consum": 518, "contain": [2, 6, 9, 29, 30, 72, 102, 127, 140, 166, 167, 168, 177, 178, 205, 206, 212, 227, 228, 229, 256, 300, 330, 333, 336, 353, 393, 395, 396, 402, 430, 461, 480, 483, 512, 515], "content": [9, 393, 512], "context": [307, 514], "contigu": [0, 1, 2, 87, 154], "continu": [359, 440, 513, 515], "contract": [0, 140], "contribut": 2, "contriv": [515, 522], "control": [0, 381, 509, 518], "conv": 114, "conv1d": [0, 353], "conv2d": [0, 353], "conv3d": [0, 353], "conv_gener": 0, "conv_transpose1d": 0, "conv_transpose2d": 0, "conv_transpose3d": 0, "conveni": [1, 2, 7, 196], "convent": [19, 114, 139, 140, 431], "convers": 8, "convert": [0, 1, 2, 82, 89, 90, 91, 124, 173, 258, 411, 412, 518, 519, 520], "convolut": [0, 107, 108, 109, 110, 111, 112, 113, 114, 360, 361, 362, 363, 364, 365, 367, 368, 494], "convolv": [107, 108, 109, 110, 111, 112, 113], "convtranspose1d": 353, "convtranspose2d": 353, "convtranspose3d": 353, "coordin": [0, 235], "copi": [0, 1, 2, 6, 8, 106, 252, 299, 519], "core": [1, 2, 3, 4, 5, 6, 7, 334, 353, 355, 356, 357, 358, 378, 385, 386, 387, 396, 399, 401, 404, 431, 432, 433, 434, 435, 436, 437, 438, 439, 452, 454, 461, 480, 483, 485, 512, 513, 519, 520], "corner": 431, "correct": [2, 9, 489, 490, 491, 516, 518], "correctli": [39, 513], "correl": [110, 367], "correspond": [0, 1, 2, 16, 18, 82, 101, 125, 127, 158, 159, 160, 162, 163, 164, 166, 167, 168, 169, 170, 171, 205, 206, 230, 232, 240, 254, 296, 297, 309, 316, 324, 329, 338, 493, 513, 515, 517], "cos_first": 422, "cosh": [0, 460], "cosin": [0, 20, 21, 115, 116, 453, 501, 503, 515], "cosine_decai": [485, 503], "cosine_similarity_loss": 353, "cost": [9, 487, 513, 518], "costli": 518, "cot": 1, "cot_index": 1, "cotan": 121, "cotang": [1, 2, 121, 328], "could": [6, 353, 516], "count": [353, 503], "counter": 509, "cours": 515, "coursera": 499, "cout": [4, 514], "cov": 264, "covari": [264, 358], "cover": 2, "cpp": [2, 4], "cpu": [8, 205, 206, 207, 208, 214, 344, 522], "cpython": 2, "crash": [87, 512], "creat": [0, 1, 2, 6, 9, 87, 132, 152, 186, 307, 353, 483, 485, 503, 512, 513, 514, 516, 517, 519], "create_additive_causal_mask": 6, "cross": [7, 110, 452, 454], "cross_entropi": [7, 353], "crowd": 6, "cry": 6, "cubic": 431, "cummax": 0, "cummin": 0, "cumprod": 0, "cumsum": 0, "cumul": [0, 87, 117, 118, 119, 120, 226], "current": [6, 8, 9, 87, 96, 109, 112, 113, 136, 180, 236, 256, 340, 353, 487, 513, 518], "current_binary_dir": 2, "custom": [8, 121, 154, 430], "custom_decod": 430, "custom_encod": 430, "custom_funct": 1, "custom_kernel_myexp_float": 1, "custom_tim": 2, "cvpr": 367, "cxx": 4, "cycl": 511, "d": [0, 1, 2, 6, 109, 113, 126, 127, 157, 189, 212, 231, 235, 250, 312, 319, 321, 322, 323, 341, 362, 365, 368, 373, 379, 414, 486, 489, 491, 522], "d1": 522, "d2": 522, "d2fdx2": 515, "d_i": 382, "dampen": 500, "darwin": 2, "data": [0, 2, 7, 8, 11, 19, 134, 152, 169, 170, 176, 186, 219, 244, 248, 261, 270, 319, 321, 327, 331, 368, 432, 433, 434, 435, 436, 437, 438, 439, 512, 513, 514, 516, 519], "dataset": [5, 513, 518], "datatyp": 53, "dbuild_shared_lib": 9, "dcmake_build_typ": [4, 9], "ddof": [0, 79, 84, 305, 326], "deal": 512, "deb": 9, "debug": [1, 3, 513, 517], "debugg": 8, "decai": [487, 490, 492, 494, 500, 501, 502, 505], "decay_r": [487, 502, 505], "decay_step": 501, "decent": 7, "decid": [338, 393], "decim": [0, 70, 280], "declar": 2, "decltyp": 1, "decod": 430, "decomposit": [202, 203, 213, 217], "decor": [1, 121], "decoupl": 490, "dedic": 513, "deep": [358, 433, 434, 435, 436], "def": [1, 2, 5, 6, 7, 121, 149, 151, 154, 325, 353, 483, 512, 513, 514, 515, 516, 518, 519, 522], "default": [1, 2, 9, 15, 16, 17, 18, 19, 27, 28, 29, 30, 86, 87, 96, 102, 103, 107, 108, 109, 110, 111, 112, 113, 121, 122, 123, 125, 126, 127, 130, 131, 132, 134, 135, 136, 149, 151, 152, 154, 156, 158, 159, 160, 162, 163, 164, 166, 167, 168, 169, 170, 171, 173, 177, 178, 182, 185, 186, 190, 198, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 230, 232, 234, 235, 240, 244, 248, 251, 252, 254, 256, 257, 259, 260, 261, 263, 264, 265, 266, 267, 269, 270, 271, 275, 277, 280, 287, 288, 289, 290, 291, 299, 300, 303, 304, 305, 307, 309, 311, 316, 318, 319, 320, 321, 322, 323, 324, 325, 326, 329, 331, 333, 334, 337, 344, 355, 356, 357, 358, 359, 360, 361, 362, 363, 364, 365, 369, 372, 373, 375, 378, 379, 381, 382, 385, 386, 387, 389, 394, 396, 401, 403, 406, 407, 408, 409, 410, 411, 412, 414, 417, 422, 426, 430, 431, 432, 433, 434, 435, 436, 437, 438, 439, 445, 452, 453, 454, 455, 456, 457, 458, 459, 460, 461, 462, 463, 464, 465, 483, 486, 487, 488, 489, 490, 491, 492, 494, 499, 500, 501, 509, 511, 512, 513, 514, 515, 517, 519, 521, 523], "default_devic": 523, "default_stream": 523, "defin": [1, 2, 5, 6, 7, 9, 121, 135, 154, 178, 204, 212, 257, 334, 337, 517, 519], "definit": [121, 202, 203, 264], "degre": [0, 258, 465], "deleg": 493, "delta": [457, 486], "delv": [435, 436], "demonstr": 519, "denomin": [378, 453, 486, 488, 489, 490, 491, 499], "dens": [235, 522], "depend": [0, 2, 3, 4, 5, 9, 82, 212, 373, 379, 414, 512, 513, 516, 521, 522], "depth": [337, 357, 362, 365, 368, 387, 515], "dequant": [0, 256], "deriv": [2, 514, 515, 518], "descend": 391, "descent": [500, 512, 518], "describ": [2, 518], "descript": [2, 4, 6, 344], "design": [1, 5, 8, 509, 522], "destin": [0, 2, 63, 136, 242, 255, 337, 485, 514], "destroi": 512, "detach": 515, "detail": [1, 2, 11, 287, 353, 367, 417, 422, 431, 433, 434, 435, 436, 486, 488, 489, 491, 492, 513, 516, 520], "detect": 512, "determin": [0, 2, 127, 264, 340, 344, 400, 521], "dev": [2, 9], "develop": [2, 4, 9], "developer_dir": 9, "deviat": [0, 265, 305, 433, 435, 438], "devic": [1, 2, 8, 9, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 33, 34, 35, 36, 37, 38, 40, 41, 42, 43, 44, 45, 46, 47, 49, 50, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 66, 68, 69, 70, 71, 73, 75, 76, 77, 78, 79, 80, 81, 83, 84, 85, 86, 87, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 101, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 122, 123, 124, 125, 126, 127, 130, 131, 134, 135, 136, 137, 138, 139, 142, 143, 144, 146, 147, 148, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 173, 174, 175, 176, 177, 178, 183, 184, 185, 186, 187, 189, 190, 191, 192, 193, 194, 195, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 263, 264, 265, 266, 267, 269, 270, 271, 272, 273, 274, 275, 277, 278, 279, 280, 281, 288, 289, 290, 292, 293, 294, 295, 296, 297, 298, 299, 300, 301, 302, 303, 304, 305, 306, 307, 308, 309, 310, 311, 312, 313, 314, 315, 316, 317, 318, 319, 320, 321, 322, 323, 324, 326, 327, 330, 331, 332, 342, 522, 523], "device_info": 291, "devicetyp": 10, "df": 519, "dfdx": [514, 515, 516], "dft": [158, 159, 160, 162, 163, 164, 169, 170, 171], "dhwc": 368, "diag": [0, 217], "diagon": [0, 46, 126, 152, 319, 321, 322, 323], "dict": [88, 102, 145, 150, 220, 236, 283, 284, 285, 334, 336, 337, 341, 399, 404, 407, 408, 483, 495, 496, 498, 511, 515, 521], "dict_kei": [338, 496], "dictionari": [6, 102, 149, 188, 220, 236, 283, 284, 336, 337, 340, 353, 393, 402, 407, 408, 497, 511, 521], "did": 6, "diff": 2, "differ": [8, 165, 196, 205, 207, 308, 327, 464, 493, 494, 512, 513, 514, 515, 517], "differenti": [1, 8, 359, 440], "difficult": 515, "difficulti": [433, 434], "dilat": [0, 107, 108, 109, 110, 111, 112, 113, 360, 361, 362, 363, 364, 365], "dim": [1, 6, 156, 157, 370, 374, 378, 380, 409, 411, 413, 417, 422, 430], "dimens": [0, 1, 2, 6, 16, 18, 27, 28, 65, 72, 82, 89, 90, 91, 102, 108, 109, 110, 112, 113, 127, 147, 156, 157, 167, 168, 170, 171, 173, 177, 178, 189, 202, 203, 205, 206, 207, 208, 209, 210, 212, 213, 214, 217, 218, 230, 231, 232, 234, 240, 254, 255, 256, 260, 269, 305, 309, 313, 316, 320, 326, 358, 360, 361, 362, 363, 364, 365, 367, 368, 372, 373, 374, 378, 379, 380, 409, 413, 414, 417, 430, 431, 445, 454, 494, 512, 515], "dimension": [31, 153, 155, 158, 159, 160, 162, 163, 164, 169, 170, 171, 355, 356, 357, 358, 360, 361, 362, 363, 364, 365, 370, 382, 385, 386, 387, 411, 412, 422, 516, 519], "dir": 4, "direct": [6, 391, 492, 522], "directli": [2, 6, 87], "directori": [2, 4, 6, 9], "disabl": [128, 287, 333, 512, 513], "disable_compil": 512, "disappoint": 6, "discard": [6, 337], "discov": [9, 513], "discoveri": 492, "discret": [114, 158, 159, 160, 162, 163, 164, 169, 170, 171, 370, 411], "discuss": 2, "disk": 6, "dispatch": 2, "dispatch_thread": 2, "dispatchthread": 1, "displai": 353, "distanc": [6, 465], "distribut": [8, 9, 259, 260, 261, 263, 264, 265, 270, 271, 333, 382, 433, 434, 435, 436, 438, 439, 455, 458, 463, 465, 480], "distributed_config": [513, 517], "diverg": 458, "divid": [0, 2, 39, 175, 256, 274, 513], "divis": [0, 137, 175, 256, 274], "divisor": [305, 326], "divmod": 0, "dloss_dw": 515, "dloss_dx": 515, "dlpack": 519, "dlvalu": 325, "dmlx_build_cpu": 9, "dmlx_build_cuda": 9, "dmlx_build_gguf": 9, "dmlx_build_safetensor": 9, "dmlx_metal_debug": 3, "dmlx_metal_jit": 9, "do": [0, 2, 6, 9, 210, 327, 353, 395, 406, 480, 483, 512, 513, 514, 515, 518], "doc": [2, 7, 513, 517], "document": [2, 3, 4, 69, 83, 154, 283, 284, 344, 512, 513, 514, 515, 516], "doe": [0, 2, 3, 6, 9, 179, 327, 336, 353, 512, 513, 516, 517, 518, 519], "doesn": [2, 353, 514], "domain": 270, "don": [1, 9, 106, 512, 522], "done": [353, 366, 413, 512, 513, 518, 519], "dot": [150, 209, 218, 316, 337, 398, 409, 513], "doubl": [0, 6, 344], "doubt": 6, "down": [6, 336], "download": 9, "downsampl": [355, 356, 357, 385, 386, 387], "dparam": 325, "dpkg": 9, "draw": 260, "driver": 9, "drop": 393, "dropout": [353, 367, 368, 403, 430, 512], "dropout2d": 353, "dropout3d": 353, "dst": 136, "dt": 143, "dtype": [0, 1, 2, 6, 12, 19, 31, 38, 39, 82, 85, 134, 135, 152, 154, 172, 173, 176, 186, 196, 198, 205, 206, 207, 208, 212, 214, 219, 248, 261, 263, 264, 265, 267, 270, 271, 296, 297, 319, 321, 324, 327, 331, 333, 344, 401, 431, 432, 433, 434, 435, 436, 437, 438, 439, 452, 454, 461, 501, 502, 503, 504, 505, 512, 513, 514, 515, 516, 519, 520, 521], "dtypecategori": [196, 344], "dual": 460, "duchi": 488, "duplic": 514, "dure": [3, 102, 290, 366, 367, 368, 431, 519], "dx": 121, "dy": 121, "dyld": 513, "dyld_library_path": 513, "dylib": 2, "dynam": [0, 514, 518], "e": [2, 7, 9, 121, 143, 154, 177, 178, 197, 292, 358, 360, 361, 362, 363, 364, 365, 367, 368, 374, 378, 380, 394, 413, 450, 451, 473, 474, 479, 485, 488, 494, 512, 514, 518, 523], "e5": 344, "e8": 344, "each": [0, 1, 2, 72, 88, 125, 145, 156, 178, 196, 202, 203, 205, 206, 207, 208, 209, 213, 217, 218, 231, 235, 251, 256, 257, 260, 275, 285, 286, 300, 317, 320, 327, 329, 330, 367, 368, 370, 373, 374, 379, 414, 417, 430, 452, 454, 509, 512, 513, 514, 517, 518], "eager": 518, "earli": 367, "eas": 6, "easi": [2, 353, 493], "easier": [1, 150, 518], "easiest": 513, "edg": [101, 251, 431, 512], "edit": [9, 408], "effect": [367, 512, 518], "effici": [6, 8, 177, 367, 417, 513, 518, 520], "eigenvalu": [205, 206, 207, 208], "eigenvector": [205, 206], "einstein": [139, 140], "einsum": 140, "either": [9, 14, 69, 82, 83, 92, 94, 95, 101, 137, 138, 142, 175, 183, 184, 188, 199, 200, 201, 212, 225, 231, 233, 241, 243, 247, 253, 274, 278, 308, 325, 356, 357, 386, 387, 419, 431, 435, 436, 513, 517, 519], "elem": [1, 154], "elem_to_loc": [1, 2], "element": [0, 1, 2, 13, 14, 20, 21, 22, 23, 24, 25, 26, 29, 74, 87, 92, 93, 94, 95, 99, 115, 116, 117, 118, 119, 120, 125, 137, 138, 142, 143, 144, 146, 148, 152, 174, 175, 178, 183, 184, 190, 191, 192, 193, 194, 195, 199, 200, 201, 221, 222, 223, 224, 225, 226, 227, 228, 229, 233, 235, 241, 243, 245, 247, 252, 253, 256, 257, 273, 274, 275, 278, 279, 281, 292, 293, 294, 295, 301, 302, 308, 312, 314, 315, 318, 325, 327, 330, 359, 366, 367, 368, 373, 377, 379, 388, 410, 414, 417, 421, 440, 447, 448, 450, 451, 466, 467, 469, 472, 473, 474, 475, 512, 515, 516], "elementwis": [1, 104, 105], "elif": 6, "ellipsi": 516, "elman": 414, "els": [0, 2, 6, 353, 394, 513, 518], "elsewher": [321, 516], "elu": [353, 470], "emb": [6, 370, 411, 422], "embed": [6, 334, 353, 411, 417, 422, 453, 494], "empti": [264, 337], "en0": 517, "en2": 513, "enabl": [3, 6, 9, 102, 141, 333, 494, 500], "enclos": 514, "encod": [2, 156, 417, 422, 430, 454], "encount": [2, 515], "end": [127, 203, 237, 256, 279, 373, 379, 428, 446, 457, 464, 470, 476, 477, 501, 504, 514], "end_axi": [0, 50, 173], "end_encod": 2, "endif": 2, "endl": [4, 514], "endswith": 394, "enhanc": [6, 417, 518], "enough": [2, 518], "ensur": [0, 1, 2, 9, 154, 336, 460, 513, 514], "ensure_row_contigu": [1, 154], "enter": 6, "entir": [16, 18, 27, 28, 230, 232, 234, 240, 254, 305, 309, 326, 367, 368], "entri": [0, 266, 324, 367, 368], "entropi": [7, 452, 454], "enumer": 353, "environ": [9, 128, 141, 513], "ep": [5, 153, 155, 358, 374, 378, 380, 413, 453, 455, 465, 485, 486, 487, 488, 489, 490, 491, 499], "epoch": 7, "epsilon": [358, 374, 378, 380, 413, 453, 455, 486, 488, 489, 490, 491, 499], "epsilon_1": 487, "epsilon_2": 487, "equal": [0, 1, 17, 29, 86, 152, 184, 190, 201, 247, 252, 267, 300, 333, 378, 382], "equal_nan": [0, 17, 86, 190], "equat": [139, 140, 215, 216], "equival": [0, 2, 32, 69, 83, 135, 138, 175, 178, 185, 312, 359, 369, 371, 375, 376, 377, 383, 384, 408, 410, 412, 415, 416, 418, 420, 423, 424, 425, 426, 427, 429, 513], "erf": [0, 144, 512], "erfinv": 0, "error": [0, 2, 9, 132, 143, 144, 291, 300, 371, 442, 443, 444, 460, 462, 512, 515, 517, 519], "error_norm": 5, "estim": 491, "eta": 492, "etc": [2, 256, 353, 431, 513], "ethernet": [513, 517], "eval": [2, 3, 5, 6, 7, 353, 483, 485, 512, 513, 514, 515, 518, 520], "eval_cpu": 2, "eval_fn": 7, "eval_gpu": 2, "evalu": [2, 6, 7, 8, 88, 136, 145, 197, 290, 328, 353, 392, 403, 483, 485, 512, 514, 520], "even": [1, 2, 6, 102, 165, 205, 207, 512, 513, 514, 518, 519], "evenli": [0, 219], "everi": [1, 256, 338, 485, 505, 515, 517], "everyth": [6, 513], "everywher": 0, "exact": [443, 444], "exactli": [2, 6, 156, 396, 515], "exampl": [0, 3, 4, 5, 6, 7, 9, 19, 39, 88, 121, 132, 149, 150, 151, 154, 157, 173, 188, 196, 198, 205, 206, 207, 208, 212, 214, 296, 297, 307, 312, 324, 336, 339, 340, 353, 355, 356, 357, 358, 378, 385, 386, 387, 394, 396, 403, 406, 431, 432, 433, 434, 435, 436, 437, 438, 439, 452, 454, 461, 480, 485, 496, 501, 502, 503, 504, 505, 509, 515, 516, 517, 518, 519, 520, 521], "exce": [333, 336], "exceed": 290, "except": [8, 121, 152, 166, 167, 169, 170, 171, 290, 344, 374, 396, 514, 516, 519], "exclud": [255, 313], "exclus": [0, 87, 95], "execut": [2, 4, 9, 89, 90, 91, 181, 198, 513, 519, 522], "execute_process": 4, "exist": [2, 3, 6, 394, 406, 513], "exp": [0, 1, 88, 148, 154, 225, 230, 261, 298, 359, 369, 421, 440, 441, 458, 470, 471, 475, 512, 514, 522], "exp_elementwis": [1, 154], "expand_dim": 0, "expect": [6, 360, 361, 362, 363, 364, 365, 366, 367, 368, 422, 430, 455, 512, 513, 516], "expens": 430, "expensive_fun": 518, "experiment": [88, 149, 151, 188, 519], "explain": 2, "explicit": [2, 496, 509, 519], "explicitli": [177, 353, 509, 517], "explor": 9, "expm1": 0, "exponenti": [0, 146, 148, 359, 369, 418, 440, 441, 470, 502], "exponential_decai": 485, "export": [8, 9, 149, 150, 188], "export_funct": 514, "ext_modul": 2, "extend": [2, 251], "extens": [8, 220, 238, 400, 521], "extern": 519, "extra": [1, 338, 339, 514], "extract": [0, 6, 46, 126, 127, 296, 353, 393, 483], "extras_requir": 2, "extrem": [516, 518], "ey": [0, 6, 209, 218], "f": [0, 2, 5, 7, 121, 212, 353, 379, 490, 512, 519], "f_jvp": 121, "f_t": 379, "f_vjp": 121, "f_vmap": 121, "face": 6, "factor": [2, 15, 185, 202, 203, 210, 211, 214, 431, 454, 502, 505], "fail": [512, 513, 517], "fall": [2, 121], "fallback": 493, "fals": [0, 1, 2, 6, 16, 17, 18, 27, 28, 34, 35, 36, 37, 42, 43, 44, 45, 58, 59, 60, 61, 62, 66, 79, 80, 84, 86, 102, 106, 110, 117, 118, 119, 120, 132, 149, 151, 154, 177, 178, 190, 196, 202, 203, 212, 216, 217, 218, 220, 226, 230, 232, 234, 235, 240, 254, 305, 309, 326, 330, 334, 337, 338, 339, 340, 344, 374, 378, 380, 382, 394, 396, 406, 409, 412, 417, 422, 430, 431, 452, 455, 487, 489, 490, 500, 514, 519], "famili": 6, "fan": [433, 434, 435, 436], "fan_in": [433, 434, 435, 436], "fan_out": [433, 434, 435, 436], "far": 485, "fast": [1, 8, 371, 444, 513, 522], "faster": [1, 2, 9, 138, 177, 178, 442, 452, 512, 513, 515], "featur": [1, 8, 107, 108, 109, 110, 111, 112, 113, 156, 358, 373, 374, 378, 379, 380, 382, 412, 413, 414, 417, 430, 431, 512, 513, 518], "feed": 6, "feed_forward": 6, "feedforward": [433, 434], "feel": 6, "fetch": 1, "few": [1, 2, 6, 7, 8, 9, 514, 518, 520], "fewer": 513, "ffn": 6, "ffn_norm": 6, "fft": 8, "fftshift": 165, "fi": 513, "figur": 513, "file": [4, 6, 9, 149, 150, 151, 188, 220, 282, 283, 284, 285, 286, 396, 400, 513, 514, 515, 521], "file_or_weight": 396, "fill": [0, 176, 249, 321, 332, 432, 433, 434, 435, 436, 438, 439], "filter": [0, 114, 360, 361, 362, 363, 364, 365, 389, 393, 493, 494], "filter_and_map": 353, "filter_fn": [389, 393], "final": [2, 4, 5, 6, 7, 185, 494, 501, 504, 513, 517], "find": [2, 4, 5, 9, 513], "find_packag": [2, 4], "finder": 9, "fine": [509, 514, 518], "finetun": 353, "finit": [0, 191, 244], "first": [0, 1, 2, 3, 4, 5, 6, 7, 9, 127, 130, 132, 173, 182, 196, 198, 199, 217, 227, 229, 231, 252, 269, 278, 310, 316, 319, 325, 337, 339, 340, 353, 356, 357, 374, 386, 387, 431, 453, 461, 487, 491, 496, 512, 513, 514, 515, 516, 517, 519, 522], "first_lay": 518, "firt": 512, "fit": [256, 522], "five": 512, "fix": [2, 6, 9, 512, 518], "flag": [4, 9, 177, 512, 519], "flat": [177, 178, 337, 341], "flat_param": 285, "flatten": [0, 29, 30, 117, 118, 119, 120, 212, 226, 250, 252, 255, 275, 279, 299, 312, 313, 318, 337, 494, 512], "flexibl": 8, "flexibli": 408, "flip": [0, 110, 114], "float": [0, 1, 2, 12, 15, 17, 19, 82, 153, 154, 155, 156, 157, 172, 175, 176, 185, 190, 196, 212, 244, 257, 259, 263, 333, 336, 344, 358, 366, 367, 368, 374, 378, 380, 389, 401, 413, 417, 422, 428, 430, 431, 432, 433, 434, 435, 436, 438, 439, 453, 454, 455, 457, 461, 464, 465, 476, 477, 486, 487, 488, 489, 490, 491, 492, 494, 499, 500, 501, 502, 504, 505], "float16": [1, 2, 12, 154, 185, 220, 344, 389, 518, 519], "float16_t": [1, 2], "float32": [0, 1, 2, 12, 19, 152, 154, 157, 185, 186, 196, 206, 208, 212, 214, 219, 248, 261, 263, 264, 265, 270, 271, 297, 321, 331, 344, 431, 432, 433, 434, 435, 436, 437, 438, 439, 452, 454, 461, 501, 502, 503, 504, 505, 512, 513, 514, 515, 516, 518, 519, 520, 521], "float64": [12, 196, 344, 519], "floor": [0, 1, 175], "floor_divid": 0, "flow": [0, 306, 518], "flush": 2, "fn": [188, 335, 338, 339, 340, 520], "follow": [1, 2, 4, 6, 7, 8, 9, 19, 114, 125, 157, 177, 212, 251, 256, 339, 353, 443, 444, 458, 486, 487, 488, 491, 492, 494, 500, 509, 512, 513, 514, 515, 517, 522], "food": 6, "forc": [6, 7, 106, 353, 513, 520], "forg": 513, "formal": [125, 256], "format": [6, 150, 220, 282, 283, 284, 285, 286, 513, 519], "formul": [359, 369], "formula": 464, "forth": [431, 513], "forward": [1, 2, 325, 512, 517, 518], "found": [4, 393], "four": 358, "fourier": [158, 159, 160, 162, 163, 164, 169, 170, 171], "fourth": 514, "frac": [143, 256, 265, 292, 358, 366, 367, 368, 374, 378, 380, 382, 413, 421, 433, 434, 435, 436, 453, 455, 457, 460, 471, 473, 474, 486, 488, 489, 490, 491, 499], "fraction": 19, "framework": 8, "free": 287, "freez": [353, 406, 483], "freq": 156, "frequenc": [156, 161, 417, 422], "frequent": [512, 518], "friend": 6, "fro": 212, "frobeniu": 212, "from": [0, 1, 2, 4, 6, 7, 8, 87, 124, 125, 127, 130, 131, 134, 135, 136, 154, 167, 168, 170, 171, 176, 177, 181, 185, 188, 205, 207, 212, 220, 231, 235, 249, 256, 258, 259, 260, 261, 262, 263, 267, 270, 285, 287, 296, 303, 306, 308, 312, 313, 318, 319, 330, 332, 337, 338, 339, 340, 341, 353, 382, 394, 396, 409, 433, 434, 435, 436, 438, 439, 455, 464, 480, 485, 511, 512, 513, 514, 515, 518, 519, 520, 521, 522], "from_embed": 411, "from_linear": 412, "front": [2, 514], "frozen": [353, 394, 404, 406, 412, 483], "fuction": 138, "full": [0, 1, 2, 7, 69, 83, 114, 154, 210, 298, 407, 408, 455, 493, 512, 513, 514, 518], "full_turn": 422, "fulli": [2, 8, 494, 517, 519, 522], "fun": [102, 149, 151, 182, 197, 325, 328, 329, 512, 514, 516, 518, 522], "fun1": 518, "func": 414, "function": [0, 1, 2, 3, 5, 6, 7, 8, 17, 19, 87, 102, 121, 138, 143, 144, 149, 151, 154, 182, 188, 190, 197, 202, 203, 205, 206, 207, 208, 209, 212, 213, 214, 217, 218, 231, 291, 292, 325, 328, 329, 335, 336, 338, 339, 340, 353, 359, 369, 371, 372, 375, 376, 377, 383, 384, 388, 390, 394, 401, 406, 410, 414, 415, 416, 418, 419, 420, 421, 423, 424, 425, 426, 427, 428, 429, 430, 442, 443, 444, 445, 446, 447, 448, 450, 451, 452, 466, 471, 473, 474, 475, 476, 477, 478, 480, 485, 496, 509, 511, 513, 516, 518, 519, 521], "functionexport": 151, "functool": 512, "further": [2, 9, 515], "fuse": [1, 512], "fusibl": 512, "futur": [6, 88, 149, 151, 188, 412, 516, 518], "fx": 121, "g": [3, 9, 121, 154, 212, 256, 379, 479, 494, 499, 500, 514, 518, 523], "g_t": [379, 486, 488, 489, 490, 491, 492, 499, 500], "gain": [433, 434, 435, 436], "gamma": [358, 374, 378, 380, 413, 433, 434, 435, 436], "gap": 1, "gate": [372, 373, 445], "gather": [0, 130, 177, 178], "gather_mm": [0, 178], "gather_qmm": 0, "gaurante": 327, "gaussian": [5, 371, 442, 443, 444, 455], "gaussian_nll_loss": 353, "gc_func": 430, "gelu": [353, 443, 444, 512], "gelu_approx": [353, 371, 442], "gelu_fast_approx": [353, 371, 442], "geluapprox": 371, "gelufast": 371, "gener": [0, 1, 3, 5, 12, 19, 110, 152, 154, 167, 168, 213, 219, 235, 259, 264, 265, 266, 267, 270, 271, 430, 509, 512, 516, 518, 523], "generate_stub": 9, "geq": [428, 477], "get": [2, 5, 7, 9, 108, 109, 110, 112, 113, 122, 123, 172, 179, 180, 181, 236, 262, 353, 512, 514, 515, 518, 522], "get_cache_memori": 100, "get_command_encod": 2, "get_kernel": 2, "get_librari": 2, "gguf": [9, 220, 283, 521], "gh": 1, "gii": 1, "git": 9, "github": [5, 7, 9, 512], "give": [2, 6, 7, 29, 512], "given": [0, 2, 9, 16, 18, 29, 39, 87, 88, 98, 101, 103, 117, 118, 119, 120, 125, 127, 140, 145, 147, 157, 158, 159, 160, 162, 163, 164, 169, 170, 171, 176, 177, 210, 212, 226, 230, 232, 234, 240, 244, 246, 254, 264, 266, 267, 279, 280, 287, 289, 298, 300, 305, 309, 311, 317, 318, 319, 321, 322, 323, 326, 342, 366, 393, 409, 453, 455, 461, 493], "gix": 1, "gix_mult": 1, "giy_mult": 1, "glibc": 9, "global": [128, 130, 131, 132, 134, 135, 136, 141, 268, 333, 336, 509, 512], "glorot": [433, 434], "glorot_norm": 353, "glorot_uniform": 353, "glu": [6, 353], "gm": 1, "gn": 1, "go": [2, 6, 513, 515], "golub": 212, "good": [2, 9, 485, 512, 513, 517, 522], "goroshin": 367, "gower": 6, "gpu": [1, 3, 8, 9, 236, 344, 516, 522], "gputrac": [3, 238], "grad": [2, 5, 7, 121, 325, 336, 485, 495, 512, 513, 514, 515, 516, 518, 520], "grad_fn": [5, 512, 515], "gradient": [0, 5, 7, 121, 182, 306, 325, 333, 335, 336, 353, 394, 407, 412, 430, 460, 483, 485, 486, 487, 489, 490, 491, 492, 495, 498, 500, 512, 513, 515, 516, 518, 519, 520], "grain": 509, "graph": [2, 6, 7, 8, 150, 290, 514, 515], "great": 3, "greater": [0, 6, 29, 148, 184, 252, 336, 428, 477], "greater_equ": 0, "grep": 9, "grid": [2, 154, 235], "grid_dim": 2, "grid_grad": 1, "grid_idx": 1, "grid_sampl": 1, "grid_sample_grad": 1, "grid_sample_ref": 1, "grid_sample_vjp": 1, "grid_shap": 1, "grid_siz": 1, "ground": [5, 6, 454, 464], "group": [0, 1, 107, 108, 109, 110, 111, 112, 113, 125, 130, 131, 132, 134, 135, 136, 157, 178, 256, 257, 327, 333, 334, 360, 361, 374, 411, 412, 513], "group_dim": 2, "group_siz": [0, 125, 178, 256, 257, 334, 411, 412], "groupnorm": 353, "grow": 518, "gru": 353, "guid": [2, 4, 8, 513, 514], "guidelin": 290, "gw": 1, "h": [1, 2, 4, 108, 109, 112, 113, 212, 358, 361, 362, 364, 365, 367, 368, 373, 379, 414, 515, 518], "h_": [373, 379, 414], "h_in": 1, "h_stride": 1, "h_t": [373, 379, 414], "ha": [2, 3, 6, 7, 8, 9, 82, 102, 127, 136, 166, 167, 169, 170, 171, 180, 182, 202, 203, 205, 206, 207, 208, 209, 213, 217, 218, 235, 260, 358, 373, 379, 382, 414, 483, 485, 512, 513, 514, 516, 517, 518, 520, 522], "had": 6, "hadamard": [0, 185], "hadamard_transform": 0, "half": [19, 267, 271, 417, 518], "halv": [372, 445], "hand": [6, 515, 518], "handi": 515, "handl": [2, 353, 512], "happen": [2, 6, 153, 155, 430, 485, 512, 518], "happi": 6, "hard": 6, "hard_shrink": [353, 375], "hard_tanh": [353, 376], "hardcod": 512, "hardshrink": [353, 446], "hardswish": 353, "hardtanh": [353, 447], "hat": [125, 256], "have": [0, 1, 2, 6, 9, 17, 86, 89, 90, 91, 96, 121, 130, 157, 167, 168, 170, 171, 178, 190, 231, 238, 260, 327, 333, 337, 379, 409, 419, 492, 496, 511, 512, 513, 514, 516, 517, 518, 522], "haven": 6, "hazan": 488, "he": [6, 435, 436], "he_norm": 353, "he_uniform": 353, "head": [157, 409, 430], "header": [2, 9, 154], "heart": 6, "heavi": 6, "height": [356, 357, 358, 361, 362, 364, 365, 367, 368, 386, 387], "hello": [337, 341], "help": [2, 6, 512, 522], "helper": [6, 154, 333, 512, 513, 517], "henc": [0, 2, 256, 512], "hendryck": 444, "here": [2, 6, 9, 485, 512, 514, 515, 518, 521, 522], "hermitian": [206, 208], "hf": 379, "hg": 379, "hh": 414, "hi": [6, 379], "hidden": [373, 379, 414, 430, 494], "hidden_dim": [7, 483, 485], "hidden_s": [373, 379, 414], "hierarchi": 344, "high": [267, 271, 353, 370, 439, 480], "high_pad_s": 0, "higher": [2, 189, 291, 461, 513, 515], "highli": 9, "him": 6, "hing": 456, "hinge_loss": 353, "hinton": 499, "hit": 2, "hn": 373, "ho": 379, "hold": [2, 6, 11, 12, 212, 512], "homebrew": 513, "hopkin": 212, "host": 2, "host1": 513, "host2": 513, "host3": 513, "host4": 513, "host_nam": 1, "hostfil": [513, 517], "hostnam": [513, 517], "hostname1": [513, 517], "hostname2": [513, 517], "hostname3": 513, "hostname4": 513, "hot": 454, "hour": 6, "how": [2, 4, 6, 7, 353, 355, 356, 357, 360, 361, 362, 363, 364, 365, 370, 385, 386, 387, 411, 431, 495, 512, 516, 522], "howev": [2, 121, 353, 371, 374, 496, 509, 512, 513, 518, 519], "hr": 373, "http": [9, 374, 378, 380, 388, 413, 444, 466], "huber": 457, "huber_loss": 353, "human": [435, 436], "hundr": 9, "hurri": 6, "hutter": 490, "hyperbol": [0, 21, 23, 26, 116, 295, 315, 429, 478], "hz": 373, "i": [0, 1, 2, 3, 4, 6, 7, 8, 9, 17, 19, 29, 38, 82, 87, 88, 101, 108, 109, 110, 112, 113, 114, 117, 118, 119, 120, 121, 126, 127, 130, 131, 133, 134, 135, 136, 138, 145, 149, 151, 153, 154, 155, 156, 157, 158, 159, 160, 162, 163, 164, 166, 167, 168, 169, 170, 171, 173, 175, 176, 177, 178, 185, 188, 190, 191, 196, 197, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 220, 225, 226, 230, 231, 235, 237, 251, 252, 255, 256, 257, 264, 265, 266, 277, 279, 282, 283, 284, 290, 291, 292, 298, 300, 305, 306, 311, 312, 313, 316, 319, 320, 324, 325, 326, 327, 328, 329, 330, 333, 334, 336, 337, 338, 339, 340, 344, 346, 353, 355, 356, 357, 358, 360, 361, 362, 363, 364, 365, 366, 367, 368, 371, 373, 374, 378, 379, 380, 382, 385, 386, 387, 393, 394, 400, 402, 403, 405, 406, 407, 408, 409, 410, 412, 413, 414, 417, 422, 428, 430, 431, 435, 436, 442, 444, 452, 453, 455, 460, 461, 464, 465, 467, 472, 477, 483, 485, 487, 489, 490, 492, 493, 495, 496, 501, 503, 504, 509, 512, 513, 514, 515, 516, 517, 518, 519, 520, 521, 522, 523], "i386": 9, "i_n": 1, "i_nw": 1, "i_s": 1, "i_sw": 1, "i_t": 379, "iclr": [489, 490, 491], "id": [7, 9], "idea": [515, 518], "idempot": [394, 406], "ident": [0, 121, 136, 152, 165, 306, 353, 403, 513], "identifi": [2, 337, 511], "idim": 7, "idiom": [7, 512], "idx": [39, 516], "ie": 406, "ieee": 344, "ifac": 513, "ignor": [6, 39, 88, 101, 102, 145, 487, 517], "ih": 414, "ii": 1, "ij": 235, "im": 265, "imag": [0, 361, 362, 364, 365, 367, 368, 431], "imagenet": [435, 436], "imaginari": [51, 187], "immedi": [6, 389], "implement": [0, 1, 5, 7, 156, 157, 177, 178, 370, 393, 409, 417, 419, 422, 428, 430, 431, 477, 486, 487, 488, 491, 492, 494, 495, 507, 512, 515], "impli": 327, "implicit": [509, 512, 515], "implicitli": 518, "import": [2, 3, 5, 6, 7, 9, 121, 132, 185, 188, 212, 285, 325, 337, 338, 339, 340, 341, 353, 355, 356, 357, 358, 378, 385, 386, 387, 396, 431, 452, 454, 461, 480, 483, 485, 512, 513, 515, 516, 518, 519, 520], "import_funct": 514, "imported_ab": 514, "imported_fun": 514, "imported_funct": 514, "improv": [1, 2, 3, 6, 452, 486, 487, 488, 489, 490, 491, 499, 512, 513], "in_ax": [329, 515], "in_channel": [360, 361, 362, 363, 364, 365], "in_dim": [353, 483], "in_proj": 483, "includ": [1, 2, 4, 117, 118, 119, 120, 150, 154, 179, 180, 226, 290, 380, 390, 402, 412, 455, 485, 512, 514, 515, 516, 520, 521, 523], "include_dir": 2, "inclus": [0, 42, 43, 44, 45, 58, 117, 118, 119, 120, 173, 226], "incom": 2, "inconveni": 512, "incorpor": 519, "incorrect": 519, "increas": [291, 517], "increment": 19, "incur": [6, 9], "independ": [129, 367, 368], "index": [0, 1, 2, 8, 10, 29, 39, 147, 152, 177, 182, 235, 252, 296, 297, 312, 313, 325], "indic": [0, 2, 17, 27, 28, 29, 30, 39, 157, 177, 178, 182, 190, 191, 192, 193, 194, 195, 196, 210, 217, 255, 296, 297, 300, 312, 313, 325, 403, 405, 454, 461, 503, 516], "indices_or_sect": [75, 300], "indirectli": 519, "individu": [353, 367, 368], "ineffici": [516, 518], "inexact": [12, 196], "inf": [212, 244, 409], "infer": [8, 176, 220, 319, 324, 513, 514], "infin": [0, 192, 194, 195, 244, 385, 386, 387, 491], "infinit": [17, 190, 191], "info": [6, 9], "inform": [3, 4, 6, 7, 9, 140, 172, 236, 283, 284, 344, 353, 358, 371, 409, 513, 514, 515, 522], "inherit": [7, 511], "inifn": 192, "init": [353, 410, 480, 485, 501, 502, 504, 505, 513], "init_fn": [432, 433, 434, 435, 436, 437, 438, 439, 480], "init_valu": 1, "initi": [1, 3, 5, 6, 132, 340, 353, 358, 374, 378, 380, 382, 410, 413, 432, 433, 434, 435, 436, 437, 438, 439, 483, 496, 501, 502, 504, 505, 512, 513, 514, 518], "initializer_list": 0, "inject": 0, "inlin": 0, "inner": [0, 512], "inorm": 378, "inp": [1, 154], "inp_ndim": 1, "inp_shap": 1, "inp_strid": 1, "inplac": [2, 9], "input": [0, 1, 2, 5, 6, 13, 14, 15, 16, 17, 18, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 86, 87, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 124, 126, 127, 130, 131, 136, 137, 138, 139, 140, 142, 143, 144, 146, 147, 148, 149, 150, 151, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 173, 174, 175, 177, 178, 182, 183, 184, 185, 187, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 240, 241, 242, 243, 244, 245, 247, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 266, 269, 272, 273, 274, 275, 277, 278, 279, 280, 281, 292, 293, 294, 295, 296, 297, 298, 299, 300, 301, 302, 303, 304, 305, 306, 308, 309, 310, 312, 313, 314, 315, 316, 317, 318, 319, 320, 322, 323, 324, 325, 326, 327, 329, 330, 332, 355, 356, 357, 358, 360, 361, 362, 363, 364, 365, 367, 368, 370, 372, 373, 374, 378, 379, 380, 382, 385, 386, 387, 409, 412, 413, 414, 417, 428, 430, 431, 432, 433, 434, 435, 436, 437, 438, 439, 445, 452, 453, 455, 456, 457, 458, 460, 461, 463, 465, 477, 480, 512, 514, 515, 516, 517, 520, 521], "input_dil": [0, 110], "input_dim": [7, 353, 382, 412], "input_nam": [1, 154], "input_s": [373, 379, 414], "inputs1": 461, "inputs2": 461, "insert": [127, 147, 522], "insid": [512, 514], "inspect": [3, 512, 520], "inspir": 8, "instabl": 465, "instal": [2, 4, 517], "instanc": [6, 39, 121, 256, 341, 353, 378, 389, 390, 391, 394, 396, 397, 398, 403, 406, 407, 408, 419, 483, 513, 517, 519], "instancenorm": 353, "instanti": [1, 2, 7, 518], "instantiate_kernel": 2, "instead": [2, 9, 121, 353, 408, 422, 515, 518], "instruct": [4, 9, 514], "int": [0, 1, 2, 4, 6, 7, 10, 16, 18, 19, 27, 28, 29, 30, 34, 35, 36, 37, 42, 43, 44, 45, 46, 47, 50, 58, 59, 60, 61, 62, 63, 66, 70, 72, 75, 78, 79, 80, 81, 82, 84, 87, 96, 98, 103, 107, 108, 109, 110, 111, 112, 113, 117, 118, 119, 120, 125, 126, 127, 134, 135, 136, 140, 147, 152, 156, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 173, 176, 178, 179, 180, 181, 182, 186, 196, 204, 212, 219, 226, 230, 232, 234, 236, 240, 242, 248, 251, 252, 254, 255, 256, 257, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 275, 277, 279, 280, 287, 290, 291, 296, 297, 298, 299, 300, 303, 304, 305, 309, 310, 312, 313, 316, 317, 318, 319, 320, 321, 322, 323, 324, 325, 326, 329, 331, 333, 334, 353, 355, 356, 357, 358, 360, 361, 362, 363, 364, 365, 370, 372, 373, 374, 378, 379, 380, 382, 385, 386, 387, 409, 411, 412, 413, 414, 417, 422, 430, 445, 453, 454, 458, 463, 465, 483, 494, 501, 503, 504, 505], "int16": 344, "int32": [0, 1, 12, 19, 39, 173, 196, 198, 212, 267, 296, 324, 344, 431, 516, 520], "int64": [12, 344], "int64_t": 2, "int8": [12, 344], "int_0": 143, "integ": [0, 12, 175, 177, 178, 196, 212, 236, 251, 256, 257, 259, 266, 267, 300, 312, 316, 329, 344, 370, 401, 503, 516], "integr": [19, 312, 518], "intend": [0, 512], "interact": 430, "interest": 522, "interfac": [2, 513, 517], "intermedi": 519, "intern": 358, "interpol": 431, "interpret": 4, "interv": [19, 219, 267, 271], "introduc": [0, 279], "intuit": 353, "invalid": [0, 87], "invers": [0, 20, 21, 22, 23, 24, 25, 26, 93, 144, 162, 163, 164, 165, 166, 167, 168, 203, 209, 213, 218], "invert": 0, "involv": [485, 512], "iogpu": 291, "iostream": 4, "ip": [513, 517], "ip1": [513, 517], "ip2": [513, 517], "ip3": 513, "ip4": 513, "is_avail": 132, "is_equival": 2, "is_leaf": [337, 338, 339, 340], "is_leaf_fn": 393, "isclos": 0, "isfinit": 0, "ish": 6, "ishmael": 6, "isinf": 0, "isnan": 0, "isneginf": 0, "isposinf": 0, "issu": [513, 515, 519], "issubdtyp": [2, 12, 344], "item": [0, 2, 5, 6, 7, 338, 514, 518, 519, 520], "iter": [5, 7, 217, 338, 339, 494, 509, 512, 518], "iterm": 9, "itertool": [6, 338], "its": [0, 1, 2, 9, 157, 203, 213, 231, 252, 269, 321, 335, 341, 353, 412, 485, 489, 490, 491, 513, 518, 519, 522], "itself": [2, 334, 493, 496], "ix": 1, "ix_n": 1, "ix_nw": 1, "ix_s": 1, "ix_sw": 1, "iy_n": 1, "iy_nw": 1, "iy_s": 1, "iy_sw": 1, "j": [6, 9, 212, 367, 488, 489, 491], "j8": 2, "jacobian": [2, 197, 328, 520], "jain": 367, "jax": [8, 509], "jit": [1, 154], "jmlr": 488, "jnp": 519, "john": 212, "join": 503, "join_schedul": 485, "jointli": 264, "json": [513, 517], "just": [2, 4, 7, 380, 512, 514, 516], "jvp": [2, 121, 520], "k": [0, 6, 46, 96, 107, 111, 126, 152, 157, 177, 185, 318, 321, 322, 323, 382, 394], "kaim": 436, "kd": [109, 113], "keep": [2, 16, 18, 27, 28, 230, 232, 234, 240, 254, 305, 309, 326, 353, 393, 515, 518], "keepdim": [0, 16, 18, 27, 28, 34, 35, 36, 37, 59, 60, 61, 62, 66, 79, 80, 84, 212, 230, 232, 234, 240, 254, 298, 305, 309, 326], "kei": [1, 3, 6, 149, 157, 188, 236, 259, 260, 261, 263, 264, 265, 266, 267, 269, 270, 271, 337, 338, 393, 394, 406, 409, 496, 509, 511, 514, 515], "kept": 291, "kernel": [2, 8, 9, 107, 108, 109, 110, 111, 112, 113, 154, 355, 385, 512, 516], "kernel_dil": [0, 110], "kernel_s": [355, 356, 357, 360, 361, 362, 363, 364, 365, 385, 386, 387], "key_cach": 6, "key_input_dim": 409, "key_proj": 6, "keyring_1": 9, "keyword": [149, 182, 285, 286, 325, 338, 353, 509, 514, 521, 523], "kh": [108, 109, 112, 113], "kind": 6, "kingma": [489, 491], "kl_div_loss": 353, "kname": 2, "know": [2, 6], "known": [420, 472], "kron": 0, "kroneck": [0, 198], "kth": [0, 29, 252], "kullback": 458, "kw": [108, 109, 112, 113], "kw_onli": 2, "kwarg": [11, 129, 149, 150, 188, 285, 286, 342, 514, 523], "l": [6, 7, 107, 111, 202, 203, 206, 208, 210, 353, 358, 360, 363, 373, 379, 414, 464], "l1": [325, 457, 459, 460, 464], "l1_loss": 353, "l2": [457, 460, 494, 500], "l2_loss": 353, "l_": 457, "la": 212, "label": [3, 5, 454, 461], "label_smooth": 454, "lack": 516, "lambd": [375, 426, 446, 476], "lambda": [338, 339, 340, 353, 375, 389, 394, 401, 426, 446, 470, 476, 486, 487, 488, 489, 490, 491, 492, 499, 500, 512, 513, 514, 515], "languag": [1, 2, 4], "lapack": 9, "larg": [6, 353, 409, 460, 512, 514, 518], "larger": [1, 156, 291, 417, 492], "largest": [212, 244, 318], "lasso": 325, "last": [0, 1, 6, 30, 82, 153, 155, 160, 164, 167, 168, 170, 171, 173, 177, 178, 181, 189, 202, 203, 205, 206, 207, 208, 209, 213, 214, 217, 218, 231, 260, 299, 316, 327, 360, 361, 362, 363, 364, 365, 367, 368, 374, 431, 493, 494, 519], "later": [3, 9, 485], "launch": [1, 2, 132, 513, 516], "layer": [8, 153, 334, 353, 355, 356, 357, 367, 368, 373, 374, 379, 380, 382, 385, 386, 387, 403, 408, 411, 412, 414, 419, 430, 479, 483, 494, 514, 517], "layer_s": 7, "layernorm": 353, "layout": 1, "lazi": [8, 483, 520], "lazili": [6, 353], "lceil": 96, "ld": [373, 379, 414], "lead": [0, 19, 87, 512], "leaf": [102, 334, 337, 338, 339, 340, 393], "leaf_modul": 353, "leaki": [381, 449], "leaky_relu": 353, "leakyrelu": 353, "learn": [5, 7, 8, 358, 374, 378, 380, 410, 413, 485, 486, 487, 488, 489, 490, 491, 492, 494, 499, 500], "learnabl": [360, 361, 362, 363, 364, 365, 419], "learning_r": [7, 485, 486, 487, 488, 489, 490, 491, 492, 494, 496, 499, 500, 501, 502, 503, 504, 505, 512], "least": [6, 89, 90, 91, 101, 202, 203, 205, 206, 207, 208, 209, 213, 214, 217, 218, 256], "leav": [2, 88, 145, 338, 339, 340], "lectur": 499, "lecun": 367, "left": [0, 6, 156, 199, 212, 256, 279, 371, 417, 431, 443, 444, 455, 457, 465], "left_shift": 0, "leibler": 458, "len": [6, 160, 164, 168, 171, 185, 503], "length": [6, 165, 303, 358, 360, 363, 373, 379, 414, 503], "leq": [457, 470], "less": [0, 1, 6, 29, 201, 252, 291, 333, 417, 464, 493, 513], "less_equ": 0, "let": [1, 2, 5, 6, 203, 512, 514, 515, 518, 519], "level": [0, 177, 178, 435, 436], "lh": [373, 379, 414], "lhs_indic": [0, 177, 178], "lhs_mask": 96, "lib": [2, 513], "libbla": 9, "liblapack": 9, "libmlx": 9, "libmlx_ext": 2, "libmpi": 513, "librari": [1, 2, 4, 9, 346, 353, 513, 514], "like": [2, 6, 8, 135, 149, 151, 188, 196, 249, 332, 368, 460, 496, 498, 512, 513, 514, 515, 517, 518, 519, 520, 522], "likelihood": [455, 463], "limit": [0, 2, 101, 287, 290, 291, 516], "linalg": 185, "line": [6, 513, 514, 517, 518, 519], "linear": [0, 2, 6, 7, 8, 215, 216, 334, 338, 353, 359, 369, 371, 372, 381, 396, 412, 414, 415, 416, 418, 420, 431, 440, 441, 442, 443, 444, 445, 449, 468, 469, 470, 472, 480, 483, 496, 503, 504, 512, 514], "linear1": 6, "linear2": 6, "linear3": 6, "linear_schedul": [485, 503], "linearli": 409, "link": [2, 4, 9], "linspac": 0, "lion": 485, "list": [1, 6, 11, 16, 18, 31, 75, 82, 87, 88, 89, 90, 91, 98, 102, 103, 110, 140, 145, 154, 159, 160, 161, 163, 164, 165, 167, 168, 170, 171, 176, 182, 197, 212, 230, 232, 234, 235, 240, 248, 251, 254, 259, 260, 261, 263, 264, 265, 267, 270, 271, 283, 298, 300, 304, 305, 309, 316, 317, 320, 325, 326, 328, 331, 337, 340, 341, 353, 394, 396, 397, 398, 399, 404, 406, 407, 408, 483, 489, 490, 491, 492, 493, 503, 511, 512, 513, 514, 515, 517, 518], "listen": 513, "liter": [2, 251, 431, 435, 436, 452, 453, 454, 455, 456, 457, 458, 459, 460, 461, 462, 463, 464, 465], "littl": 6, "liu": 6, "live": [8, 154, 522], "ll": [1, 5, 7, 457, 512, 515], "llama": 6, "llamaattent": 6, "llamaencoderlay": 6, "llm": 8, "load": [2, 7, 8, 346, 396, 513], "load_weight": [353, 518], "loader": 7, "loader_path": 2, "loan": 212, "loc": [1, 263, 265], "local": [353, 367, 513], "localhost": [513, 517], "locat": [0, 2, 4, 87, 296, 297, 407, 408, 513, 516, 522], "log": [0, 223, 225, 230, 383, 384, 450, 451, 452, 455, 458, 460, 463, 475], "log10": 0, "log1p": 0, "log2": 0, "log_cosh_loss": 353, "log_sigmoid": [353, 383], "log_softmax": [353, 384], "logaddexp": 0, "logarithm": [0, 221, 222, 223, 224], "logcosh": 460, "logcumsumexp": 0, "logic": [0, 2, 227, 228, 229, 513], "logical_and": 0, "logical_not": 0, "logical_or": 0, "logist": [0, 5, 292, 444, 472], "logit": [6, 260, 452, 454, 512], "logsigmoid": 353, "logsoftmax": 353, "logsumexp": [0, 226], "long": 6, "longer": [6, 114, 515], "look": [2, 6, 513], "lookup": 370, "loop": [6, 7, 512, 513, 515, 518], "loshchilov": 490, "loss": [5, 7, 325, 353, 485, 512, 513, 515, 518], "loss_and_grad": 353, "loss_and_grad_fn": [7, 485, 512, 515], "loss_fn": [5, 7, 485, 512, 515], "loss_grad_fn": 513, "lot": 515, "low": [267, 271, 439, 480], "low_pad_s": 0, "lower": [202, 203, 206, 208, 216, 218, 256, 267, 270, 271, 321, 439], "lr": [5, 492], "lr_schedul": [501, 502, 503, 504, 505], "lstm": 353, "lto": 2, "lu": [6, 211], "luckili": 518, "lvalu": 325, "m": [0, 2, 4, 6, 9, 96, 152, 177, 185, 212, 321, 486, 512], "m1": [1, 6, 512, 515, 522], "m10": 344, "m7": 344, "m_": [489, 490, 491, 492], "m_t": [489, 490, 491, 492], "mac": 513, "machin": [6, 8, 9, 499, 513], "maco": [9, 291], "macosx": 9, "made": [6, 346], "mai": [2, 4, 88, 149, 151, 177, 178, 188, 212, 334, 367, 494, 513, 515, 516], "main": [4, 8, 127, 152, 154, 319, 338, 339, 353, 513], "maintain": [367, 368, 492], "major": [0, 2, 106], "make": [1, 2, 3, 4, 6, 7, 9, 106, 150, 151, 231, 246, 289, 353, 493, 501, 502, 504, 505, 512, 518, 520, 522], "make_shar": 2, "malloc": 2, "man": 6, "manag": [8, 307, 509, 513, 514, 522], "mani": [1, 2, 87, 300, 360, 361, 362, 363, 364, 365, 370, 411, 512, 513, 514, 518], "manual": [353, 513], "map": [2, 7, 39, 220, 338, 370, 389, 514], "map_fn": [389, 393], "map_torch_to_mlx": 6, "margin": [461, 465], "margin_ranking_loss": 353, "mask": [0, 6, 96, 157, 403, 409, 516], "mask_lh": [0, 96], "mask_n": 1, "mask_nw": 1, "mask_out": [0, 96], "mask_rh": [0, 96], "mask_s": 1, "mask_sw": 1, "matadata": 220, "match": [9, 157, 158, 159, 160, 162, 163, 164, 166, 167, 168, 169, 170, 171, 179, 396, 431, 454, 516, 519], "materi": [6, 8], "math": [6, 465, 512], "mathbf": 203, "mathcal": [265, 382], "mathemat": 212, "mathrm": [143, 292, 378], "matmul": [0, 177, 522], "matric": [212, 214, 217], "matrix": [0, 5, 15, 46, 96, 125, 126, 152, 177, 178, 185, 186, 202, 203, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 217, 218, 231, 235, 256, 257, 264, 411, 412, 437, 480], "matter": [6, 353, 514], "matur": 513, "max": [0, 1, 2, 212, 233, 359, 385, 386, 387, 410, 440, 447, 448, 453, 455, 456, 461, 465, 467, 469, 487, 491, 512, 515, 522], "max_buffer_s": 236, "max_freq": 422, "max_i": 256, "max_norm": 336, "max_recommended_working_set_s": [236, 291], "max_val": 447, "maximum": [0, 7, 27, 39, 101, 117, 181, 290, 336, 353, 381, 385, 386, 387, 415, 422, 443, 444, 449, 468, 483, 518], "maxpool1d": 353, "maxpool2d": 353, "maxpool3d": 353, "maxtotalthreadsperthreadgroup": 2, "mca": [513, 517], "md": 212, "me": 6, "mean": [0, 1, 5, 6, 7, 155, 263, 264, 265, 325, 353, 358, 374, 394, 413, 438, 452, 453, 454, 455, 456, 457, 458, 459, 460, 461, 462, 463, 464, 465, 512, 513, 515, 519], "meant": 121, "measur": 522, "mechan": 430, "medic": 368, "meet": 9, "member": [353, 399, 404], "memori": [0, 1, 2, 8, 87, 100, 179, 180, 181, 276, 287, 290, 291, 430, 483, 487, 512, 518, 519], "memory_order_relax": 1, "memory_s": [236, 291], "memoryview": [518, 519], "merg": 512, "meshgrid": 0, "metadata": [5, 220, 283, 284], "metal": [2, 4, 8, 154, 290], "metal_captur": 3, "metal_kernel": 1, "metal_path": 9, "metallib": [2, 9], "method": [2, 6, 10, 11, 31, 121, 129, 172, 334, 342, 353, 354, 355, 356, 357, 358, 359, 360, 361, 362, 363, 364, 365, 366, 367, 368, 369, 370, 371, 372, 373, 374, 375, 376, 377, 378, 379, 380, 381, 382, 383, 384, 385, 386, 387, 388, 400, 409, 410, 411, 412, 413, 414, 415, 416, 417, 418, 419, 420, 421, 422, 423, 424, 425, 426, 427, 428, 429, 430, 431, 483, 486, 487, 488, 489, 490, 491, 492, 493, 494, 496, 499, 500, 507], "millisecond": [9, 512, 522], "min": [0, 2, 212, 241, 359, 410, 440, 447, 448, 467, 469], "min_freq": 422, "min_i": 256, "min_val": 447, "mind": 6, "mine": 6, "minibatch": 7, "minim": [513, 517], "minimum": [0, 28, 39, 101, 118, 422, 452, 453], "minsizerel": 9, "minu": 148, "minut": 6, "mish": 353, "mismatch": 514, "miss": [396, 514, 521], "mix": 516, "mkdir": [3, 9], "ml": 9, "mlp": [7, 353, 430, 485], "mlp_dim": [6, 430], "mlx": [1, 3, 5, 6, 7, 9, 346, 353, 480, 483, 485, 509, 511, 512, 513, 514, 515, 516, 517, 518, 519, 520, 521, 522], "mlx_build_acceler": 4, "mlx_build_benchmark": 9, "mlx_build_cpu": 9, "mlx_build_cuda": 9, "mlx_build_exampl": 9, "mlx_build_gguf": 9, "mlx_build_met": [2, 4, 9], "mlx_build_metallib": 2, "mlx_build_python_bind": 9, "mlx_build_safetensor": 9, "mlx_build_test": 9, "mlx_cxx_flag": 4, "mlx_disable_compil": [128, 141, 512], "mlx_ext": 2, "mlx_ext_metallib": 2, "mlx_found": 4, "mlx_include_dir": [2, 4], "mlx_librari": 4, "mlx_metal_debug": [3, 9], "mlx_metal_jit": 9, "mlx_root": 4, "mlx_sample_extens": 2, "mlx_trace": 3, "mlxfn": [149, 151, 188, 514], "mnist": 7, "mode": [0, 1, 2, 114, 251, 392, 403, 405, 431, 435, 436], "model": [5, 7, 8, 285, 334, 335, 338, 339, 353, 389, 392, 394, 396, 400, 403, 405, 406, 407, 409, 430, 480, 483, 485, 495, 496, 498, 512, 513, 514, 518], "modest": 2, "modif": 519, "modifi": 519, "modul": [2, 4, 6, 7, 334, 335, 419, 430, 480, 498, 511, 512, 518], "moment": [6, 487, 491, 513], "momentum": [358, 492, 494, 496, 500, 512], "monei": 6, "monitor": 517, "monoton": 466, "moor": 213, "more": [1, 2, 3, 4, 7, 11, 82, 127, 149, 177, 202, 203, 205, 206, 207, 208, 209, 210, 213, 217, 218, 231, 283, 284, 287, 290, 344, 353, 358, 367, 417, 422, 430, 431, 433, 434, 435, 436, 452, 509, 512, 513, 515, 516, 520, 522], "moreov": 517, "most": [2, 157, 260, 324, 353, 498, 512, 513, 515, 516, 518], "move": [0, 2, 242, 522], "moveaxi": 0, "mpi": [132, 346], "mpirun": [513, 517], "mse": 325, "mse_loss": 353, "mtl": 2, "mtl_capture_en": 3, "mtlcommandbuff": 2, "mu": 500, "much": [1, 2, 6, 355, 356, 357, 385, 386, 387, 512, 518], "multi": [8, 157, 360, 361, 362, 363, 364, 365, 514, 516, 519], "multidimension": 235, "multiheadattent": [6, 353], "multioptim": 485, "multipl": [0, 1, 9, 15, 96, 151, 153, 155, 177, 178, 231, 243, 256, 257, 409, 422, 502, 503, 505, 512, 518, 521], "multipli": [0, 2, 39, 178, 256, 257, 366, 422, 431], "muon": 485, "murtadha": 6, "must": [0, 1, 2, 3, 9, 96, 101, 149, 156, 157, 176, 178, 206, 208, 212, 259, 260, 264, 267, 270, 271, 330, 431, 519], "mx": [1, 2, 3, 4, 5, 6, 7, 39, 88, 104, 105, 121, 132, 135, 149, 150, 151, 154, 157, 173, 188, 196, 198, 205, 206, 207, 208, 210, 212, 214, 220, 266, 285, 296, 297, 324, 325, 336, 353, 355, 356, 357, 358, 369, 378, 381, 385, 386, 387, 389, 396, 400, 415, 431, 432, 433, 434, 435, 436, 437, 438, 439, 441, 449, 452, 453, 454, 458, 461, 468, 478, 480, 483, 485, 509, 512, 513, 514, 515, 516, 518, 519, 520, 521, 522, 523], "my": [6, 9], "my_devic": 523, "my_path": 285, "my_script": [513, 517], "myexp": [1, 154], "myexp_strid": 1, "mymlp": 483, "n": [0, 1, 2, 6, 31, 96, 107, 108, 109, 110, 111, 112, 113, 152, 157, 158, 160, 162, 164, 166, 169, 171, 185, 186, 264, 265, 305, 321, 326, 358, 360, 361, 362, 363, 364, 365, 367, 368, 373, 379, 414, 431, 460, 465, 513, 517], "n_kv": 157, "n_q": 157, "n_t": 373, "naiv": [2, 515], "naive_add": 515, "name": [1, 2, 121, 150, 154, 178, 220, 256, 257, 283, 284, 285, 286, 353, 374, 393, 396, 398, 513, 516, 521], "named_modul": 353, "namespac": 4, "nan": [0, 17, 86, 190, 191, 193, 244], "nan_to_num": 0, "nanobind": 2, "nanobind_add_modul": 2, "nativ": [9, 513], "natur": [0, 221, 223, 518], "nb": 2, "nb_domain": 2, "nb_modul": 2, "nb_static": 2, "nbyte": 2, "nc": 358, "ndarrai": [31, 516, 518, 520], "ndhwc": [362, 365, 368], "ndim": [0, 1, 2, 173, 212, 217, 431], "ne": 1, "nearest": [1, 431], "necessari": [106, 353], "necessarili": [207, 318], "need": [1, 2, 4, 6, 7, 8, 9, 86, 256, 353, 407, 408, 422, 430, 509, 513, 515, 517, 518, 519, 520, 522], "neg": [0, 127, 173, 194, 244, 279, 319, 381, 385, 386, 387, 409, 455, 463, 465, 516], "negat": [0, 245], "negative_slop": [381, 449], "neginf": [0, 244], "neighbor": [431, 517], "neither": [182, 325], "nelem": 2, "nervou": 6, "nest": [82, 102, 340, 353, 483, 511, 515], "nesterov": [494, 500], "network": [6, 8, 333, 358, 367, 370, 433, 434, 480, 483, 494, 499, 513], "neural": [6, 8, 370, 433, 434, 466, 480, 483, 494, 499], "never": [6, 518], "new": [0, 1, 2, 7, 98, 127, 242, 246, 277, 304, 320, 327, 338, 339, 401, 409, 483, 485, 498, 503, 512, 514, 516, 518, 519], "new_tre": 339, "newton": 494, "next": [2, 4, 6, 7, 287, 514], "nh": [373, 379, 414], "nhwc": [358, 361, 364], "nice": [515, 518], "nlc": [358, 360, 363], "nld": [373, 379, 414], "nlh": [373, 379, 414], "nll": [455, 463], "nll_loss": 353, "nn": [2, 6, 7, 285, 338, 353, 480, 483, 485, 496, 498, 512, 514, 518], "nobodi": 6, "node": [88, 102, 145, 329, 339, 340, 513, 517], "nois": 5, "noisi": 5, "nomins": 2, "non": [0, 1, 2, 4, 9, 235, 404, 414, 466, 483], "nondeterminist": 516, "none": [1, 2, 6, 10, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 33, 34, 35, 36, 37, 38, 40, 41, 42, 43, 44, 45, 46, 47, 49, 50, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 66, 68, 69, 70, 71, 73, 75, 76, 77, 78, 79, 80, 81, 83, 84, 85, 86, 87, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 124, 125, 126, 127, 128, 130, 131, 134, 135, 136, 137, 138, 139, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 152, 153, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 182, 183, 184, 185, 186, 187, 189, 190, 191, 192, 193, 194, 195, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 238, 239, 240, 241, 242, 243, 244, 245, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 284, 285, 288, 289, 292, 293, 294, 295, 296, 297, 298, 299, 300, 301, 302, 303, 304, 305, 306, 308, 309, 310, 311, 312, 313, 314, 315, 316, 317, 318, 319, 320, 321, 322, 323, 324, 325, 326, 327, 329, 330, 331, 332, 333, 334, 337, 338, 339, 340, 355, 356, 357, 371, 385, 386, 387, 389, 393, 394, 401, 406, 409, 414, 422, 430, 452, 453, 454, 455, 456, 457, 458, 459, 460, 461, 462, 463, 464, 465, 487, 507, 514, 516], "nonlinear": [414, 512], "nonzero": 516, "noop": [406, 513], "nor": [2, 182, 325], "norm": [6, 155, 336, 374, 465, 491, 492], "norm1": 6, "norm2": 6, "norm_first": 430, "normal": [1, 2, 5, 6, 153, 154, 155, 157, 205, 206, 264, 270, 353, 355, 356, 357, 358, 374, 378, 380, 385, 386, 387, 413, 430, 433, 435, 519, 522], "not_equ": 0, "notabl": [6, 8], "notat": [125, 337, 398], "note": [0, 1, 2, 4, 6, 9, 17, 19, 87, 96, 102, 109, 112, 113, 121, 167, 168, 178, 179, 190, 210, 212, 256, 260, 327, 334, 353, 413, 431, 485, 516, 519, 521], "noth": [6, 121, 353, 518], "notic": [6, 514, 515, 521], "now": [1, 2, 6, 9, 412, 512, 519], "np": [1, 6, 7, 513, 519, 520], "npy": [220, 282, 521], "npz": [6, 220, 285, 286, 396, 400, 521], "ns_step": 494, "nuc": 212, "nuclear": 212, "nuisanc": 513, "nullopt": 0, "num": [0, 6, 219, 269], "num_class": [7, 485], "num_decoder_lay": 430, "num_embed": [370, 411], "num_encoder_lay": 430, "num_epoch": [7, 485], "num_exampl": 5, "num_featur": [5, 358], "num_group": 374, "num_head": [6, 409, 430], "num_it": 5, "num_lay": [6, 7, 485], "num_param": 353, "num_paramet": 410, "num_sampl": 260, "num_split": 0, "number": [0, 2, 12, 19, 64, 74, 102, 108, 109, 110, 112, 113, 125, 150, 152, 157, 178, 182, 186, 197, 219, 244, 251, 256, 257, 260, 263, 265, 269, 271, 275, 279, 280, 316, 317, 321, 325, 328, 329, 333, 334, 353, 358, 360, 361, 362, 363, 364, 365, 367, 368, 374, 378, 409, 410, 430, 431, 433, 434, 435, 436, 494, 501, 503, 504, 509, 512, 515, 517, 523], "number_of_el": 0, "numer": [6, 153, 155, 212, 225, 230, 298, 358, 374, 378, 380, 413, 452, 453, 455, 465, 486, 487, 488, 489, 490, 491, 499, 512, 518], "numpi": [2, 6, 7, 8, 14, 17, 19, 92, 94, 95, 97, 98, 137, 138, 142, 183, 184, 190, 199, 200, 201, 205, 207, 225, 231, 233, 241, 243, 247, 253, 274, 278, 308, 518, 520, 521], "nvidia": 9, "nw": 1, "nwhc": 367, "o": [9, 157, 379], "o_t": 379, "obj": 283, "object": [3, 11, 31, 52, 82, 102, 150, 151, 154, 196, 285, 329, 337, 338, 339, 340, 344, 367, 430, 511, 517], "observ": 6, "occupi": [125, 178, 256, 257], "occur": 519, "odd": 165, "odim": 7, "odot": [373, 379], "off": [6, 9, 518], "offer": 460, "offset": [0, 1, 2, 6, 47, 87, 127, 153, 156, 319], "often": 368, "ok": [396, 512, 514, 515], "okai": [512, 518], "old": 6, "older": [149, 151, 188], "omit": [491, 513], "onc": [1, 2, 9, 512, 514], "one": [0, 2, 4, 6, 9, 39, 82, 89, 97, 101, 108, 109, 110, 112, 113, 132, 147, 149, 152, 153, 155, 156, 177, 212, 223, 231, 257, 260, 303, 308, 324, 333, 344, 363, 364, 365, 406, 431, 454, 493, 513, 514, 517, 522], "ones": [0, 2, 6, 249, 285, 297, 321, 407, 408, 485, 513, 516], "ones_lik": 0, "onli": [1, 2, 6, 8, 86, 96, 108, 109, 110, 112, 113, 121, 157, 177, 206, 208, 212, 217, 256, 264, 291, 327, 344, 353, 393, 394, 396, 401, 403, 406, 407, 408, 483, 512, 513, 514, 515, 517, 521, 522], "onlin": 488, "op": [1, 2, 250, 327, 394, 518], "open": [3, 9, 19, 267, 271, 513], "openmpi": 513, "oper": [3, 6, 8, 10, 38, 89, 90, 91, 110, 157, 177, 178, 253, 255, 298, 306, 313, 342, 344, 353, 430, 492, 512, 513, 515, 516, 518, 519, 520, 522, 523], "operand": [139, 140, 177], "opportun": 512, "opt": [495, 513], "optim": [1, 3, 5, 7, 8, 407, 512, 513, 515, 518], "option": [0, 3, 6, 15, 16, 18, 19, 27, 28, 29, 30, 33, 34, 35, 36, 37, 38, 40, 41, 42, 43, 44, 45, 46, 47, 49, 50, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 66, 68, 69, 70, 71, 73, 75, 76, 77, 78, 79, 80, 81, 83, 84, 85, 87, 89, 90, 91, 96, 102, 103, 107, 108, 109, 110, 111, 112, 113, 114, 117, 118, 119, 120, 121, 125, 126, 127, 130, 131, 132, 134, 135, 136, 149, 151, 152, 153, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 173, 176, 177, 178, 182, 186, 194, 195, 198, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 226, 230, 232, 234, 235, 240, 244, 248, 251, 252, 254, 256, 257, 259, 260, 261, 263, 264, 265, 266, 267, 269, 270, 271, 275, 277, 279, 284, 298, 299, 300, 303, 304, 305, 309, 311, 312, 316, 318, 319, 320, 321, 322, 323, 324, 325, 326, 329, 331, 333, 334, 337, 338, 339, 340, 355, 356, 357, 358, 360, 361, 362, 363, 364, 365, 373, 379, 382, 385, 386, 387, 389, 393, 394, 396, 401, 406, 409, 411, 412, 414, 417, 422, 430, 431, 432, 433, 434, 435, 436, 437, 438, 439, 452, 453, 454, 455, 456, 457, 458, 459, 460, 461, 462, 463, 464, 465, 486, 487, 488, 489, 490, 491, 492, 494, 496, 499, 500, 501, 509, 512, 514, 521, 523], "ord": 212, "order": [0, 1, 29, 87, 110, 140, 206, 207, 208, 212, 252, 256, 318, 353, 374, 407, 419, 496, 512, 515, 517], "ordinari": 189, "org": [374, 378, 380, 388, 413, 444, 466], "origin": [6, 127, 336, 358, 402, 433, 434, 435, 436, 486, 487, 488, 491, 492, 494, 514, 519], "orthogon": 494, "orthonorm": 185, "other": [0, 2, 6, 8, 196, 212, 353, 395, 483, 492, 512, 513, 514, 516, 517, 518, 520], "other_input": 353, "otherwis": [19, 110, 132, 266, 334, 337, 338, 339, 340, 394, 396, 406, 428, 430, 431, 446, 452, 457, 464, 476, 477, 518, 519], "our": [1, 2, 6, 7, 419, 486, 487, 488, 491, 492, 494, 513], "out": [0, 1, 2, 9, 96, 154, 157, 188, 367, 368, 403, 512, 513, 514, 515, 516], "out_ax": [329, 515], "out_channel": [360, 361, 362, 363, 364, 365], "out_dim": [353, 483], "out_dtyp": 2, "out_idx": 2, "out_mask": 96, "out_proj": [6, 483], "out_ptr": 2, "out_shap": [1, 2], "outer": [0, 512, 518], "outlier": 460, "output": [0, 1, 2, 6, 9, 16, 17, 18, 19, 29, 87, 96, 97, 98, 102, 104, 105, 106, 111, 112, 113, 117, 118, 119, 120, 121, 139, 150, 152, 153, 154, 155, 156, 157, 166, 169, 170, 171, 176, 177, 182, 185, 186, 190, 212, 219, 226, 230, 232, 234, 235, 240, 244, 248, 249, 252, 254, 255, 259, 260, 261, 263, 264, 265, 267, 270, 271, 285, 286, 296, 297, 298, 303, 305, 309, 313, 319, 321, 325, 326, 327, 328, 329, 330, 331, 332, 358, 360, 361, 362, 363, 364, 365, 378, 382, 409, 412, 428, 430, 431, 433, 434, 435, 436, 452, 453, 454, 455, 456, 457, 458, 459, 460, 461, 462, 463, 464, 465, 477, 480, 512, 513, 514, 515, 516, 517, 518, 519, 520, 521, 522], "output_dim": [7, 353, 382, 412], "output_directori": 2, "output_dtyp": [1, 154], "output_fil": 6, "output_nam": [1, 154], "output_pad": [0, 111, 112, 113, 363, 364, 365], "output_shap": [1, 154], "output_strip_trailing_whitespac": 4, "output_vari": 4, "outsid": [154, 173], "over": [0, 2, 6, 7, 16, 18, 27, 28, 29, 30, 107, 108, 109, 110, 111, 112, 113, 117, 118, 119, 120, 160, 161, 164, 165, 168, 171, 189, 212, 217, 219, 226, 230, 232, 234, 240, 252, 254, 281, 298, 299, 305, 309, 316, 318, 326, 358, 360, 361, 362, 363, 364, 365, 374, 380, 413, 454, 501, 504, 513, 515, 517], "overal": 2, "overhead": [1, 512, 518, 522], "overlap": 1, "overload": 19, "overrid": [2, 141], "overview": 3, "overwrit": 6, "own": [9, 513, 519], "owndata": 519, "p": [9, 210, 259, 353, 366, 367, 368, 465, 489, 491], "pack": [178, 256, 257], "packag": [2, 5, 7, 9, 346, 480, 513, 517], "package_data": 2, "pad": [0, 1, 107, 108, 109, 110, 111, 112, 113, 158, 159, 160, 162, 163, 164, 166, 167, 168, 169, 170, 171, 355, 356, 357, 360, 361, 362, 363, 364, 365, 385, 386, 387], "pad_valu": 0, "pad_width": [0, 251], "padding_hi": 0, "padding_lo": 0, "page": [513, 520], "pain": 6, "pair": [0, 2, 251, 396, 417], "pairwis": 465, "pan": 6, "paper": [358, 422, 486, 487, 488, 491, 492], "parallel": [513, 522], "param": [325, 334, 353, 480, 514, 515], "paramet": [0, 1, 2, 5, 6, 7, 13, 14, 15, 16, 17, 18, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 38, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 124, 125, 126, 127, 130, 131, 132, 134, 135, 136, 137, 138, 139, 140, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 173, 174, 175, 176, 177, 178, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 238, 240, 241, 242, 243, 244, 245, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 289, 290, 291, 292, 293, 294, 295, 296, 297, 298, 299, 300, 301, 302, 303, 304, 305, 306, 307, 308, 309, 310, 311, 312, 313, 314, 315, 316, 317, 318, 319, 320, 321, 322, 323, 324, 325, 326, 327, 328, 329, 330, 331, 332, 333, 334, 335, 336, 337, 338, 339, 340, 341, 355, 356, 357, 358, 359, 360, 361, 362, 363, 364, 365, 366, 367, 368, 369, 370, 371, 372, 373, 374, 375, 378, 379, 380, 381, 382, 385, 386, 387, 389, 390, 393, 394, 396, 401, 402, 403, 406, 407, 408, 409, 410, 411, 412, 413, 414, 417, 419, 422, 426, 428, 430, 431, 432, 433, 434, 435, 436, 437, 438, 439, 445, 452, 453, 454, 455, 456, 457, 458, 459, 460, 461, 462, 463, 464, 465, 477, 479, 480, 483, 485, 486, 487, 488, 489, 490, 491, 492, 493, 494, 495, 496, 498, 499, 500, 501, 502, 503, 504, 505, 507, 512, 513, 514, 515, 518], "parameter_scal": 487, "parametr": [410, 467], "pars": [6, 150], "parse_arg": 6, "parser": 6, "part": [1, 2, 51, 67, 149, 151, 187, 188, 272, 515, 516], "parti": 513, "partial": [407, 408, 512, 518], "particip": [130, 131, 134, 135, 136], "particular": [256, 374], "particularli": 512, "partit": [0, 29], "pass": [1, 2, 6, 7, 9, 69, 83, 177, 178, 250, 251, 325, 333, 335, 337, 338, 339, 353, 394, 406, 407, 408, 419, 512, 513, 514, 517, 518], "password": [513, 517], "path": [3, 4, 9, 140, 149, 150, 151, 188, 238, 285, 286, 334, 339, 396, 493, 513, 517], "pattern": [353, 518], "peak": [181, 276], "penalti": [494, 500], "penros": 213, "pep": 519, "per": [6, 7, 125, 157, 178, 256, 257, 333, 334, 358, 374, 378, 380, 413, 507, 512, 513, 517, 518], "perceptron": [8, 514], "perf_count": 512, "perfectli": 518, "perform": [0, 1, 2, 3, 6, 8, 15, 96, 110, 117, 118, 119, 120, 136, 139, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 177, 178, 185, 206, 208, 226, 231, 257, 280, 298, 312, 333, 353, 374, 430, 435, 436, 485, 494, 512, 513, 516, 518, 522], "perhap": [2, 6], "perm": 7, "permtuat": 266, "permuat": 210, "permut": [0, 7], "persist": 9, "pg": 212, "phi": [371, 442], "physic": 513, "pi": [143, 371, 422, 443, 515], "pick": 2, "pip": [2, 4, 9], "pipelin": 2, "pivot": [210, 211], "pixel": 367, "place": [6, 39, 279, 280, 334, 513, 518, 519], "placehold": 512, "plai": [2, 6], "plain": 419, "plan": [2, 512], "platform": 9, "plot": 513, "plu": [0, 223], "png": 513, "point": [0, 2, 5, 6, 9, 87, 172, 175, 257, 344], "pool": [355, 356, 357, 385, 386, 387, 522], "popul": 2, "port": 517, "portion": 366, "posinf": [0, 244], "posit": [0, 6, 29, 127, 156, 173, 182, 195, 202, 203, 242, 244, 252, 264, 279, 319, 325, 338, 353, 360, 361, 362, 363, 364, 365, 409, 417, 422, 455, 465, 514], "possibl": [132, 177, 300, 370, 411, 512, 513, 516, 522], "possibli": [1, 6, 15, 96, 177, 231, 336], "postur": 6, "power": [0, 515, 519], "practic": [2, 512], "pre": [9, 157, 452], "preced": 374, "precis": [0, 6, 148, 157, 353, 371, 413, 452, 495, 512], "preclud": 353, "pred": [456, 460], "predic": [334, 401, 493], "predict": [452, 455, 456, 457, 458, 459, 460, 462, 463, 464], "prefix": [329, 337, 339], "prelu": 353, "prepar": [2, 6, 513], "prepend": [3, 231], "preprint": [6, 486, 492], "preprocessor": 9, "present": 1, "preserv": [277, 515], "press": [6, 212], "pretti": [512, 518], "prevent": [306, 465, 519], "previou": [287, 290, 291], "primal": [1, 2, 121, 197, 328], "primit": 515, "print": [1, 2, 5, 6, 7, 9, 88, 121, 198, 336, 337, 338, 339, 341, 353, 509, 512, 513, 514, 515, 516, 517, 518, 519, 520], "prior": [255, 312, 313], "priorit": 515, "privat": [2, 4], "prng": [259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 509], "prob": 452, "probabl": [9, 267, 366, 367, 368, 412, 452, 454, 458, 522], "problem": [5, 7, 353], "process": [6, 110, 114, 129, 130, 131, 132, 134, 135, 136, 333, 338, 339, 368, 370, 430, 511, 513, 517], "processor": 9, "prod": [0, 1], "produc": [0, 2, 9, 102, 409, 480, 514], "product": [0, 2, 15, 87, 119, 189, 197, 198, 204, 231, 250, 254, 316, 328, 409, 520], "profil": 3, "program": [4, 181], "programmat": 408, "project": [3, 4, 6, 409, 514], "project_source_dir": 2, "promot": [2, 157], "promote_typ": 2, "promoted_dtyp": 2, "prompt": 6, "propag": [515, 516], "properti": [32, 39, 48, 51, 53, 64, 65, 67, 72, 74, 402, 405, 497, 513, 515], "proportion": 336, "protocol": 519, "provid": [0, 2, 6, 87, 125, 149, 150, 182, 265, 266, 279, 316, 325, 333, 338, 340, 346, 353, 389, 394, 396, 406, 407, 408, 411, 412, 430, 431, 479, 483, 493, 513, 514, 521, 523], "pseudo": [213, 509], "pth": 6, "public": [2, 353], "pun": 0, "pure": [1, 121, 353, 485], "purpos": [1, 212, 513], "purs": 6, "push": 2, "push_back": 2, "put": [0, 1, 7, 255, 512, 513], "put_along_axi": [0, 210], "py": [2, 6, 9, 513, 517], "pypi": 9, "python": [1, 3, 4, 6, 52, 72, 82, 88, 145, 333, 337, 338, 339, 340, 341, 483, 495, 496, 498, 511, 513, 514, 515, 517, 519], "python_execut": 4, "python_requir": 2, "pytorch": [6, 8, 371, 374, 515], "pytorch_compat": 374, "q": [157, 214], "quantiz": [0, 125, 178, 220, 257, 411, 412], "quantized_matmul": 0, "quantizedembed": 353, "quantizedlinear": 353, "quarter": 6, "queri": [6, 157, 291, 409], "query_input_dim": 409, "query_proj": 6, "question": [6, 518], "queue": 3, "quick": [2, 8], "quit": [515, 519], "quotient": [0, 137, 138, 175], "r": [2, 6, 214, 325, 367, 373], "r_t": 373, "race": 522, "radian": [0, 124], "rag": 6, "rain": 6, "rais": [0, 6, 121, 212, 253, 300, 396, 514], "ram": [6, 290], "random": [1, 2, 3, 5, 6, 7, 8, 154, 157, 355, 356, 357, 358, 378, 385, 386, 387, 396, 403, 512, 514, 515, 522, 523], "randomli": [5, 6, 266, 366, 367, 368], "rang": [0, 2, 3, 5, 6, 7, 9, 19, 173, 177, 219, 434, 436, 443, 444, 485, 501, 502, 503, 504, 505, 509, 512, 515, 518, 522], "rank": [0, 134, 135, 136, 461, 513, 517], "rate": [5, 485, 486, 487, 488, 489, 490, 491, 492, 494, 499, 500], "rather": [2, 515, 522], "ratio": [0, 25], "rceil": 96, "re": [7, 9, 265, 480], "reachabl": 513, "readabl": 3, "real": [0, 166, 167, 168, 169, 170, 171, 202, 203, 205, 206, 207, 208, 265], "realli": 380, "reason": [1, 6, 516], "reboot": 9, "receiv": [134, 135, 334, 503, 513, 519], "reciproc": [0, 281], "reclaim": 287, "recommend": [9, 290, 492, 494], "recompil": [102, 512], "reconstruct": 210, "record": [3, 181, 518], "recreat": [341, 485], "rectifi": [381, 415, 416, 435, 436, 449, 468, 469], "recurr": [373, 379, 414], "recurs": [150, 353, 393, 394, 399, 404, 406, 483], "recv": [135, 513], "reduc": [0, 1, 9, 16, 18, 27, 28, 131, 230, 232, 234, 240, 254, 305, 309, 326, 333, 340, 358, 430, 460], "reduct": [16, 18, 131, 230, 232, 240, 254, 340, 452, 453, 454, 455, 456, 457, 458, 459, 460, 461, 462, 463, 464, 465], "redund": 515, "refer": [212, 378, 388, 402, 433, 434, 435, 436, 444, 466, 516], "reflect": [402, 512, 516, 519], "regard": 371, "regardless": [87, 157, 513], "regist": [2, 7], "regress": [8, 460], "regular": [39, 367, 466, 490, 512, 514, 516], "regularli": 2, "reimplement": 2, "rel": [17, 190, 487, 512, 513], "relative_step": 487, "releas": 4, "relev": 2, "reli": [1, 2], "relu": [353, 410, 430, 467, 480], "relu6": 353, "remain": [0, 6, 291, 325, 339, 366, 367, 368, 513], "remaind": [0, 138], "remov": [0, 127, 231, 260, 303, 454], "rep": [0, 317], "repeat": [0, 317], "repeatedli": 5, "repetit": 275, "replac": [0, 6, 244, 407, 408, 430, 464], "replai": 3, "repli": 6, "repo": [5, 7, 9, 512], "report": [179, 290], "repres": [2, 6, 129, 132, 178, 461, 465, 519], "represent": [6, 211, 256, 327, 337, 341], "requir": [1, 2, 4, 6, 353, 513, 517, 518, 519], "requires_grad": 515, "rerun": [512, 518], "rescal": 336, "research": 8, "reset": 276, "reset_peak_memori": 181, "reshap": [0, 6, 212, 431, 512, 516], "resid": 291, "resolv": 2, "resourc": 2, "resource_limit": 236, "respect": [2, 5, 7, 121, 153, 155, 177, 178, 182, 256, 325, 338, 353, 358, 371, 374, 378, 380, 483, 513, 515, 517, 520], "respons": 2, "rest": [6, 156, 338, 339, 417, 517], "restart": 9, "restor": 279, "result": [0, 6, 15, 19, 39, 82, 87, 102, 150, 153, 155, 178, 198, 212, 231, 257, 264, 275, 290, 304, 338, 339, 340, 344, 422, 452, 512, 513, 515, 519], "resum": 6, "return": [0, 1, 2, 4, 5, 6, 7, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 38, 52, 72, 82, 86, 87, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 124, 125, 126, 127, 130, 131, 132, 134, 135, 136, 137, 138, 139, 140, 142, 143, 144, 146, 147, 148, 149, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 173, 174, 175, 176, 177, 178, 180, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 240, 241, 242, 243, 244, 245, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 269, 270, 271, 272, 273, 274, 275, 277, 278, 280, 281, 287, 290, 291, 292, 293, 294, 295, 296, 297, 298, 299, 300, 301, 302, 303, 304, 305, 306, 307, 308, 309, 310, 312, 313, 314, 315, 316, 317, 318, 319, 320, 321, 322, 323, 324, 325, 326, 327, 328, 329, 330, 331, 332, 334, 335, 336, 337, 338, 339, 340, 341, 353, 373, 379, 389, 390, 391, 393, 394, 395, 396, 397, 398, 399, 403, 404, 406, 407, 408, 414, 432, 433, 434, 435, 436, 437, 438, 439, 452, 453, 454, 455, 456, 457, 458, 459, 460, 461, 462, 463, 464, 465, 480, 483, 493, 495, 511, 512, 513, 514, 515, 516, 518, 519, 521, 522], "return_metadata": 220, "revers": [0, 2, 42, 43, 44, 45, 58, 87, 117, 118, 119, 120, 226, 320, 422], "rf": 9, "rfft": 166, "rfft2": 167, "rfftn": 168, "rho": 486, "rhs_indic": [0, 177, 178], "rhs_mask": 96, "right": [0, 1, 2, 9, 205, 256, 278, 279, 371, 431, 443, 444, 455, 457, 465], "right_shift": 0, "ring": 132, "rm": [6, 9, 155, 487], "rmsnorm": [6, 353], "rmsprop": 485, "rnn": [353, 373], "robust": 460, "roform": [6, 417], "roll": 0, "root": [0, 6, 155, 281, 301, 413], "rope": [6, 353], "rosetta": 9, "rotari": [6, 156, 417], "rotat": [156, 417], "round": [0, 256], "row": [0, 1, 2, 87, 106, 152, 154, 186, 256, 321], "rpath": 2, "rsqrt": 0, "rtol": [0, 17, 190], "rule": [2, 485], "run": [1, 2, 3, 4, 6, 7, 8, 9, 10, 154, 250, 342, 358, 389, 486, 487, 489, 490, 491, 512, 514, 517, 518, 522, 523], "runtim": [6, 132, 346, 512, 513], "runtime_error": 2, "safetensor": [9, 220, 284, 396, 400, 485, 518, 521], "sai": [2, 6, 480, 518], "said": 6, "sake": 515, "same": [0, 2, 6, 9, 17, 39, 86, 97, 98, 102, 108, 109, 110, 112, 113, 114, 130, 153, 155, 161, 165, 166, 169, 170, 171, 178, 182, 190, 197, 251, 260, 279, 280, 297, 327, 328, 330, 333, 339, 353, 356, 357, 358, 366, 374, 378, 386, 387, 411, 432, 433, 434, 435, 436, 437, 438, 439, 454, 465, 483, 495, 509, 512, 513, 514, 516, 517, 522], "sampl": [2, 5, 6, 219, 259, 260, 261, 263, 264, 267, 270, 271, 433, 434, 435, 436, 438, 439, 455, 461, 465, 509, 512, 514], "sat": 6, "save": [3, 6, 8, 220, 238, 256, 283, 284, 285, 286, 400, 514, 518], "save_gguf": 521, "save_safetensor": [400, 485, 521], "save_weight": 353, "savez": [6, 400, 521], "savez_compress": 521, "saw": [6, 515], "scalar": [0, 2, 14, 15, 17, 31, 52, 82, 86, 92, 93, 94, 95, 96, 98, 101, 137, 138, 142, 175, 176, 182, 183, 184, 185, 190, 199, 200, 201, 219, 225, 227, 228, 229, 231, 233, 241, 243, 244, 247, 251, 253, 259, 265, 267, 270, 271, 274, 278, 283, 308, 325, 327, 330, 335, 465, 514, 515, 518, 520], "scale": [0, 2, 6, 15, 125, 153, 155, 156, 157, 178, 185, 256, 257, 263, 265, 336, 367, 368, 380, 409, 417, 418, 422, 431, 470, 487], "scale_arr": 2, "scale_factor": 431, "scale_paramet": 487, "scatter": 0, "scatter_add": 0, "scatter_add_axi": 0, "scatter_max": 0, "scatter_min": 0, "scatter_prod": 0, "schedul": [2, 485, 501, 502, 503, 504, 505, 507, 522], "schema": [3, 517], "schulz": 494, "scipi": [185, 210], "scope": 353, "score": [6, 157, 461], "script": [513, 517], "sdk": 9, "se": 1, "second": [6, 9, 127, 196, 198, 199, 227, 229, 231, 278, 310, 319, 325, 356, 357, 386, 387, 453, 461, 487, 491, 512, 514, 515, 522], "second_layer_a": 518, "second_layer_b": 518, "secret": 6, "section": [1, 6, 9, 300, 465, 512, 513, 515], "see": [1, 2, 4, 6, 7, 9, 11, 12, 33, 34, 35, 36, 37, 40, 41, 42, 43, 44, 45, 47, 49, 50, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 66, 68, 69, 70, 71, 73, 75, 76, 77, 78, 79, 80, 81, 83, 84, 85, 212, 283, 284, 287, 334, 344, 353, 358, 359, 367, 369, 371, 375, 376, 377, 383, 384, 392, 410, 411, 412, 415, 416, 417, 418, 420, 422, 423, 424, 425, 426, 427, 429, 431, 433, 434, 435, 436, 442, 443, 444, 470, 512, 513, 514, 515, 516, 517, 520, 522], "seed": 262, "seen": [513, 519], "segment": 0, "segmented_mm": 0, "select": [0, 3, 9, 206, 208, 318, 330, 389, 393, 401, 517], "self": [6, 7, 10, 31, 32, 33, 34, 35, 36, 37, 38, 40, 41, 42, 43, 44, 45, 46, 47, 49, 50, 52, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 66, 68, 69, 70, 71, 73, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 121, 172, 353, 466, 483], "selu": 353, "semant": [14, 92, 94, 95, 97, 98, 137, 138, 142, 183, 184, 199, 200, 201, 225, 231, 233, 241, 243, 247, 253, 274, 278, 308, 522], "semi": [202, 203, 264], "send": 513, "sender": 513, "sennrich": 6, "sensit": 460, "sentencepiec": 6, "separ": [0, 6, 69, 83, 374, 461], "sequenc": [6, 16, 18, 34, 35, 59, 60, 61, 62, 66, 78, 79, 80, 84, 87, 98, 110, 134, 147, 154, 159, 160, 161, 163, 164, 165, 167, 168, 170, 171, 176, 182, 230, 232, 234, 240, 248, 254, 259, 260, 261, 263, 264, 265, 267, 270, 271, 277, 296, 297, 298, 300, 303, 305, 309, 316, 317, 320, 324, 325, 326, 331, 358, 360, 363, 373, 379, 414, 430, 509, 522], "sequenti": [353, 480], "seri": 9, "serial": 485, "set": [2, 4, 6, 7, 9, 102, 121, 128, 130, 131, 132, 134, 135, 136, 141, 153, 155, 156, 236, 287, 288, 289, 290, 291, 307, 333, 371, 380, 382, 392, 394, 401, 402, 403, 406, 407, 412, 417, 428, 453, 465, 477, 483, 485, 487, 489, 490, 496, 509, 514, 515, 518], "set_byt": 2, "set_compute_pipeline_st": 2, "set_data": 2, "set_dtyp": 353, "set_input_arrai": 2, "set_memory_limit": 287, "set_output_arrai": 2, "set_vector_byt": 2, "setup": [2, 4, 5, 7, 9, 512, 513, 514], "sever": [6, 9, 107, 108, 109, 110, 111, 112, 113, 285, 286, 333, 512, 513, 517, 521], "sgd": [5, 7, 485, 492, 496, 501, 502, 505, 512], "shade": [1, 2], "shall": 6, "shape": [0, 2, 3, 6, 7, 69, 86, 87, 96, 97, 98, 102, 107, 108, 109, 110, 111, 112, 113, 127, 130, 134, 135, 149, 151, 154, 157, 158, 161, 162, 165, 166, 169, 170, 171, 176, 177, 185, 197, 209, 218, 231, 248, 249, 259, 260, 261, 263, 264, 265, 267, 270, 271, 277, 279, 297, 324, 327, 328, 330, 331, 332, 353, 355, 356, 357, 358, 360, 361, 362, 363, 364, 365, 367, 368, 373, 378, 379, 382, 385, 386, 387, 396, 414, 432, 433, 434, 435, 436, 437, 438, 439, 454, 465, 485, 512, 514, 515, 516, 520, 522], "shapeless": [0, 102, 149, 151], "share": [8, 125, 178, 256, 257, 327, 513], "shazeer": 6, "shift": [0, 161, 165, 199, 278, 279, 358], "shop": 6, "should": [1, 2, 4, 5, 6, 7, 9, 87, 100, 127, 130, 153, 154, 155, 157, 197, 238, 255, 256, 291, 313, 319, 325, 328, 333, 334, 337, 353, 360, 361, 362, 363, 364, 365, 367, 368, 403, 409, 419, 454, 456, 461, 483, 493, 494, 511, 512, 513, 514, 515, 518, 519, 523], "show": [9, 344, 512], "shown": 2, "shuffl": 7, "side": [0, 251, 355, 356, 357, 363, 364, 365, 385, 386, 387, 512], "sigma": [371, 372, 373, 379, 421, 433, 434, 435, 436, 444, 445, 450, 471, 472], "sigmoid": [0, 6, 353, 383, 420, 444, 450, 452, 472], "sign": [0, 17, 190, 344, 492], "signal": [114, 431], "signatur": [1, 154], "signedinteg": [12, 196], "signific": 256, "significantli": 513, "silent": [169, 170, 171], "silicon": [2, 6, 8, 9, 522], "silu": 353, "sim": 265, "simd": 1, "simd_sum": 1, "simdgroup": 1, "simdgroup_s": 1, "similar": [6, 178, 196, 338, 407, 408, 409, 453, 513, 519, 521], "similarli": [2, 9, 231, 515, 518], "simpl": [2, 6, 7, 353, 370, 479, 485, 512, 513, 514, 515, 517, 518], "simple_axpbi": 2, "simple_tim": 2, "simplest": [2, 353, 513], "simpli": [2, 6, 9, 369, 381, 415, 441, 449, 468, 478, 483, 512, 513, 515, 517], "simplic": 0, "simplifi": 513, "simultan": 1, "sin": [0, 121, 422, 514, 515, 520], "sinc": [1, 2, 6, 7, 178, 181, 483, 492, 503, 512, 514, 519, 522], "sine": [0, 22, 23, 294, 295, 514, 515], "sing": 212, "singer": 488, "singl": [7, 88, 145, 197, 220, 235, 251, 328, 356, 357, 386, 387, 512, 514, 516, 521], "singleton": [0, 16, 18, 27, 28, 132, 230, 231, 232, 234, 240, 254, 305, 309, 326, 513], "singular": [212, 213, 217], "sinh": 0, "sinusoid": 422, "sinusoidalpositionalencod": 353, "size": [0, 1, 2, 6, 7, 53, 72, 96, 108, 109, 112, 113, 125, 147, 153, 154, 155, 157, 158, 159, 160, 162, 163, 164, 166, 167, 168, 169, 170, 171, 176, 178, 180, 185, 186, 196, 204, 212, 256, 257, 260, 277, 290, 291, 296, 300, 303, 324, 327, 333, 334, 353, 355, 356, 357, 360, 361, 362, 363, 364, 365, 370, 378, 385, 386, 387, 411, 412, 431, 487, 513, 518, 519], "size_in_megabyt": 291, "size_t": [0, 2], "skip": [3, 87], "slice": [0, 297, 516], "slice_s": [0, 296], "slice_upd": 0, "slide": [355, 356, 357, 385, 386, 387], "slight": [6, 518], "slightli": [417, 522], "slope": 381, "slow": 512, "slowli": 6, "sm": 9, "small": [6, 148, 153, 155, 333, 358, 374, 380, 413, 455, 460, 465, 512, 522], "smaller": [0, 9, 252, 333, 492, 512], "smallest": 212, "smile": 6, "smooth": [454, 464, 499], "smooth_l1_loss": 353, "sned": 136, "snippet": 513, "so": [1, 2, 6, 9, 182, 185, 325, 366, 431, 485, 512, 513, 518, 522], "socket": 513, "softmax": [0, 6, 157, 353, 384, 451, 454], "softmin": 353, "softplu": [353, 388, 466], "softshrink": 353, "softsign": 353, "solut": [215, 216], "solv": 353, "some": [0, 2, 5, 6, 7, 150, 394, 406, 485, 496, 512, 513, 514, 515, 517, 518], "someon": 6, "someth": [5, 6, 516], "sometim": 512, "sonoma": 9, "soon": 6, "sort": [0, 29, 30, 177, 178, 252, 318], "sorted_indic": [0, 177, 178], "sourc": [0, 1, 2, 3, 4, 63, 134, 135, 154, 242, 320, 513], "space": [0, 2, 219, 452, 463], "spars": [0, 235], "spatial": [108, 109, 110, 112, 113, 355, 356, 357, 374, 385, 386, 387, 431], "speak": [6, 212], "specif": [1, 2, 9, 513, 515], "specifi": [0, 2, 19, 38, 108, 109, 110, 112, 113, 127, 167, 168, 176, 182, 204, 212, 219, 242, 248, 255, 260, 275, 310, 312, 313, 316, 319, 320, 325, 329, 331, 358, 428, 452, 453, 454, 455, 456, 457, 458, 459, 460, 461, 462, 463, 464, 465, 477, 512, 513, 514, 515, 522], "spectrum": 161, "speed": 1, "spent": 6, "split": [0, 372, 374, 445], "splittabl": 509, "sqrt": [0, 6, 143, 157, 185, 358, 371, 374, 378, 380, 382, 413, 422, 433, 434, 435, 436, 443, 486, 488, 489, 490, 499, 512], "squar": [0, 5, 6, 155, 186, 205, 207, 209, 218, 281, 301, 325, 338, 353, 413, 462, 464, 486, 487, 489, 490, 491, 515, 519], "squeez": [0, 431, 512], "src": [0, 134, 135], "ssh": [513, 517], "stabil": [153, 155, 358, 374, 378, 380, 413, 452, 453, 455, 486, 487, 488, 489, 490, 491, 499], "stabl": [225, 230, 298, 460], "stable_abi": 2, "stack": [0, 512], "standard": [0, 1, 4, 7, 52, 82, 231, 261, 265, 305, 430, 433, 435, 438, 513, 520], "starmap": [6, 338], "start": [0, 1, 2, 5, 6, 8, 9, 19, 156, 219, 238, 296, 297, 300, 340, 512, 514, 516, 517, 522], "start_axi": [0, 50, 173], "start_captur": 3, "start_indic": [296, 297], "state": [6, 7, 353, 373, 379, 414, 485, 496, 509, 512], "static": [9, 512], "static_cast": 2, "std": [0, 2, 4, 438, 514], "stderr": 517, "stdout": 517, "step": [0, 3, 4, 6, 7, 19, 333, 353, 373, 379, 414, 487, 494, 496, 501, 503, 504, 505, 512, 513], "step_decai": 485, "step_siz": 505, "still": [6, 9, 212, 512, 518], "stochast": [488, 489, 491, 500, 518], "stood": 6, "stop": [0, 2, 6, 19, 219, 239, 306, 515, 516], "stop_captur": 3, "stop_gradi": [0, 515], "storag": 87, "store": [6, 337], "str": [114, 132, 139, 140, 149, 150, 151, 154, 157, 182, 188, 206, 208, 212, 220, 235, 236, 238, 282, 283, 284, 285, 286, 325, 334, 337, 341, 389, 390, 393, 394, 396, 398, 400, 406, 431, 435, 436, 452, 453, 454, 455, 456, 457, 458, 459, 460, 461, 462, 463, 464, 465, 493], "straight": 6, "strang": 6, "stream": [2, 8, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 33, 34, 35, 36, 37, 38, 40, 41, 42, 43, 44, 45, 46, 47, 49, 50, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 66, 68, 69, 70, 71, 73, 75, 76, 77, 78, 79, 80, 81, 83, 84, 85, 86, 87, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 101, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 123, 124, 125, 126, 127, 130, 131, 134, 135, 136, 137, 138, 139, 142, 143, 144, 146, 147, 148, 152, 153, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 173, 174, 175, 176, 177, 178, 183, 184, 185, 186, 187, 189, 190, 191, 192, 193, 194, 195, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 263, 264, 265, 266, 267, 269, 270, 271, 272, 273, 274, 275, 277, 278, 279, 280, 281, 289, 292, 293, 294, 295, 296, 297, 298, 299, 300, 301, 302, 303, 304, 305, 306, 308, 309, 310, 311, 312, 313, 314, 315, 316, 317, 318, 319, 320, 321, 322, 323, 324, 326, 327, 330, 331, 332, 513, 522], "streamcontext": 307, "streamordevic": [0, 2], "street": 6, "strength": [492, 494, 500], "strict": [132, 183, 200, 394, 396, 406, 407, 408], "strictli": [212, 291], "stride": [0, 2, 87, 107, 108, 109, 110, 111, 112, 113, 355, 356, 357, 360, 361, 362, 363, 364, 365, 385, 386, 387, 417, 516], "string": [0, 2, 140, 149, 154, 157, 188, 236, 251, 514, 519, 521], "stronger": 517, "structur": [2, 333, 495, 515], "stub": 9, "style": [2, 14, 17, 92, 94, 95, 137, 138, 142, 183, 184, 190, 199, 200, 201, 225, 231, 233, 241, 243, 247, 253, 274, 278, 308], "su": 6, "sub": [0, 7, 127, 269, 296, 297, 319, 334, 494], "subarrai": [127, 300], "subclass": 483, "subdivid": 1, "subdtyp": 196, "subgradi": 488, "sublinear": 487, "submodul": [6, 7, 353, 390, 394, 395, 406, 408], "subnetwork": 513, "suboptim": 514, "subscript": [139, 140], "subsect": 6, "subsequ": [132, 485, 513, 517], "subset": [353, 393, 407, 408], "substanti": 9, "subtl": 512, "subtract": [0, 39], "subtyp": [196, 344], "succe": 132, "successfulli": 513, "sudo": [9, 291, 513], "suggest": 513, "sum": [0, 2, 5, 14, 120, 131, 151, 189, 212, 230, 298, 316, 319, 353, 452, 453, 454, 455, 456, 457, 458, 459, 460, 461, 462, 463, 464, 465, 513, 516, 519], "sum_": [212, 460], "sum_i": 451, "sum_j": [473, 474], "summat": [139, 140], "super": [6, 7, 353, 483], "superset": [338, 495], "support": [1, 2, 6, 8, 9, 17, 96, 109, 112, 113, 157, 173, 185, 190, 202, 203, 205, 206, 207, 208, 209, 213, 214, 217, 218, 220, 231, 256, 264, 513, 515, 516, 519, 521], "suppos": [515, 522], "sure": [2, 3, 6, 9, 353, 512], "surpass": [435, 436], "surpris": 6, "sw": 1, "swap": [0, 114, 290, 310, 408], "swapax": [0, 121], "swiglu": 6, "swish": [420, 472], "switch": 9, "symbol": 492, "symmetr": [108, 109, 112, 113, 202, 203, 206, 208], "symmetri": [206, 208], "synchron": [2, 512], "syntax": [39, 516], "synthet": 5, "sysctl": 291, "system": [4, 6, 9, 179, 180, 215, 216, 236, 291], "t": [0, 1, 2, 4, 6, 9, 106, 143, 154, 157, 178, 202, 203, 257, 325, 353, 373, 379, 414, 486, 487, 488, 489, 490, 491, 492, 499, 500, 512, 514, 515, 522], "t_kv": 157, "t_q": 157, "tabl": [1, 212, 344, 370], "take": [0, 2, 6, 7, 92, 93, 94, 95, 102, 149, 177, 182, 197, 233, 241, 249, 257, 313, 325, 328, 329, 332, 339, 340, 355, 356, 357, 385, 386, 387, 409, 452, 493, 509, 513, 514, 515, 516, 517, 521, 522, 523], "take_along_axi": [0, 210, 516], "taken": [127, 312, 319], "talk": 513, "tan": 0, "tangent": [0, 2, 24, 25, 26, 121, 197, 314, 315, 429, 478], "tangent_i": 2, "tangent_x": 2, "tanh": [0, 353, 371, 373, 379, 388, 414, 443, 466], "target": [2, 325, 452, 454, 455, 456, 457, 458, 459, 460, 461, 462, 463, 464, 512], "target_include_directori": 2, "target_link_librari": [2, 4], "target_link_opt": 2, "target_sourc": 2, "task": 460, "tau": 500, "tcp": 513, "tediou": 513, "tell": [4, 6, 512, 519], "temp": 6, "templat": [0, 1, 2, 154], "ten": 518, "tend": 492, "tensor": [220, 316, 465, 519], "tensordot": 0, "term": [2, 455, 486, 487, 488, 489, 490, 491, 499], "termin": [9, 517], "test": [7, 9, 513, 517], "test_imag": 7, "test_label": 7, "text": [6, 265, 371, 373, 379, 388, 414, 421, 428, 433, 434, 435, 436, 443, 446, 447, 448, 455, 456, 457, 460, 461, 464, 466, 467, 470, 471, 476, 477, 487, 492], "textrm": [256, 371, 372, 442, 445], "tf": 519, "tgp_size": 2, "th": [117, 118, 119, 120, 126, 152, 205, 206, 226, 503], "than": [1, 2, 6, 82, 114, 127, 138, 156, 177, 183, 184, 200, 201, 202, 203, 205, 206, 207, 208, 209, 210, 213, 217, 218, 231, 287, 291, 336, 338, 417, 428, 431, 461, 464, 477, 487, 492, 493, 512, 514, 515, 522], "thank": 518, "thei": [1, 2, 5, 6, 9, 17, 114, 178, 190, 419, 456, 483, 492, 511, 512, 513, 514, 518, 520, 521, 522], "them": [0, 2, 6, 130, 353, 394, 406, 513, 514, 517, 522], "themselv": [2, 512], "thi": [0, 1, 2, 4, 6, 7, 9, 16, 17, 18, 19, 27, 28, 29, 30, 87, 88, 100, 121, 141, 149, 151, 154, 177, 178, 179, 185, 188, 190, 197, 202, 203, 205, 206, 207, 208, 209, 212, 213, 214, 217, 218, 225, 230, 231, 232, 234, 240, 252, 254, 260, 289, 291, 298, 299, 300, 305, 309, 312, 318, 326, 333, 336, 339, 340, 353, 366, 367, 368, 372, 373, 379, 390, 391, 393, 394, 397, 398, 399, 404, 406, 407, 408, 409, 412, 414, 428, 433, 434, 435, 436, 443, 444, 445, 452, 460, 477, 483, 493, 496, 511, 512, 513, 514, 515, 517, 518, 519, 521], "thin": 517, "thing": [2, 6], "third": [204, 357, 387, 513, 514], "thompson": 367, "those": [2, 6, 353, 494], "though": [2, 6, 512, 514, 518, 519], "thousand": 518, "thread": [1, 2], "thread_index_in_simdgroup": 1, "thread_position_in_grid": [1, 2, 154], "threadgroup": [1, 2, 154], "threads_per_simdgroup": 1, "three": [6, 91, 357, 387, 431], "threefri": 509, "threshold": [428, 457, 464, 477], "through": [1, 2, 306, 430, 492, 512, 513, 514, 515, 519], "throw": [2, 102, 132], "thu": [6, 353], "thumb": 485, "tic": 512, "tieleman": 499, "tile": [0, 157], "time": [1, 2, 6, 9, 290, 317, 353, 373, 379, 414, 512, 513, 515, 518, 522], "timeit": [512, 515], "titl": 2, "tmp": [1, 154], "to_quant": 334, "to_stream": 2, "toc": 512, "togeth": [0, 1, 2, 7, 256, 338, 339, 513], "tok_embed": 6, "token": [6, 370, 411], "told": 6, "toler": [0, 17, 190], "too": [196, 512, 518], "took": 6, "tool": 9, "toolkit": 9, "top": [2, 318, 382, 431], "topk": 0, "torch": [6, 519], "torch_weight": 6, "total": [291, 515], "total_norm": 336, "tpi": 512, "tpng": 513, "trace": [0, 3, 151, 512], "trace_fil": 3, "tracer": 407, "track": [2, 353, 358], "track_running_stat": 358, "trade": 518, "tradit": [6, 156, 367, 368, 417], "train": [6, 7, 353, 358, 366, 367, 368, 392, 394, 406, 433, 434, 514], "train_imag": [7, 485], "train_label": [7, 485], "trainabl": [7, 335, 353, 483], "trainable_paramet": [353, 393, 496], "transfer": 517, "transform": [1, 6, 8, 121, 158, 159, 160, 162, 163, 164, 166, 167, 168, 169, 170, 171, 185, 335, 353, 358, 374, 380, 382, 393, 394, 406, 412, 417, 516], "transformerencod": 285, "transit": 503, "translat": [153, 380], "transpos": [0, 6, 32, 111, 112, 113, 178, 257, 363, 364, 365], "treat": [0, 2, 121, 167, 168, 170, 171, 312, 431, 512], "tree": [8, 88, 102, 145, 182, 325, 329, 333, 337, 338, 339, 340, 341, 495, 496, 498, 507, 515], "tree_flatten": [285, 338, 341, 353, 485, 514], "tree_map": [339, 353, 513], "tree_unflatten": [6, 485, 514], "trembl": 6, "tri": [0, 132], "triangl": [206, 208, 321], "triangular": [202, 203, 216, 218], "trigger": 512, "tril": 0, "trilinear": 431, "triplet": 465, "triplet_loss": 353, "triu": 0, "true": [0, 1, 2, 4, 5, 6, 17, 42, 43, 44, 45, 58, 86, 102, 117, 118, 119, 120, 154, 156, 178, 190, 196, 202, 203, 212, 217, 220, 226, 235, 257, 298, 330, 334, 337, 338, 339, 340, 344, 353, 358, 360, 361, 362, 363, 364, 365, 373, 374, 378, 379, 380, 382, 393, 394, 396, 403, 406, 407, 408, 412, 414, 417, 422, 430, 431, 452, 460, 487, 489, 490, 493, 494, 512, 514], "truncat": [158, 159, 160, 162, 163, 164, 166, 167, 168, 169, 170, 171, 270], "truth": [5, 454, 464], "try": [2, 9, 513], "tupl": [0, 31, 69, 72, 75, 83, 88, 97, 103, 108, 109, 110, 112, 113, 134, 138, 140, 145, 147, 149, 159, 160, 163, 164, 167, 168, 170, 171, 188, 197, 205, 206, 210, 211, 212, 214, 217, 251, 256, 277, 279, 296, 297, 303, 324, 325, 328, 337, 338, 339, 340, 341, 355, 356, 357, 361, 362, 364, 365, 385, 386, 387, 396, 398, 419, 431, 487, 489, 490, 491, 492, 511, 514, 515], "tutori": 2, "twice": 522, "two": [0, 2, 14, 15, 17, 25, 86, 90, 92, 94, 95, 96, 127, 137, 142, 159, 163, 170, 177, 178, 183, 184, 190, 198, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 213, 214, 217, 218, 225, 231, 233, 241, 243, 247, 250, 310, 340, 356, 372, 379, 386, 445, 453, 512, 513, 514, 515, 516, 522], "txt": [2, 4], "type": [0, 1, 2, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 38, 72, 82, 86, 87, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 124, 125, 126, 127, 130, 131, 132, 134, 135, 136, 137, 138, 139, 140, 142, 143, 144, 146, 147, 148, 152, 153, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 240, 241, 242, 243, 244, 245, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 269, 270, 271, 272, 273, 274, 275, 277, 278, 280, 281, 287, 290, 291, 292, 293, 294, 295, 296, 297, 298, 299, 300, 301, 302, 303, 304, 305, 306, 308, 309, 310, 312, 313, 314, 315, 316, 317, 318, 319, 320, 321, 322, 323, 324, 325, 326, 327, 328, 329, 330, 331, 332, 333, 336, 337, 340, 353, 401, 430, 432, 433, 434, 435, 436, 437, 438, 439, 452, 453, 454, 455, 456, 457, 458, 459, 460, 461, 462, 463, 464, 465, 512, 514, 516, 519], "type_to_nam": 2, "typenam": [0, 1, 2], "typic": [0, 157, 333, 370, 485, 512, 518], "u": [1, 2, 4, 202, 206, 208, 210, 217, 382, 408, 507, 513, 517, 518], "u_": 486, "u_t": 486, "ubuntu": 9, "ubuntu2204": 9, "uint": [1, 2, 154], "uint16": [12, 344], "uint3": 1, "uint32": [12, 27, 28, 29, 30, 260, 344], "uint64": [12, 344], "uint8": [12, 344], "ultra": 6, "unabl": 9, "unam": 9, "unari": 512, "unchang": [156, 306, 417], "uncheck": 9, "uncompress": 285, "undefin": [0, 29, 121, 202, 203, 252, 264, 516], "under": 212, "underli": [2, 327], "understand": [6, 433, 434], "unevalu": 150, "unexpect": [2, 19], "unexpectedli": 517, "unflatten": 0, "unfreez": [353, 394], "unfrozen": 406, "unifi": 8, "uniform": [3, 353, 382, 396, 434, 436, 480, 509, 512, 515, 522], "uniformli": 271, "unintend": 0, "union": [19, 33, 34, 35, 36, 37, 38, 40, 41, 42, 43, 44, 45, 46, 47, 49, 50, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 66, 68, 69, 70, 71, 73, 75, 76, 77, 78, 79, 80, 81, 83, 84, 85, 89, 90, 91, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 194, 195, 196, 198, 205, 206, 207, 208, 217, 236, 283, 307, 337], "uniqu": [2, 215, 216, 509, 513], "unique_ptr": 2, "unit": [359, 369, 371, 372, 373, 381, 415, 416, 418, 420, 433, 434, 435, 436, 440, 441, 442, 443, 444, 445, 449, 468, 469, 470, 472], "unittest": 9, "univers": 212, "unless": [6, 17, 190, 212, 483], "unlik": [6, 17, 190, 210, 367, 368, 402, 516], "unnecessari": [2, 6], "unnorm": [260, 452, 454], "unscal": 487, "unsign": [178, 256, 257, 344], "unsignedinteg": 12, "unspecifi": [16, 18, 19, 27, 28, 29, 30, 103, 117, 118, 119, 120, 176, 226, 230, 232, 234, 240, 248, 252, 254, 275, 298, 299, 305, 309, 312, 318, 319, 326, 331, 523], "unsqueez": 6, "unsupport": 220, "until": [2, 333, 518, 520], "unus": 2, "up": [1, 2, 6, 121, 512], "upcast": 2, "updat": [0, 1, 2, 5, 6, 7, 9, 39, 102, 297, 334, 338, 340, 358, 389, 390, 396, 401, 402, 403, 408, 485, 487, 490, 492, 495, 496, 500, 501, 502, 503, 504, 505, 512, 513, 514, 518], "update_modul": 353, "uplo": [206, 208], "upon": [6, 338, 339], "upper": [202, 203, 206, 208, 216, 218, 256, 267, 270, 271, 439], "upsampl": 353, "us": [0, 3, 5, 6, 7, 8, 9, 19, 39, 87, 121, 125, 128, 130, 131, 134, 135, 136, 138, 154, 156, 173, 178, 179, 180, 181, 199, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 231, 256, 257, 265, 275, 277, 278, 279, 287, 290, 291, 311, 324, 333, 337, 340, 344, 346, 353, 356, 357, 367, 370, 371, 373, 379, 382, 386, 387, 389, 393, 400, 407, 409, 411, 412, 414, 417, 422, 430, 431, 435, 436, 443, 444, 453, 480, 483, 485, 486, 487, 489, 490, 491, 492, 493, 495, 496, 509, 511, 512, 513, 514, 515, 516, 517, 520, 522], "usag": [430, 512, 513], "user": [2, 6, 353], "usual": [370, 411, 511, 518], "util": [1, 2, 6, 8, 9, 285, 353, 485, 517], "v": [6, 114, 157, 205, 206, 353, 394, 519], "v_": [486, 488, 489, 490, 491, 499, 500], "v_t": [486, 488, 489, 490, 491, 499, 500], "val": [0, 31, 176], "valid": [7, 114, 173, 329, 337, 394, 406, 511, 513], "valid_parameter_filt": 389, "valu": [0, 1, 5, 6, 12, 13, 17, 19, 27, 28, 52, 82, 86, 101, 132, 149, 152, 157, 158, 159, 160, 162, 163, 164, 166, 167, 168, 169, 170, 171, 173, 176, 188, 190, 204, 212, 213, 217, 219, 236, 244, 251, 255, 259, 260, 261, 263, 264, 265, 267, 270, 271, 279, 283, 291, 312, 313, 325, 329, 335, 337, 338, 339, 340, 344, 356, 357, 359, 366, 367, 368, 369, 375, 378, 382, 386, 387, 393, 409, 410, 426, 428, 430, 432, 452, 453, 454, 455, 456, 457, 459, 460, 461, 462, 463, 464, 477, 483, 487, 490, 501, 502, 504, 505, 515], "value_and_grad": [7, 121, 353, 407, 483, 485, 498, 512, 515, 519, 520], "value_and_grad_fn": 518, "value_cach": 6, "value_dim": 409, "value_input_dim": 409, "value_output_dim": 409, "value_proj": 6, "valueerror": [121, 212, 396, 515], "values_hat": 6, "van": 212, "var": [0, 358, 374, 378, 380, 455], "variabl": [9, 102, 121, 128, 141, 149, 150, 151, 182, 197, 325, 328, 329, 512, 513, 514], "varianc": [0, 305, 326, 358, 374, 455], "variant": [6, 464, 491], "variou": 212, "vector": [0, 2, 5, 8, 189, 197, 212, 312, 328, 329, 370, 454, 514, 520], "verbos": [1, 154, 513], "veri": [6, 409, 517, 518, 522], "verifi": [5, 9], "versa": 279, "version": [2, 4, 9, 88, 125, 149, 151, 188, 225, 230, 256, 298, 329, 509, 515, 516], "versu": 512, "via": [9, 121, 495, 498, 513, 517, 518, 519], "vice": 279, "video": 368, "view": [0, 3, 87, 519], "visual": 150, "vjp": [2, 121, 520], "vmap": [2, 121, 514, 515, 518, 520], "vmap_add": 515, "vocab_s": 6, "vocabulari": [370, 411], "void": [1, 2], "volta": 9, "vt": 217, "w": [0, 1, 5, 108, 109, 112, 113, 125, 178, 205, 206, 256, 257, 325, 339, 358, 361, 362, 364, 365, 367, 368, 382, 485, 500, 515], "w1": [6, 336], "w2": [6, 336], "w3": 6, "w_": [373, 379, 414, 486, 487, 488, 489, 490, 491, 492, 499, 500], "w_1": 256, "w_g": 256, "w_i": [125, 256], "w_in": 1, "w_q": 256, "w_star": 5, "w_stride": 1, "w_t": [486, 488, 489, 490, 491, 492, 499, 500], "wa": [4, 6, 87, 134, 135, 513, 514, 518], "wai": [2, 6, 9, 353, 431, 512, 513, 514, 515, 516, 517], "wait": 6, "walk": [6, 514], "walkthrough": 2, "walsh": 185, "want": [1, 2, 6, 513, 514, 515, 517, 522], "warm": [2, 512], "warmup_init": 487, "watch": [6, 512], "wd": 492, "we": [0, 1, 2, 5, 6, 7, 121, 125, 134, 135, 178, 256, 257, 353, 370, 411, 419, 490, 492, 509, 511, 512, 513, 514, 515, 517, 518, 522], "weight": [0, 5, 107, 108, 109, 110, 111, 112, 113, 153, 155, 338, 353, 396, 400, 411, 412, 452, 454, 483, 487, 490, 492, 493, 494, 496, 500, 515, 518], "weight_decai": [487, 490, 492, 494, 500], "weight_fil": 6, "weights_fp16": 518, "well": [6, 353, 394, 406, 409, 513, 518], "wen": 6, "went": 6, "were": [6, 522], "wet": 6, "wget": 9, "what": [2, 6, 338, 517], "whatsoev": 6, "whc": 367, "when": [0, 1, 2, 6, 8, 9, 102, 110, 121, 136, 202, 203, 205, 206, 207, 208, 209, 212, 213, 217, 218, 220, 290, 360, 361, 362, 363, 364, 365, 431, 435, 436, 452, 458, 464, 483, 485, 503, 509, 512, 513, 514, 522], "where": [0, 4, 7, 152, 190, 203, 256, 325, 329, 358, 360, 361, 362, 363, 364, 365, 366, 367, 368, 369, 371, 373, 374, 378, 379, 380, 382, 393, 410, 413, 414, 428, 435, 436, 441, 442, 444, 455, 461, 467, 470, 472, 477, 496, 513, 515, 516], "wherea": 515, "whether": [149, 151, 154, 178, 206, 208, 216, 218, 257, 373, 379, 393, 409, 414, 452, 455, 461], "which": [0, 1, 2, 6, 7, 8, 9, 19, 38, 87, 88, 102, 110, 127, 130, 131, 132, 134, 135, 136, 145, 149, 151, 156, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 173, 182, 188, 191, 192, 193, 194, 195, 197, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 220, 235, 238, 256, 260, 261, 275, 277, 279, 282, 283, 284, 285, 286, 303, 304, 312, 319, 324, 325, 328, 329, 334, 356, 357, 367, 368, 371, 386, 387, 389, 393, 417, 452, 454, 457, 461, 464, 480, 495, 496, 509, 512, 513, 514, 515, 516, 517, 518, 522, 523], "while": [2, 3, 6, 9, 165, 277, 417, 518, 519], "whistl": 2, "who": 6, "whose": [152, 334, 335], "why": 6, "wi": 513, "wide": 518, "width": [356, 357, 358, 361, 362, 364, 365, 367, 368, 386, 387, 411, 412], "window": [9, 355, 356, 357, 385, 386, 387], "wipe": 9, "wire": 291, "wired_limit_mb": 291, "wise": [0, 2, 13, 14, 20, 21, 22, 23, 24, 25, 26, 92, 93, 94, 95, 99, 115, 116, 137, 138, 142, 143, 144, 146, 148, 174, 175, 183, 184, 190, 199, 200, 201, 221, 222, 223, 224, 225, 227, 228, 229, 233, 241, 243, 245, 247, 253, 273, 274, 278, 281, 292, 293, 294, 295, 301, 302, 308, 314, 315, 359, 367, 368, 377, 388, 410, 421, 440, 447, 448, 450, 451, 466, 467, 469, 472, 473, 474, 475, 512], "wish": 9, "with_logit": 452, "within": [0, 3, 29, 190], "without": [1, 6, 8, 306, 409, 479, 511, 512, 513, 514, 517, 518, 519, 522], "wk": 6, "wl": 2, "wo": 6, "word": 0, "work": [2, 3, 6, 290, 344, 494, 512, 513, 514, 515, 516, 517, 518], "workhors": 353, "world": [341, 513], "world2": 513, "world_ani": 513, "world_mpi": 513, "world_r": 513, "worri": [1, 518], "would": [2, 6, 431, 513, 514, 516, 518, 519, 522], "wq": 6, "wrap": [121, 353, 493], "wrapper": [514, 517], "write": [0, 1, 6, 353, 519], "written": [2, 514], "wrong": 514, "wrt": 335, "wv": 6, "x": [0, 1, 2, 4, 5, 6, 7, 39, 88, 96, 121, 130, 131, 135, 136, 143, 148, 149, 150, 153, 154, 155, 178, 185, 186, 188, 212, 257, 261, 265, 266, 280, 285, 292, 322, 323, 330, 338, 340, 353, 355, 356, 357, 358, 359, 369, 371, 372, 374, 378, 380, 381, 382, 385, 386, 387, 388, 389, 410, 413, 415, 421, 422, 428, 431, 440, 441, 442, 443, 444, 445, 446, 447, 448, 449, 450, 451, 464, 466, 467, 468, 469, 470, 471, 472, 473, 474, 475, 476, 477, 478, 483, 485, 492, 512, 513, 514, 515, 516, 518, 519, 520, 522], "x1": 453, "x2": 453, "x86_64": 9, "x_1": [453, 461], "x_2": [453, 461], "x_cast": 2, "x_grad": 1, "x_i": [451, 473, 474], "x_j": [473, 474], "x_offset": 2, "x_ptr": 2, "x_shape": 1, "x_stride": 2, "x_t": [373, 379, 414], "x_view": 519, "xcode": 9, "xcodeproj": 3, "xcrun": 9, "xf": 379, "xg": 379, "xi": 379, "xn": 373, "xo": 379, "xor": 95, "xr": 373, "xy": [0, 235], "xz": 373, "x\u00b2": 519, "y": [0, 2, 4, 5, 6, 7, 9, 39, 88, 121, 149, 150, 185, 188, 330, 353, 358, 367, 374, 378, 380, 382, 413, 456, 461, 464, 485, 488, 512, 513, 514, 515, 518, 519], "y_": [456, 460], "y_cast": 2, "y_hat": 353, "y_offset": 2, "y_ptr": 2, "y_stride": 2, "ye": 6, "year": 6, "yet": [6, 353, 483, 496, 515, 516, 518, 520], "yield": [6, 7, 509], "you": [1, 2, 3, 4, 6, 7, 8, 9, 291, 353, 422, 430, 480, 509, 512, 513, 514, 515, 516, 517, 519, 521, 522], "your": [2, 6, 9, 483, 513, 515, 518], "z": [2, 88, 373, 512, 514, 518], "z_t": 373, "zeiler": 486, "zero": [0, 149, 152, 158, 159, 160, 161, 162, 163, 164, 166, 167, 168, 169, 170, 171, 204, 235, 276, 297, 321, 322, 323, 332, 353, 355, 356, 357, 366, 367, 368, 396, 432, 433, 434, 435, 436, 437, 438, 439, 480, 485, 487, 514, 516], "zero_grad": 515, "zeros_lik": [0, 210], "zhang": 6, "zip": [6, 7], "zip_saf": 2}, "titles": ["Operations", "Custom Metal Kernels", "Custom Extensions in MLX", "Metal Debugger", "Using MLX in C++", "Linear Regression", "LLM inference", "Multi-Layer Perceptron", "MLX", "Build and Install", "mlx.core.Device", "mlx.core.Dtype", "mlx.core.DtypeCategory", "mlx.core.abs", "mlx.core.add", "mlx.core.addmm", "mlx.core.all", "mlx.core.allclose", "mlx.core.any", "mlx.core.arange", "mlx.core.arccos", "mlx.core.arccosh", "mlx.core.arcsin", "mlx.core.arcsinh", "mlx.core.arctan", "mlx.core.arctan2", "mlx.core.arctanh", "mlx.core.argmax", "mlx.core.argmin", "mlx.core.argpartition", "mlx.core.argsort", "mlx.core.array", "mlx.core.array.T", "mlx.core.array.abs", "mlx.core.array.all", "mlx.core.array.any", "mlx.core.array.argmax", "mlx.core.array.argmin", "mlx.core.array.astype", "mlx.core.array.at", "mlx.core.array.conj", "mlx.core.array.cos", "mlx.core.array.cummax", "mlx.core.array.cummin", "mlx.core.array.cumprod", "mlx.core.array.cumsum", "mlx.core.array.diag", "mlx.core.array.diagonal", "mlx.core.array.dtype", "mlx.core.array.exp", "mlx.core.array.flatten", "mlx.core.array.imag", "mlx.core.array.item", "mlx.core.array.itemsize", "mlx.core.array.log", "mlx.core.array.log10", "mlx.core.array.log1p", "mlx.core.array.log2", "mlx.core.array.logcumsumexp", "mlx.core.array.logsumexp", "mlx.core.array.max", "mlx.core.array.mean", "mlx.core.array.min", "mlx.core.array.moveaxis", "mlx.core.array.nbytes", "mlx.core.array.ndim", "mlx.core.array.prod", "mlx.core.array.real", "mlx.core.array.reciprocal", "mlx.core.array.reshape", "mlx.core.array.round", "mlx.core.array.rsqrt", "mlx.core.array.shape", "mlx.core.array.sin", "mlx.core.array.size", "mlx.core.array.split", "mlx.core.array.sqrt", "mlx.core.array.square", "mlx.core.array.squeeze", "mlx.core.array.std", "mlx.core.array.sum", "mlx.core.array.swapaxes", "mlx.core.array.tolist", "mlx.core.array.transpose", "mlx.core.array.var", "mlx.core.array.view", "mlx.core.array_equal", "mlx.core.as_strided", "mlx.core.async_eval", "mlx.core.atleast_1d", "mlx.core.atleast_2d", "mlx.core.atleast_3d", "mlx.core.bitwise_and", "mlx.core.bitwise_invert", "mlx.core.bitwise_or", "mlx.core.bitwise_xor", "mlx.core.block_masked_mm", "mlx.core.broadcast_arrays", "mlx.core.broadcast_to", "mlx.core.ceil", "mlx.core.clear_cache", "mlx.core.clip", "mlx.core.compile", "mlx.core.concatenate", "mlx.core.conj", "mlx.core.conjugate", "mlx.core.contiguous", "mlx.core.conv1d", "mlx.core.conv2d", "mlx.core.conv3d", "mlx.core.conv_general", "mlx.core.conv_transpose1d", "mlx.core.conv_transpose2d", "mlx.core.conv_transpose3d", "mlx.core.convolve", "mlx.core.cos", "mlx.core.cosh", "mlx.core.cummax", "mlx.core.cummin", "mlx.core.cumprod", "mlx.core.cumsum", "mlx.core.custom_function", "mlx.core.default_device", "mlx.core.default_stream", "mlx.core.degrees", "mlx.core.dequantize", "mlx.core.diag", "mlx.core.diagonal", "mlx.core.disable_compile", "mlx.core.distributed.Group", "mlx.core.distributed.all_gather", "mlx.core.distributed.all_sum", "mlx.core.distributed.init", "mlx.core.distributed.is_available", "mlx.core.distributed.recv", "mlx.core.distributed.recv_like", "mlx.core.distributed.send", "mlx.core.divide", "mlx.core.divmod", "mlx.core.einsum", "mlx.core.einsum_path", "mlx.core.enable_compile", "mlx.core.equal", "mlx.core.erf", "mlx.core.erfinv", "mlx.core.eval", "mlx.core.exp", "mlx.core.expand_dims", "mlx.core.expm1", "mlx.core.export_function", "mlx.core.export_to_dot", "mlx.core.exporter", "mlx.core.eye", "mlx.core.fast.layer_norm", "mlx.core.fast.metal_kernel", "mlx.core.fast.rms_norm", "mlx.core.fast.rope", "mlx.core.fast.scaled_dot_product_attention", "mlx.core.fft.fft", "mlx.core.fft.fft2", "mlx.core.fft.fftn", "mlx.core.fft.fftshift", "mlx.core.fft.ifft", "mlx.core.fft.ifft2", "mlx.core.fft.ifftn", "mlx.core.fft.ifftshift", "mlx.core.fft.irfft", "mlx.core.fft.irfft2", "mlx.core.fft.irfftn", "mlx.core.fft.rfft", "mlx.core.fft.rfft2", "mlx.core.fft.rfftn", "mlx.core.finfo", "mlx.core.flatten", "mlx.core.floor", "mlx.core.floor_divide", "mlx.core.full", "mlx.core.gather_mm", "mlx.core.gather_qmm", "mlx.core.get_active_memory", "mlx.core.get_cache_memory", "mlx.core.get_peak_memory", "mlx.core.grad", "mlx.core.greater", "mlx.core.greater_equal", "mlx.core.hadamard_transform", "mlx.core.identity", "mlx.core.imag", "mlx.core.import_function", "mlx.core.inner", "mlx.core.isclose", "mlx.core.isfinite", "mlx.core.isinf", "mlx.core.isnan", "mlx.core.isneginf", "mlx.core.isposinf", "mlx.core.issubdtype", "mlx.core.jvp", "mlx.core.kron", "mlx.core.left_shift", "mlx.core.less", "mlx.core.less_equal", "mlx.core.linalg.cholesky", "mlx.core.linalg.cholesky_inv", "mlx.core.linalg.cross", "mlx.core.linalg.eig", "mlx.core.linalg.eigh", "mlx.core.linalg.eigvals", "mlx.core.linalg.eigvalsh", "mlx.core.linalg.inv", "mlx.core.linalg.lu", "mlx.core.linalg.lu_factor", "mlx.core.linalg.norm", "mlx.core.linalg.pinv", "mlx.core.linalg.qr", "mlx.core.linalg.solve", "mlx.core.linalg.solve_triangular", "mlx.core.linalg.svd", "mlx.core.linalg.tri_inv", "mlx.core.linspace", "mlx.core.load", "mlx.core.log", "mlx.core.log10", "mlx.core.log1p", "mlx.core.log2", "mlx.core.logaddexp", "mlx.core.logcumsumexp", "mlx.core.logical_and", "mlx.core.logical_not", "mlx.core.logical_or", "mlx.core.logsumexp", "mlx.core.matmul", "mlx.core.max", "mlx.core.maximum", "mlx.core.mean", "mlx.core.meshgrid", "mlx.core.metal.device_info", "mlx.core.metal.is_available", "mlx.core.metal.start_capture", "mlx.core.metal.stop_capture", "mlx.core.min", "mlx.core.minimum", "mlx.core.moveaxis", "mlx.core.multiply", "mlx.core.nan_to_num", "mlx.core.negative", "mlx.core.new_stream", "mlx.core.not_equal", "mlx.core.ones", "mlx.core.ones_like", "mlx.core.outer", "mlx.core.pad", "mlx.core.partition", "mlx.core.power", "mlx.core.prod", "mlx.core.put_along_axis", "mlx.core.quantize", "mlx.core.quantized_matmul", "mlx.core.radians", "mlx.core.random.bernoulli", "mlx.core.random.categorical", "mlx.core.random.gumbel", "mlx.core.random.key", "mlx.core.random.laplace", "mlx.core.random.multivariate_normal", "mlx.core.random.normal", "mlx.core.random.permutation", "mlx.core.random.randint", "mlx.core.random.seed", "mlx.core.random.split", "mlx.core.random.truncated_normal", "mlx.core.random.uniform", "mlx.core.real", "mlx.core.reciprocal", "mlx.core.remainder", "mlx.core.repeat", "mlx.core.reset_peak_memory", "mlx.core.reshape", "mlx.core.right_shift", "mlx.core.roll", "mlx.core.round", "mlx.core.rsqrt", "mlx.core.save", "mlx.core.save_gguf", "mlx.core.save_safetensors", "mlx.core.savez", "mlx.core.savez_compressed", "mlx.core.set_cache_limit", "mlx.core.set_default_device", "mlx.core.set_default_stream", "mlx.core.set_memory_limit", "mlx.core.set_wired_limit", "mlx.core.sigmoid", "mlx.core.sign", "mlx.core.sin", "mlx.core.sinh", "mlx.core.slice", "mlx.core.slice_update", "mlx.core.softmax", "mlx.core.sort", "mlx.core.split", "mlx.core.sqrt", "mlx.core.square", "mlx.core.squeeze", "mlx.core.stack", "mlx.core.std", "mlx.core.stop_gradient", "mlx.core.stream", "mlx.core.subtract", "mlx.core.sum", "mlx.core.swapaxes", "mlx.core.synchronize", "mlx.core.take", "mlx.core.take_along_axis", "mlx.core.tan", "mlx.core.tanh", "mlx.core.tensordot", "mlx.core.tile", "mlx.core.topk", "mlx.core.trace", "mlx.core.transpose", "mlx.core.tri", "mlx.core.tril", "mlx.core.triu", "mlx.core.unflatten", "mlx.core.value_and_grad", "mlx.core.var", "mlx.core.view", "mlx.core.vjp", "mlx.core.vmap", "mlx.core.where", "mlx.core.zeros", "mlx.core.zeros_like", "mlx.nn.average_gradients", "mlx.nn.quantize", "mlx.nn.value_and_grad", "mlx.optimizers.clip_grad_norm", "mlx.utils.tree_flatten", "mlx.utils.tree_map", "mlx.utils.tree_map_with_path", "mlx.utils.tree_reduce", "mlx.utils.tree_unflatten", "mlx.core.Stream", "Array", "Data Types", "Devices and Streams", "Distributed Communication", "Export Functions", "Fast", "FFT", "Linear Algebra", "Memory Management", "Metal", "Neural Networks", "mlx.nn.ALiBi", "mlx.nn.AvgPool1d", "mlx.nn.AvgPool2d", "mlx.nn.AvgPool3d", "mlx.nn.BatchNorm", "mlx.nn.CELU", "mlx.nn.Conv1d", "mlx.nn.Conv2d", "mlx.nn.Conv3d", "mlx.nn.ConvTranspose1d", "mlx.nn.ConvTranspose2d", "mlx.nn.ConvTranspose3d", "mlx.nn.Dropout", "mlx.nn.Dropout2d", "mlx.nn.Dropout3d", "mlx.nn.ELU", "mlx.nn.Embedding", "mlx.nn.GELU", "mlx.nn.GLU", "mlx.nn.GRU", "mlx.nn.GroupNorm", "mlx.nn.HardShrink", "mlx.nn.HardTanh", "mlx.nn.Hardswish", "mlx.nn.InstanceNorm", "mlx.nn.LSTM", "mlx.nn.LayerNorm", "mlx.nn.LeakyReLU", "mlx.nn.Linear", "mlx.nn.LogSigmoid", "mlx.nn.LogSoftmax", "mlx.nn.MaxPool1d", "mlx.nn.MaxPool2d", "mlx.nn.MaxPool3d", "mlx.nn.Mish", "mlx.nn.Module.apply", "mlx.nn.Module.apply_to_modules", "mlx.nn.Module.children", "mlx.nn.Module.eval", "mlx.nn.Module.filter_and_map", "mlx.nn.Module.freeze", "mlx.nn.Module.leaf_modules", "mlx.nn.Module.load_weights", "mlx.nn.Module.modules", "mlx.nn.Module.named_modules", "mlx.nn.Module.parameters", "mlx.nn.Module.save_weights", "mlx.nn.Module.set_dtype", "mlx.nn.Module.state", "mlx.nn.Module.train", "mlx.nn.Module.trainable_parameters", "mlx.nn.Module.training", "mlx.nn.Module.unfreeze", "mlx.nn.Module.update", "mlx.nn.Module.update_modules", "mlx.nn.MultiHeadAttention", "mlx.nn.PReLU", "mlx.nn.QuantizedEmbedding", "mlx.nn.QuantizedLinear", "mlx.nn.RMSNorm", "mlx.nn.RNN", "mlx.nn.ReLU", "mlx.nn.ReLU6", "mlx.nn.RoPE", "mlx.nn.SELU", "mlx.nn.Sequential", "mlx.nn.SiLU", "mlx.nn.Sigmoid", "mlx.nn.SinusoidalPositionalEncoding", "mlx.nn.Softmax", "mlx.nn.Softmin", "mlx.nn.Softplus", "mlx.nn.Softshrink", "mlx.nn.Softsign", "mlx.nn.Step", "mlx.nn.Tanh", "mlx.nn.Transformer", "mlx.nn.Upsample", "mlx.nn.init.constant", "mlx.nn.init.glorot_normal", "mlx.nn.init.glorot_uniform", "mlx.nn.init.he_normal", "mlx.nn.init.he_uniform", "mlx.nn.init.identity", "mlx.nn.init.normal", "mlx.nn.init.uniform", "mlx.nn.celu", "mlx.nn.elu", "mlx.nn.gelu", "mlx.nn.gelu_approx", "mlx.nn.gelu_fast_approx", "mlx.nn.glu", "mlx.nn.hard_shrink", "mlx.nn.hard_tanh", "mlx.nn.hardswish", "mlx.nn.leaky_relu", "mlx.nn.log_sigmoid", "mlx.nn.log_softmax", "mlx.nn.losses.binary_cross_entropy", "mlx.nn.losses.cosine_similarity_loss", "mlx.nn.losses.cross_entropy", "mlx.nn.losses.gaussian_nll_loss", "mlx.nn.losses.hinge_loss", "mlx.nn.losses.huber_loss", "mlx.nn.losses.kl_div_loss", "mlx.nn.losses.l1_loss", "mlx.nn.losses.log_cosh_loss", "mlx.nn.losses.margin_ranking_loss", "mlx.nn.losses.mse_loss", "mlx.nn.losses.nll_loss", "mlx.nn.losses.smooth_l1_loss", "mlx.nn.losses.triplet_loss", "mlx.nn.mish", "mlx.nn.prelu", "mlx.nn.relu", "mlx.nn.relu6", "mlx.nn.selu", "mlx.nn.sigmoid", "mlx.nn.silu", "mlx.nn.softmax", "mlx.nn.softmin", "mlx.nn.softplus", "mlx.nn.softshrink", "mlx.nn.step", "mlx.nn.tanh", "Functions", "Initializers", "Layers", "Loss Functions", "Module", "Operations", "Optimizers", "mlx.optimizers.AdaDelta", "mlx.optimizers.Adafactor", "mlx.optimizers.Adagrad", "mlx.optimizers.Adam", "mlx.optimizers.AdamW", "mlx.optimizers.Adamax", "mlx.optimizers.Lion", "mlx.optimizers.MultiOptimizer", "mlx.optimizers.Muon", "mlx.optimizers.Optimizer.apply_gradients", "mlx.optimizers.Optimizer.init", "mlx.optimizers.Optimizer.state", "mlx.optimizers.Optimizer.update", "mlx.optimizers.RMSprop", "mlx.optimizers.SGD", "mlx.optimizers.cosine_decay", "mlx.optimizers.exponential_decay", "mlx.optimizers.join_schedules", "mlx.optimizers.linear_schedule", "mlx.optimizers.step_decay", "Common Optimizers", "Optimizer", "Schedulers", "Random", "Transforms", "Tree Utils", "Compilation", "Distributed Communication", "Exporting Functions", "Function Transforms", "Indexing Arrays", "Launching Distributed Programs", "Lazy Evaluation", "Conversion to NumPy and Other Frameworks", "Quick Start Guide", "Saving and Loading Arrays", "Unified Memory", "Using Streams"], "titleterms": {"A": 522, "In": 516, "The": 353, "ab": [13, 33], "adadelta": 486, "adafactor": 487, "adagrad": 488, "adam": 489, "adamax": 491, "adamw": 490, "add": 14, "addmm": 15, "algebra": 350, "alibi": 354, "all": [6, 16, 34, 513], "all_gath": 130, "all_sum": 131, "allclos": 17, "ani": [18, 35], "api": [8, 9], "appli": 389, "apply_gradi": 495, "apply_to_modul": 390, "arang": 19, "arcco": 20, "arccosh": 21, "arcsin": 22, "arcsinh": 23, "arctan": 24, "arctan2": 25, "arctanh": 26, "argmax": [27, 36], "argmin": [28, 37], "argpartit": 29, "argsort": 30, "arrai": [31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 343, 516, 521], "array_equ": 86, "as_strid": 87, "astyp": 38, "async_ev": 88, "atleast_1d": 89, "atleast_2d": 90, "atleast_3d": 91, "attent": 6, "automat": 515, "average_gradi": [333, 513], "avgpool1d": 355, "avgpool2d": 356, "avgpool3d": 357, "back": 2, "backend": 513, "basic": [512, 514, 520], "batchnorm": 358, "benchmark": 6, "bernoulli": 259, "binari": 9, "binary_cross_entropi": 452, "bind": 2, "bitwise_and": 92, "bitwise_invert": 93, "bitwise_or": 94, "bitwise_xor": 95, "block_masked_mm": 96, "broadcast_arrai": 97, "broadcast_to": 98, "build": [2, 9], "c": [4, 8, 9, 514], "categor": 260, "ceil": 99, "celu": [359, 440], "children": 391, "choleski": 202, "cholesky_inv": 203, "class": 353, "clear_cach": 100, "clip": 101, "clip_grad_norm": 336, "cmake": 2, "co": [41, 115], "code": [2, 6], "common": 506, "commun": [346, 513], "compil": [102, 512], "complex": 1, "comput": 518, "concaten": 103, "conj": [40, 104], "conjug": 105, "constant": 432, "contigu": 106, "conv1d": [107, 360], "conv2d": [108, 361], "conv3d": [109, 362], "conv_gener": 110, "conv_transpose1d": 111, "conv_transpose2d": 112, "conv_transpose3d": 113, "convers": 519, "convert": 6, "convolv": 114, "convtranspose1d": 363, "convtranspose2d": 364, "convtranspose3d": 365, "core": [10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294, 295, 296, 297, 298, 299, 300, 301, 302, 303, 304, 305, 306, 307, 308, 309, 310, 311, 312, 313, 314, 315, 316, 317, 318, 319, 320, 321, 322, 323, 324, 325, 326, 327, 328, 329, 330, 331, 332, 342], "cosh": 116, "cosine_decai": 501, "cosine_similarity_loss": 453, "cpu": [2, 9], "cross": 204, "cross_entropi": 454, "cuda": 9, "cummax": [42, 117], "cummin": [43, 118], "cumprod": [44, 119], "cumsum": [45, 120], "custom": [1, 2], "custom_funct": 121, "data": 344, "debug": 512, "debugg": 3, "default_devic": 122, "default_stream": 123, "defin": 513, "degre": 124, "dequant": 125, "devic": [10, 345], "device_info": 236, "diag": [46, 126], "diagon": [47, 127], "differ": 516, "differenti": 515, "disable_compil": 128, "distribut": [129, 130, 131, 132, 133, 134, 135, 136, 346, 513, 517], "divid": 137, "divmod": 138, "download": [2, 6], "dropout": 366, "dropout2d": 367, "dropout3d": 368, "dtype": [11, 48], "dtypecategori": 12, "eig": 205, "eigh": 206, "eigval": 207, "eigvalsh": 208, "einsum": 139, "einsum_path": 140, "elu": [369, 441], "embed": 370, "enable_compil": 141, "encod": 6, "end": 2, "equal": 142, "erf": 143, "erfinv": 144, "eval": [145, 392], "evalu": 518, "exampl": [1, 2, 8, 512, 513, 514, 522], "exp": [49, 146], "expand_dim": 147, "expm1": 148, "exponential_decai": 502, "export": [151, 347, 514], "export_funct": 149, "export_to_dot": 150, "extens": 2, "ey": 152, "fast": [153, 154, 155, 156, 157, 348], "fft": [158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 349], "fft2": 159, "fftn": 160, "fftshift": 161, "filter_and_map": 393, "finfo": 172, "flatten": [50, 173], "floor": 174, "floor_divid": 175, "format": 521, "found": 9, "framework": 519, "freez": 394, "from": [9, 516], "full": [6, 176], "function": [347, 479, 482, 512, 514, 515, 520], "further": 8, "gather_mm": 177, "gather_qmm": 178, "gaussian_nll_loss": 455, "gelu": [371, 442], "gelu_approx": 443, "gelu_fast_approx": 444, "gener": 6, "get": 513, "get_active_memori": 179, "get_cache_memori": 180, "get_peak_memori": 181, "glorot_norm": 433, "glorot_uniform": 434, "glu": [372, 445], "gpu": 2, "grad": [182, 353], "graph": [512, 518, 520], "greater": 183, "greater_equ": 184, "grid": 1, "group": 129, "groupnorm": 374, "gru": 373, "guid": 520, "gumbel": 261, "hadamard_transform": 185, "hard_shrink": 446, "hard_tanh": 447, "hardshrink": 375, "hardswish": [377, 448], "hardtanh": 376, "he_norm": 435, "he_uniform": 436, "hinge_loss": 456, "host": [513, 517], "huber_loss": 457, "ident": [186, 437], "ifft": 162, "ifft2": 163, "ifftn": 164, "ifftshift": 165, "imag": [51, 187], "implement": [2, 6], "import": 514, "import_funct": 188, "index": 516, "infer": 6, "init": [132, 432, 433, 434, 435, 436, 437, 438, 439, 496], "initi": 480, "inner": 189, "inspect": 353, "instal": [8, 9, 513], "instancenorm": 378, "introduc": 2, "inv": 209, "irfft": 166, "irfft2": 167, "irfftn": 168, "is_avail": [133, 237], "isclos": 190, "isfinit": 191, "isinf": 192, "isnan": 193, "isneginf": 194, "isposinf": 195, "issubdtyp": 196, "item": 52, "items": 53, "jax": 519, "join_schedul": 503, "jvp": 197, "kei": 262, "kernel": 1, "kl_div_loss": 458, "kron": 198, "l1_loss": 459, "laplac": 263, "launch": 517, "layer": [6, 7, 481], "layer_norm": 153, "layernorm": 380, "lazi": 518, "leaf_modul": 395, "leaky_relu": 449, "leakyrelu": 381, "left_shift": 199, "less": 200, "less_equ": 201, "linalg": [202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218], "linear": [5, 350, 382], "linear_schedul": 504, "linspac": 219, "linux": 9, "lion": 492, "llm": 6, "load": [6, 220, 485, 521], "load_weight": 396, "log": [54, 221], "log10": [55, 222], "log1p": [56, 223], "log2": [57, 224], "log_cosh_loss": 460, "log_sigmoid": 450, "log_softmax": 451, "logaddexp": 225, "logcumsumexp": [58, 226], "logical_and": 227, "logical_not": 228, "logical_or": 229, "logsigmoid": 383, "logsoftmax": 384, "logsumexp": [59, 230], "loss": [452, 453, 454, 455, 456, 457, 458, 459, 460, 461, 462, 463, 464, 465, 482], "lstm": 379, "lu": 210, "lu_factor": 211, "manag": 351, "margin_ranking_loss": 461, "matmul": 231, "max": [60, 232], "maximum": 233, "maxpool1d": 385, "maxpool2d": 386, "maxpool3d": 387, "mean": [61, 234], "memori": [351, 522], "meshgrid": 235, "metal": [1, 3, 9, 236, 237, 238, 239, 352], "metal_kernel": 154, "min": [62, 240], "minim": 9, "minimum": 241, "mish": [388, 466], "mlx": [2, 4, 8, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294, 295, 296, 297, 298, 299, 300, 301, 302, 303, 304, 305, 306, 307, 308, 309, 310, 311, 312, 313, 314, 315, 316, 317, 318, 319, 320, 321, 322, 323, 324, 325, 326, 327, 328, 329, 330, 331, 332, 333, 334, 335, 336, 337, 338, 339, 340, 341, 342, 354, 355, 356, 357, 358, 359, 360, 361, 362, 363, 364, 365, 366, 367, 368, 369, 370, 371, 372, 373, 374, 375, 376, 377, 378, 379, 380, 381, 382, 383, 384, 385, 386, 387, 388, 389, 390, 391, 392, 393, 394, 395, 396, 397, 398, 399, 400, 401, 402, 403, 404, 405, 406, 407, 408, 409, 410, 411, 412, 413, 414, 415, 416, 417, 418, 419, 420, 421, 422, 423, 424, 425, 426, 427, 428, 429, 430, 431, 432, 433, 434, 435, 436, 437, 438, 439, 440, 441, 442, 443, 444, 445, 446, 447, 448, 449, 450, 451, 452, 453, 454, 455, 456, 457, 458, 459, 460, 461, 462, 463, 464, 465, 466, 467, 468, 469, 470, 471, 472, 473, 474, 475, 476, 477, 478, 486, 487, 488, 489, 490, 491, 492, 493, 494, 495, 496, 497, 498, 499, 500, 501, 502, 503, 504, 505], "model": 6, "modul": [353, 389, 390, 391, 392, 393, 394, 395, 396, 397, 398, 399, 400, 401, 402, 403, 404, 405, 406, 407, 408, 483, 514], "more": 514, "moveaxi": [63, 242], "mpi": [513, 517], "mse_loss": 462, "multi": 7, "multiheadattent": 409, "multioptim": 493, "multipl": 514, "multipli": 243, "multivariate_norm": 264, "muon": 494, "named_modul": 398, "nan_to_num": 244, "nbyte": 64, "ndim": 65, "neg": 245, "network": 353, "neural": 353, "new_stream": 246, "nll_loss": 463, "nn": [333, 334, 335, 354, 355, 356, 357, 358, 359, 360, 361, 362, 363, 364, 365, 366, 367, 368, 369, 370, 371, 372, 373, 374, 375, 376, 377, 378, 379, 380, 381, 382, 383, 384, 385, 386, 387, 388, 389, 390, 391, 392, 393, 394, 395, 396, 397, 398, 399, 400, 401, 402, 403, 404, 405, 406, 407, 408, 409, 410, 411, 412, 413, 414, 415, 416, 417, 418, 419, 420, 421, 422, 423, 424, 425, 426, 427, 428, 429, 430, 431, 432, 433, 434, 435, 436, 437, 438, 439, 440, 441, 442, 443, 444, 445, 446, 447, 448, 449, 450, 451, 452, 453, 454, 455, 456, 457, 458, 459, 460, 461, 462, 463, 464, 465, 466, 467, 468, 469, 470, 471, 472, 473, 474, 475, 476, 477, 478, 513], "norm": 212, "normal": [265, 438], "not_equ": 247, "numpi": [516, 519], "ones": 248, "ones_lik": 249, "onli": [9, 518], "oper": [0, 2, 484], "optim": [336, 485, 486, 487, 488, 489, 490, 491, 492, 493, 494, 495, 496, 497, 498, 499, 500, 501, 502, 503, 504, 505, 506, 507], "option": 9, "other": 519, "outer": 250, "packag": 4, "pad": 251, "paramet": [353, 399], "partit": 252, "perceptron": 7, "permut": 266, "pinv": 213, "place": 516, "power": 253, "prelu": [410, 467], "primit": 2, "prod": [66, 254], "program": [513, 517], "provid": 517, "pure": 512, "put": 6, "put_along_axi": 255, "python": [2, 8, 9], "pytorch": 519, "qr": 214, "quantiz": [256, 334], "quantized_matmul": 257, "quantizedembed": 411, "quantizedlinear": 412, "quick": [353, 520], "radian": 258, "randint": 267, "random": [259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 509], "read": 8, "real": [67, 272], "reciproc": [68, 273], "recv": 134, "recv_lik": 135, "reduc": 513, "refer": 8, "regress": 5, "relu": [415, 468], "relu6": [416, 469], "remaind": 274, "remot": [513, 517], "repeat": 275, "requir": 9, "reset_peak_memori": 276, "reshap": [69, 277], "result": 2, "rfft": 169, "rfft2": 170, "rfftn": 171, "right_shift": 278, "ring": [513, 517], "rms_norm": 155, "rmsnorm": 413, "rmsprop": 499, "rnn": 414, "roll": 279, "rope": [156, 417], "round": [70, 280], "rsqrt": [71, 281], "run": 513, "sampl": 1, "save": [282, 485, 521], "save_gguf": 283, "save_safetensor": 284, "save_weight": 400, "savez": 285, "savez_compress": 286, "scaled_dot_product_attent": 157, "schedul": 508, "script": [2, 6], "seed": 268, "select": 513, "selu": [418, 470], "send": 136, "sequenti": 419, "serial": 521, "set": [513, 517], "set_cache_limit": 287, "set_default_devic": 288, "set_default_stream": 289, "set_dtyp": 401, "set_memory_limit": 290, "set_wired_limit": 291, "setuptool": 2, "sgd": 500, "shape": [1, 72], "shapeless": [512, 514], "shell": 9, "sigmoid": [292, 421, 471], "sign": 293, "silu": [420, 472], "simpl": [1, 522], "sin": [73, 294], "sinh": 295, "sinusoidalpositionalencod": 422, "size": [9, 74], "slice": 296, "slice_upd": 297, "smooth_l1_loss": 464, "softmax": [298, 423, 473], "softmin": [424, 474], "softplu": [425, 475], "softshrink": [426, 476], "softsign": 427, "solv": 215, "solve_triangular": 216, "sort": 299, "sourc": 9, "specif": 517, "specifi": 523, "speedup": 512, "split": [75, 269, 300], "sqrt": [76, 301], "squar": [77, 302], "squeez": [78, 303], "stack": 304, "start": [353, 513, 520], "start_captur": 238, "state": [402, 497], "std": [79, 305], "step": [428, 477], "step_decai": 505, "stop_captur": 239, "stop_gradi": 306, "stream": [307, 342, 345, 523], "stride": 1, "subtract": 308, "sum": [80, 309], "support": 344, "svd": 217, "swapax": [81, 310], "synchron": 311, "t": 32, "take": 312, "take_along_axi": 313, "tan": 314, "tanh": [315, 429, 478], "tensordot": 316, "tensorflow": 519, "thunderbolt": 513, "tile": 317, "togeth": 6, "tolist": 82, "topk": 318, "trace": [319, 514], "train": [403, 405, 512, 513], "trainable_paramet": 404, "transform": [2, 430, 510, 512, 514, 515, 518, 520], "transpos": [83, 320], "tree": 511, "tree_flatten": 337, "tree_map": 338, "tree_map_with_path": 339, "tree_reduc": 340, "tree_unflatten": 341, "tri": 321, "tri_inv": 218, "tril": 322, "triplet_loss": 465, "triu": 323, "troubleshoot": 9, "truncated_norm": 270, "tune": 513, "type": 344, "unflatten": 324, "unfreez": 406, "unifi": 522, "uniform": [271, 439], "up": [513, 517], "updat": [353, 407, 498, 516], "update_modul": 408, "upsampl": 431, "us": [1, 2, 4, 518, 523], "usag": [2, 8, 517], "util": [337, 338, 339, 340, 341, 511, 513], "valu": 353, "value_and_grad": [325, 335], "var": [84, 326], "variabl": 4, "vector": 515, "view": [85, 327], "vjp": [1, 328], "vmap": 329, "weight": 6, "what": 518, "when": 518, "where": 330, "why": 518, "workflow": 3, "x86": 9, "xcode": 3, "you": 518, "zero": 331, "zeros_lik": 332}}) \ No newline at end of file +Search.setIndex({"alltitles": {"A Simple Example": [[525, "a-simple-example"]], "Array": [[345, null]], "Attention layer": [[6, "attention-layer"]], "Automatic Differentiation": [[518, "automatic-differentiation"]], "Automatic Vectorization": [[518, "automatic-vectorization"]], "Basics": [[523, "basics"]], "Basics of Compile": [[515, "basics-of-compile"]], "Basics of Exporting": [[517, "basics-of-exporting"]], "Binary Size Minimization": [[9, "binary-size-minimization"]], "Binding to Python": [[2, "binding-to-python"]], "Build Options": [[9, "id5"]], "Build Requirements": [[9, "build-requirements"]], "Build and Install": [[9, null]], "Build from source": [[9, "build-from-source"]], "Building and Binding": [[2, "building-and-binding"]], "Building with CMake": [[2, "building-with-cmake"]], "Building with setuptools": [[2, "building-with-setuptools"]], "C++ API": [[9, "c-api"]], "C++ API Reference": [[8, null]], "CPU-only (Linux)": [[9, "cpu-only-linux"]], "CUDA": [[9, "cuda"], [9, "id3"], [346, null]], "Common Optimizers": [[509, null]], "Compilation": [[515, null]], "Compiling Training Graphs": [[515, "compiling-training-graphs"]], "Complex Example": [[1, "complex-example"]], "Conversion to NumPy and Other Frameworks": [[522, null]], "Converting the weights": [[6, "converting-the-weights"]], "Custom Extensions in MLX": [[2, null]], "Custom Metal Kernels": [[1, null]], "Data Types": [[347, null]], "Debugging": [[515, "debugging"]], "Defining a Ring": [[516, "defining-a-ring"]], "Devices and Streams": [[348, null]], "Differences from NumPy": [[519, "differences-from-numpy"]], "Distributed Communication": [[349, null], [516, null]], "Download the code": [[2, null], [6, null]], "Encoder layer": [[6, "encoder-layer"]], "Example Speedup": [[515, "example-speedup"]], "Examples": [[8, null]], "Export Functions": [[350, null]], "Exporting Functions": [[517, null]], "Exporting Modules": [[517, "exporting-modules"]], "Exporting Multiple Traces": [[517, "exporting-multiple-traces"]], "FFT": [[352, null]], "Fast": [[351, null]], "Full model": [[6, "full-model"]], "Function Transforms": [[518, null]], "Function and Graph Transformations": [[523, "function-and-graph-transformations"]], "Functions": [[482, null]], "Further Reading": [[8, null]], "Generation": [[6, "generation"]], "Getting Started": [[516, "getting-started"]], "Getting Started with MPI": [[516, "getting-started-with-mpi"]], "Getting Started with Ring": [[516, "getting-started-with-ring"]], "Grid Sample VJP": [[1, "grid-sample-vjp"]], "Implementing the CPU Back-end": [[2, "implementing-the-cpu-back-end"]], "Implementing the GPU Back-end": [[2, "implementing-the-gpu-back-end"]], "Implementing the Primitive": [[2, "implementing-the-primitive"]], "Implementing the model": [[6, "implementing-the-model"]], "Importing Functions in C++": [[517, "importing-functions-in-c"]], "In Place Updates": [[519, "in-place-updates"]], "Indexing Arrays": [[519, null]], "Initializers": [[483, null]], "Inspecting Modules": [[356, "inspecting-modules"]], "Install": [[8, null]], "Installing MPI": [[516, "installing-mpi"]], "Introducing the Example": [[2, "introducing-the-example"]], "JAX": [[522, "jax"]], "LLM inference": [[6, null]], "Launching Distributed Programs": [[520, null]], "Layers": [[484, null]], "Lazy Evaluation": [[521, null]], "Linear Algebra": [[353, null]], "Linear Regression": [[5, null]], "Linux": [[9, "linux"]], "Loss Functions": [[485, null]], "MLX": [[8, null]], "MPI Specifics": [[520, "mpi-specifics"]], "Memory Management": [[354, null]], "Metal": [[355, null]], "Metal Debugger": [[3, null]], "Metal not found": [[9, "metal-not-found"]], "Module": [[486, null]], "More Examples": [[517, "more-examples"]], "Multi-Layer Perceptron": [[7, null]], "Neural Networks": [[356, null]], "Only Compute What You Use": [[521, "only-compute-what-you-use"]], "Operations": [[0, null], [2, "operations"], [487, null]], "Operations and Primitives": [[2, "operations-and-primitives"]], "Optimizer": [[510, null]], "Optimizers": [[488, null]], "Package Variables": [[4, "id1"]], "Parameters": [[356, "parameters"]], "Primitive Transforms": [[2, "primitive-transforms"]], "Primitives": [[2, "primitives"]], "Providing Hosts": [[520, "providing-hosts"]], "Pure Functions": [[515, "pure-functions"]], "Putting it all together": [[6, "putting-it-all-together"]], "PyTorch": [[522, "pytorch"]], "Python API": [[9, "python-api"]], "Python API Reference": [[8, null]], "Python Installation": [[9, "python-installation"]], "Quick Start Guide": [[523, null]], "Quick Start with Neural Networks": [[356, "quick-start-with-neural-networks"]], "Random": [[512, null]], "Results": [[2, "results"]], "Ring Specifics": [[520, "ring-specifics"]], "Running Distributed Programs": [[516, "running-distributed-programs"]], "Saving and Loading": [[488, "saving-and-loading"]], "Saving and Loading Arrays": [[524, null]], "Schedulers": [[511, null]], "Scripts": [[2, "scripts"], [6, "scripts"]], "Selecting Backend": [[516, "selecting-backend"]], "Serialization Formats": [[524, "id1"]], "Setting up Remote Hosts": [[516, "setting-up-remote-hosts"], [520, "setting-up-remote-hosts"]], "Shapeless Compilation": [[515, "shapeless-compilation"]], "Shapeless Exports": [[517, "shapeless-exports"]], "Simple Example": [[1, "simple-example"]], "Specifying the Stream": [[526, "specifying-the-stream"]], "Supported Data Types": [[347, "id2"]], "TensorFlow": [[522, "tensorflow"]], "The Module Class": [[356, "the-module-class"]], "Thunderbolt Ring": [[516, "thunderbolt-ring"]], "Training Example": [[516, "training-example"]], "Transformations with Compile": [[515, "transformations-with-compile"]], "Transformations with Imported Functions": [[517, "transformations-with-imported-functions"]], "Transforming Compute Graphs": [[521, "transforming-compute-graphs"]], "Transforms": [[513, null]], "Tree Utils": [[514, null]], "Troubleshooting": [[9, "troubleshooting"], [9, "id4"]], "Tuning MPI All Reduce": [[516, "tuning-mpi-all-reduce"]], "Unified Memory": [[525, null]], "Updating the Parameters": [[356, "updating-the-parameters"]], "Usage": [[2, "usage"], [8, null], [520, "usage"]], "Using MLX in C++": [[4, null]], "Using Shape/Strides": [[1, "using-shape-strides"]], "Using Streams": [[526, null]], "Using the Primitive": [[2, "using-the-primitive"]], "Utilizing nn.average_gradients": [[516, "utilizing-nn-average-gradients"]], "Value and Grad": [[356, "value-and-grad"]], "Weight loading and benchmarking": [[6, "weight-loading-and-benchmarking"]], "When to Evaluate": [[521, "when-to-evaluate"]], "Why Lazy Evaluation": [[521, "why-lazy-evaluation"]], "Xcode Workflow": [[3, "xcode-workflow"]], "mlx.core.Device": [[10, null]], "mlx.core.Dtype": [[11, null]], "mlx.core.DtypeCategory": [[12, null]], "mlx.core.Stream": [[344, null]], "mlx.core.abs": [[13, null]], "mlx.core.add": [[14, null]], "mlx.core.addmm": [[15, null]], "mlx.core.all": [[16, null]], "mlx.core.allclose": [[17, null]], "mlx.core.any": [[18, null]], "mlx.core.arange": [[19, null]], "mlx.core.arccos": [[20, null]], "mlx.core.arccosh": [[21, null]], "mlx.core.arcsin": [[22, null]], "mlx.core.arcsinh": [[23, null]], "mlx.core.arctan": [[24, null]], "mlx.core.arctan2": [[25, null]], "mlx.core.arctanh": [[26, null]], "mlx.core.argmax": [[27, null]], "mlx.core.argmin": [[28, null]], "mlx.core.argpartition": [[29, null]], "mlx.core.argsort": [[30, null]], "mlx.core.array": [[31, null]], "mlx.core.array.T": [[32, null]], "mlx.core.array.abs": [[33, null]], "mlx.core.array.all": [[34, null]], "mlx.core.array.any": [[35, null]], "mlx.core.array.argmax": [[36, null]], "mlx.core.array.argmin": [[37, null]], "mlx.core.array.astype": [[38, null]], "mlx.core.array.at": [[39, null]], "mlx.core.array.conj": [[40, null]], "mlx.core.array.cos": [[41, null]], "mlx.core.array.cummax": [[42, null]], "mlx.core.array.cummin": [[43, null]], "mlx.core.array.cumprod": [[44, null]], "mlx.core.array.cumsum": [[45, null]], "mlx.core.array.diag": [[46, null]], "mlx.core.array.diagonal": [[47, null]], "mlx.core.array.dtype": [[48, null]], "mlx.core.array.exp": [[49, null]], "mlx.core.array.flatten": [[50, null]], "mlx.core.array.imag": [[51, null]], "mlx.core.array.item": [[52, null]], "mlx.core.array.itemsize": [[53, null]], "mlx.core.array.log": [[54, null]], "mlx.core.array.log10": [[55, null]], "mlx.core.array.log1p": [[56, null]], "mlx.core.array.log2": [[57, null]], "mlx.core.array.logcumsumexp": [[58, null]], "mlx.core.array.logsumexp": [[59, null]], "mlx.core.array.max": [[60, null]], "mlx.core.array.mean": [[61, null]], "mlx.core.array.min": [[62, null]], "mlx.core.array.moveaxis": [[63, null]], "mlx.core.array.nbytes": [[64, null]], "mlx.core.array.ndim": [[65, null]], "mlx.core.array.prod": [[66, null]], "mlx.core.array.real": [[67, null]], "mlx.core.array.reciprocal": [[68, null]], "mlx.core.array.reshape": [[69, null]], "mlx.core.array.round": [[70, null]], "mlx.core.array.rsqrt": [[71, null]], "mlx.core.array.shape": [[72, null]], "mlx.core.array.sin": [[73, null]], "mlx.core.array.size": [[74, null]], "mlx.core.array.split": [[75, null]], "mlx.core.array.sqrt": [[76, null]], "mlx.core.array.square": [[77, null]], "mlx.core.array.squeeze": [[78, null]], "mlx.core.array.std": [[79, null]], "mlx.core.array.sum": [[80, null]], "mlx.core.array.swapaxes": [[81, null]], "mlx.core.array.tolist": [[82, null]], "mlx.core.array.transpose": [[83, null]], "mlx.core.array.var": [[84, null]], "mlx.core.array.view": [[85, null]], "mlx.core.array_equal": [[86, null]], "mlx.core.as_strided": [[87, null]], "mlx.core.async_eval": [[88, null]], "mlx.core.atleast_1d": [[89, null]], "mlx.core.atleast_2d": [[90, null]], "mlx.core.atleast_3d": [[91, null]], "mlx.core.bitwise_and": [[92, null]], "mlx.core.bitwise_invert": [[93, null]], "mlx.core.bitwise_or": [[94, null]], "mlx.core.bitwise_xor": [[95, null]], "mlx.core.block_masked_mm": [[96, null]], "mlx.core.broadcast_arrays": [[97, null]], "mlx.core.broadcast_to": [[98, null]], "mlx.core.ceil": [[99, null]], "mlx.core.clear_cache": [[100, null]], "mlx.core.clip": [[101, null]], "mlx.core.compile": [[102, null]], "mlx.core.concatenate": [[103, null]], "mlx.core.conj": [[104, null]], "mlx.core.conjugate": [[105, null]], "mlx.core.contiguous": [[106, null]], "mlx.core.conv1d": [[107, null]], "mlx.core.conv2d": [[108, null]], "mlx.core.conv3d": [[109, null]], "mlx.core.conv_general": [[110, null]], "mlx.core.conv_transpose1d": [[111, null]], "mlx.core.conv_transpose2d": [[112, null]], "mlx.core.conv_transpose3d": [[113, null]], "mlx.core.convolve": [[114, null]], "mlx.core.cos": [[115, null]], "mlx.core.cosh": [[116, null]], "mlx.core.cuda.is_available": [[117, null]], "mlx.core.cummax": [[118, null]], "mlx.core.cummin": [[119, null]], "mlx.core.cumprod": [[120, null]], "mlx.core.cumsum": [[121, null]], "mlx.core.custom_function": [[122, null]], "mlx.core.default_device": [[123, null]], "mlx.core.default_stream": [[124, null]], "mlx.core.degrees": [[125, null]], "mlx.core.dequantize": [[126, null]], "mlx.core.diag": [[127, null]], "mlx.core.diagonal": [[128, null]], "mlx.core.disable_compile": [[129, null]], "mlx.core.distributed.Group": [[130, null]], "mlx.core.distributed.all_gather": [[131, null]], "mlx.core.distributed.all_sum": [[132, null]], "mlx.core.distributed.init": [[133, null]], "mlx.core.distributed.is_available": [[134, null]], "mlx.core.distributed.recv": [[135, null]], "mlx.core.distributed.recv_like": [[136, null]], "mlx.core.distributed.send": [[137, null]], "mlx.core.divide": [[138, null]], "mlx.core.divmod": [[139, null]], "mlx.core.einsum": [[140, null]], "mlx.core.einsum_path": [[141, null]], "mlx.core.enable_compile": [[142, null]], "mlx.core.equal": [[143, null]], "mlx.core.erf": [[144, null]], "mlx.core.erfinv": [[145, null]], "mlx.core.eval": [[146, null]], "mlx.core.exp": [[147, null]], "mlx.core.expand_dims": [[148, null]], "mlx.core.expm1": [[149, null]], "mlx.core.export_function": [[150, null]], "mlx.core.export_to_dot": [[151, null]], "mlx.core.exporter": [[152, null]], "mlx.core.eye": [[153, null]], "mlx.core.fast.cuda_kernel": [[154, null]], "mlx.core.fast.layer_norm": [[155, null]], "mlx.core.fast.metal_kernel": [[156, null]], "mlx.core.fast.rms_norm": [[157, null]], "mlx.core.fast.rope": [[158, null]], "mlx.core.fast.scaled_dot_product_attention": [[159, null]], "mlx.core.fft.fft": [[160, null]], "mlx.core.fft.fft2": [[161, null]], "mlx.core.fft.fftn": [[162, null]], "mlx.core.fft.fftshift": [[163, null]], "mlx.core.fft.ifft": [[164, null]], "mlx.core.fft.ifft2": [[165, null]], "mlx.core.fft.ifftn": [[166, null]], "mlx.core.fft.ifftshift": [[167, null]], "mlx.core.fft.irfft": [[168, null]], "mlx.core.fft.irfft2": [[169, null]], "mlx.core.fft.irfftn": [[170, null]], "mlx.core.fft.rfft": [[171, null]], "mlx.core.fft.rfft2": [[172, null]], "mlx.core.fft.rfftn": [[173, null]], "mlx.core.finfo": [[174, null]], "mlx.core.flatten": [[175, null]], "mlx.core.floor": [[176, null]], "mlx.core.floor_divide": [[177, null]], "mlx.core.full": [[178, null]], "mlx.core.gather_mm": [[179, null]], "mlx.core.gather_qmm": [[180, null]], "mlx.core.get_active_memory": [[181, null]], "mlx.core.get_cache_memory": [[182, null]], "mlx.core.get_peak_memory": [[183, null]], "mlx.core.grad": [[184, null]], "mlx.core.greater": [[185, null]], "mlx.core.greater_equal": [[186, null]], "mlx.core.hadamard_transform": [[187, null]], "mlx.core.identity": [[188, null]], "mlx.core.imag": [[189, null]], "mlx.core.import_function": [[190, null]], "mlx.core.inner": [[191, null]], "mlx.core.isclose": [[192, null]], "mlx.core.isfinite": [[193, null]], "mlx.core.isinf": [[194, null]], "mlx.core.isnan": [[195, null]], "mlx.core.isneginf": [[196, null]], "mlx.core.isposinf": [[197, null]], "mlx.core.issubdtype": [[198, null]], "mlx.core.jvp": [[199, null]], "mlx.core.kron": [[200, null]], "mlx.core.left_shift": [[201, null]], "mlx.core.less": [[202, null]], "mlx.core.less_equal": [[203, null]], "mlx.core.linalg.cholesky": [[204, null]], "mlx.core.linalg.cholesky_inv": [[205, null]], "mlx.core.linalg.cross": [[206, null]], "mlx.core.linalg.eig": [[207, null]], "mlx.core.linalg.eigh": [[208, null]], "mlx.core.linalg.eigvals": [[209, null]], "mlx.core.linalg.eigvalsh": [[210, null]], "mlx.core.linalg.inv": [[211, null]], "mlx.core.linalg.lu": [[212, null]], "mlx.core.linalg.lu_factor": [[213, null]], "mlx.core.linalg.norm": [[214, null]], "mlx.core.linalg.pinv": [[215, null]], "mlx.core.linalg.qr": [[216, null]], "mlx.core.linalg.solve": [[217, null]], "mlx.core.linalg.solve_triangular": [[218, null]], "mlx.core.linalg.svd": [[219, null]], "mlx.core.linalg.tri_inv": [[220, null]], "mlx.core.linspace": [[221, null]], "mlx.core.load": [[222, null]], "mlx.core.log": [[223, null]], "mlx.core.log10": [[224, null]], "mlx.core.log1p": [[225, null]], "mlx.core.log2": [[226, null]], "mlx.core.logaddexp": [[227, null]], "mlx.core.logcumsumexp": [[228, null]], "mlx.core.logical_and": [[229, null]], "mlx.core.logical_not": [[230, null]], "mlx.core.logical_or": [[231, null]], "mlx.core.logsumexp": [[232, null]], "mlx.core.matmul": [[233, null]], "mlx.core.max": [[234, null]], "mlx.core.maximum": [[235, null]], "mlx.core.mean": [[236, null]], "mlx.core.meshgrid": [[237, null]], "mlx.core.metal.device_info": [[238, null]], "mlx.core.metal.is_available": [[239, null]], "mlx.core.metal.start_capture": [[240, null]], "mlx.core.metal.stop_capture": [[241, null]], "mlx.core.min": [[242, null]], "mlx.core.minimum": [[243, null]], "mlx.core.moveaxis": [[244, null]], "mlx.core.multiply": [[245, null]], "mlx.core.nan_to_num": [[246, null]], "mlx.core.negative": [[247, null]], "mlx.core.new_stream": [[248, null]], "mlx.core.not_equal": [[249, null]], "mlx.core.ones": [[250, null]], "mlx.core.ones_like": [[251, null]], "mlx.core.outer": [[252, null]], "mlx.core.pad": [[253, null]], "mlx.core.partition": [[254, null]], "mlx.core.power": [[255, null]], "mlx.core.prod": [[256, null]], "mlx.core.put_along_axis": [[257, null]], "mlx.core.quantize": [[258, null]], "mlx.core.quantized_matmul": [[259, null]], "mlx.core.radians": [[260, null]], "mlx.core.random.bernoulli": [[261, null]], "mlx.core.random.categorical": [[262, null]], "mlx.core.random.gumbel": [[263, null]], "mlx.core.random.key": [[264, null]], "mlx.core.random.laplace": [[265, null]], "mlx.core.random.multivariate_normal": [[266, null]], "mlx.core.random.normal": [[267, null]], "mlx.core.random.permutation": [[268, null]], "mlx.core.random.randint": [[269, null]], "mlx.core.random.seed": [[270, null]], "mlx.core.random.split": [[271, null]], "mlx.core.random.truncated_normal": [[272, null]], "mlx.core.random.uniform": [[273, null]], "mlx.core.real": [[274, null]], "mlx.core.reciprocal": [[275, null]], "mlx.core.remainder": [[276, null]], "mlx.core.repeat": [[277, null]], "mlx.core.reset_peak_memory": [[278, null]], "mlx.core.reshape": [[279, null]], "mlx.core.right_shift": [[280, null]], "mlx.core.roll": [[281, null]], "mlx.core.round": [[282, null]], "mlx.core.rsqrt": [[283, null]], "mlx.core.save": [[284, null]], "mlx.core.save_gguf": [[285, null]], "mlx.core.save_safetensors": [[286, null]], "mlx.core.savez": [[287, null]], "mlx.core.savez_compressed": [[288, null]], "mlx.core.set_cache_limit": [[289, null]], "mlx.core.set_default_device": [[290, null]], "mlx.core.set_default_stream": [[291, null]], "mlx.core.set_memory_limit": [[292, null]], "mlx.core.set_wired_limit": [[293, null]], "mlx.core.sigmoid": [[294, null]], "mlx.core.sign": [[295, null]], "mlx.core.sin": [[296, null]], "mlx.core.sinh": [[297, null]], "mlx.core.slice": [[298, null]], "mlx.core.slice_update": [[299, null]], "mlx.core.softmax": [[300, null]], "mlx.core.sort": [[301, null]], "mlx.core.split": [[302, null]], "mlx.core.sqrt": [[303, null]], "mlx.core.square": [[304, null]], "mlx.core.squeeze": [[305, null]], "mlx.core.stack": [[306, null]], "mlx.core.std": [[307, null]], "mlx.core.stop_gradient": [[308, null]], "mlx.core.stream": [[309, null]], "mlx.core.subtract": [[310, null]], "mlx.core.sum": [[311, null]], "mlx.core.swapaxes": [[312, null]], "mlx.core.synchronize": [[313, null]], "mlx.core.take": [[314, null]], "mlx.core.take_along_axis": [[315, null]], "mlx.core.tan": [[316, null]], "mlx.core.tanh": [[317, null]], "mlx.core.tensordot": [[318, null]], "mlx.core.tile": [[319, null]], "mlx.core.topk": [[320, null]], "mlx.core.trace": [[321, null]], "mlx.core.transpose": [[322, null]], "mlx.core.tri": [[323, null]], "mlx.core.tril": [[324, null]], "mlx.core.triu": [[325, null]], "mlx.core.unflatten": [[326, null]], "mlx.core.value_and_grad": [[327, null]], "mlx.core.var": [[328, null]], "mlx.core.view": [[329, null]], "mlx.core.vjp": [[330, null]], "mlx.core.vmap": [[331, null]], "mlx.core.where": [[332, null]], "mlx.core.zeros": [[333, null]], "mlx.core.zeros_like": [[334, null]], "mlx.nn.ALiBi": [[357, null]], "mlx.nn.AvgPool1d": [[358, null]], "mlx.nn.AvgPool2d": [[359, null]], "mlx.nn.AvgPool3d": [[360, null]], "mlx.nn.BatchNorm": [[361, null]], "mlx.nn.CELU": [[362, null]], "mlx.nn.Conv1d": [[363, null]], "mlx.nn.Conv2d": [[364, null]], "mlx.nn.Conv3d": [[365, null]], "mlx.nn.ConvTranspose1d": [[366, null]], "mlx.nn.ConvTranspose2d": [[367, null]], "mlx.nn.ConvTranspose3d": [[368, null]], "mlx.nn.Dropout": [[369, null]], "mlx.nn.Dropout2d": [[370, null]], "mlx.nn.Dropout3d": [[371, null]], "mlx.nn.ELU": [[372, null]], "mlx.nn.Embedding": [[373, null]], "mlx.nn.GELU": [[374, null]], "mlx.nn.GLU": [[375, null]], "mlx.nn.GRU": [[376, null]], "mlx.nn.GroupNorm": [[377, null]], "mlx.nn.HardShrink": [[378, null]], "mlx.nn.HardTanh": [[379, null]], "mlx.nn.Hardswish": [[380, null]], "mlx.nn.InstanceNorm": [[381, null]], "mlx.nn.LSTM": [[382, null]], "mlx.nn.LayerNorm": [[383, null]], "mlx.nn.LeakyReLU": [[384, null]], "mlx.nn.Linear": [[385, null]], "mlx.nn.LogSigmoid": [[386, null]], "mlx.nn.LogSoftmax": [[387, null]], "mlx.nn.MaxPool1d": [[388, null]], "mlx.nn.MaxPool2d": [[389, null]], "mlx.nn.MaxPool3d": [[390, null]], "mlx.nn.Mish": [[391, null]], "mlx.nn.Module.apply": [[392, null]], "mlx.nn.Module.apply_to_modules": [[393, null]], "mlx.nn.Module.children": [[394, null]], "mlx.nn.Module.eval": [[395, null]], "mlx.nn.Module.filter_and_map": [[396, null]], "mlx.nn.Module.freeze": [[397, null]], "mlx.nn.Module.leaf_modules": [[398, null]], "mlx.nn.Module.load_weights": [[399, null]], "mlx.nn.Module.modules": [[400, null]], "mlx.nn.Module.named_modules": [[401, null]], "mlx.nn.Module.parameters": [[402, null]], "mlx.nn.Module.save_weights": [[403, null]], "mlx.nn.Module.set_dtype": [[404, null]], "mlx.nn.Module.state": [[405, null]], "mlx.nn.Module.train": [[406, null]], "mlx.nn.Module.trainable_parameters": [[407, null]], "mlx.nn.Module.training": [[408, null]], "mlx.nn.Module.unfreeze": [[409, null]], "mlx.nn.Module.update": [[410, null]], "mlx.nn.Module.update_modules": [[411, null]], "mlx.nn.MultiHeadAttention": [[412, null]], "mlx.nn.PReLU": [[413, null]], "mlx.nn.QuantizedEmbedding": [[414, null]], "mlx.nn.QuantizedLinear": [[415, null]], "mlx.nn.RMSNorm": [[416, null]], "mlx.nn.RNN": [[417, null]], "mlx.nn.ReLU": [[418, null]], "mlx.nn.ReLU6": [[419, null]], "mlx.nn.RoPE": [[420, null]], "mlx.nn.SELU": [[421, null]], "mlx.nn.Sequential": [[422, null]], "mlx.nn.SiLU": [[423, null]], "mlx.nn.Sigmoid": [[424, null]], "mlx.nn.SinusoidalPositionalEncoding": [[425, null]], "mlx.nn.Softmax": [[426, null]], "mlx.nn.Softmin": [[427, null]], "mlx.nn.Softplus": [[428, null]], "mlx.nn.Softshrink": [[429, null]], "mlx.nn.Softsign": [[430, null]], "mlx.nn.Step": [[431, null]], "mlx.nn.Tanh": [[432, null]], "mlx.nn.Transformer": [[433, null]], "mlx.nn.Upsample": [[434, null]], "mlx.nn.average_gradients": [[335, null]], "mlx.nn.celu": [[443, null]], "mlx.nn.elu": [[444, null]], "mlx.nn.gelu": [[445, null]], "mlx.nn.gelu_approx": [[446, null]], "mlx.nn.gelu_fast_approx": [[447, null]], "mlx.nn.glu": [[448, null]], "mlx.nn.hard_shrink": [[449, null]], "mlx.nn.hard_tanh": [[450, null]], "mlx.nn.hardswish": [[451, null]], "mlx.nn.init.constant": [[435, null]], "mlx.nn.init.glorot_normal": [[436, null]], "mlx.nn.init.glorot_uniform": [[437, null]], "mlx.nn.init.he_normal": [[438, null]], "mlx.nn.init.he_uniform": [[439, null]], "mlx.nn.init.identity": [[440, null]], "mlx.nn.init.normal": [[441, null]], "mlx.nn.init.uniform": [[442, null]], "mlx.nn.leaky_relu": [[452, null]], "mlx.nn.log_sigmoid": [[453, null]], "mlx.nn.log_softmax": [[454, null]], "mlx.nn.losses.binary_cross_entropy": [[455, null]], "mlx.nn.losses.cosine_similarity_loss": [[456, null]], "mlx.nn.losses.cross_entropy": [[457, null]], "mlx.nn.losses.gaussian_nll_loss": [[458, null]], "mlx.nn.losses.hinge_loss": [[459, null]], "mlx.nn.losses.huber_loss": [[460, null]], "mlx.nn.losses.kl_div_loss": [[461, null]], "mlx.nn.losses.l1_loss": [[462, null]], "mlx.nn.losses.log_cosh_loss": [[463, null]], "mlx.nn.losses.margin_ranking_loss": [[464, null]], "mlx.nn.losses.mse_loss": [[465, null]], "mlx.nn.losses.nll_loss": [[466, null]], "mlx.nn.losses.smooth_l1_loss": [[467, null]], "mlx.nn.losses.triplet_loss": [[468, null]], "mlx.nn.mish": [[469, null]], "mlx.nn.prelu": [[470, null]], "mlx.nn.quantize": [[336, null]], "mlx.nn.relu": [[471, null]], "mlx.nn.relu6": [[472, null]], "mlx.nn.selu": [[473, null]], "mlx.nn.sigmoid": [[474, null]], "mlx.nn.silu": [[475, null]], "mlx.nn.softmax": [[476, null]], "mlx.nn.softmin": [[477, null]], "mlx.nn.softplus": [[478, null]], "mlx.nn.softshrink": [[479, null]], "mlx.nn.step": [[480, null]], "mlx.nn.tanh": [[481, null]], "mlx.nn.value_and_grad": [[337, null]], "mlx.optimizers.AdaDelta": [[489, null]], "mlx.optimizers.Adafactor": [[490, null]], "mlx.optimizers.Adagrad": [[491, null]], "mlx.optimizers.Adam": [[492, null]], "mlx.optimizers.AdamW": [[493, null]], "mlx.optimizers.Adamax": [[494, null]], "mlx.optimizers.Lion": [[495, null]], "mlx.optimizers.MultiOptimizer": [[496, null]], "mlx.optimizers.Muon": [[497, null]], "mlx.optimizers.Optimizer.apply_gradients": [[498, null]], "mlx.optimizers.Optimizer.init": [[499, null]], "mlx.optimizers.Optimizer.state": [[500, null]], "mlx.optimizers.Optimizer.update": [[501, null]], "mlx.optimizers.RMSprop": [[502, null]], "mlx.optimizers.SGD": [[503, null]], "mlx.optimizers.clip_grad_norm": [[338, null]], "mlx.optimizers.cosine_decay": [[504, null]], "mlx.optimizers.exponential_decay": [[505, null]], "mlx.optimizers.join_schedules": [[506, null]], "mlx.optimizers.linear_schedule": [[507, null]], "mlx.optimizers.step_decay": [[508, null]], "mlx.utils.tree_flatten": [[339, null]], "mlx.utils.tree_map": [[340, null]], "mlx.utils.tree_map_with_path": [[341, null]], "mlx.utils.tree_reduce": [[342, null]], "mlx.utils.tree_unflatten": [[343, null]], "x86 Shell": [[9, "x86-shell"]]}, "docnames": ["cpp/ops", "dev/custom_metal_kernels", "dev/extensions", "dev/metal_debugger", "dev/mlx_in_cpp", "examples/linear_regression", "examples/llama-inference", "examples/mlp", "index", "install", "python/_autosummary/mlx.core.Device", "python/_autosummary/mlx.core.Dtype", "python/_autosummary/mlx.core.DtypeCategory", "python/_autosummary/mlx.core.abs", "python/_autosummary/mlx.core.add", "python/_autosummary/mlx.core.addmm", "python/_autosummary/mlx.core.all", "python/_autosummary/mlx.core.allclose", "python/_autosummary/mlx.core.any", "python/_autosummary/mlx.core.arange", "python/_autosummary/mlx.core.arccos", "python/_autosummary/mlx.core.arccosh", "python/_autosummary/mlx.core.arcsin", "python/_autosummary/mlx.core.arcsinh", "python/_autosummary/mlx.core.arctan", "python/_autosummary/mlx.core.arctan2", "python/_autosummary/mlx.core.arctanh", "python/_autosummary/mlx.core.argmax", "python/_autosummary/mlx.core.argmin", "python/_autosummary/mlx.core.argpartition", "python/_autosummary/mlx.core.argsort", "python/_autosummary/mlx.core.array", "python/_autosummary/mlx.core.array.T", "python/_autosummary/mlx.core.array.abs", "python/_autosummary/mlx.core.array.all", "python/_autosummary/mlx.core.array.any", "python/_autosummary/mlx.core.array.argmax", "python/_autosummary/mlx.core.array.argmin", "python/_autosummary/mlx.core.array.astype", "python/_autosummary/mlx.core.array.at", "python/_autosummary/mlx.core.array.conj", "python/_autosummary/mlx.core.array.cos", "python/_autosummary/mlx.core.array.cummax", "python/_autosummary/mlx.core.array.cummin", "python/_autosummary/mlx.core.array.cumprod", "python/_autosummary/mlx.core.array.cumsum", "python/_autosummary/mlx.core.array.diag", "python/_autosummary/mlx.core.array.diagonal", "python/_autosummary/mlx.core.array.dtype", "python/_autosummary/mlx.core.array.exp", "python/_autosummary/mlx.core.array.flatten", "python/_autosummary/mlx.core.array.imag", "python/_autosummary/mlx.core.array.item", "python/_autosummary/mlx.core.array.itemsize", "python/_autosummary/mlx.core.array.log", "python/_autosummary/mlx.core.array.log10", "python/_autosummary/mlx.core.array.log1p", "python/_autosummary/mlx.core.array.log2", "python/_autosummary/mlx.core.array.logcumsumexp", "python/_autosummary/mlx.core.array.logsumexp", "python/_autosummary/mlx.core.array.max", "python/_autosummary/mlx.core.array.mean", "python/_autosummary/mlx.core.array.min", "python/_autosummary/mlx.core.array.moveaxis", "python/_autosummary/mlx.core.array.nbytes", "python/_autosummary/mlx.core.array.ndim", "python/_autosummary/mlx.core.array.prod", "python/_autosummary/mlx.core.array.real", "python/_autosummary/mlx.core.array.reciprocal", "python/_autosummary/mlx.core.array.reshape", "python/_autosummary/mlx.core.array.round", "python/_autosummary/mlx.core.array.rsqrt", "python/_autosummary/mlx.core.array.shape", "python/_autosummary/mlx.core.array.sin", "python/_autosummary/mlx.core.array.size", "python/_autosummary/mlx.core.array.split", "python/_autosummary/mlx.core.array.sqrt", "python/_autosummary/mlx.core.array.square", "python/_autosummary/mlx.core.array.squeeze", "python/_autosummary/mlx.core.array.std", "python/_autosummary/mlx.core.array.sum", "python/_autosummary/mlx.core.array.swapaxes", "python/_autosummary/mlx.core.array.tolist", "python/_autosummary/mlx.core.array.transpose", "python/_autosummary/mlx.core.array.var", "python/_autosummary/mlx.core.array.view", "python/_autosummary/mlx.core.array_equal", "python/_autosummary/mlx.core.as_strided", "python/_autosummary/mlx.core.async_eval", "python/_autosummary/mlx.core.atleast_1d", "python/_autosummary/mlx.core.atleast_2d", "python/_autosummary/mlx.core.atleast_3d", "python/_autosummary/mlx.core.bitwise_and", "python/_autosummary/mlx.core.bitwise_invert", "python/_autosummary/mlx.core.bitwise_or", "python/_autosummary/mlx.core.bitwise_xor", "python/_autosummary/mlx.core.block_masked_mm", "python/_autosummary/mlx.core.broadcast_arrays", "python/_autosummary/mlx.core.broadcast_to", "python/_autosummary/mlx.core.ceil", "python/_autosummary/mlx.core.clear_cache", "python/_autosummary/mlx.core.clip", "python/_autosummary/mlx.core.compile", "python/_autosummary/mlx.core.concatenate", "python/_autosummary/mlx.core.conj", "python/_autosummary/mlx.core.conjugate", "python/_autosummary/mlx.core.contiguous", "python/_autosummary/mlx.core.conv1d", "python/_autosummary/mlx.core.conv2d", "python/_autosummary/mlx.core.conv3d", "python/_autosummary/mlx.core.conv_general", "python/_autosummary/mlx.core.conv_transpose1d", "python/_autosummary/mlx.core.conv_transpose2d", "python/_autosummary/mlx.core.conv_transpose3d", "python/_autosummary/mlx.core.convolve", "python/_autosummary/mlx.core.cos", "python/_autosummary/mlx.core.cosh", "python/_autosummary/mlx.core.cuda.is_available", "python/_autosummary/mlx.core.cummax", "python/_autosummary/mlx.core.cummin", "python/_autosummary/mlx.core.cumprod", "python/_autosummary/mlx.core.cumsum", "python/_autosummary/mlx.core.custom_function", "python/_autosummary/mlx.core.default_device", "python/_autosummary/mlx.core.default_stream", "python/_autosummary/mlx.core.degrees", "python/_autosummary/mlx.core.dequantize", "python/_autosummary/mlx.core.diag", "python/_autosummary/mlx.core.diagonal", "python/_autosummary/mlx.core.disable_compile", "python/_autosummary/mlx.core.distributed.Group", "python/_autosummary/mlx.core.distributed.all_gather", "python/_autosummary/mlx.core.distributed.all_sum", "python/_autosummary/mlx.core.distributed.init", "python/_autosummary/mlx.core.distributed.is_available", "python/_autosummary/mlx.core.distributed.recv", "python/_autosummary/mlx.core.distributed.recv_like", "python/_autosummary/mlx.core.distributed.send", "python/_autosummary/mlx.core.divide", "python/_autosummary/mlx.core.divmod", "python/_autosummary/mlx.core.einsum", "python/_autosummary/mlx.core.einsum_path", "python/_autosummary/mlx.core.enable_compile", "python/_autosummary/mlx.core.equal", "python/_autosummary/mlx.core.erf", "python/_autosummary/mlx.core.erfinv", "python/_autosummary/mlx.core.eval", "python/_autosummary/mlx.core.exp", "python/_autosummary/mlx.core.expand_dims", "python/_autosummary/mlx.core.expm1", "python/_autosummary/mlx.core.export_function", "python/_autosummary/mlx.core.export_to_dot", "python/_autosummary/mlx.core.exporter", "python/_autosummary/mlx.core.eye", "python/_autosummary/mlx.core.fast.cuda_kernel", "python/_autosummary/mlx.core.fast.layer_norm", "python/_autosummary/mlx.core.fast.metal_kernel", "python/_autosummary/mlx.core.fast.rms_norm", "python/_autosummary/mlx.core.fast.rope", "python/_autosummary/mlx.core.fast.scaled_dot_product_attention", "python/_autosummary/mlx.core.fft.fft", "python/_autosummary/mlx.core.fft.fft2", "python/_autosummary/mlx.core.fft.fftn", "python/_autosummary/mlx.core.fft.fftshift", "python/_autosummary/mlx.core.fft.ifft", "python/_autosummary/mlx.core.fft.ifft2", "python/_autosummary/mlx.core.fft.ifftn", "python/_autosummary/mlx.core.fft.ifftshift", "python/_autosummary/mlx.core.fft.irfft", "python/_autosummary/mlx.core.fft.irfft2", "python/_autosummary/mlx.core.fft.irfftn", "python/_autosummary/mlx.core.fft.rfft", "python/_autosummary/mlx.core.fft.rfft2", "python/_autosummary/mlx.core.fft.rfftn", "python/_autosummary/mlx.core.finfo", "python/_autosummary/mlx.core.flatten", "python/_autosummary/mlx.core.floor", "python/_autosummary/mlx.core.floor_divide", "python/_autosummary/mlx.core.full", "python/_autosummary/mlx.core.gather_mm", "python/_autosummary/mlx.core.gather_qmm", "python/_autosummary/mlx.core.get_active_memory", "python/_autosummary/mlx.core.get_cache_memory", "python/_autosummary/mlx.core.get_peak_memory", "python/_autosummary/mlx.core.grad", "python/_autosummary/mlx.core.greater", "python/_autosummary/mlx.core.greater_equal", "python/_autosummary/mlx.core.hadamard_transform", "python/_autosummary/mlx.core.identity", "python/_autosummary/mlx.core.imag", "python/_autosummary/mlx.core.import_function", "python/_autosummary/mlx.core.inner", "python/_autosummary/mlx.core.isclose", "python/_autosummary/mlx.core.isfinite", "python/_autosummary/mlx.core.isinf", "python/_autosummary/mlx.core.isnan", "python/_autosummary/mlx.core.isneginf", "python/_autosummary/mlx.core.isposinf", "python/_autosummary/mlx.core.issubdtype", "python/_autosummary/mlx.core.jvp", "python/_autosummary/mlx.core.kron", "python/_autosummary/mlx.core.left_shift", "python/_autosummary/mlx.core.less", "python/_autosummary/mlx.core.less_equal", "python/_autosummary/mlx.core.linalg.cholesky", "python/_autosummary/mlx.core.linalg.cholesky_inv", "python/_autosummary/mlx.core.linalg.cross", "python/_autosummary/mlx.core.linalg.eig", "python/_autosummary/mlx.core.linalg.eigh", "python/_autosummary/mlx.core.linalg.eigvals", "python/_autosummary/mlx.core.linalg.eigvalsh", "python/_autosummary/mlx.core.linalg.inv", "python/_autosummary/mlx.core.linalg.lu", "python/_autosummary/mlx.core.linalg.lu_factor", "python/_autosummary/mlx.core.linalg.norm", "python/_autosummary/mlx.core.linalg.pinv", "python/_autosummary/mlx.core.linalg.qr", "python/_autosummary/mlx.core.linalg.solve", "python/_autosummary/mlx.core.linalg.solve_triangular", "python/_autosummary/mlx.core.linalg.svd", "python/_autosummary/mlx.core.linalg.tri_inv", "python/_autosummary/mlx.core.linspace", "python/_autosummary/mlx.core.load", "python/_autosummary/mlx.core.log", "python/_autosummary/mlx.core.log10", "python/_autosummary/mlx.core.log1p", "python/_autosummary/mlx.core.log2", "python/_autosummary/mlx.core.logaddexp", "python/_autosummary/mlx.core.logcumsumexp", "python/_autosummary/mlx.core.logical_and", "python/_autosummary/mlx.core.logical_not", "python/_autosummary/mlx.core.logical_or", "python/_autosummary/mlx.core.logsumexp", "python/_autosummary/mlx.core.matmul", "python/_autosummary/mlx.core.max", "python/_autosummary/mlx.core.maximum", "python/_autosummary/mlx.core.mean", "python/_autosummary/mlx.core.meshgrid", "python/_autosummary/mlx.core.metal.device_info", "python/_autosummary/mlx.core.metal.is_available", "python/_autosummary/mlx.core.metal.start_capture", "python/_autosummary/mlx.core.metal.stop_capture", "python/_autosummary/mlx.core.min", "python/_autosummary/mlx.core.minimum", "python/_autosummary/mlx.core.moveaxis", "python/_autosummary/mlx.core.multiply", "python/_autosummary/mlx.core.nan_to_num", "python/_autosummary/mlx.core.negative", "python/_autosummary/mlx.core.new_stream", "python/_autosummary/mlx.core.not_equal", "python/_autosummary/mlx.core.ones", "python/_autosummary/mlx.core.ones_like", "python/_autosummary/mlx.core.outer", "python/_autosummary/mlx.core.pad", "python/_autosummary/mlx.core.partition", "python/_autosummary/mlx.core.power", "python/_autosummary/mlx.core.prod", "python/_autosummary/mlx.core.put_along_axis", "python/_autosummary/mlx.core.quantize", "python/_autosummary/mlx.core.quantized_matmul", "python/_autosummary/mlx.core.radians", "python/_autosummary/mlx.core.random.bernoulli", "python/_autosummary/mlx.core.random.categorical", "python/_autosummary/mlx.core.random.gumbel", "python/_autosummary/mlx.core.random.key", "python/_autosummary/mlx.core.random.laplace", "python/_autosummary/mlx.core.random.multivariate_normal", "python/_autosummary/mlx.core.random.normal", "python/_autosummary/mlx.core.random.permutation", "python/_autosummary/mlx.core.random.randint", "python/_autosummary/mlx.core.random.seed", "python/_autosummary/mlx.core.random.split", "python/_autosummary/mlx.core.random.truncated_normal", "python/_autosummary/mlx.core.random.uniform", "python/_autosummary/mlx.core.real", "python/_autosummary/mlx.core.reciprocal", "python/_autosummary/mlx.core.remainder", "python/_autosummary/mlx.core.repeat", "python/_autosummary/mlx.core.reset_peak_memory", "python/_autosummary/mlx.core.reshape", "python/_autosummary/mlx.core.right_shift", "python/_autosummary/mlx.core.roll", "python/_autosummary/mlx.core.round", "python/_autosummary/mlx.core.rsqrt", "python/_autosummary/mlx.core.save", "python/_autosummary/mlx.core.save_gguf", "python/_autosummary/mlx.core.save_safetensors", "python/_autosummary/mlx.core.savez", "python/_autosummary/mlx.core.savez_compressed", "python/_autosummary/mlx.core.set_cache_limit", "python/_autosummary/mlx.core.set_default_device", "python/_autosummary/mlx.core.set_default_stream", "python/_autosummary/mlx.core.set_memory_limit", "python/_autosummary/mlx.core.set_wired_limit", "python/_autosummary/mlx.core.sigmoid", "python/_autosummary/mlx.core.sign", "python/_autosummary/mlx.core.sin", "python/_autosummary/mlx.core.sinh", "python/_autosummary/mlx.core.slice", "python/_autosummary/mlx.core.slice_update", "python/_autosummary/mlx.core.softmax", "python/_autosummary/mlx.core.sort", "python/_autosummary/mlx.core.split", "python/_autosummary/mlx.core.sqrt", "python/_autosummary/mlx.core.square", "python/_autosummary/mlx.core.squeeze", "python/_autosummary/mlx.core.stack", "python/_autosummary/mlx.core.std", "python/_autosummary/mlx.core.stop_gradient", "python/_autosummary/mlx.core.stream", "python/_autosummary/mlx.core.subtract", "python/_autosummary/mlx.core.sum", "python/_autosummary/mlx.core.swapaxes", "python/_autosummary/mlx.core.synchronize", "python/_autosummary/mlx.core.take", "python/_autosummary/mlx.core.take_along_axis", "python/_autosummary/mlx.core.tan", "python/_autosummary/mlx.core.tanh", "python/_autosummary/mlx.core.tensordot", "python/_autosummary/mlx.core.tile", "python/_autosummary/mlx.core.topk", "python/_autosummary/mlx.core.trace", "python/_autosummary/mlx.core.transpose", "python/_autosummary/mlx.core.tri", "python/_autosummary/mlx.core.tril", "python/_autosummary/mlx.core.triu", "python/_autosummary/mlx.core.unflatten", "python/_autosummary/mlx.core.value_and_grad", "python/_autosummary/mlx.core.var", "python/_autosummary/mlx.core.view", "python/_autosummary/mlx.core.vjp", "python/_autosummary/mlx.core.vmap", "python/_autosummary/mlx.core.where", "python/_autosummary/mlx.core.zeros", "python/_autosummary/mlx.core.zeros_like", "python/_autosummary/mlx.nn.average_gradients", "python/_autosummary/mlx.nn.quantize", "python/_autosummary/mlx.nn.value_and_grad", "python/_autosummary/mlx.optimizers.clip_grad_norm", "python/_autosummary/mlx.utils.tree_flatten", "python/_autosummary/mlx.utils.tree_map", "python/_autosummary/mlx.utils.tree_map_with_path", "python/_autosummary/mlx.utils.tree_reduce", "python/_autosummary/mlx.utils.tree_unflatten", "python/_autosummary/stream_class", "python/array", "python/cuda", "python/data_types", "python/devices_and_streams", "python/distributed", "python/export", "python/fast", "python/fft", "python/linalg", "python/memory_management", "python/metal", "python/nn", "python/nn/_autosummary/mlx.nn.ALiBi", "python/nn/_autosummary/mlx.nn.AvgPool1d", "python/nn/_autosummary/mlx.nn.AvgPool2d", "python/nn/_autosummary/mlx.nn.AvgPool3d", "python/nn/_autosummary/mlx.nn.BatchNorm", "python/nn/_autosummary/mlx.nn.CELU", "python/nn/_autosummary/mlx.nn.Conv1d", "python/nn/_autosummary/mlx.nn.Conv2d", "python/nn/_autosummary/mlx.nn.Conv3d", "python/nn/_autosummary/mlx.nn.ConvTranspose1d", "python/nn/_autosummary/mlx.nn.ConvTranspose2d", "python/nn/_autosummary/mlx.nn.ConvTranspose3d", "python/nn/_autosummary/mlx.nn.Dropout", "python/nn/_autosummary/mlx.nn.Dropout2d", "python/nn/_autosummary/mlx.nn.Dropout3d", "python/nn/_autosummary/mlx.nn.ELU", "python/nn/_autosummary/mlx.nn.Embedding", "python/nn/_autosummary/mlx.nn.GELU", "python/nn/_autosummary/mlx.nn.GLU", "python/nn/_autosummary/mlx.nn.GRU", "python/nn/_autosummary/mlx.nn.GroupNorm", "python/nn/_autosummary/mlx.nn.HardShrink", "python/nn/_autosummary/mlx.nn.HardTanh", "python/nn/_autosummary/mlx.nn.Hardswish", "python/nn/_autosummary/mlx.nn.InstanceNorm", "python/nn/_autosummary/mlx.nn.LSTM", "python/nn/_autosummary/mlx.nn.LayerNorm", "python/nn/_autosummary/mlx.nn.LeakyReLU", "python/nn/_autosummary/mlx.nn.Linear", "python/nn/_autosummary/mlx.nn.LogSigmoid", "python/nn/_autosummary/mlx.nn.LogSoftmax", "python/nn/_autosummary/mlx.nn.MaxPool1d", "python/nn/_autosummary/mlx.nn.MaxPool2d", "python/nn/_autosummary/mlx.nn.MaxPool3d", "python/nn/_autosummary/mlx.nn.Mish", "python/nn/_autosummary/mlx.nn.Module.apply", "python/nn/_autosummary/mlx.nn.Module.apply_to_modules", "python/nn/_autosummary/mlx.nn.Module.children", "python/nn/_autosummary/mlx.nn.Module.eval", "python/nn/_autosummary/mlx.nn.Module.filter_and_map", "python/nn/_autosummary/mlx.nn.Module.freeze", "python/nn/_autosummary/mlx.nn.Module.leaf_modules", "python/nn/_autosummary/mlx.nn.Module.load_weights", "python/nn/_autosummary/mlx.nn.Module.modules", "python/nn/_autosummary/mlx.nn.Module.named_modules", "python/nn/_autosummary/mlx.nn.Module.parameters", "python/nn/_autosummary/mlx.nn.Module.save_weights", "python/nn/_autosummary/mlx.nn.Module.set_dtype", "python/nn/_autosummary/mlx.nn.Module.state", "python/nn/_autosummary/mlx.nn.Module.train", "python/nn/_autosummary/mlx.nn.Module.trainable_parameters", "python/nn/_autosummary/mlx.nn.Module.training", "python/nn/_autosummary/mlx.nn.Module.unfreeze", "python/nn/_autosummary/mlx.nn.Module.update", "python/nn/_autosummary/mlx.nn.Module.update_modules", "python/nn/_autosummary/mlx.nn.MultiHeadAttention", "python/nn/_autosummary/mlx.nn.PReLU", "python/nn/_autosummary/mlx.nn.QuantizedEmbedding", "python/nn/_autosummary/mlx.nn.QuantizedLinear", "python/nn/_autosummary/mlx.nn.RMSNorm", "python/nn/_autosummary/mlx.nn.RNN", "python/nn/_autosummary/mlx.nn.ReLU", "python/nn/_autosummary/mlx.nn.ReLU6", "python/nn/_autosummary/mlx.nn.RoPE", "python/nn/_autosummary/mlx.nn.SELU", "python/nn/_autosummary/mlx.nn.Sequential", "python/nn/_autosummary/mlx.nn.SiLU", "python/nn/_autosummary/mlx.nn.Sigmoid", "python/nn/_autosummary/mlx.nn.SinusoidalPositionalEncoding", "python/nn/_autosummary/mlx.nn.Softmax", "python/nn/_autosummary/mlx.nn.Softmin", "python/nn/_autosummary/mlx.nn.Softplus", "python/nn/_autosummary/mlx.nn.Softshrink", "python/nn/_autosummary/mlx.nn.Softsign", "python/nn/_autosummary/mlx.nn.Step", "python/nn/_autosummary/mlx.nn.Tanh", "python/nn/_autosummary/mlx.nn.Transformer", "python/nn/_autosummary/mlx.nn.Upsample", "python/nn/_autosummary/mlx.nn.init.constant", "python/nn/_autosummary/mlx.nn.init.glorot_normal", "python/nn/_autosummary/mlx.nn.init.glorot_uniform", "python/nn/_autosummary/mlx.nn.init.he_normal", "python/nn/_autosummary/mlx.nn.init.he_uniform", "python/nn/_autosummary/mlx.nn.init.identity", "python/nn/_autosummary/mlx.nn.init.normal", "python/nn/_autosummary/mlx.nn.init.uniform", "python/nn/_autosummary_functions/mlx.nn.celu", "python/nn/_autosummary_functions/mlx.nn.elu", "python/nn/_autosummary_functions/mlx.nn.gelu", "python/nn/_autosummary_functions/mlx.nn.gelu_approx", "python/nn/_autosummary_functions/mlx.nn.gelu_fast_approx", "python/nn/_autosummary_functions/mlx.nn.glu", "python/nn/_autosummary_functions/mlx.nn.hard_shrink", "python/nn/_autosummary_functions/mlx.nn.hard_tanh", "python/nn/_autosummary_functions/mlx.nn.hardswish", "python/nn/_autosummary_functions/mlx.nn.leaky_relu", "python/nn/_autosummary_functions/mlx.nn.log_sigmoid", "python/nn/_autosummary_functions/mlx.nn.log_softmax", "python/nn/_autosummary_functions/mlx.nn.losses.binary_cross_entropy", "python/nn/_autosummary_functions/mlx.nn.losses.cosine_similarity_loss", "python/nn/_autosummary_functions/mlx.nn.losses.cross_entropy", "python/nn/_autosummary_functions/mlx.nn.losses.gaussian_nll_loss", "python/nn/_autosummary_functions/mlx.nn.losses.hinge_loss", "python/nn/_autosummary_functions/mlx.nn.losses.huber_loss", "python/nn/_autosummary_functions/mlx.nn.losses.kl_div_loss", "python/nn/_autosummary_functions/mlx.nn.losses.l1_loss", "python/nn/_autosummary_functions/mlx.nn.losses.log_cosh_loss", "python/nn/_autosummary_functions/mlx.nn.losses.margin_ranking_loss", "python/nn/_autosummary_functions/mlx.nn.losses.mse_loss", "python/nn/_autosummary_functions/mlx.nn.losses.nll_loss", "python/nn/_autosummary_functions/mlx.nn.losses.smooth_l1_loss", "python/nn/_autosummary_functions/mlx.nn.losses.triplet_loss", "python/nn/_autosummary_functions/mlx.nn.mish", "python/nn/_autosummary_functions/mlx.nn.prelu", "python/nn/_autosummary_functions/mlx.nn.relu", "python/nn/_autosummary_functions/mlx.nn.relu6", "python/nn/_autosummary_functions/mlx.nn.selu", "python/nn/_autosummary_functions/mlx.nn.sigmoid", "python/nn/_autosummary_functions/mlx.nn.silu", "python/nn/_autosummary_functions/mlx.nn.softmax", "python/nn/_autosummary_functions/mlx.nn.softmin", "python/nn/_autosummary_functions/mlx.nn.softplus", "python/nn/_autosummary_functions/mlx.nn.softshrink", "python/nn/_autosummary_functions/mlx.nn.step", "python/nn/_autosummary_functions/mlx.nn.tanh", "python/nn/functions", "python/nn/init", "python/nn/layers", "python/nn/losses", "python/nn/module", "python/ops", "python/optimizers", "python/optimizers/_autosummary/mlx.optimizers.AdaDelta", "python/optimizers/_autosummary/mlx.optimizers.Adafactor", "python/optimizers/_autosummary/mlx.optimizers.Adagrad", "python/optimizers/_autosummary/mlx.optimizers.Adam", "python/optimizers/_autosummary/mlx.optimizers.AdamW", "python/optimizers/_autosummary/mlx.optimizers.Adamax", "python/optimizers/_autosummary/mlx.optimizers.Lion", "python/optimizers/_autosummary/mlx.optimizers.MultiOptimizer", "python/optimizers/_autosummary/mlx.optimizers.Muon", "python/optimizers/_autosummary/mlx.optimizers.Optimizer.apply_gradients", "python/optimizers/_autosummary/mlx.optimizers.Optimizer.init", "python/optimizers/_autosummary/mlx.optimizers.Optimizer.state", "python/optimizers/_autosummary/mlx.optimizers.Optimizer.update", "python/optimizers/_autosummary/mlx.optimizers.RMSprop", "python/optimizers/_autosummary/mlx.optimizers.SGD", "python/optimizers/_autosummary/mlx.optimizers.cosine_decay", "python/optimizers/_autosummary/mlx.optimizers.exponential_decay", "python/optimizers/_autosummary/mlx.optimizers.join_schedules", "python/optimizers/_autosummary/mlx.optimizers.linear_schedule", "python/optimizers/_autosummary/mlx.optimizers.step_decay", "python/optimizers/common_optimizers", "python/optimizers/optimizer", "python/optimizers/schedulers", "python/random", "python/transforms", "python/tree_utils", "usage/compile", "usage/distributed", "usage/export", "usage/function_transforms", "usage/indexing", "usage/launching_distributed", "usage/lazy_evaluation", "usage/numpy", "usage/quick_start", "usage/saving_and_loading", "usage/unified_memory", "usage/using_streams"], "envversion": {"sphinx": 62, "sphinx.domains.c": 3, "sphinx.domains.changeset": 1, "sphinx.domains.citation": 1, "sphinx.domains.cpp": 9, "sphinx.domains.index": 1, "sphinx.domains.javascript": 3, "sphinx.domains.math": 2, "sphinx.domains.python": 4, "sphinx.domains.rst": 2, "sphinx.domains.std": 2, "sphinx.ext.intersphinx": 1}, "filenames": ["cpp/ops.rst", "dev/custom_metal_kernels.rst", "dev/extensions.rst", "dev/metal_debugger.rst", "dev/mlx_in_cpp.rst", "examples/linear_regression.rst", "examples/llama-inference.rst", "examples/mlp.rst", "index.rst", "install.rst", "python/_autosummary/mlx.core.Device.rst", "python/_autosummary/mlx.core.Dtype.rst", "python/_autosummary/mlx.core.DtypeCategory.rst", "python/_autosummary/mlx.core.abs.rst", "python/_autosummary/mlx.core.add.rst", "python/_autosummary/mlx.core.addmm.rst", "python/_autosummary/mlx.core.all.rst", "python/_autosummary/mlx.core.allclose.rst", "python/_autosummary/mlx.core.any.rst", "python/_autosummary/mlx.core.arange.rst", "python/_autosummary/mlx.core.arccos.rst", "python/_autosummary/mlx.core.arccosh.rst", "python/_autosummary/mlx.core.arcsin.rst", "python/_autosummary/mlx.core.arcsinh.rst", "python/_autosummary/mlx.core.arctan.rst", "python/_autosummary/mlx.core.arctan2.rst", "python/_autosummary/mlx.core.arctanh.rst", "python/_autosummary/mlx.core.argmax.rst", "python/_autosummary/mlx.core.argmin.rst", "python/_autosummary/mlx.core.argpartition.rst", "python/_autosummary/mlx.core.argsort.rst", "python/_autosummary/mlx.core.array.rst", "python/_autosummary/mlx.core.array.T.rst", "python/_autosummary/mlx.core.array.abs.rst", "python/_autosummary/mlx.core.array.all.rst", "python/_autosummary/mlx.core.array.any.rst", "python/_autosummary/mlx.core.array.argmax.rst", "python/_autosummary/mlx.core.array.argmin.rst", "python/_autosummary/mlx.core.array.astype.rst", "python/_autosummary/mlx.core.array.at.rst", "python/_autosummary/mlx.core.array.conj.rst", "python/_autosummary/mlx.core.array.cos.rst", "python/_autosummary/mlx.core.array.cummax.rst", "python/_autosummary/mlx.core.array.cummin.rst", "python/_autosummary/mlx.core.array.cumprod.rst", "python/_autosummary/mlx.core.array.cumsum.rst", "python/_autosummary/mlx.core.array.diag.rst", "python/_autosummary/mlx.core.array.diagonal.rst", "python/_autosummary/mlx.core.array.dtype.rst", "python/_autosummary/mlx.core.array.exp.rst", "python/_autosummary/mlx.core.array.flatten.rst", "python/_autosummary/mlx.core.array.imag.rst", "python/_autosummary/mlx.core.array.item.rst", "python/_autosummary/mlx.core.array.itemsize.rst", "python/_autosummary/mlx.core.array.log.rst", "python/_autosummary/mlx.core.array.log10.rst", "python/_autosummary/mlx.core.array.log1p.rst", "python/_autosummary/mlx.core.array.log2.rst", "python/_autosummary/mlx.core.array.logcumsumexp.rst", "python/_autosummary/mlx.core.array.logsumexp.rst", "python/_autosummary/mlx.core.array.max.rst", "python/_autosummary/mlx.core.array.mean.rst", "python/_autosummary/mlx.core.array.min.rst", "python/_autosummary/mlx.core.array.moveaxis.rst", "python/_autosummary/mlx.core.array.nbytes.rst", "python/_autosummary/mlx.core.array.ndim.rst", "python/_autosummary/mlx.core.array.prod.rst", "python/_autosummary/mlx.core.array.real.rst", "python/_autosummary/mlx.core.array.reciprocal.rst", "python/_autosummary/mlx.core.array.reshape.rst", "python/_autosummary/mlx.core.array.round.rst", "python/_autosummary/mlx.core.array.rsqrt.rst", "python/_autosummary/mlx.core.array.shape.rst", "python/_autosummary/mlx.core.array.sin.rst", "python/_autosummary/mlx.core.array.size.rst", "python/_autosummary/mlx.core.array.split.rst", "python/_autosummary/mlx.core.array.sqrt.rst", "python/_autosummary/mlx.core.array.square.rst", "python/_autosummary/mlx.core.array.squeeze.rst", "python/_autosummary/mlx.core.array.std.rst", "python/_autosummary/mlx.core.array.sum.rst", "python/_autosummary/mlx.core.array.swapaxes.rst", "python/_autosummary/mlx.core.array.tolist.rst", "python/_autosummary/mlx.core.array.transpose.rst", "python/_autosummary/mlx.core.array.var.rst", "python/_autosummary/mlx.core.array.view.rst", "python/_autosummary/mlx.core.array_equal.rst", "python/_autosummary/mlx.core.as_strided.rst", "python/_autosummary/mlx.core.async_eval.rst", "python/_autosummary/mlx.core.atleast_1d.rst", "python/_autosummary/mlx.core.atleast_2d.rst", "python/_autosummary/mlx.core.atleast_3d.rst", "python/_autosummary/mlx.core.bitwise_and.rst", "python/_autosummary/mlx.core.bitwise_invert.rst", "python/_autosummary/mlx.core.bitwise_or.rst", "python/_autosummary/mlx.core.bitwise_xor.rst", "python/_autosummary/mlx.core.block_masked_mm.rst", "python/_autosummary/mlx.core.broadcast_arrays.rst", "python/_autosummary/mlx.core.broadcast_to.rst", "python/_autosummary/mlx.core.ceil.rst", "python/_autosummary/mlx.core.clear_cache.rst", "python/_autosummary/mlx.core.clip.rst", "python/_autosummary/mlx.core.compile.rst", "python/_autosummary/mlx.core.concatenate.rst", "python/_autosummary/mlx.core.conj.rst", "python/_autosummary/mlx.core.conjugate.rst", "python/_autosummary/mlx.core.contiguous.rst", "python/_autosummary/mlx.core.conv1d.rst", "python/_autosummary/mlx.core.conv2d.rst", "python/_autosummary/mlx.core.conv3d.rst", "python/_autosummary/mlx.core.conv_general.rst", "python/_autosummary/mlx.core.conv_transpose1d.rst", "python/_autosummary/mlx.core.conv_transpose2d.rst", "python/_autosummary/mlx.core.conv_transpose3d.rst", "python/_autosummary/mlx.core.convolve.rst", "python/_autosummary/mlx.core.cos.rst", "python/_autosummary/mlx.core.cosh.rst", "python/_autosummary/mlx.core.cuda.is_available.rst", "python/_autosummary/mlx.core.cummax.rst", "python/_autosummary/mlx.core.cummin.rst", "python/_autosummary/mlx.core.cumprod.rst", "python/_autosummary/mlx.core.cumsum.rst", "python/_autosummary/mlx.core.custom_function.rst", "python/_autosummary/mlx.core.default_device.rst", "python/_autosummary/mlx.core.default_stream.rst", "python/_autosummary/mlx.core.degrees.rst", "python/_autosummary/mlx.core.dequantize.rst", "python/_autosummary/mlx.core.diag.rst", "python/_autosummary/mlx.core.diagonal.rst", "python/_autosummary/mlx.core.disable_compile.rst", "python/_autosummary/mlx.core.distributed.Group.rst", "python/_autosummary/mlx.core.distributed.all_gather.rst", "python/_autosummary/mlx.core.distributed.all_sum.rst", "python/_autosummary/mlx.core.distributed.init.rst", "python/_autosummary/mlx.core.distributed.is_available.rst", "python/_autosummary/mlx.core.distributed.recv.rst", "python/_autosummary/mlx.core.distributed.recv_like.rst", "python/_autosummary/mlx.core.distributed.send.rst", "python/_autosummary/mlx.core.divide.rst", "python/_autosummary/mlx.core.divmod.rst", "python/_autosummary/mlx.core.einsum.rst", "python/_autosummary/mlx.core.einsum_path.rst", "python/_autosummary/mlx.core.enable_compile.rst", "python/_autosummary/mlx.core.equal.rst", "python/_autosummary/mlx.core.erf.rst", "python/_autosummary/mlx.core.erfinv.rst", "python/_autosummary/mlx.core.eval.rst", "python/_autosummary/mlx.core.exp.rst", "python/_autosummary/mlx.core.expand_dims.rst", "python/_autosummary/mlx.core.expm1.rst", "python/_autosummary/mlx.core.export_function.rst", "python/_autosummary/mlx.core.export_to_dot.rst", "python/_autosummary/mlx.core.exporter.rst", "python/_autosummary/mlx.core.eye.rst", "python/_autosummary/mlx.core.fast.cuda_kernel.rst", "python/_autosummary/mlx.core.fast.layer_norm.rst", "python/_autosummary/mlx.core.fast.metal_kernel.rst", "python/_autosummary/mlx.core.fast.rms_norm.rst", "python/_autosummary/mlx.core.fast.rope.rst", "python/_autosummary/mlx.core.fast.scaled_dot_product_attention.rst", "python/_autosummary/mlx.core.fft.fft.rst", "python/_autosummary/mlx.core.fft.fft2.rst", "python/_autosummary/mlx.core.fft.fftn.rst", "python/_autosummary/mlx.core.fft.fftshift.rst", "python/_autosummary/mlx.core.fft.ifft.rst", "python/_autosummary/mlx.core.fft.ifft2.rst", "python/_autosummary/mlx.core.fft.ifftn.rst", "python/_autosummary/mlx.core.fft.ifftshift.rst", "python/_autosummary/mlx.core.fft.irfft.rst", "python/_autosummary/mlx.core.fft.irfft2.rst", "python/_autosummary/mlx.core.fft.irfftn.rst", "python/_autosummary/mlx.core.fft.rfft.rst", "python/_autosummary/mlx.core.fft.rfft2.rst", "python/_autosummary/mlx.core.fft.rfftn.rst", "python/_autosummary/mlx.core.finfo.rst", "python/_autosummary/mlx.core.flatten.rst", "python/_autosummary/mlx.core.floor.rst", "python/_autosummary/mlx.core.floor_divide.rst", "python/_autosummary/mlx.core.full.rst", "python/_autosummary/mlx.core.gather_mm.rst", "python/_autosummary/mlx.core.gather_qmm.rst", "python/_autosummary/mlx.core.get_active_memory.rst", "python/_autosummary/mlx.core.get_cache_memory.rst", "python/_autosummary/mlx.core.get_peak_memory.rst", "python/_autosummary/mlx.core.grad.rst", "python/_autosummary/mlx.core.greater.rst", "python/_autosummary/mlx.core.greater_equal.rst", "python/_autosummary/mlx.core.hadamard_transform.rst", "python/_autosummary/mlx.core.identity.rst", "python/_autosummary/mlx.core.imag.rst", "python/_autosummary/mlx.core.import_function.rst", "python/_autosummary/mlx.core.inner.rst", "python/_autosummary/mlx.core.isclose.rst", "python/_autosummary/mlx.core.isfinite.rst", "python/_autosummary/mlx.core.isinf.rst", "python/_autosummary/mlx.core.isnan.rst", "python/_autosummary/mlx.core.isneginf.rst", "python/_autosummary/mlx.core.isposinf.rst", "python/_autosummary/mlx.core.issubdtype.rst", "python/_autosummary/mlx.core.jvp.rst", "python/_autosummary/mlx.core.kron.rst", "python/_autosummary/mlx.core.left_shift.rst", "python/_autosummary/mlx.core.less.rst", "python/_autosummary/mlx.core.less_equal.rst", "python/_autosummary/mlx.core.linalg.cholesky.rst", "python/_autosummary/mlx.core.linalg.cholesky_inv.rst", "python/_autosummary/mlx.core.linalg.cross.rst", "python/_autosummary/mlx.core.linalg.eig.rst", "python/_autosummary/mlx.core.linalg.eigh.rst", "python/_autosummary/mlx.core.linalg.eigvals.rst", "python/_autosummary/mlx.core.linalg.eigvalsh.rst", "python/_autosummary/mlx.core.linalg.inv.rst", "python/_autosummary/mlx.core.linalg.lu.rst", "python/_autosummary/mlx.core.linalg.lu_factor.rst", "python/_autosummary/mlx.core.linalg.norm.rst", "python/_autosummary/mlx.core.linalg.pinv.rst", "python/_autosummary/mlx.core.linalg.qr.rst", "python/_autosummary/mlx.core.linalg.solve.rst", "python/_autosummary/mlx.core.linalg.solve_triangular.rst", "python/_autosummary/mlx.core.linalg.svd.rst", "python/_autosummary/mlx.core.linalg.tri_inv.rst", "python/_autosummary/mlx.core.linspace.rst", "python/_autosummary/mlx.core.load.rst", "python/_autosummary/mlx.core.log.rst", "python/_autosummary/mlx.core.log10.rst", "python/_autosummary/mlx.core.log1p.rst", "python/_autosummary/mlx.core.log2.rst", "python/_autosummary/mlx.core.logaddexp.rst", "python/_autosummary/mlx.core.logcumsumexp.rst", "python/_autosummary/mlx.core.logical_and.rst", "python/_autosummary/mlx.core.logical_not.rst", "python/_autosummary/mlx.core.logical_or.rst", "python/_autosummary/mlx.core.logsumexp.rst", "python/_autosummary/mlx.core.matmul.rst", "python/_autosummary/mlx.core.max.rst", "python/_autosummary/mlx.core.maximum.rst", "python/_autosummary/mlx.core.mean.rst", "python/_autosummary/mlx.core.meshgrid.rst", "python/_autosummary/mlx.core.metal.device_info.rst", "python/_autosummary/mlx.core.metal.is_available.rst", "python/_autosummary/mlx.core.metal.start_capture.rst", "python/_autosummary/mlx.core.metal.stop_capture.rst", "python/_autosummary/mlx.core.min.rst", "python/_autosummary/mlx.core.minimum.rst", "python/_autosummary/mlx.core.moveaxis.rst", "python/_autosummary/mlx.core.multiply.rst", "python/_autosummary/mlx.core.nan_to_num.rst", "python/_autosummary/mlx.core.negative.rst", "python/_autosummary/mlx.core.new_stream.rst", "python/_autosummary/mlx.core.not_equal.rst", "python/_autosummary/mlx.core.ones.rst", "python/_autosummary/mlx.core.ones_like.rst", "python/_autosummary/mlx.core.outer.rst", "python/_autosummary/mlx.core.pad.rst", "python/_autosummary/mlx.core.partition.rst", "python/_autosummary/mlx.core.power.rst", "python/_autosummary/mlx.core.prod.rst", "python/_autosummary/mlx.core.put_along_axis.rst", "python/_autosummary/mlx.core.quantize.rst", "python/_autosummary/mlx.core.quantized_matmul.rst", "python/_autosummary/mlx.core.radians.rst", "python/_autosummary/mlx.core.random.bernoulli.rst", "python/_autosummary/mlx.core.random.categorical.rst", "python/_autosummary/mlx.core.random.gumbel.rst", "python/_autosummary/mlx.core.random.key.rst", "python/_autosummary/mlx.core.random.laplace.rst", "python/_autosummary/mlx.core.random.multivariate_normal.rst", "python/_autosummary/mlx.core.random.normal.rst", "python/_autosummary/mlx.core.random.permutation.rst", "python/_autosummary/mlx.core.random.randint.rst", "python/_autosummary/mlx.core.random.seed.rst", "python/_autosummary/mlx.core.random.split.rst", "python/_autosummary/mlx.core.random.truncated_normal.rst", "python/_autosummary/mlx.core.random.uniform.rst", "python/_autosummary/mlx.core.real.rst", "python/_autosummary/mlx.core.reciprocal.rst", "python/_autosummary/mlx.core.remainder.rst", "python/_autosummary/mlx.core.repeat.rst", "python/_autosummary/mlx.core.reset_peak_memory.rst", "python/_autosummary/mlx.core.reshape.rst", "python/_autosummary/mlx.core.right_shift.rst", "python/_autosummary/mlx.core.roll.rst", "python/_autosummary/mlx.core.round.rst", "python/_autosummary/mlx.core.rsqrt.rst", "python/_autosummary/mlx.core.save.rst", "python/_autosummary/mlx.core.save_gguf.rst", "python/_autosummary/mlx.core.save_safetensors.rst", "python/_autosummary/mlx.core.savez.rst", "python/_autosummary/mlx.core.savez_compressed.rst", "python/_autosummary/mlx.core.set_cache_limit.rst", "python/_autosummary/mlx.core.set_default_device.rst", "python/_autosummary/mlx.core.set_default_stream.rst", "python/_autosummary/mlx.core.set_memory_limit.rst", "python/_autosummary/mlx.core.set_wired_limit.rst", "python/_autosummary/mlx.core.sigmoid.rst", "python/_autosummary/mlx.core.sign.rst", "python/_autosummary/mlx.core.sin.rst", "python/_autosummary/mlx.core.sinh.rst", "python/_autosummary/mlx.core.slice.rst", "python/_autosummary/mlx.core.slice_update.rst", "python/_autosummary/mlx.core.softmax.rst", "python/_autosummary/mlx.core.sort.rst", "python/_autosummary/mlx.core.split.rst", "python/_autosummary/mlx.core.sqrt.rst", "python/_autosummary/mlx.core.square.rst", "python/_autosummary/mlx.core.squeeze.rst", "python/_autosummary/mlx.core.stack.rst", "python/_autosummary/mlx.core.std.rst", "python/_autosummary/mlx.core.stop_gradient.rst", "python/_autosummary/mlx.core.stream.rst", "python/_autosummary/mlx.core.subtract.rst", "python/_autosummary/mlx.core.sum.rst", "python/_autosummary/mlx.core.swapaxes.rst", "python/_autosummary/mlx.core.synchronize.rst", "python/_autosummary/mlx.core.take.rst", "python/_autosummary/mlx.core.take_along_axis.rst", "python/_autosummary/mlx.core.tan.rst", "python/_autosummary/mlx.core.tanh.rst", "python/_autosummary/mlx.core.tensordot.rst", "python/_autosummary/mlx.core.tile.rst", "python/_autosummary/mlx.core.topk.rst", "python/_autosummary/mlx.core.trace.rst", "python/_autosummary/mlx.core.transpose.rst", "python/_autosummary/mlx.core.tri.rst", "python/_autosummary/mlx.core.tril.rst", "python/_autosummary/mlx.core.triu.rst", "python/_autosummary/mlx.core.unflatten.rst", "python/_autosummary/mlx.core.value_and_grad.rst", "python/_autosummary/mlx.core.var.rst", "python/_autosummary/mlx.core.view.rst", "python/_autosummary/mlx.core.vjp.rst", "python/_autosummary/mlx.core.vmap.rst", "python/_autosummary/mlx.core.where.rst", "python/_autosummary/mlx.core.zeros.rst", "python/_autosummary/mlx.core.zeros_like.rst", "python/_autosummary/mlx.nn.average_gradients.rst", "python/_autosummary/mlx.nn.quantize.rst", "python/_autosummary/mlx.nn.value_and_grad.rst", "python/_autosummary/mlx.optimizers.clip_grad_norm.rst", "python/_autosummary/mlx.utils.tree_flatten.rst", "python/_autosummary/mlx.utils.tree_map.rst", "python/_autosummary/mlx.utils.tree_map_with_path.rst", "python/_autosummary/mlx.utils.tree_reduce.rst", "python/_autosummary/mlx.utils.tree_unflatten.rst", "python/_autosummary/stream_class.rst", "python/array.rst", "python/cuda.rst", "python/data_types.rst", "python/devices_and_streams.rst", "python/distributed.rst", "python/export.rst", "python/fast.rst", "python/fft.rst", "python/linalg.rst", "python/memory_management.rst", "python/metal.rst", "python/nn.rst", "python/nn/_autosummary/mlx.nn.ALiBi.rst", "python/nn/_autosummary/mlx.nn.AvgPool1d.rst", "python/nn/_autosummary/mlx.nn.AvgPool2d.rst", "python/nn/_autosummary/mlx.nn.AvgPool3d.rst", "python/nn/_autosummary/mlx.nn.BatchNorm.rst", "python/nn/_autosummary/mlx.nn.CELU.rst", "python/nn/_autosummary/mlx.nn.Conv1d.rst", "python/nn/_autosummary/mlx.nn.Conv2d.rst", "python/nn/_autosummary/mlx.nn.Conv3d.rst", "python/nn/_autosummary/mlx.nn.ConvTranspose1d.rst", "python/nn/_autosummary/mlx.nn.ConvTranspose2d.rst", "python/nn/_autosummary/mlx.nn.ConvTranspose3d.rst", "python/nn/_autosummary/mlx.nn.Dropout.rst", "python/nn/_autosummary/mlx.nn.Dropout2d.rst", "python/nn/_autosummary/mlx.nn.Dropout3d.rst", "python/nn/_autosummary/mlx.nn.ELU.rst", "python/nn/_autosummary/mlx.nn.Embedding.rst", "python/nn/_autosummary/mlx.nn.GELU.rst", "python/nn/_autosummary/mlx.nn.GLU.rst", "python/nn/_autosummary/mlx.nn.GRU.rst", "python/nn/_autosummary/mlx.nn.GroupNorm.rst", "python/nn/_autosummary/mlx.nn.HardShrink.rst", "python/nn/_autosummary/mlx.nn.HardTanh.rst", "python/nn/_autosummary/mlx.nn.Hardswish.rst", "python/nn/_autosummary/mlx.nn.InstanceNorm.rst", "python/nn/_autosummary/mlx.nn.LSTM.rst", "python/nn/_autosummary/mlx.nn.LayerNorm.rst", "python/nn/_autosummary/mlx.nn.LeakyReLU.rst", "python/nn/_autosummary/mlx.nn.Linear.rst", "python/nn/_autosummary/mlx.nn.LogSigmoid.rst", "python/nn/_autosummary/mlx.nn.LogSoftmax.rst", "python/nn/_autosummary/mlx.nn.MaxPool1d.rst", "python/nn/_autosummary/mlx.nn.MaxPool2d.rst", "python/nn/_autosummary/mlx.nn.MaxPool3d.rst", "python/nn/_autosummary/mlx.nn.Mish.rst", "python/nn/_autosummary/mlx.nn.Module.apply.rst", "python/nn/_autosummary/mlx.nn.Module.apply_to_modules.rst", "python/nn/_autosummary/mlx.nn.Module.children.rst", "python/nn/_autosummary/mlx.nn.Module.eval.rst", "python/nn/_autosummary/mlx.nn.Module.filter_and_map.rst", "python/nn/_autosummary/mlx.nn.Module.freeze.rst", "python/nn/_autosummary/mlx.nn.Module.leaf_modules.rst", "python/nn/_autosummary/mlx.nn.Module.load_weights.rst", "python/nn/_autosummary/mlx.nn.Module.modules.rst", "python/nn/_autosummary/mlx.nn.Module.named_modules.rst", "python/nn/_autosummary/mlx.nn.Module.parameters.rst", "python/nn/_autosummary/mlx.nn.Module.save_weights.rst", "python/nn/_autosummary/mlx.nn.Module.set_dtype.rst", "python/nn/_autosummary/mlx.nn.Module.state.rst", "python/nn/_autosummary/mlx.nn.Module.train.rst", "python/nn/_autosummary/mlx.nn.Module.trainable_parameters.rst", "python/nn/_autosummary/mlx.nn.Module.training.rst", "python/nn/_autosummary/mlx.nn.Module.unfreeze.rst", "python/nn/_autosummary/mlx.nn.Module.update.rst", "python/nn/_autosummary/mlx.nn.Module.update_modules.rst", "python/nn/_autosummary/mlx.nn.MultiHeadAttention.rst", "python/nn/_autosummary/mlx.nn.PReLU.rst", "python/nn/_autosummary/mlx.nn.QuantizedEmbedding.rst", "python/nn/_autosummary/mlx.nn.QuantizedLinear.rst", "python/nn/_autosummary/mlx.nn.RMSNorm.rst", "python/nn/_autosummary/mlx.nn.RNN.rst", "python/nn/_autosummary/mlx.nn.ReLU.rst", "python/nn/_autosummary/mlx.nn.ReLU6.rst", "python/nn/_autosummary/mlx.nn.RoPE.rst", "python/nn/_autosummary/mlx.nn.SELU.rst", "python/nn/_autosummary/mlx.nn.Sequential.rst", "python/nn/_autosummary/mlx.nn.SiLU.rst", "python/nn/_autosummary/mlx.nn.Sigmoid.rst", "python/nn/_autosummary/mlx.nn.SinusoidalPositionalEncoding.rst", "python/nn/_autosummary/mlx.nn.Softmax.rst", "python/nn/_autosummary/mlx.nn.Softmin.rst", "python/nn/_autosummary/mlx.nn.Softplus.rst", "python/nn/_autosummary/mlx.nn.Softshrink.rst", "python/nn/_autosummary/mlx.nn.Softsign.rst", "python/nn/_autosummary/mlx.nn.Step.rst", "python/nn/_autosummary/mlx.nn.Tanh.rst", "python/nn/_autosummary/mlx.nn.Transformer.rst", "python/nn/_autosummary/mlx.nn.Upsample.rst", "python/nn/_autosummary/mlx.nn.init.constant.rst", "python/nn/_autosummary/mlx.nn.init.glorot_normal.rst", "python/nn/_autosummary/mlx.nn.init.glorot_uniform.rst", "python/nn/_autosummary/mlx.nn.init.he_normal.rst", "python/nn/_autosummary/mlx.nn.init.he_uniform.rst", "python/nn/_autosummary/mlx.nn.init.identity.rst", "python/nn/_autosummary/mlx.nn.init.normal.rst", "python/nn/_autosummary/mlx.nn.init.uniform.rst", "python/nn/_autosummary_functions/mlx.nn.celu.rst", "python/nn/_autosummary_functions/mlx.nn.elu.rst", "python/nn/_autosummary_functions/mlx.nn.gelu.rst", "python/nn/_autosummary_functions/mlx.nn.gelu_approx.rst", "python/nn/_autosummary_functions/mlx.nn.gelu_fast_approx.rst", "python/nn/_autosummary_functions/mlx.nn.glu.rst", "python/nn/_autosummary_functions/mlx.nn.hard_shrink.rst", "python/nn/_autosummary_functions/mlx.nn.hard_tanh.rst", "python/nn/_autosummary_functions/mlx.nn.hardswish.rst", "python/nn/_autosummary_functions/mlx.nn.leaky_relu.rst", "python/nn/_autosummary_functions/mlx.nn.log_sigmoid.rst", "python/nn/_autosummary_functions/mlx.nn.log_softmax.rst", "python/nn/_autosummary_functions/mlx.nn.losses.binary_cross_entropy.rst", "python/nn/_autosummary_functions/mlx.nn.losses.cosine_similarity_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.cross_entropy.rst", "python/nn/_autosummary_functions/mlx.nn.losses.gaussian_nll_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.hinge_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.huber_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.kl_div_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.l1_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.log_cosh_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.margin_ranking_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.mse_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.nll_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.smooth_l1_loss.rst", "python/nn/_autosummary_functions/mlx.nn.losses.triplet_loss.rst", "python/nn/_autosummary_functions/mlx.nn.mish.rst", "python/nn/_autosummary_functions/mlx.nn.prelu.rst", "python/nn/_autosummary_functions/mlx.nn.relu.rst", "python/nn/_autosummary_functions/mlx.nn.relu6.rst", "python/nn/_autosummary_functions/mlx.nn.selu.rst", "python/nn/_autosummary_functions/mlx.nn.sigmoid.rst", "python/nn/_autosummary_functions/mlx.nn.silu.rst", "python/nn/_autosummary_functions/mlx.nn.softmax.rst", "python/nn/_autosummary_functions/mlx.nn.softmin.rst", "python/nn/_autosummary_functions/mlx.nn.softplus.rst", "python/nn/_autosummary_functions/mlx.nn.softshrink.rst", "python/nn/_autosummary_functions/mlx.nn.step.rst", "python/nn/_autosummary_functions/mlx.nn.tanh.rst", "python/nn/functions.rst", "python/nn/init.rst", "python/nn/layers.rst", "python/nn/losses.rst", "python/nn/module.rst", "python/ops.rst", "python/optimizers.rst", "python/optimizers/_autosummary/mlx.optimizers.AdaDelta.rst", "python/optimizers/_autosummary/mlx.optimizers.Adafactor.rst", "python/optimizers/_autosummary/mlx.optimizers.Adagrad.rst", "python/optimizers/_autosummary/mlx.optimizers.Adam.rst", "python/optimizers/_autosummary/mlx.optimizers.AdamW.rst", "python/optimizers/_autosummary/mlx.optimizers.Adamax.rst", "python/optimizers/_autosummary/mlx.optimizers.Lion.rst", "python/optimizers/_autosummary/mlx.optimizers.MultiOptimizer.rst", "python/optimizers/_autosummary/mlx.optimizers.Muon.rst", "python/optimizers/_autosummary/mlx.optimizers.Optimizer.apply_gradients.rst", "python/optimizers/_autosummary/mlx.optimizers.Optimizer.init.rst", "python/optimizers/_autosummary/mlx.optimizers.Optimizer.state.rst", "python/optimizers/_autosummary/mlx.optimizers.Optimizer.update.rst", "python/optimizers/_autosummary/mlx.optimizers.RMSprop.rst", "python/optimizers/_autosummary/mlx.optimizers.SGD.rst", "python/optimizers/_autosummary/mlx.optimizers.cosine_decay.rst", "python/optimizers/_autosummary/mlx.optimizers.exponential_decay.rst", "python/optimizers/_autosummary/mlx.optimizers.join_schedules.rst", "python/optimizers/_autosummary/mlx.optimizers.linear_schedule.rst", "python/optimizers/_autosummary/mlx.optimizers.step_decay.rst", "python/optimizers/common_optimizers.rst", "python/optimizers/optimizer.rst", "python/optimizers/schedulers.rst", "python/random.rst", "python/transforms.rst", "python/tree_utils.rst", "usage/compile.rst", "usage/distributed.rst", "usage/export.rst", "usage/function_transforms.rst", "usage/indexing.rst", "usage/launching_distributed.rst", "usage/lazy_evaluation.rst", "usage/numpy.rst", "usage/quick_start.rst", "usage/saving_and_loading.rst", "usage/unified_memory.rst", "usage/using_streams.rst"], "indexentries": {"__init__() (array method)": [[31, "mlx.core.array.__init__", false]], "__init__() (custom_function method)": [[122, "mlx.core.custom_function.__init__", false]], "__init__() (device method)": [[10, "mlx.core.Device.__init__", false]], "__init__() (dtype method)": [[11, "mlx.core.Dtype.__init__", false]], "__init__() (dtypecategory method)": [[12, "mlx.core.DtypeCategory.__init__", false]], "__init__() (finfo method)": [[174, "mlx.core.finfo.__init__", false]], "__init__() (group method)": [[130, "mlx.core.distributed.Group.__init__", false]], "__init__() (stream method)": [[344, "mlx.core.Stream.__init__", false]], "abs (c++ function)": [[0, "_CPPv43absRK5array14StreamOrDevice", false]], "abs() (array method)": [[33, "mlx.core.array.abs", false]], "abs() (in module mlx.core)": [[13, "mlx.core.abs", false]], "adadelta (class in mlx.optimizers)": [[489, "mlx.optimizers.AdaDelta", false]], "adafactor (class in mlx.optimizers)": [[490, "mlx.optimizers.Adafactor", false]], "adagrad (class in mlx.optimizers)": [[491, "mlx.optimizers.Adagrad", false]], "adam (class in mlx.optimizers)": [[492, "mlx.optimizers.Adam", false]], "adamax (class in mlx.optimizers)": [[494, "mlx.optimizers.Adamax", false]], "adamw (class in mlx.optimizers)": [[493, "mlx.optimizers.AdamW", false]], "add (c++ function)": [[0, "_CPPv43addRK5arrayRK5array14StreamOrDevice", false]], "add() (in module mlx.core)": [[14, "mlx.core.add", false]], "addmm (c++ function)": [[0, "_CPPv45addmm5array5array5arrayRKfRKf14StreamOrDevice", false]], "addmm() (in module mlx.core)": [[15, "mlx.core.addmm", false]], "alibi (class in mlx.nn)": [[357, "mlx.nn.ALiBi", false]], "all (c++ function)": [[0, "_CPPv43allRK5array14StreamOrDevice", false], [0, "_CPPv43allRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", false], [0, "_CPPv43allRK5arrayb14StreamOrDevice", false], [0, "_CPPv43allRK5arrayib14StreamOrDevice", false]], "all() (array method)": [[34, "mlx.core.array.all", false]], "all() (in module mlx.core)": [[16, "mlx.core.all", false]], "all_gather() (in module mlx.core.distributed)": [[131, "mlx.core.distributed.all_gather", false]], "all_sum() (in module mlx.core.distributed)": [[132, "mlx.core.distributed.all_sum", false]], "allclose (c++ function)": [[0, "_CPPv48allcloseRK5arrayRK5arrayddb14StreamOrDevice", false]], "allclose() (in module mlx.core)": [[17, "mlx.core.allclose", false]], "any (c++ function)": [[0, "_CPPv43anyRK5array14StreamOrDevice", false], [0, "_CPPv43anyRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", false], [0, "_CPPv43anyRK5arrayb14StreamOrDevice", false], [0, "_CPPv43anyRK5arrayib14StreamOrDevice", false]], "any() (array method)": [[35, "mlx.core.array.any", false]], "any() (in module mlx.core)": [[18, "mlx.core.any", false]], "apply() (module method)": [[392, "mlx.nn.Module.apply", false]], "apply_gradients() (optimizer method)": [[498, "mlx.optimizers.Optimizer.apply_gradients", false]], "apply_to_modules() (module method)": [[393, "mlx.nn.Module.apply_to_modules", false]], "arange (c++ function)": [[0, "_CPPv46aranged14StreamOrDevice", false], [0, "_CPPv46aranged5Dtype14StreamOrDevice", false], [0, "_CPPv46arangedd14StreamOrDevice", false], [0, "_CPPv46arangedd5Dtype14StreamOrDevice", false], [0, "_CPPv46arangeddd14StreamOrDevice", false], [0, "_CPPv46arangeddd5Dtype14StreamOrDevice", false], [0, "_CPPv46arangei14StreamOrDevice", false], [0, "_CPPv46arangeii14StreamOrDevice", false], [0, "_CPPv46arangeiii14StreamOrDevice", false]], "arange() (in module mlx.core)": [[19, "mlx.core.arange", false]], "arccos (c++ function)": [[0, "_CPPv46arccosRK5array14StreamOrDevice", false]], "arccos() (in module mlx.core)": [[20, "mlx.core.arccos", false]], "arccosh (c++ function)": [[0, "_CPPv47arccoshRK5array14StreamOrDevice", false]], "arccosh() (in module mlx.core)": [[21, "mlx.core.arccosh", false]], "arcsin (c++ function)": [[0, "_CPPv46arcsinRK5array14StreamOrDevice", false]], "arcsin() (in module mlx.core)": [[22, "mlx.core.arcsin", false]], "arcsinh (c++ function)": [[0, "_CPPv47arcsinhRK5array14StreamOrDevice", false]], "arcsinh() (in module mlx.core)": [[23, "mlx.core.arcsinh", false]], "arctan (c++ function)": [[0, "_CPPv46arctanRK5array14StreamOrDevice", false]], "arctan() (in module mlx.core)": [[24, "mlx.core.arctan", false]], "arctan2 (c++ function)": [[0, "_CPPv47arctan2RK5arrayRK5array14StreamOrDevice", false]], "arctan2() (in module mlx.core)": [[25, "mlx.core.arctan2", false]], "arctanh (c++ function)": [[0, "_CPPv47arctanhRK5array14StreamOrDevice", false]], "arctanh() (in module mlx.core)": [[26, "mlx.core.arctanh", false]], "argmax (c++ function)": [[0, "_CPPv46argmaxRK5array14StreamOrDevice", false], [0, "_CPPv46argmaxRK5arrayb14StreamOrDevice", false], [0, "_CPPv46argmaxRK5arrayib14StreamOrDevice", false]], "argmax() (array method)": [[36, "mlx.core.array.argmax", false]], "argmax() (in module mlx.core)": [[27, "mlx.core.argmax", false]], "argmin (c++ function)": [[0, "_CPPv46argminRK5array14StreamOrDevice", false], [0, "_CPPv46argminRK5arrayb14StreamOrDevice", false], [0, "_CPPv46argminRK5arrayib14StreamOrDevice", false]], "argmin() (array method)": [[37, "mlx.core.array.argmin", false]], "argmin() (in module mlx.core)": [[28, "mlx.core.argmin", false]], "argpartition (c++ function)": [[0, "_CPPv412argpartitionRK5arrayi14StreamOrDevice", false], [0, "_CPPv412argpartitionRK5arrayii14StreamOrDevice", false]], "argpartition() (in module mlx.core)": [[29, "mlx.core.argpartition", false]], "argsort (c++ function)": [[0, "_CPPv47argsortRK5array14StreamOrDevice", false], [0, "_CPPv47argsortRK5arrayi14StreamOrDevice", false]], "argsort() (in module mlx.core)": [[30, "mlx.core.argsort", false]], "array (class in mlx.core)": [[31, "mlx.core.array", false]], "array_equal (c++ function)": [[0, "_CPPv411array_equalRK5arrayRK5array14StreamOrDevice", false], [0, "_CPPv411array_equalRK5arrayRK5arrayb14StreamOrDevice", false]], "array_equal() (in module mlx.core)": [[86, "mlx.core.array_equal", false]], "as_strided (c++ function)": [[0, "_CPPv410as_strided5array5Shape7Strides6size_t14StreamOrDevice", false]], "as_strided() (in module mlx.core)": [[87, "mlx.core.as_strided", false]], "astype (c++ function)": [[0, "_CPPv46astype5array5Dtype14StreamOrDevice", false]], "astype() (array method)": [[38, "mlx.core.array.astype", false]], "async_eval() (in module mlx.core)": [[88, "mlx.core.async_eval", false]], "at (array property)": [[39, "mlx.core.array.at", false]], "atleast_1d (c++ function)": [[0, "_CPPv410atleast_1dRK5array14StreamOrDevice", false], [0, "_CPPv410atleast_1dRKNSt6vectorI5arrayEE14StreamOrDevice", false]], "atleast_1d() (in module mlx.core)": [[89, "mlx.core.atleast_1d", false]], "atleast_2d (c++ function)": [[0, "_CPPv410atleast_2dRK5array14StreamOrDevice", false], [0, "_CPPv410atleast_2dRKNSt6vectorI5arrayEE14StreamOrDevice", false]], "atleast_2d() (in module mlx.core)": [[90, "mlx.core.atleast_2d", false]], "atleast_3d (c++ function)": [[0, "_CPPv410atleast_3dRK5array14StreamOrDevice", false], [0, "_CPPv410atleast_3dRKNSt6vectorI5arrayEE14StreamOrDevice", false]], "atleast_3d() (in module mlx.core)": [[91, "mlx.core.atleast_3d", false]], "average_gradients() (in module mlx.nn)": [[335, "mlx.nn.average_gradients", false]], "avgpool1d (class in mlx.nn)": [[358, "mlx.nn.AvgPool1d", false]], "avgpool2d (class in mlx.nn)": [[359, "mlx.nn.AvgPool2d", false]], "avgpool3d (class in mlx.nn)": [[360, "mlx.nn.AvgPool3d", false]], "batchnorm (class in mlx.nn)": [[361, "mlx.nn.BatchNorm", false]], "bernoulli() (in module mlx.core.random)": [[261, "mlx.core.random.bernoulli", false]], "binary_cross_entropy (class in mlx.nn.losses)": [[455, "mlx.nn.losses.binary_cross_entropy", false]], "bitwise_and (c++ function)": [[0, "_CPPv411bitwise_andRK5arrayRK5array14StreamOrDevice", false]], "bitwise_and() (in module mlx.core)": [[92, "mlx.core.bitwise_and", false]], "bitwise_invert (c++ function)": [[0, "_CPPv414bitwise_invertRK5array14StreamOrDevice", false]], "bitwise_invert() (in module mlx.core)": [[93, "mlx.core.bitwise_invert", false]], "bitwise_or (c++ function)": [[0, "_CPPv410bitwise_orRK5arrayRK5array14StreamOrDevice", false]], "bitwise_or() (in module mlx.core)": [[94, "mlx.core.bitwise_or", false]], "bitwise_xor (c++ function)": [[0, "_CPPv411bitwise_xorRK5arrayRK5array14StreamOrDevice", false]], "bitwise_xor() (in module mlx.core)": [[95, "mlx.core.bitwise_xor", false]], "block_masked_mm (c++ function)": [[0, "_CPPv415block_masked_mm5array5arrayiNSt8optionalI5arrayEENSt8optionalI5arrayEENSt8optionalI5arrayEE14StreamOrDevice", false]], "block_masked_mm() (in module mlx.core)": [[96, "mlx.core.block_masked_mm", false]], "broadcast_arrays (c++ function)": [[0, "_CPPv416broadcast_arraysRKNSt6vectorI5arrayEE14StreamOrDevice", false]], "broadcast_arrays() (in module mlx.core)": [[97, "mlx.core.broadcast_arrays", false]], "broadcast_to (c++ function)": [[0, "_CPPv412broadcast_toRK5arrayRK5Shape14StreamOrDevice", false]], "broadcast_to() (in module mlx.core)": [[98, "mlx.core.broadcast_to", false]], "categorical() (in module mlx.core.random)": [[262, "mlx.core.random.categorical", false]], "ceil (c++ function)": [[0, "_CPPv44ceilRK5array14StreamOrDevice", false]], "ceil() (in module mlx.core)": [[99, "mlx.core.ceil", false]], "celu (class in mlx.nn)": [[362, "mlx.nn.CELU", false], [443, "mlx.nn.celu", false]], "children() (module method)": [[394, "mlx.nn.Module.children", false]], "cholesky() (in module mlx.core.linalg)": [[204, "mlx.core.linalg.cholesky", false]], "cholesky_inv() (in module mlx.core.linalg)": [[205, "mlx.core.linalg.cholesky_inv", false]], "clear_cache() (in module mlx.core)": [[100, "mlx.core.clear_cache", false]], "clip (c++ function)": [[0, "_CPPv44clipRK5arrayRKNSt8optionalI5arrayEERKNSt8optionalI5arrayEE14StreamOrDevice", false]], "clip() (in module mlx.core)": [[101, "mlx.core.clip", false]], "clip_grad_norm() (in module mlx.optimizers)": [[338, "mlx.optimizers.clip_grad_norm", false]], "compile() (in module mlx.core)": [[102, "mlx.core.compile", false]], "concatenate (c++ function)": [[0, "_CPPv411concatenateNSt6vectorI5arrayEE14StreamOrDevice", false], [0, "_CPPv411concatenateNSt6vectorI5arrayEEi14StreamOrDevice", false]], "concatenate() (in module mlx.core)": [[103, "mlx.core.concatenate", false]], "conj() (array method)": [[40, "mlx.core.array.conj", false]], "conj() (in module mlx.core)": [[104, "mlx.core.conj", false]], "conjugate (c++ function)": [[0, "_CPPv49conjugateRK5array14StreamOrDevice", false]], "conjugate() (in module mlx.core)": [[105, "mlx.core.conjugate", false]], "constant() (in module mlx.nn.init)": [[435, "mlx.nn.init.constant", false]], "contiguous (c++ function)": [[0, "_CPPv410contiguousRK5arrayb14StreamOrDevice", false]], "contiguous() (in module mlx.core)": [[106, "mlx.core.contiguous", false]], "conv1d (c++ function)": [[0, "_CPPv46conv1dRK5arrayRK5arrayiiii14StreamOrDevice", false]], "conv1d (class in mlx.nn)": [[363, "mlx.nn.Conv1d", false]], "conv1d() (in module mlx.core)": [[107, "mlx.core.conv1d", false]], "conv2d (c++ function)": [[0, "_CPPv46conv2dRK5arrayRK5arrayRKNSt4pairIiiEERKNSt4pairIiiEERKNSt4pairIiiEEi14StreamOrDevice", false]], "conv2d (class in mlx.nn)": [[364, "mlx.nn.Conv2d", false]], "conv2d() (in module mlx.core)": [[108, "mlx.core.conv2d", false]], "conv3d (c++ function)": [[0, "_CPPv46conv3dRK5arrayRK5arrayRKNSt5tupleIiiiEERKNSt5tupleIiiiEERKNSt5tupleIiiiEEi14StreamOrDevice", false]], "conv3d (class in mlx.nn)": [[365, "mlx.nn.Conv3d", false]], "conv3d() (in module mlx.core)": [[109, "mlx.core.conv3d", false]], "conv_general (c++ function)": [[0, "_CPPv412conv_general5array5arrayNSt6vectorIiEENSt6vectorIiEENSt6vectorIiEENSt6vectorIiEENSt6vectorIiEEib14StreamOrDevice", false], [0, "_CPPv412conv_generalRK5arrayRK5arrayNSt6vectorIiEENSt6vectorIiEENSt6vectorIiEENSt6vectorIiEEib14StreamOrDevice", false]], "conv_general() (in module mlx.core)": [[110, "mlx.core.conv_general", false]], "conv_transpose1d (c++ function)": [[0, "_CPPv416conv_transpose1dRK5arrayRK5arrayiiiii14StreamOrDevice", false]], "conv_transpose1d() (in module mlx.core)": [[111, "mlx.core.conv_transpose1d", false]], "conv_transpose2d (c++ function)": [[0, "_CPPv416conv_transpose2dRK5arrayRK5arrayRKNSt4pairIiiEERKNSt4pairIiiEERKNSt4pairIiiEERKNSt4pairIiiEEi14StreamOrDevice", false]], "conv_transpose2d() (in module mlx.core)": [[112, "mlx.core.conv_transpose2d", false]], "conv_transpose3d (c++ function)": [[0, "_CPPv416conv_transpose3dRK5arrayRK5arrayRKNSt5tupleIiiiEERKNSt5tupleIiiiEERKNSt5tupleIiiiEERKNSt5tupleIiiiEEi14StreamOrDevice", false]], "conv_transpose3d() (in module mlx.core)": [[113, "mlx.core.conv_transpose3d", false]], "convolve() (in module mlx.core)": [[114, "mlx.core.convolve", false]], "convtranspose1d (class in mlx.nn)": [[366, "mlx.nn.ConvTranspose1d", false]], "convtranspose2d (class in mlx.nn)": [[367, "mlx.nn.ConvTranspose2d", false]], "convtranspose3d (class in mlx.nn)": [[368, "mlx.nn.ConvTranspose3d", false]], "copy (c++ function)": [[0, "_CPPv44copy5array14StreamOrDevice", false]], "cos (c++ function)": [[0, "_CPPv43cosRK5array14StreamOrDevice", false]], "cos() (array method)": [[41, "mlx.core.array.cos", false]], "cos() (in module mlx.core)": [[115, "mlx.core.cos", false]], "cosh (c++ function)": [[0, "_CPPv44coshRK5array14StreamOrDevice", false]], "cosh() (in module mlx.core)": [[116, "mlx.core.cosh", false]], "cosine_decay() (in module mlx.optimizers)": [[504, "mlx.optimizers.cosine_decay", false]], "cosine_similarity_loss (class in mlx.nn.losses)": [[456, "mlx.nn.losses.cosine_similarity_loss", false]], "cross() (in module mlx.core.linalg)": [[206, "mlx.core.linalg.cross", false]], "cross_entropy (class in mlx.nn.losses)": [[457, "mlx.nn.losses.cross_entropy", false]], "cuda_kernel() (in module mlx.core.fast)": [[154, "mlx.core.fast.cuda_kernel", false]], "cummax (c++ function)": [[0, "_CPPv46cummaxRK5arrayibb14StreamOrDevice", false]], "cummax() (array method)": [[42, "mlx.core.array.cummax", false]], "cummax() (in module mlx.core)": [[118, "mlx.core.cummax", false]], "cummin (c++ function)": [[0, "_CPPv46cumminRK5arrayibb14StreamOrDevice", false]], "cummin() (array method)": [[43, "mlx.core.array.cummin", false]], "cummin() (in module mlx.core)": [[119, "mlx.core.cummin", false]], "cumprod (c++ function)": [[0, "_CPPv47cumprodRK5arrayibb14StreamOrDevice", false]], "cumprod() (array method)": [[44, "mlx.core.array.cumprod", false]], "cumprod() (in module mlx.core)": [[120, "mlx.core.cumprod", false]], "cumsum (c++ function)": [[0, "_CPPv46cumsumRK5arrayibb14StreamOrDevice", false]], "cumsum() (array method)": [[45, "mlx.core.array.cumsum", false]], "cumsum() (in module mlx.core)": [[121, "mlx.core.cumsum", false]], "custom_function (class in mlx.core)": [[122, "mlx.core.custom_function", false]], "default_device() (in module mlx.core)": [[123, "mlx.core.default_device", false]], "default_stream() (in module mlx.core)": [[124, "mlx.core.default_stream", false]], "degrees (c++ function)": [[0, "_CPPv47degreesRK5array14StreamOrDevice", false]], "degrees() (in module mlx.core)": [[125, "mlx.core.degrees", false]], "depends (c++ function)": [[0, "_CPPv47dependsRKNSt6vectorI5arrayEERKNSt6vectorI5arrayEE", false]], "dequantize (c++ function)": [[0, "_CPPv410dequantizeRK5arrayRK5arrayRKNSt8optionalI5arrayEEiiRKNSt6stringE14StreamOrDevice", false]], "dequantize() (in module mlx.core)": [[126, "mlx.core.dequantize", false]], "device (class in mlx.core)": [[10, "mlx.core.Device", false]], "device_info() (in module mlx.core.metal)": [[238, "mlx.core.metal.device_info", false]], "diag (c++ function)": [[0, "_CPPv44diagRK5arrayi14StreamOrDevice", false]], "diag() (array method)": [[46, "mlx.core.array.diag", false]], "diag() (in module mlx.core)": [[127, "mlx.core.diag", false]], "diagonal (c++ function)": [[0, "_CPPv48diagonalRK5arrayiii14StreamOrDevice", false]], "diagonal() (array method)": [[47, "mlx.core.array.diagonal", false]], "diagonal() (in module mlx.core)": [[128, "mlx.core.diagonal", false]], "disable_compile() (in module mlx.core)": [[129, "mlx.core.disable_compile", false]], "divide (c++ function)": [[0, "_CPPv46divideRK5arrayRK5array14StreamOrDevice", false]], "divide() (in module mlx.core)": [[138, "mlx.core.divide", false]], "divmod (c++ function)": [[0, "_CPPv46divmodRK5arrayRK5array14StreamOrDevice", false]], "divmod() (in module mlx.core)": [[139, "mlx.core.divmod", false]], "dropout (class in mlx.nn)": [[369, "mlx.nn.Dropout", false]], "dropout2d (class in mlx.nn)": [[370, "mlx.nn.Dropout2d", false]], "dropout3d (class in mlx.nn)": [[371, "mlx.nn.Dropout3d", false]], "dtype (array property)": [[48, "mlx.core.array.dtype", false]], "dtype (class in mlx.core)": [[11, "mlx.core.Dtype", false]], "dtypecategory (class in mlx.core)": [[12, "mlx.core.DtypeCategory", false]], "eig() (in module mlx.core.linalg)": [[207, "mlx.core.linalg.eig", false]], "eigh() (in module mlx.core.linalg)": [[208, "mlx.core.linalg.eigh", false]], "eigvals() (in module mlx.core.linalg)": [[209, "mlx.core.linalg.eigvals", false]], "eigvalsh() (in module mlx.core.linalg)": [[210, "mlx.core.linalg.eigvalsh", false]], "einsum() (in module mlx.core)": [[140, "mlx.core.einsum", false]], "einsum_path() (in module mlx.core)": [[141, "mlx.core.einsum_path", false]], "elu (class in mlx.nn)": [[372, "mlx.nn.ELU", false], [444, "mlx.nn.elu", false]], "embedding (class in mlx.nn)": [[373, "mlx.nn.Embedding", false]], "enable_compile() (in module mlx.core)": [[142, "mlx.core.enable_compile", false]], "equal (c++ function)": [[0, "_CPPv45equalRK5arrayRK5array14StreamOrDevice", false]], "equal() (in module mlx.core)": [[143, "mlx.core.equal", false]], "erf (c++ function)": [[0, "_CPPv43erfRK5array14StreamOrDevice", false]], "erf() (in module mlx.core)": [[144, "mlx.core.erf", false]], "erfinv (c++ function)": [[0, "_CPPv46erfinvRK5array14StreamOrDevice", false]], "erfinv() (in module mlx.core)": [[145, "mlx.core.erfinv", false]], "eval() (in module mlx.core)": [[146, "mlx.core.eval", false]], "eval() (module method)": [[395, "mlx.nn.Module.eval", false]], "exp (c++ function)": [[0, "_CPPv43expRK5array14StreamOrDevice", false]], "exp() (array method)": [[49, "mlx.core.array.exp", false]], "exp() (in module mlx.core)": [[147, "mlx.core.exp", false]], "expand_dims (c++ function)": [[0, "_CPPv411expand_dimsRK5arrayRKNSt6vectorIiEE14StreamOrDevice", false], [0, "_CPPv411expand_dimsRK5arrayi14StreamOrDevice", false]], "expand_dims() (in module mlx.core)": [[148, "mlx.core.expand_dims", false]], "expm1 (c++ function)": [[0, "_CPPv45expm1RK5array14StreamOrDevice", false]], "expm1() (in module mlx.core)": [[149, "mlx.core.expm1", false]], "exponential_decay() (in module mlx.optimizers)": [[505, "mlx.optimizers.exponential_decay", false]], "export_function() (in module mlx.core)": [[150, "mlx.core.export_function", false]], "export_to_dot() (in module mlx.core)": [[151, "mlx.core.export_to_dot", false]], "exporter() (in module mlx.core)": [[152, "mlx.core.exporter", false]], "eye (c++ function)": [[0, "_CPPv43eyei14StreamOrDevice", false], [0, "_CPPv43eyei5Dtype14StreamOrDevice", false], [0, "_CPPv43eyeii14StreamOrDevice", false], [0, "_CPPv43eyeiii14StreamOrDevice", false], [0, "_CPPv43eyeiii5Dtype14StreamOrDevice", false]], "eye() (in module mlx.core)": [[153, "mlx.core.eye", false]], "fft() (in module mlx.core.fft)": [[160, "mlx.core.fft.fft", false]], "fft2() (in module mlx.core.fft)": [[161, "mlx.core.fft.fft2", false]], "fftn() (in module mlx.core.fft)": [[162, "mlx.core.fft.fftn", false]], "fftshift() (in module mlx.core.fft)": [[163, "mlx.core.fft.fftshift", false]], "filter_and_map() (module method)": [[396, "mlx.nn.Module.filter_and_map", false]], "finfo (class in mlx.core)": [[174, "mlx.core.finfo", false]], "flatten (c++ function)": [[0, "_CPPv47flattenRK5array14StreamOrDevice", false], [0, "_CPPv47flattenRK5arrayii14StreamOrDevice", false]], "flatten() (array method)": [[50, "mlx.core.array.flatten", false]], "flatten() (in module mlx.core)": [[175, "mlx.core.flatten", false]], "floor (c++ function)": [[0, "_CPPv45floorRK5array14StreamOrDevice", false]], "floor() (in module mlx.core)": [[176, "mlx.core.floor", false]], "floor_divide (c++ function)": [[0, "_CPPv412floor_divideRK5arrayRK5array14StreamOrDevice", false]], "floor_divide() (in module mlx.core)": [[177, "mlx.core.floor_divide", false]], "freeze() (module method)": [[397, "mlx.nn.Module.freeze", false]], "full (c++ function)": [[0, "_CPPv44full5Shape5array14StreamOrDevice", false], [0, "_CPPv44full5Shape5array5Dtype14StreamOrDevice", false], [0, "_CPPv4I0E4full5array5Shape1T14StreamOrDevice", false], [0, "_CPPv4I0E4full5array5Shape1T5Dtype14StreamOrDevice", false]], "full() (in module mlx.core)": [[178, "mlx.core.full", false]], "gather (c++ function)": [[0, "_CPPv46gatherRK5arrayRK5arrayiRK5Shape14StreamOrDevice", false], [0, "_CPPv46gatherRK5arrayRKNSt6vectorI5arrayEERKNSt6vectorIiEERK5Shape14StreamOrDevice", false]], "gather_mm (c++ function)": [[0, "_CPPv49gather_mm5array5arrayNSt8optionalI5arrayEENSt8optionalI5arrayEEb14StreamOrDevice", false]], "gather_mm() (in module mlx.core)": [[179, "mlx.core.gather_mm", false]], "gather_qmm (c++ function)": [[0, "_CPPv410gather_qmmRK5arrayRK5arrayRK5arrayRKNSt8optionalI5arrayEENSt8optionalI5arrayEENSt8optionalI5arrayEEbiiRKNSt6stringEb14StreamOrDevice", false]], "gather_qmm() (in module mlx.core)": [[180, "mlx.core.gather_qmm", false]], "gaussian_nll_loss (class in mlx.nn.losses)": [[458, "mlx.nn.losses.gaussian_nll_loss", false]], "gelu (class in mlx.nn)": [[374, "mlx.nn.GELU", false], [445, "mlx.nn.gelu", false]], "gelu_approx (class in mlx.nn)": [[446, "mlx.nn.gelu_approx", false]], "gelu_fast_approx (class in mlx.nn)": [[447, "mlx.nn.gelu_fast_approx", false]], "get_active_memory() (in module mlx.core)": [[181, "mlx.core.get_active_memory", false]], "get_cache_memory() (in module mlx.core)": [[182, "mlx.core.get_cache_memory", false]], "get_peak_memory() (in module mlx.core)": [[183, "mlx.core.get_peak_memory", false]], "glorot_normal() (in module mlx.nn.init)": [[436, "mlx.nn.init.glorot_normal", false]], "glorot_uniform() (in module mlx.nn.init)": [[437, "mlx.nn.init.glorot_uniform", false]], "glu (class in mlx.nn)": [[375, "mlx.nn.GLU", false], [448, "mlx.nn.glu", false]], "grad() (in module mlx.core)": [[184, "mlx.core.grad", false]], "greater (c++ function)": [[0, "_CPPv47greaterRK5arrayRK5array14StreamOrDevice", false]], "greater() (in module mlx.core)": [[185, "mlx.core.greater", false]], "greater_equal (c++ function)": [[0, "_CPPv413greater_equalRK5arrayRK5array14StreamOrDevice", false]], "greater_equal() (in module mlx.core)": [[186, "mlx.core.greater_equal", false]], "group (class in mlx.core.distributed)": [[130, "mlx.core.distributed.Group", false]], "groupnorm (class in mlx.nn)": [[377, "mlx.nn.GroupNorm", false]], "gru (class in mlx.nn)": [[376, "mlx.nn.GRU", false]], "gumbel() (in module mlx.core.random)": [[263, "mlx.core.random.gumbel", false]], "hadamard_transform (c++ function)": [[0, "_CPPv418hadamard_transformRK5arrayNSt8optionalIfEE14StreamOrDevice", false]], "hadamard_transform() (in module mlx.core)": [[187, "mlx.core.hadamard_transform", false]], "hard_shrink (class in mlx.nn)": [[449, "mlx.nn.hard_shrink", false]], "hard_tanh (class in mlx.nn)": [[450, "mlx.nn.hard_tanh", false]], "hardshrink (class in mlx.nn)": [[378, "mlx.nn.HardShrink", false]], "hardswish (class in mlx.nn)": [[380, "mlx.nn.Hardswish", false], [451, "mlx.nn.hardswish", false]], "hardtanh (class in mlx.nn)": [[379, "mlx.nn.HardTanh", false]], "he_normal() (in module mlx.nn.init)": [[438, "mlx.nn.init.he_normal", false]], "he_uniform() (in module mlx.nn.init)": [[439, "mlx.nn.init.he_uniform", false]], "hinge_loss (class in mlx.nn.losses)": [[459, "mlx.nn.losses.hinge_loss", false]], "huber_loss (class in mlx.nn.losses)": [[460, "mlx.nn.losses.huber_loss", false]], "identity (c++ function)": [[0, "_CPPv48identityi14StreamOrDevice", false], [0, "_CPPv48identityi5Dtype14StreamOrDevice", false]], "identity() (in module mlx.core)": [[188, "mlx.core.identity", false]], "identity() (in module mlx.nn.init)": [[440, "mlx.nn.init.identity", false]], "ifft() (in module mlx.core.fft)": [[164, "mlx.core.fft.ifft", false]], "ifft2() (in module mlx.core.fft)": [[165, "mlx.core.fft.ifft2", false]], "ifftn() (in module mlx.core.fft)": [[166, "mlx.core.fft.ifftn", false]], "ifftshift() (in module mlx.core.fft)": [[167, "mlx.core.fft.ifftshift", false]], "imag (array property)": [[51, "mlx.core.array.imag", false]], "imag (c++ function)": [[0, "_CPPv44imagRK5array14StreamOrDevice", false]], "imag() (in module mlx.core)": [[189, "mlx.core.imag", false]], "import_function() (in module mlx.core)": [[190, "mlx.core.import_function", false]], "init() (in module mlx.core.distributed)": [[133, "mlx.core.distributed.init", false]], "init() (optimizer method)": [[499, "mlx.optimizers.Optimizer.init", false]], "inner (c++ function)": [[0, "_CPPv45innerRK5arrayRK5array14StreamOrDevice", false]], "inner() (in module mlx.core)": [[191, "mlx.core.inner", false]], "instancenorm (class in mlx.nn)": [[381, "mlx.nn.InstanceNorm", false]], "inv() (in module mlx.core.linalg)": [[211, "mlx.core.linalg.inv", false]], "irfft() (in module mlx.core.fft)": [[168, "mlx.core.fft.irfft", false]], "irfft2() (in module mlx.core.fft)": [[169, "mlx.core.fft.irfft2", false]], "irfftn() (in module mlx.core.fft)": [[170, "mlx.core.fft.irfftn", false]], "is_available() (in module mlx.core.cuda)": [[117, "mlx.core.cuda.is_available", false]], "is_available() (in module mlx.core.distributed)": [[134, "mlx.core.distributed.is_available", false]], "is_available() (in module mlx.core.metal)": [[239, "mlx.core.metal.is_available", false]], "isclose (c++ function)": [[0, "_CPPv47iscloseRK5arrayRK5arrayddb14StreamOrDevice", false]], "isclose() (in module mlx.core)": [[192, "mlx.core.isclose", false]], "isfinite (c++ function)": [[0, "_CPPv48isfiniteRK5array14StreamOrDevice", false]], "isfinite() (in module mlx.core)": [[193, "mlx.core.isfinite", false]], "isinf (c++ function)": [[0, "_CPPv45isinfRK5array14StreamOrDevice", false]], "isinf() (in module mlx.core)": [[194, "mlx.core.isinf", false]], "isnan (c++ function)": [[0, "_CPPv45isnanRK5array14StreamOrDevice", false]], "isnan() (in module mlx.core)": [[195, "mlx.core.isnan", false]], "isneginf (c++ function)": [[0, "_CPPv48isneginfRK5array14StreamOrDevice", false]], "isneginf() (in module mlx.core)": [[196, "mlx.core.isneginf", false]], "isposinf (c++ function)": [[0, "_CPPv48isposinfRK5array14StreamOrDevice", false]], "isposinf() (in module mlx.core)": [[197, "mlx.core.isposinf", false]], "issubdtype() (in module mlx.core)": [[198, "mlx.core.issubdtype", false]], "item() (array method)": [[52, "mlx.core.array.item", false]], "itemsize (array property)": [[53, "mlx.core.array.itemsize", false]], "join_schedules() (in module mlx.optimizers)": [[506, "mlx.optimizers.join_schedules", false]], "jvp() (in module mlx.core)": [[199, "mlx.core.jvp", false]], "key() (in module mlx.core.random)": [[264, "mlx.core.random.key", false]], "kl_div_loss (class in mlx.nn.losses)": [[461, "mlx.nn.losses.kl_div_loss", false]], "kron (c++ function)": [[0, "_CPPv44kronRK5arrayRK5array14StreamOrDevice", false]], "kron() (in module mlx.core)": [[200, "mlx.core.kron", false]], "l1_loss (class in mlx.nn.losses)": [[462, "mlx.nn.losses.l1_loss", false]], "laplace() (in module mlx.core.random)": [[265, "mlx.core.random.laplace", false]], "layer_norm() (in module mlx.core.fast)": [[155, "mlx.core.fast.layer_norm", false]], "layernorm (class in mlx.nn)": [[383, "mlx.nn.LayerNorm", false]], "leaf_modules() (module method)": [[398, "mlx.nn.Module.leaf_modules", false]], "leaky_relu (class in mlx.nn)": [[452, "mlx.nn.leaky_relu", false]], "leakyrelu (class in mlx.nn)": [[384, "mlx.nn.LeakyReLU", false]], "left_shift (c++ function)": [[0, "_CPPv410left_shiftRK5arrayRK5array14StreamOrDevice", false]], "left_shift() (in module mlx.core)": [[201, "mlx.core.left_shift", false]], "less (c++ function)": [[0, "_CPPv44lessRK5arrayRK5array14StreamOrDevice", false]], "less() (in module mlx.core)": [[202, "mlx.core.less", false]], "less_equal (c++ function)": [[0, "_CPPv410less_equalRK5arrayRK5array14StreamOrDevice", false]], "less_equal() (in module mlx.core)": [[203, "mlx.core.less_equal", false]], "linear (class in mlx.nn)": [[385, "mlx.nn.Linear", false]], "linear_schedule() (in module mlx.optimizers)": [[507, "mlx.optimizers.linear_schedule", false]], "linspace (c++ function)": [[0, "_CPPv48linspaceddi5Dtype14StreamOrDevice", false]], "linspace() (in module mlx.core)": [[221, "mlx.core.linspace", false]], "lion (class in mlx.optimizers)": [[495, "mlx.optimizers.Lion", false]], "load() (in module mlx.core)": [[222, "mlx.core.load", false]], "load_weights() (module method)": [[399, "mlx.nn.Module.load_weights", false]], "log (c++ function)": [[0, "_CPPv43logRK5array14StreamOrDevice", false]], "log() (array method)": [[54, "mlx.core.array.log", false]], "log() (in module mlx.core)": [[223, "mlx.core.log", false]], "log10 (c++ function)": [[0, "_CPPv45log10RK5array14StreamOrDevice", false]], "log10() (array method)": [[55, "mlx.core.array.log10", false]], "log10() (in module mlx.core)": [[224, "mlx.core.log10", false]], "log1p (c++ function)": [[0, "_CPPv45log1pRK5array14StreamOrDevice", false]], "log1p() (array method)": [[56, "mlx.core.array.log1p", false]], "log1p() (in module mlx.core)": [[225, "mlx.core.log1p", false]], "log2 (c++ function)": [[0, "_CPPv44log2RK5array14StreamOrDevice", false]], "log2() (array method)": [[57, "mlx.core.array.log2", false]], "log2() (in module mlx.core)": [[226, "mlx.core.log2", false]], "log_cosh_loss (class in mlx.nn.losses)": [[463, "mlx.nn.losses.log_cosh_loss", false]], "log_sigmoid (class in mlx.nn)": [[453, "mlx.nn.log_sigmoid", false]], "log_softmax (class in mlx.nn)": [[454, "mlx.nn.log_softmax", false]], "logaddexp (c++ function)": [[0, "_CPPv49logaddexpRK5arrayRK5array14StreamOrDevice", false]], "logaddexp() (in module mlx.core)": [[227, "mlx.core.logaddexp", false]], "logcumsumexp (c++ function)": [[0, "_CPPv412logcumsumexpRK5arrayibb14StreamOrDevice", false]], "logcumsumexp() (array method)": [[58, "mlx.core.array.logcumsumexp", false]], "logcumsumexp() (in module mlx.core)": [[228, "mlx.core.logcumsumexp", false]], "logical_and (c++ function)": [[0, "_CPPv411logical_andRK5arrayRK5array14StreamOrDevice", false]], "logical_and() (in module mlx.core)": [[229, "mlx.core.logical_and", false]], "logical_not (c++ function)": [[0, "_CPPv411logical_notRK5array14StreamOrDevice", false]], "logical_not() (in module mlx.core)": [[230, "mlx.core.logical_not", false]], "logical_or (c++ function)": [[0, "_CPPv410logical_orRK5arrayRK5array14StreamOrDevice", false]], "logical_or() (in module mlx.core)": [[231, "mlx.core.logical_or", false]], "logsigmoid (class in mlx.nn)": [[386, "mlx.nn.LogSigmoid", false]], "logsoftmax (class in mlx.nn)": [[387, "mlx.nn.LogSoftmax", false]], "logsumexp (c++ function)": [[0, "_CPPv49logsumexpRK5array14StreamOrDevice", false], [0, "_CPPv49logsumexpRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", false], [0, "_CPPv49logsumexpRK5arrayb14StreamOrDevice", false], [0, "_CPPv49logsumexpRK5arrayib14StreamOrDevice", false]], "logsumexp() (array method)": [[59, "mlx.core.array.logsumexp", false]], "logsumexp() (in module mlx.core)": [[232, "mlx.core.logsumexp", false]], "lstm (class in mlx.nn)": [[382, "mlx.nn.LSTM", false]], "lu() (in module mlx.core.linalg)": [[212, "mlx.core.linalg.lu", false]], "lu_factor() (in module mlx.core.linalg)": [[213, "mlx.core.linalg.lu_factor", false]], "margin_ranking_loss (class in mlx.nn.losses)": [[464, "mlx.nn.losses.margin_ranking_loss", false]], "matmul (c++ function)": [[0, "_CPPv46matmulRK5arrayRK5array14StreamOrDevice", false]], "matmul() (in module mlx.core)": [[233, "mlx.core.matmul", false]], "max (c++ function)": [[0, "_CPPv43maxRK5array14StreamOrDevice", false], [0, "_CPPv43maxRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", false], [0, "_CPPv43maxRK5arrayb14StreamOrDevice", false], [0, "_CPPv43maxRK5arrayib14StreamOrDevice", false]], "max() (array method)": [[60, "mlx.core.array.max", false]], "max() (in module mlx.core)": [[234, "mlx.core.max", false]], "maximum (c++ function)": [[0, "_CPPv47maximumRK5arrayRK5array14StreamOrDevice", false]], "maximum() (in module mlx.core)": [[235, "mlx.core.maximum", false]], "maxpool1d (class in mlx.nn)": [[388, "mlx.nn.MaxPool1d", false]], "maxpool2d (class in mlx.nn)": [[389, "mlx.nn.MaxPool2d", false]], "maxpool3d (class in mlx.nn)": [[390, "mlx.nn.MaxPool3d", false]], "mean (c++ function)": [[0, "_CPPv44meanRK5array14StreamOrDevice", false], [0, "_CPPv44meanRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", false], [0, "_CPPv44meanRK5arrayb14StreamOrDevice", false], [0, "_CPPv44meanRK5arrayib14StreamOrDevice", false]], "mean() (array method)": [[61, "mlx.core.array.mean", false]], "mean() (in module mlx.core)": [[236, "mlx.core.mean", false]], "meshgrid (c++ function)": [[0, "_CPPv48meshgridRKNSt6vectorI5arrayEEbRKNSt6stringE14StreamOrDevice", false]], "meshgrid() (in module mlx.core)": [[237, "mlx.core.meshgrid", false]], "metal_kernel() (in module mlx.core.fast)": [[156, "mlx.core.fast.metal_kernel", false]], "min (c++ function)": [[0, "_CPPv43minRK5array14StreamOrDevice", false], [0, "_CPPv43minRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", false], [0, "_CPPv43minRK5arrayb14StreamOrDevice", false], [0, "_CPPv43minRK5arrayib14StreamOrDevice", false]], "min() (array method)": [[62, "mlx.core.array.min", false]], "min() (in module mlx.core)": [[242, "mlx.core.min", false]], "minimum (c++ function)": [[0, "_CPPv47minimumRK5arrayRK5array14StreamOrDevice", false]], "minimum() (in module mlx.core)": [[243, "mlx.core.minimum", false]], "mish (class in mlx.nn)": [[391, "mlx.nn.Mish", false], [469, "mlx.nn.mish", false]], "module (class in mlx.nn)": [[486, "mlx.nn.Module", false]], "modules() (module method)": [[400, "mlx.nn.Module.modules", false]], "moveaxis (c++ function)": [[0, "_CPPv48moveaxisRK5arrayii14StreamOrDevice", false]], "moveaxis() (array method)": [[63, "mlx.core.array.moveaxis", false]], "moveaxis() (in module mlx.core)": [[244, "mlx.core.moveaxis", false]], "mse_loss (class in mlx.nn.losses)": [[465, "mlx.nn.losses.mse_loss", false]], "multiheadattention (class in mlx.nn)": [[412, "mlx.nn.MultiHeadAttention", false]], "multioptimizer (class in mlx.optimizers)": [[496, "mlx.optimizers.MultiOptimizer", false]], "multiply (c++ function)": [[0, "_CPPv48multiplyRK5arrayRK5array14StreamOrDevice", false]], "multiply() (in module mlx.core)": [[245, "mlx.core.multiply", false]], "multivariate_normal() (in module mlx.core.random)": [[266, "mlx.core.random.multivariate_normal", false]], "muon (class in mlx.optimizers)": [[497, "mlx.optimizers.Muon", false]], "named_modules() (module method)": [[401, "mlx.nn.Module.named_modules", false]], "nan_to_num (c++ function)": [[0, "_CPPv410nan_to_numRK5arrayfKNSt8optionalIfEEKNSt8optionalIfEE14StreamOrDevice", false]], "nan_to_num() (in module mlx.core)": [[246, "mlx.core.nan_to_num", false]], "nbytes (array property)": [[64, "mlx.core.array.nbytes", false]], "ndim (array property)": [[65, "mlx.core.array.ndim", false]], "negative (c++ function)": [[0, "_CPPv48negativeRK5array14StreamOrDevice", false]], "negative() (in module mlx.core)": [[247, "mlx.core.negative", false]], "new_stream() (in module mlx.core)": [[248, "mlx.core.new_stream", false]], "nll_loss (class in mlx.nn.losses)": [[466, "mlx.nn.losses.nll_loss", false]], "norm() (in module mlx.core.linalg)": [[214, "mlx.core.linalg.norm", false]], "normal() (in module mlx.core.random)": [[267, "mlx.core.random.normal", false]], "normal() (in module mlx.nn.init)": [[441, "mlx.nn.init.normal", false]], "not_equal (c++ function)": [[0, "_CPPv49not_equalRK5arrayRK5array14StreamOrDevice", false]], "not_equal() (in module mlx.core)": [[249, "mlx.core.not_equal", false]], "number_of_elements (c++ function)": [[0, "_CPPv418number_of_elementsRK5arrayNSt6vectorIiEEb5Dtype14StreamOrDevice", false]], "ones (c++ function)": [[0, "_CPPv44onesRK5Shape14StreamOrDevice", false], [0, "_CPPv44onesRK5Shape5Dtype14StreamOrDevice", false]], "ones() (in module mlx.core)": [[250, "mlx.core.ones", false]], "ones_like (c++ function)": [[0, "_CPPv49ones_likeRK5array14StreamOrDevice", false]], "ones_like() (in module mlx.core)": [[251, "mlx.core.ones_like", false]], "operator!= (c++ function)": [[0, "_CPPv4I0Ene5array1TRK5array", false], [0, "_CPPv4I0Ene5arrayRK5array1T", false], [0, "_CPPv4neRK5arrayRK5array", false]], "operator% (c++ function)": [[0, "_CPPv4I0Erm5array1TRK5array", false], [0, "_CPPv4I0Erm5arrayRK5array1T", false], [0, "_CPPv4rmRK5arrayRK5array", false]], "operator& (c++ function)": [[0, "_CPPv4anRK5arrayRK5array", false]], "operator&& (c++ function)": [[0, "_CPPv4aaRK5arrayRK5array", false]], "operator* (c++ function)": [[0, "_CPPv4I0Eml5array1TRK5array", false], [0, "_CPPv4I0Eml5arrayRK5array1T", false], [0, "_CPPv4mlRK5arrayRK5array", false]], "operator+ (c++ function)": [[0, "_CPPv4I0Epl5array1TRK5array", false], [0, "_CPPv4I0Epl5arrayRK5array1T", false], [0, "_CPPv4plRK5arrayRK5array", false]], "operator- (c++ function)": [[0, "_CPPv4I0Emi5array1TRK5array", false], [0, "_CPPv4I0Emi5arrayRK5array1T", false], [0, "_CPPv4miRK5array", false], [0, "_CPPv4miRK5arrayRK5array", false]], "operator/ (c++ function)": [[0, "_CPPv4dvRK5arrayRK5array", false], [0, "_CPPv4dvRK5arrayd", false], [0, "_CPPv4dvdRK5array", false]], "operator< (c++ function)": [[0, "_CPPv4I0Elt5array1TRK5array", false], [0, "_CPPv4I0Elt5arrayRK5array1T", false], [0, "_CPPv4ltRK5arrayRK5array", false]], "operator<< (c++ function)": [[0, "_CPPv4lsRK5arrayRK5array", false]], "operator<= (c++ function)": [[0, "_CPPv4I0Ele5array1TRK5array", false], [0, "_CPPv4I0Ele5arrayRK5array1T", false], [0, "_CPPv4leRK5arrayRK5array", false]], "operator== (c++ function)": [[0, "_CPPv4I0Eeq5array1TRK5array", false], [0, "_CPPv4I0Eeq5arrayRK5array1T", false], [0, "_CPPv4eqRK5arrayRK5array", false]], "operator> (c++ function)": [[0, "_CPPv4I0Egt5array1TRK5array", false], [0, "_CPPv4I0Egt5arrayRK5array1T", false], [0, "_CPPv4gtRK5arrayRK5array", false]], "operator>= (c++ function)": [[0, "_CPPv4I0Ege5array1TRK5array", false], [0, "_CPPv4I0Ege5arrayRK5array1T", false], [0, "_CPPv4geRK5arrayRK5array", false]], "operator>> (c++ function)": [[0, "_CPPv4rsRK5arrayRK5array", false]], "operator^ (c++ function)": [[0, "_CPPv4eoRK5arrayRK5array", false]], "operator| (c++ function)": [[0, "_CPPv4orRK5arrayRK5array", false]], "operator|| (c++ function)": [[0, "_CPPv4ooRK5arrayRK5array", false]], "operator~ (c++ function)": [[0, "_CPPv4coRK5array", false]], "optimizer (class in mlx.optimizers)": [[510, "mlx.optimizers.Optimizer", false]], "outer (c++ function)": [[0, "_CPPv45outerRK5arrayRK5array14StreamOrDevice", false]], "outer() (in module mlx.core)": [[252, "mlx.core.outer", false]], "pad (c++ function)": [[0, "_CPPv43padRK5arrayRKNSt4pairIiiEERK5arrayRKNSt6stringE14StreamOrDevice", false], [0, "_CPPv43padRK5arrayRKNSt6vectorINSt4pairIiiEEEERK5arrayRKNSt6stringE14StreamOrDevice", false], [0, "_CPPv43padRK5arrayRKNSt6vectorIiEERK5ShapeRK5ShapeRK5arrayRKNSt6stringE14StreamOrDevice", false], [0, "_CPPv43padRK5arrayiRK5arrayRKNSt6stringE14StreamOrDevice", false]], "pad() (in module mlx.core)": [[253, "mlx.core.pad", false]], "parameters() (module method)": [[402, "mlx.nn.Module.parameters", false]], "partition (c++ function)": [[0, "_CPPv49partitionRK5arrayi14StreamOrDevice", false], [0, "_CPPv49partitionRK5arrayii14StreamOrDevice", false]], "partition() (in module mlx.core)": [[254, "mlx.core.partition", false]], "permutation() (in module mlx.core.random)": [[268, "mlx.core.random.permutation", false]], "pinv() (in module mlx.core.linalg)": [[215, "mlx.core.linalg.pinv", false]], "power (c++ function)": [[0, "_CPPv45powerRK5arrayRK5array14StreamOrDevice", false]], "power() (in module mlx.core)": [[255, "mlx.core.power", false]], "prelu (class in mlx.nn)": [[413, "mlx.nn.PReLU", false], [470, "mlx.nn.prelu", false]], "prod (c++ function)": [[0, "_CPPv44prodRK5array14StreamOrDevice", false], [0, "_CPPv44prodRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", false], [0, "_CPPv44prodRK5arrayb14StreamOrDevice", false], [0, "_CPPv44prodRK5arrayib14StreamOrDevice", false]], "prod() (array method)": [[66, "mlx.core.array.prod", false]], "prod() (in module mlx.core)": [[256, "mlx.core.prod", false]], "put_along_axis (c++ function)": [[0, "_CPPv414put_along_axisRK5arrayRK5arrayRK5arrayi14StreamOrDevice", false]], "put_along_axis() (in module mlx.core)": [[257, "mlx.core.put_along_axis", false]], "qr() (in module mlx.core.linalg)": [[216, "mlx.core.linalg.qr", false]], "quantize (c++ function)": [[0, "_CPPv48quantizeRK5arrayiiRKNSt6stringE14StreamOrDevice", false]], "quantize() (in module mlx.core)": [[258, "mlx.core.quantize", false]], "quantize() (in module mlx.nn)": [[336, "mlx.nn.quantize", false]], "quantized_matmul (c++ function)": [[0, "_CPPv416quantized_matmul5array5array5arrayNSt8optionalI5arrayEEbiiRKNSt6stringE14StreamOrDevice", false]], "quantized_matmul() (in module mlx.core)": [[259, "mlx.core.quantized_matmul", false]], "quantizedembedding (class in mlx.nn)": [[414, "mlx.nn.QuantizedEmbedding", false]], "quantizedlinear (class in mlx.nn)": [[415, "mlx.nn.QuantizedLinear", false]], "radians (c++ function)": [[0, "_CPPv47radiansRK5array14StreamOrDevice", false]], "radians() (in module mlx.core)": [[260, "mlx.core.radians", false]], "randint() (in module mlx.core.random)": [[269, "mlx.core.random.randint", false]], "real (array property)": [[67, "mlx.core.array.real", false]], "real (c++ function)": [[0, "_CPPv44realRK5array14StreamOrDevice", false]], "real() (in module mlx.core)": [[274, "mlx.core.real", false]], "reciprocal (c++ function)": [[0, "_CPPv410reciprocalRK5array14StreamOrDevice", false]], "reciprocal() (array method)": [[68, "mlx.core.array.reciprocal", false]], "reciprocal() (in module mlx.core)": [[275, "mlx.core.reciprocal", false]], "recv() (in module mlx.core.distributed)": [[135, "mlx.core.distributed.recv", false]], "recv_like() (in module mlx.core.distributed)": [[136, "mlx.core.distributed.recv_like", false]], "relu (class in mlx.nn)": [[418, "mlx.nn.ReLU", false], [471, "mlx.nn.relu", false]], "relu6 (class in mlx.nn)": [[419, "mlx.nn.ReLU6", false], [472, "mlx.nn.relu6", false]], "remainder (c++ function)": [[0, "_CPPv49remainderRK5arrayRK5array14StreamOrDevice", false]], "remainder() (in module mlx.core)": [[276, "mlx.core.remainder", false]], "repeat (c++ function)": [[0, "_CPPv46repeatRK5arrayi14StreamOrDevice", false], [0, "_CPPv46repeatRK5arrayii14StreamOrDevice", false]], "repeat() (in module mlx.core)": [[277, "mlx.core.repeat", false]], "reset_peak_memory() (in module mlx.core)": [[278, "mlx.core.reset_peak_memory", false]], "reshape (c++ function)": [[0, "_CPPv47reshapeRK5array5Shape14StreamOrDevice", false]], "reshape() (array method)": [[69, "mlx.core.array.reshape", false]], "reshape() (in module mlx.core)": [[279, "mlx.core.reshape", false]], "rfft() (in module mlx.core.fft)": [[171, "mlx.core.fft.rfft", false]], "rfft2() (in module mlx.core.fft)": [[172, "mlx.core.fft.rfft2", false]], "rfftn() (in module mlx.core.fft)": [[173, "mlx.core.fft.rfftn", false]], "right_shift (c++ function)": [[0, "_CPPv411right_shiftRK5arrayRK5array14StreamOrDevice", false]], "right_shift() (in module mlx.core)": [[280, "mlx.core.right_shift", false]], "rms_norm() (in module mlx.core.fast)": [[157, "mlx.core.fast.rms_norm", false]], "rmsnorm (class in mlx.nn)": [[416, "mlx.nn.RMSNorm", false]], "rmsprop (class in mlx.optimizers)": [[502, "mlx.optimizers.RMSprop", false]], "rnn (class in mlx.nn)": [[417, "mlx.nn.RNN", false]], "roll (c++ function)": [[0, "_CPPv44rollRK5arrayRK5Shape14StreamOrDevice", false], [0, "_CPPv44rollRK5arrayRK5ShapeRKNSt6vectorIiEE14StreamOrDevice", false], [0, "_CPPv44rollRK5arrayRK5Shapei14StreamOrDevice", false], [0, "_CPPv44rollRK5arrayi14StreamOrDevice", false], [0, "_CPPv44rollRK5arrayiRKNSt6vectorIiEE14StreamOrDevice", false], [0, "_CPPv44rollRK5arrayii14StreamOrDevice", false]], "roll() (in module mlx.core)": [[281, "mlx.core.roll", false]], "rope (class in mlx.nn)": [[420, "mlx.nn.RoPE", false]], "rope() (in module mlx.core.fast)": [[158, "mlx.core.fast.rope", false]], "round (c++ function)": [[0, "_CPPv45roundRK5array14StreamOrDevice", false], [0, "_CPPv45roundRK5arrayi14StreamOrDevice", false]], "round() (array method)": [[70, "mlx.core.array.round", false]], "round() (in module mlx.core)": [[282, "mlx.core.round", false]], "rsqrt (c++ function)": [[0, "_CPPv45rsqrtRK5array14StreamOrDevice", false]], "rsqrt() (array method)": [[71, "mlx.core.array.rsqrt", false]], "rsqrt() (in module mlx.core)": [[283, "mlx.core.rsqrt", false]], "save() (in module mlx.core)": [[284, "mlx.core.save", false]], "save_gguf() (in module mlx.core)": [[285, "mlx.core.save_gguf", false]], "save_safetensors() (in module mlx.core)": [[286, "mlx.core.save_safetensors", false]], "save_weights() (module method)": [[403, "mlx.nn.Module.save_weights", false]], "savez() (in module mlx.core)": [[287, "mlx.core.savez", false]], "savez_compressed() (in module mlx.core)": [[288, "mlx.core.savez_compressed", false]], "scaled_dot_product_attention() (in module mlx.core.fast)": [[159, "mlx.core.fast.scaled_dot_product_attention", false]], "scatter (c++ function)": [[0, "_CPPv47scatterRK5arrayRK5arrayRK5arrayi14StreamOrDevice", false], [0, "_CPPv47scatterRK5arrayRKNSt6vectorI5arrayEERK5arrayRKNSt6vectorIiEE14StreamOrDevice", false]], "scatter_add (c++ function)": [[0, "_CPPv411scatter_addRK5arrayRK5arrayRK5arrayi14StreamOrDevice", false], [0, "_CPPv411scatter_addRK5arrayRKNSt6vectorI5arrayEERK5arrayRKNSt6vectorIiEE14StreamOrDevice", false]], "scatter_add_axis (c++ function)": [[0, "_CPPv416scatter_add_axisRK5arrayRK5arrayRK5arrayi14StreamOrDevice", false]], "scatter_max (c++ function)": [[0, "_CPPv411scatter_maxRK5arrayRK5arrayRK5arrayi14StreamOrDevice", false], [0, "_CPPv411scatter_maxRK5arrayRKNSt6vectorI5arrayEERK5arrayRKNSt6vectorIiEE14StreamOrDevice", false]], "scatter_min (c++ function)": [[0, "_CPPv411scatter_minRK5arrayRK5arrayRK5arrayi14StreamOrDevice", false], [0, "_CPPv411scatter_minRK5arrayRKNSt6vectorI5arrayEERK5arrayRKNSt6vectorIiEE14StreamOrDevice", false]], "scatter_prod (c++ function)": [[0, "_CPPv412scatter_prodRK5arrayRK5arrayRK5arrayi14StreamOrDevice", false], [0, "_CPPv412scatter_prodRK5arrayRKNSt6vectorI5arrayEERK5arrayRKNSt6vectorIiEE14StreamOrDevice", false]], "seed() (in module mlx.core.random)": [[270, "mlx.core.random.seed", false]], "segmented_mm (c++ function)": [[0, "_CPPv412segmented_mm5array5array5array14StreamOrDevice", false]], "selu (class in mlx.nn)": [[421, "mlx.nn.SELU", false], [473, "mlx.nn.selu", false]], "send() (in module mlx.core.distributed)": [[137, "mlx.core.distributed.send", false]], "sequential (class in mlx.nn)": [[422, "mlx.nn.Sequential", false]], "set_cache_limit() (in module mlx.core)": [[289, "mlx.core.set_cache_limit", false]], "set_default_device() (in module mlx.core)": [[290, "mlx.core.set_default_device", false]], "set_default_stream() (in module mlx.core)": [[291, "mlx.core.set_default_stream", false]], "set_dtype() (module method)": [[404, "mlx.nn.Module.set_dtype", false]], "set_memory_limit() (in module mlx.core)": [[292, "mlx.core.set_memory_limit", false]], "set_wired_limit() (in module mlx.core)": [[293, "mlx.core.set_wired_limit", false]], "sgd (class in mlx.optimizers)": [[503, "mlx.optimizers.SGD", false]], "shape (array property)": [[72, "mlx.core.array.shape", false]], "sigmoid (c++ function)": [[0, "_CPPv47sigmoidRK5array14StreamOrDevice", false]], "sigmoid (class in mlx.nn)": [[424, "mlx.nn.Sigmoid", false], [474, "mlx.nn.sigmoid", false]], "sigmoid() (in module mlx.core)": [[294, "mlx.core.sigmoid", false]], "sign (c++ function)": [[0, "_CPPv44signRK5array14StreamOrDevice", false]], "sign() (in module mlx.core)": [[295, "mlx.core.sign", false]], "silu (class in mlx.nn)": [[423, "mlx.nn.SiLU", false], [475, "mlx.nn.silu", false]], "sin (c++ function)": [[0, "_CPPv43sinRK5array14StreamOrDevice", false]], "sin() (array method)": [[73, "mlx.core.array.sin", false]], "sin() (in module mlx.core)": [[296, "mlx.core.sin", false]], "sinh (c++ function)": [[0, "_CPPv44sinhRK5array14StreamOrDevice", false]], "sinh() (in module mlx.core)": [[297, "mlx.core.sinh", false]], "sinusoidalpositionalencoding (class in mlx.nn)": [[425, "mlx.nn.SinusoidalPositionalEncoding", false]], "size (array property)": [[74, "mlx.core.array.size", false]], "slice (c++ function)": [[0, "_CPPv45sliceRK5array5Shape5Shape14StreamOrDevice", false], [0, "_CPPv45sliceRK5array5Shape5Shape5Shape14StreamOrDevice", false], [0, "_CPPv45sliceRK5arrayNSt16initializer_listIiEE5Shape5Shape14StreamOrDevice", false], [0, "_CPPv45sliceRK5arrayRK5arrayNSt6vectorIiEE5Shape14StreamOrDevice", false]], "slice() (in module mlx.core)": [[298, "mlx.core.slice", false]], "slice_update (c++ function)": [[0, "_CPPv412slice_updateRK5arrayRK5array5Shape5Shape14StreamOrDevice", false], [0, "_CPPv412slice_updateRK5arrayRK5array5Shape5Shape5Shape14StreamOrDevice", false], [0, "_CPPv412slice_updateRK5arrayRK5arrayRK5arrayNSt6vectorIiEE14StreamOrDevice", false]], "slice_update() (in module mlx.core)": [[299, "mlx.core.slice_update", false]], "smooth_l1_loss (class in mlx.nn.losses)": [[467, "mlx.nn.losses.smooth_l1_loss", false]], "softmax (c++ function)": [[0, "_CPPv47softmaxRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", false], [0, "_CPPv47softmaxRK5arrayb14StreamOrDevice", false], [0, "_CPPv47softmaxRK5arrayib14StreamOrDevice", false]], "softmax (class in mlx.nn)": [[426, "mlx.nn.Softmax", false], [476, "mlx.nn.softmax", false]], "softmax() (in module mlx.core)": [[300, "mlx.core.softmax", false]], "softmin (class in mlx.nn)": [[427, "mlx.nn.Softmin", false], [477, "mlx.nn.softmin", false]], "softplus (class in mlx.nn)": [[428, "mlx.nn.Softplus", false], [478, "mlx.nn.softplus", false]], "softshrink (class in mlx.nn)": [[429, "mlx.nn.Softshrink", false], [479, "mlx.nn.softshrink", false]], "softsign (class in mlx.nn)": [[430, "mlx.nn.Softsign", false]], "solve() (in module mlx.core.linalg)": [[217, "mlx.core.linalg.solve", false]], "solve_triangular() (in module mlx.core.linalg)": [[218, "mlx.core.linalg.solve_triangular", false]], "sort (c++ function)": [[0, "_CPPv44sortRK5array14StreamOrDevice", false], [0, "_CPPv44sortRK5arrayi14StreamOrDevice", false]], "sort() (in module mlx.core)": [[301, "mlx.core.sort", false]], "split (c++ function)": [[0, "_CPPv45splitRK5arrayRK5Shape14StreamOrDevice", false], [0, "_CPPv45splitRK5arrayRK5Shapei14StreamOrDevice", false], [0, "_CPPv45splitRK5arrayi14StreamOrDevice", false], [0, "_CPPv45splitRK5arrayii14StreamOrDevice", false]], "split() (array method)": [[75, "mlx.core.array.split", false]], "split() (in module mlx.core)": [[302, "mlx.core.split", false]], "split() (in module mlx.core.random)": [[271, "mlx.core.random.split", false]], "sqrt (c++ function)": [[0, "_CPPv44sqrtRK5array14StreamOrDevice", false]], "sqrt() (array method)": [[76, "mlx.core.array.sqrt", false]], "sqrt() (in module mlx.core)": [[303, "mlx.core.sqrt", false]], "square (c++ function)": [[0, "_CPPv46squareRK5array14StreamOrDevice", false]], "square() (array method)": [[77, "mlx.core.array.square", false]], "square() (in module mlx.core)": [[304, "mlx.core.square", false]], "squeeze (c++ function)": [[0, "_CPPv47squeezeRK5array14StreamOrDevice", false], [0, "_CPPv47squeezeRK5arrayRKNSt6vectorIiEE14StreamOrDevice", false], [0, "_CPPv47squeezeRK5arrayi14StreamOrDevice", false]], "squeeze() (array method)": [[78, "mlx.core.array.squeeze", false]], "squeeze() (in module mlx.core)": [[305, "mlx.core.squeeze", false]], "stack (c++ function)": [[0, "_CPPv45stackRKNSt6vectorI5arrayEE14StreamOrDevice", false], [0, "_CPPv45stackRKNSt6vectorI5arrayEEi14StreamOrDevice", false]], "stack() (in module mlx.core)": [[306, "mlx.core.stack", false]], "start_capture() (in module mlx.core.metal)": [[240, "mlx.core.metal.start_capture", false]], "state (module property)": [[405, "mlx.nn.Module.state", false]], "state (optimizer property)": [[500, "mlx.optimizers.Optimizer.state", false]], "std (c++ function)": [[0, "_CPPv4StRK5array14StreamOrDevice", false], [0, "_CPPv4StRK5arrayRKNSt6vectorIiEEbi14StreamOrDevice", false], [0, "_CPPv4StRK5arraybi14StreamOrDevice", false], [0, "_CPPv4StRK5arrayibi14StreamOrDevice", false]], "std() (array method)": [[79, "mlx.core.array.std", false]], "std() (in module mlx.core)": [[307, "mlx.core.std", false]], "step (class in mlx.nn)": [[431, "mlx.nn.Step", false], [480, "mlx.nn.step", false]], "step_decay() (in module mlx.optimizers)": [[508, "mlx.optimizers.step_decay", false]], "stop_capture() (in module mlx.core.metal)": [[241, "mlx.core.metal.stop_capture", false]], "stop_gradient (c++ function)": [[0, "_CPPv413stop_gradientRK5array14StreamOrDevice", false]], "stop_gradient() (in module mlx.core)": [[308, "mlx.core.stop_gradient", false]], "stream (class in mlx.core)": [[344, "mlx.core.Stream", false]], "stream() (in module mlx.core)": [[309, "mlx.core.stream", false]], "subtract (c++ function)": [[0, "_CPPv48subtractRK5arrayRK5array14StreamOrDevice", false]], "subtract() (in module mlx.core)": [[310, "mlx.core.subtract", false]], "sum (c++ function)": [[0, "_CPPv43sumRK5array14StreamOrDevice", false], [0, "_CPPv43sumRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", false], [0, "_CPPv43sumRK5arrayb14StreamOrDevice", false], [0, "_CPPv43sumRK5arrayib14StreamOrDevice", false]], "sum() (array method)": [[80, "mlx.core.array.sum", false]], "sum() (in module mlx.core)": [[311, "mlx.core.sum", false]], "svd() (in module mlx.core.linalg)": [[219, "mlx.core.linalg.svd", false]], "swapaxes (c++ function)": [[0, "_CPPv48swapaxesRK5arrayii14StreamOrDevice", false]], "swapaxes() (array method)": [[81, "mlx.core.array.swapaxes", false]], "swapaxes() (in module mlx.core)": [[312, "mlx.core.swapaxes", false]], "synchronize() (in module mlx.core)": [[313, "mlx.core.synchronize", false]], "t (array property)": [[32, "mlx.core.array.T", false]], "take (c++ function)": [[0, "_CPPv44takeRK5arrayRK5array14StreamOrDevice", false], [0, "_CPPv44takeRK5arrayRK5arrayi14StreamOrDevice", false], [0, "_CPPv44takeRK5arrayi14StreamOrDevice", false], [0, "_CPPv44takeRK5arrayii14StreamOrDevice", false]], "take() (in module mlx.core)": [[314, "mlx.core.take", false]], "take_along_axis (c++ function)": [[0, "_CPPv415take_along_axisRK5arrayRK5arrayi14StreamOrDevice", false]], "take_along_axis() (in module mlx.core)": [[315, "mlx.core.take_along_axis", false]], "tan (c++ function)": [[0, "_CPPv43tanRK5array14StreamOrDevice", false]], "tan() (in module mlx.core)": [[316, "mlx.core.tan", false]], "tanh (c++ function)": [[0, "_CPPv44tanhRK5array14StreamOrDevice", false]], "tanh (class in mlx.nn)": [[432, "mlx.nn.Tanh", false], [481, "mlx.nn.tanh", false]], "tanh() (in module mlx.core)": [[317, "mlx.core.tanh", false]], "tensordot (c++ function)": [[0, "_CPPv49tensordotRK5arrayRK5arrayKi14StreamOrDevice", false], [0, "_CPPv49tensordotRK5arrayRK5arrayRKNSt6vectorIiEERKNSt6vectorIiEE14StreamOrDevice", false]], "tensordot() (in module mlx.core)": [[318, "mlx.core.tensordot", false]], "tile (c++ function)": [[0, "_CPPv44tileRK5arrayNSt6vectorIiEE14StreamOrDevice", false]], "tile() (in module mlx.core)": [[319, "mlx.core.tile", false]], "tolist() (array method)": [[82, "mlx.core.array.tolist", false]], "topk (c++ function)": [[0, "_CPPv44topkRK5arrayi14StreamOrDevice", false], [0, "_CPPv44topkRK5arrayii14StreamOrDevice", false]], "topk() (in module mlx.core)": [[320, "mlx.core.topk", false]], "trace (c++ function)": [[0, "_CPPv45traceRK5array14StreamOrDevice", false], [0, "_CPPv45traceRK5arrayiii14StreamOrDevice", false], [0, "_CPPv45traceRK5arrayiii5Dtype14StreamOrDevice", false]], "trace() (in module mlx.core)": [[321, "mlx.core.trace", false]], "train() (module method)": [[406, "mlx.nn.Module.train", false]], "trainable_parameters() (module method)": [[407, "mlx.nn.Module.trainable_parameters", false]], "training (module property)": [[408, "mlx.nn.Module.training", false]], "transformer (class in mlx.nn)": [[433, "mlx.nn.Transformer", false]], "transpose (c++ function)": [[0, "_CPPv49transposeRK5array14StreamOrDevice", false], [0, "_CPPv49transposeRK5arrayNSt16initializer_listIiEE14StreamOrDevice", false], [0, "_CPPv49transposeRK5arrayNSt6vectorIiEE14StreamOrDevice", false]], "transpose() (array method)": [[83, "mlx.core.array.transpose", false]], "transpose() (in module mlx.core)": [[322, "mlx.core.transpose", false]], "tree_flatten() (in module mlx.utils)": [[339, "mlx.utils.tree_flatten", false]], "tree_map() (in module mlx.utils)": [[340, "mlx.utils.tree_map", false]], "tree_map_with_path() (in module mlx.utils)": [[341, "mlx.utils.tree_map_with_path", false]], "tree_reduce() (in module mlx.utils)": [[342, "mlx.utils.tree_reduce", false]], "tree_unflatten() (in module mlx.utils)": [[343, "mlx.utils.tree_unflatten", false]], "tri (c++ function)": [[0, "_CPPv43trii5Dtype14StreamOrDevice", false], [0, "_CPPv43triiii5Dtype14StreamOrDevice", false]], "tri() (in module mlx.core)": [[323, "mlx.core.tri", false]], "tri_inv() (in module mlx.core.linalg)": [[220, "mlx.core.linalg.tri_inv", false]], "tril (c++ function)": [[0, "_CPPv44tril5arrayi14StreamOrDevice", false]], "tril() (in module mlx.core)": [[324, "mlx.core.tril", false]], "triplet_loss (class in mlx.nn.losses)": [[468, "mlx.nn.losses.triplet_loss", false]], "triu (c++ function)": [[0, "_CPPv44triu5arrayi14StreamOrDevice", false]], "triu() (in module mlx.core)": [[325, "mlx.core.triu", false]], "truncated_normal() (in module mlx.core.random)": [[272, "mlx.core.random.truncated_normal", false]], "unflatten (c++ function)": [[0, "_CPPv49unflattenRK5arrayi5Shape14StreamOrDevice", false]], "unflatten() (in module mlx.core)": [[326, "mlx.core.unflatten", false]], "unfreeze() (module method)": [[409, "mlx.nn.Module.unfreeze", false]], "uniform() (in module mlx.core.random)": [[273, "mlx.core.random.uniform", false]], "uniform() (in module mlx.nn.init)": [[442, "mlx.nn.init.uniform", false]], "update() (module method)": [[410, "mlx.nn.Module.update", false]], "update() (optimizer method)": [[501, "mlx.optimizers.Optimizer.update", false]], "update_modules() (module method)": [[411, "mlx.nn.Module.update_modules", false]], "upsample (class in mlx.nn)": [[434, "mlx.nn.Upsample", false]], "value_and_grad() (in module mlx.core)": [[327, "mlx.core.value_and_grad", false]], "value_and_grad() (in module mlx.nn)": [[337, "mlx.nn.value_and_grad", false]], "var (c++ function)": [[0, "_CPPv43varRK5array14StreamOrDevice", false], [0, "_CPPv43varRK5arrayRKNSt6vectorIiEEbi14StreamOrDevice", false], [0, "_CPPv43varRK5arraybi14StreamOrDevice", false], [0, "_CPPv43varRK5arrayibi14StreamOrDevice", false]], "var() (array method)": [[84, "mlx.core.array.var", false]], "var() (in module mlx.core)": [[328, "mlx.core.var", false]], "view (c++ function)": [[0, "_CPPv44viewRK5arrayRK5Dtype14StreamOrDevice", false]], "view() (array method)": [[85, "mlx.core.array.view", false]], "view() (in module mlx.core)": [[329, "mlx.core.view", false]], "vjp() (in module mlx.core)": [[330, "mlx.core.vjp", false]], "vmap() (in module mlx.core)": [[331, "mlx.core.vmap", false]], "where (c++ function)": [[0, "_CPPv45whereRK5arrayRK5arrayRK5array14StreamOrDevice", false]], "where() (in module mlx.core)": [[332, "mlx.core.where", false]], "zeros (c++ function)": [[0, "_CPPv45zerosRK5Shape14StreamOrDevice", false], [0, "_CPPv45zerosRK5Shape5Dtype14StreamOrDevice", false]], "zeros() (in module mlx.core)": [[333, "mlx.core.zeros", false]], "zeros_like (c++ function)": [[0, "_CPPv410zeros_likeRK5array14StreamOrDevice", false]], "zeros_like() (in module mlx.core)": [[334, "mlx.core.zeros_like", false]]}, "objects": {"": [[0, 0, 1, "_CPPv43absRK5array14StreamOrDevice", "abs"], [0, 1, 1, "_CPPv43absRK5array14StreamOrDevice", "abs::a"], [0, 1, 1, "_CPPv43absRK5array14StreamOrDevice", "abs::s"], [0, 0, 1, "_CPPv43addRK5arrayRK5array14StreamOrDevice", "add"], [0, 1, 1, "_CPPv43addRK5arrayRK5array14StreamOrDevice", "add::a"], [0, 1, 1, "_CPPv43addRK5arrayRK5array14StreamOrDevice", "add::b"], [0, 1, 1, "_CPPv43addRK5arrayRK5array14StreamOrDevice", "add::s"], [0, 0, 1, "_CPPv45addmm5array5array5arrayRKfRKf14StreamOrDevice", "addmm"], [0, 1, 1, "_CPPv45addmm5array5array5arrayRKfRKf14StreamOrDevice", "addmm::a"], [0, 1, 1, "_CPPv45addmm5array5array5arrayRKfRKf14StreamOrDevice", "addmm::alpha"], [0, 1, 1, "_CPPv45addmm5array5array5arrayRKfRKf14StreamOrDevice", "addmm::b"], [0, 1, 1, "_CPPv45addmm5array5array5arrayRKfRKf14StreamOrDevice", "addmm::beta"], [0, 1, 1, "_CPPv45addmm5array5array5arrayRKfRKf14StreamOrDevice", "addmm::c"], [0, 1, 1, "_CPPv45addmm5array5array5arrayRKfRKf14StreamOrDevice", "addmm::s"], [0, 0, 1, "_CPPv43allRK5array14StreamOrDevice", "all"], [0, 0, 1, "_CPPv43allRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "all"], [0, 0, 1, "_CPPv43allRK5arrayb14StreamOrDevice", "all"], [0, 0, 1, "_CPPv43allRK5arrayib14StreamOrDevice", "all"], [0, 1, 1, "_CPPv43allRK5array14StreamOrDevice", "all::a"], [0, 1, 1, "_CPPv43allRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "all::a"], [0, 1, 1, "_CPPv43allRK5arrayb14StreamOrDevice", "all::a"], [0, 1, 1, "_CPPv43allRK5arrayib14StreamOrDevice", "all::a"], [0, 1, 1, "_CPPv43allRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "all::axes"], [0, 1, 1, "_CPPv43allRK5arrayib14StreamOrDevice", "all::axis"], [0, 1, 1, "_CPPv43allRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "all::keepdims"], [0, 1, 1, "_CPPv43allRK5arrayb14StreamOrDevice", "all::keepdims"], [0, 1, 1, "_CPPv43allRK5arrayib14StreamOrDevice", "all::keepdims"], [0, 1, 1, "_CPPv43allRK5array14StreamOrDevice", "all::s"], [0, 1, 1, "_CPPv43allRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "all::s"], [0, 1, 1, "_CPPv43allRK5arrayb14StreamOrDevice", "all::s"], [0, 1, 1, "_CPPv43allRK5arrayib14StreamOrDevice", "all::s"], [0, 0, 1, "_CPPv48allcloseRK5arrayRK5arrayddb14StreamOrDevice", "allclose"], [0, 1, 1, "_CPPv48allcloseRK5arrayRK5arrayddb14StreamOrDevice", "allclose::a"], [0, 1, 1, "_CPPv48allcloseRK5arrayRK5arrayddb14StreamOrDevice", "allclose::atol"], [0, 1, 1, "_CPPv48allcloseRK5arrayRK5arrayddb14StreamOrDevice", "allclose::b"], [0, 1, 1, "_CPPv48allcloseRK5arrayRK5arrayddb14StreamOrDevice", "allclose::equal_nan"], [0, 1, 1, "_CPPv48allcloseRK5arrayRK5arrayddb14StreamOrDevice", "allclose::rtol"], [0, 1, 1, "_CPPv48allcloseRK5arrayRK5arrayddb14StreamOrDevice", "allclose::s"], [0, 0, 1, "_CPPv43anyRK5array14StreamOrDevice", "any"], [0, 0, 1, "_CPPv43anyRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "any"], [0, 0, 1, "_CPPv43anyRK5arrayb14StreamOrDevice", "any"], [0, 0, 1, "_CPPv43anyRK5arrayib14StreamOrDevice", "any"], [0, 1, 1, "_CPPv43anyRK5array14StreamOrDevice", "any::a"], [0, 1, 1, "_CPPv43anyRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "any::a"], [0, 1, 1, "_CPPv43anyRK5arrayb14StreamOrDevice", "any::a"], [0, 1, 1, "_CPPv43anyRK5arrayib14StreamOrDevice", "any::a"], [0, 1, 1, "_CPPv43anyRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "any::axes"], [0, 1, 1, "_CPPv43anyRK5arrayib14StreamOrDevice", "any::axis"], [0, 1, 1, "_CPPv43anyRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "any::keepdims"], [0, 1, 1, "_CPPv43anyRK5arrayb14StreamOrDevice", "any::keepdims"], [0, 1, 1, "_CPPv43anyRK5arrayib14StreamOrDevice", "any::keepdims"], [0, 1, 1, "_CPPv43anyRK5array14StreamOrDevice", "any::s"], [0, 1, 1, "_CPPv43anyRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "any::s"], [0, 1, 1, "_CPPv43anyRK5arrayb14StreamOrDevice", "any::s"], [0, 1, 1, "_CPPv43anyRK5arrayib14StreamOrDevice", "any::s"], [0, 0, 1, "_CPPv46aranged14StreamOrDevice", "arange"], [0, 0, 1, "_CPPv46aranged5Dtype14StreamOrDevice", "arange"], [0, 0, 1, "_CPPv46arangedd14StreamOrDevice", "arange"], [0, 0, 1, "_CPPv46arangedd5Dtype14StreamOrDevice", "arange"], [0, 0, 1, "_CPPv46arangeddd14StreamOrDevice", "arange"], [0, 0, 1, "_CPPv46arangeddd5Dtype14StreamOrDevice", "arange"], [0, 0, 1, "_CPPv46arangei14StreamOrDevice", "arange"], [0, 0, 1, "_CPPv46arangeii14StreamOrDevice", "arange"], [0, 0, 1, "_CPPv46arangeiii14StreamOrDevice", "arange"], [0, 1, 1, "_CPPv46aranged5Dtype14StreamOrDevice", "arange::dtype"], [0, 1, 1, "_CPPv46arangedd5Dtype14StreamOrDevice", "arange::dtype"], [0, 1, 1, "_CPPv46arangeddd5Dtype14StreamOrDevice", "arange::dtype"], [0, 1, 1, "_CPPv46aranged14StreamOrDevice", "arange::s"], [0, 1, 1, "_CPPv46aranged5Dtype14StreamOrDevice", "arange::s"], [0, 1, 1, "_CPPv46arangedd14StreamOrDevice", "arange::s"], [0, 1, 1, "_CPPv46arangedd5Dtype14StreamOrDevice", "arange::s"], [0, 1, 1, "_CPPv46arangeddd14StreamOrDevice", "arange::s"], [0, 1, 1, "_CPPv46arangeddd5Dtype14StreamOrDevice", "arange::s"], [0, 1, 1, "_CPPv46arangei14StreamOrDevice", "arange::s"], [0, 1, 1, "_CPPv46arangeii14StreamOrDevice", "arange::s"], [0, 1, 1, "_CPPv46arangeiii14StreamOrDevice", "arange::s"], [0, 1, 1, "_CPPv46arangedd14StreamOrDevice", "arange::start"], [0, 1, 1, "_CPPv46arangedd5Dtype14StreamOrDevice", "arange::start"], [0, 1, 1, "_CPPv46arangeddd14StreamOrDevice", "arange::start"], [0, 1, 1, "_CPPv46arangeddd5Dtype14StreamOrDevice", "arange::start"], [0, 1, 1, "_CPPv46arangeii14StreamOrDevice", "arange::start"], [0, 1, 1, "_CPPv46arangeiii14StreamOrDevice", "arange::start"], [0, 1, 1, "_CPPv46arangeddd14StreamOrDevice", "arange::step"], [0, 1, 1, "_CPPv46arangeddd5Dtype14StreamOrDevice", "arange::step"], [0, 1, 1, "_CPPv46arangeiii14StreamOrDevice", "arange::step"], [0, 1, 1, "_CPPv46aranged14StreamOrDevice", "arange::stop"], [0, 1, 1, "_CPPv46aranged5Dtype14StreamOrDevice", "arange::stop"], [0, 1, 1, "_CPPv46arangedd14StreamOrDevice", "arange::stop"], [0, 1, 1, "_CPPv46arangedd5Dtype14StreamOrDevice", "arange::stop"], [0, 1, 1, "_CPPv46arangeddd14StreamOrDevice", "arange::stop"], [0, 1, 1, "_CPPv46arangeddd5Dtype14StreamOrDevice", "arange::stop"], [0, 1, 1, "_CPPv46arangei14StreamOrDevice", "arange::stop"], [0, 1, 1, "_CPPv46arangeii14StreamOrDevice", "arange::stop"], [0, 1, 1, "_CPPv46arangeiii14StreamOrDevice", "arange::stop"], [0, 0, 1, "_CPPv46arccosRK5array14StreamOrDevice", "arccos"], [0, 1, 1, "_CPPv46arccosRK5array14StreamOrDevice", "arccos::a"], [0, 1, 1, "_CPPv46arccosRK5array14StreamOrDevice", "arccos::s"], [0, 0, 1, "_CPPv47arccoshRK5array14StreamOrDevice", "arccosh"], [0, 1, 1, "_CPPv47arccoshRK5array14StreamOrDevice", "arccosh::a"], [0, 1, 1, "_CPPv47arccoshRK5array14StreamOrDevice", "arccosh::s"], [0, 0, 1, "_CPPv46arcsinRK5array14StreamOrDevice", "arcsin"], [0, 1, 1, "_CPPv46arcsinRK5array14StreamOrDevice", "arcsin::a"], [0, 1, 1, "_CPPv46arcsinRK5array14StreamOrDevice", "arcsin::s"], [0, 0, 1, "_CPPv47arcsinhRK5array14StreamOrDevice", "arcsinh"], [0, 1, 1, "_CPPv47arcsinhRK5array14StreamOrDevice", "arcsinh::a"], [0, 1, 1, "_CPPv47arcsinhRK5array14StreamOrDevice", "arcsinh::s"], [0, 0, 1, "_CPPv46arctanRK5array14StreamOrDevice", "arctan"], [0, 0, 1, "_CPPv47arctan2RK5arrayRK5array14StreamOrDevice", "arctan2"], [0, 1, 1, "_CPPv47arctan2RK5arrayRK5array14StreamOrDevice", "arctan2::a"], [0, 1, 1, "_CPPv47arctan2RK5arrayRK5array14StreamOrDevice", "arctan2::b"], [0, 1, 1, "_CPPv47arctan2RK5arrayRK5array14StreamOrDevice", "arctan2::s"], [0, 1, 1, "_CPPv46arctanRK5array14StreamOrDevice", "arctan::a"], [0, 1, 1, "_CPPv46arctanRK5array14StreamOrDevice", "arctan::s"], [0, 0, 1, "_CPPv47arctanhRK5array14StreamOrDevice", "arctanh"], [0, 1, 1, "_CPPv47arctanhRK5array14StreamOrDevice", "arctanh::a"], [0, 1, 1, "_CPPv47arctanhRK5array14StreamOrDevice", "arctanh::s"], [0, 0, 1, "_CPPv46argmaxRK5array14StreamOrDevice", "argmax"], [0, 0, 1, "_CPPv46argmaxRK5arrayb14StreamOrDevice", "argmax"], [0, 0, 1, "_CPPv46argmaxRK5arrayib14StreamOrDevice", "argmax"], [0, 1, 1, "_CPPv46argmaxRK5array14StreamOrDevice", "argmax::a"], [0, 1, 1, "_CPPv46argmaxRK5arrayb14StreamOrDevice", "argmax::a"], [0, 1, 1, "_CPPv46argmaxRK5arrayib14StreamOrDevice", "argmax::a"], [0, 1, 1, "_CPPv46argmaxRK5arrayib14StreamOrDevice", "argmax::axis"], [0, 1, 1, "_CPPv46argmaxRK5arrayb14StreamOrDevice", "argmax::keepdims"], [0, 1, 1, "_CPPv46argmaxRK5arrayib14StreamOrDevice", "argmax::keepdims"], [0, 1, 1, "_CPPv46argmaxRK5array14StreamOrDevice", "argmax::s"], [0, 1, 1, "_CPPv46argmaxRK5arrayb14StreamOrDevice", "argmax::s"], [0, 1, 1, "_CPPv46argmaxRK5arrayib14StreamOrDevice", "argmax::s"], [0, 0, 1, "_CPPv46argminRK5array14StreamOrDevice", "argmin"], [0, 0, 1, "_CPPv46argminRK5arrayb14StreamOrDevice", "argmin"], [0, 0, 1, "_CPPv46argminRK5arrayib14StreamOrDevice", "argmin"], [0, 1, 1, "_CPPv46argminRK5array14StreamOrDevice", "argmin::a"], [0, 1, 1, "_CPPv46argminRK5arrayb14StreamOrDevice", "argmin::a"], [0, 1, 1, "_CPPv46argminRK5arrayib14StreamOrDevice", "argmin::a"], [0, 1, 1, "_CPPv46argminRK5arrayib14StreamOrDevice", "argmin::axis"], [0, 1, 1, "_CPPv46argminRK5arrayb14StreamOrDevice", "argmin::keepdims"], [0, 1, 1, "_CPPv46argminRK5arrayib14StreamOrDevice", "argmin::keepdims"], [0, 1, 1, "_CPPv46argminRK5array14StreamOrDevice", "argmin::s"], [0, 1, 1, "_CPPv46argminRK5arrayb14StreamOrDevice", "argmin::s"], [0, 1, 1, "_CPPv46argminRK5arrayib14StreamOrDevice", "argmin::s"], [0, 0, 1, "_CPPv412argpartitionRK5arrayi14StreamOrDevice", "argpartition"], [0, 0, 1, "_CPPv412argpartitionRK5arrayii14StreamOrDevice", "argpartition"], [0, 1, 1, "_CPPv412argpartitionRK5arrayi14StreamOrDevice", "argpartition::a"], [0, 1, 1, "_CPPv412argpartitionRK5arrayii14StreamOrDevice", "argpartition::a"], [0, 1, 1, "_CPPv412argpartitionRK5arrayii14StreamOrDevice", "argpartition::axis"], [0, 1, 1, "_CPPv412argpartitionRK5arrayi14StreamOrDevice", "argpartition::kth"], [0, 1, 1, "_CPPv412argpartitionRK5arrayii14StreamOrDevice", "argpartition::kth"], [0, 1, 1, "_CPPv412argpartitionRK5arrayi14StreamOrDevice", "argpartition::s"], [0, 1, 1, "_CPPv412argpartitionRK5arrayii14StreamOrDevice", "argpartition::s"], [0, 0, 1, "_CPPv47argsortRK5array14StreamOrDevice", "argsort"], [0, 0, 1, "_CPPv47argsortRK5arrayi14StreamOrDevice", "argsort"], [0, 1, 1, "_CPPv47argsortRK5array14StreamOrDevice", "argsort::a"], [0, 1, 1, "_CPPv47argsortRK5arrayi14StreamOrDevice", "argsort::a"], [0, 1, 1, "_CPPv47argsortRK5arrayi14StreamOrDevice", "argsort::axis"], [0, 1, 1, "_CPPv47argsortRK5array14StreamOrDevice", "argsort::s"], [0, 1, 1, "_CPPv47argsortRK5arrayi14StreamOrDevice", "argsort::s"], [0, 0, 1, "_CPPv411array_equalRK5arrayRK5array14StreamOrDevice", "array_equal"], [0, 0, 1, "_CPPv411array_equalRK5arrayRK5arrayb14StreamOrDevice", "array_equal"], [0, 1, 1, "_CPPv411array_equalRK5arrayRK5array14StreamOrDevice", "array_equal::a"], [0, 1, 1, "_CPPv411array_equalRK5arrayRK5arrayb14StreamOrDevice", "array_equal::a"], [0, 1, 1, "_CPPv411array_equalRK5arrayRK5array14StreamOrDevice", "array_equal::b"], [0, 1, 1, "_CPPv411array_equalRK5arrayRK5arrayb14StreamOrDevice", "array_equal::b"], [0, 1, 1, "_CPPv411array_equalRK5arrayRK5arrayb14StreamOrDevice", "array_equal::equal_nan"], [0, 1, 1, "_CPPv411array_equalRK5arrayRK5array14StreamOrDevice", "array_equal::s"], [0, 1, 1, "_CPPv411array_equalRK5arrayRK5arrayb14StreamOrDevice", "array_equal::s"], [0, 0, 1, "_CPPv410as_strided5array5Shape7Strides6size_t14StreamOrDevice", "as_strided"], [0, 1, 1, "_CPPv410as_strided5array5Shape7Strides6size_t14StreamOrDevice", "as_strided::a"], [0, 1, 1, "_CPPv410as_strided5array5Shape7Strides6size_t14StreamOrDevice", "as_strided::offset"], [0, 1, 1, "_CPPv410as_strided5array5Shape7Strides6size_t14StreamOrDevice", "as_strided::s"], [0, 1, 1, "_CPPv410as_strided5array5Shape7Strides6size_t14StreamOrDevice", "as_strided::shape"], [0, 1, 1, "_CPPv410as_strided5array5Shape7Strides6size_t14StreamOrDevice", "as_strided::strides"], [0, 0, 1, "_CPPv46astype5array5Dtype14StreamOrDevice", "astype"], [0, 1, 1, "_CPPv46astype5array5Dtype14StreamOrDevice", "astype::a"], [0, 1, 1, "_CPPv46astype5array5Dtype14StreamOrDevice", "astype::dtype"], [0, 1, 1, "_CPPv46astype5array5Dtype14StreamOrDevice", "astype::s"], [0, 0, 1, "_CPPv410atleast_1dRK5array14StreamOrDevice", "atleast_1d"], [0, 0, 1, "_CPPv410atleast_1dRKNSt6vectorI5arrayEE14StreamOrDevice", "atleast_1d"], [0, 1, 1, "_CPPv410atleast_1dRK5array14StreamOrDevice", "atleast_1d::a"], [0, 1, 1, "_CPPv410atleast_1dRKNSt6vectorI5arrayEE14StreamOrDevice", "atleast_1d::a"], [0, 1, 1, "_CPPv410atleast_1dRK5array14StreamOrDevice", "atleast_1d::s"], [0, 1, 1, "_CPPv410atleast_1dRKNSt6vectorI5arrayEE14StreamOrDevice", "atleast_1d::s"], [0, 0, 1, "_CPPv410atleast_2dRK5array14StreamOrDevice", "atleast_2d"], [0, 0, 1, "_CPPv410atleast_2dRKNSt6vectorI5arrayEE14StreamOrDevice", "atleast_2d"], [0, 1, 1, "_CPPv410atleast_2dRK5array14StreamOrDevice", "atleast_2d::a"], [0, 1, 1, "_CPPv410atleast_2dRKNSt6vectorI5arrayEE14StreamOrDevice", "atleast_2d::a"], [0, 1, 1, "_CPPv410atleast_2dRK5array14StreamOrDevice", "atleast_2d::s"], [0, 1, 1, "_CPPv410atleast_2dRKNSt6vectorI5arrayEE14StreamOrDevice", "atleast_2d::s"], [0, 0, 1, "_CPPv410atleast_3dRK5array14StreamOrDevice", "atleast_3d"], [0, 0, 1, "_CPPv410atleast_3dRKNSt6vectorI5arrayEE14StreamOrDevice", "atleast_3d"], [0, 1, 1, "_CPPv410atleast_3dRK5array14StreamOrDevice", "atleast_3d::a"], [0, 1, 1, "_CPPv410atleast_3dRKNSt6vectorI5arrayEE14StreamOrDevice", "atleast_3d::a"], [0, 1, 1, "_CPPv410atleast_3dRK5array14StreamOrDevice", "atleast_3d::s"], [0, 1, 1, "_CPPv410atleast_3dRKNSt6vectorI5arrayEE14StreamOrDevice", "atleast_3d::s"], [0, 0, 1, "_CPPv411bitwise_andRK5arrayRK5array14StreamOrDevice", "bitwise_and"], [0, 1, 1, "_CPPv411bitwise_andRK5arrayRK5array14StreamOrDevice", "bitwise_and::a"], [0, 1, 1, "_CPPv411bitwise_andRK5arrayRK5array14StreamOrDevice", "bitwise_and::b"], [0, 1, 1, "_CPPv411bitwise_andRK5arrayRK5array14StreamOrDevice", "bitwise_and::s"], [0, 0, 1, "_CPPv414bitwise_invertRK5array14StreamOrDevice", "bitwise_invert"], [0, 1, 1, "_CPPv414bitwise_invertRK5array14StreamOrDevice", "bitwise_invert::a"], [0, 1, 1, "_CPPv414bitwise_invertRK5array14StreamOrDevice", "bitwise_invert::s"], [0, 0, 1, "_CPPv410bitwise_orRK5arrayRK5array14StreamOrDevice", "bitwise_or"], [0, 1, 1, "_CPPv410bitwise_orRK5arrayRK5array14StreamOrDevice", "bitwise_or::a"], [0, 1, 1, "_CPPv410bitwise_orRK5arrayRK5array14StreamOrDevice", "bitwise_or::b"], [0, 1, 1, "_CPPv410bitwise_orRK5arrayRK5array14StreamOrDevice", "bitwise_or::s"], [0, 0, 1, "_CPPv411bitwise_xorRK5arrayRK5array14StreamOrDevice", "bitwise_xor"], [0, 1, 1, "_CPPv411bitwise_xorRK5arrayRK5array14StreamOrDevice", "bitwise_xor::a"], [0, 1, 1, "_CPPv411bitwise_xorRK5arrayRK5array14StreamOrDevice", "bitwise_xor::b"], [0, 1, 1, "_CPPv411bitwise_xorRK5arrayRK5array14StreamOrDevice", "bitwise_xor::s"], [0, 0, 1, "_CPPv415block_masked_mm5array5arrayiNSt8optionalI5arrayEENSt8optionalI5arrayEENSt8optionalI5arrayEE14StreamOrDevice", "block_masked_mm"], [0, 1, 1, "_CPPv415block_masked_mm5array5arrayiNSt8optionalI5arrayEENSt8optionalI5arrayEENSt8optionalI5arrayEE14StreamOrDevice", "block_masked_mm::a"], [0, 1, 1, "_CPPv415block_masked_mm5array5arrayiNSt8optionalI5arrayEENSt8optionalI5arrayEENSt8optionalI5arrayEE14StreamOrDevice", "block_masked_mm::b"], [0, 1, 1, "_CPPv415block_masked_mm5array5arrayiNSt8optionalI5arrayEENSt8optionalI5arrayEENSt8optionalI5arrayEE14StreamOrDevice", "block_masked_mm::block_size"], [0, 1, 1, "_CPPv415block_masked_mm5array5arrayiNSt8optionalI5arrayEENSt8optionalI5arrayEENSt8optionalI5arrayEE14StreamOrDevice", "block_masked_mm::mask_lhs"], [0, 1, 1, "_CPPv415block_masked_mm5array5arrayiNSt8optionalI5arrayEENSt8optionalI5arrayEENSt8optionalI5arrayEE14StreamOrDevice", "block_masked_mm::mask_out"], [0, 1, 1, "_CPPv415block_masked_mm5array5arrayiNSt8optionalI5arrayEENSt8optionalI5arrayEENSt8optionalI5arrayEE14StreamOrDevice", "block_masked_mm::mask_rhs"], [0, 1, 1, "_CPPv415block_masked_mm5array5arrayiNSt8optionalI5arrayEENSt8optionalI5arrayEENSt8optionalI5arrayEE14StreamOrDevice", "block_masked_mm::s"], [0, 0, 1, "_CPPv416broadcast_arraysRKNSt6vectorI5arrayEE14StreamOrDevice", "broadcast_arrays"], [0, 1, 1, "_CPPv416broadcast_arraysRKNSt6vectorI5arrayEE14StreamOrDevice", "broadcast_arrays::inputs"], [0, 1, 1, "_CPPv416broadcast_arraysRKNSt6vectorI5arrayEE14StreamOrDevice", "broadcast_arrays::s"], [0, 0, 1, "_CPPv412broadcast_toRK5arrayRK5Shape14StreamOrDevice", "broadcast_to"], [0, 1, 1, "_CPPv412broadcast_toRK5arrayRK5Shape14StreamOrDevice", "broadcast_to::a"], [0, 1, 1, "_CPPv412broadcast_toRK5arrayRK5Shape14StreamOrDevice", "broadcast_to::s"], [0, 1, 1, "_CPPv412broadcast_toRK5arrayRK5Shape14StreamOrDevice", "broadcast_to::shape"], [0, 0, 1, "_CPPv44ceilRK5array14StreamOrDevice", "ceil"], [0, 1, 1, "_CPPv44ceilRK5array14StreamOrDevice", "ceil::a"], [0, 1, 1, "_CPPv44ceilRK5array14StreamOrDevice", "ceil::s"], [0, 0, 1, "_CPPv44clipRK5arrayRKNSt8optionalI5arrayEERKNSt8optionalI5arrayEE14StreamOrDevice", "clip"], [0, 1, 1, "_CPPv44clipRK5arrayRKNSt8optionalI5arrayEERKNSt8optionalI5arrayEE14StreamOrDevice", "clip::a"], [0, 1, 1, "_CPPv44clipRK5arrayRKNSt8optionalI5arrayEERKNSt8optionalI5arrayEE14StreamOrDevice", "clip::a_max"], [0, 1, 1, "_CPPv44clipRK5arrayRKNSt8optionalI5arrayEERKNSt8optionalI5arrayEE14StreamOrDevice", "clip::a_min"], [0, 1, 1, "_CPPv44clipRK5arrayRKNSt8optionalI5arrayEERKNSt8optionalI5arrayEE14StreamOrDevice", "clip::s"], [0, 0, 1, "_CPPv411concatenateNSt6vectorI5arrayEE14StreamOrDevice", "concatenate"], [0, 0, 1, "_CPPv411concatenateNSt6vectorI5arrayEEi14StreamOrDevice", "concatenate"], [0, 1, 1, "_CPPv411concatenateNSt6vectorI5arrayEE14StreamOrDevice", "concatenate::arrays"], [0, 1, 1, "_CPPv411concatenateNSt6vectorI5arrayEEi14StreamOrDevice", "concatenate::arrays"], [0, 1, 1, "_CPPv411concatenateNSt6vectorI5arrayEEi14StreamOrDevice", "concatenate::axis"], [0, 1, 1, "_CPPv411concatenateNSt6vectorI5arrayEE14StreamOrDevice", "concatenate::s"], [0, 1, 1, "_CPPv411concatenateNSt6vectorI5arrayEEi14StreamOrDevice", "concatenate::s"], [0, 0, 1, "_CPPv49conjugateRK5array14StreamOrDevice", "conjugate"], [0, 1, 1, "_CPPv49conjugateRK5array14StreamOrDevice", "conjugate::a"], [0, 1, 1, "_CPPv49conjugateRK5array14StreamOrDevice", "conjugate::s"], [0, 0, 1, "_CPPv410contiguousRK5arrayb14StreamOrDevice", "contiguous"], [0, 1, 1, "_CPPv410contiguousRK5arrayb14StreamOrDevice", "contiguous::a"], [0, 1, 1, "_CPPv410contiguousRK5arrayb14StreamOrDevice", "contiguous::allow_col_major"], [0, 1, 1, "_CPPv410contiguousRK5arrayb14StreamOrDevice", "contiguous::s"], [0, 0, 1, "_CPPv46conv1dRK5arrayRK5arrayiiii14StreamOrDevice", "conv1d"], [0, 1, 1, "_CPPv46conv1dRK5arrayRK5arrayiiii14StreamOrDevice", "conv1d::dilation"], [0, 1, 1, "_CPPv46conv1dRK5arrayRK5arrayiiii14StreamOrDevice", "conv1d::groups"], [0, 1, 1, "_CPPv46conv1dRK5arrayRK5arrayiiii14StreamOrDevice", "conv1d::input"], [0, 1, 1, "_CPPv46conv1dRK5arrayRK5arrayiiii14StreamOrDevice", "conv1d::padding"], [0, 1, 1, "_CPPv46conv1dRK5arrayRK5arrayiiii14StreamOrDevice", "conv1d::s"], [0, 1, 1, "_CPPv46conv1dRK5arrayRK5arrayiiii14StreamOrDevice", "conv1d::stride"], [0, 1, 1, "_CPPv46conv1dRK5arrayRK5arrayiiii14StreamOrDevice", "conv1d::weight"], [0, 0, 1, "_CPPv46conv2dRK5arrayRK5arrayRKNSt4pairIiiEERKNSt4pairIiiEERKNSt4pairIiiEEi14StreamOrDevice", "conv2d"], [0, 1, 1, "_CPPv46conv2dRK5arrayRK5arrayRKNSt4pairIiiEERKNSt4pairIiiEERKNSt4pairIiiEEi14StreamOrDevice", "conv2d::dilation"], [0, 1, 1, "_CPPv46conv2dRK5arrayRK5arrayRKNSt4pairIiiEERKNSt4pairIiiEERKNSt4pairIiiEEi14StreamOrDevice", "conv2d::groups"], [0, 1, 1, "_CPPv46conv2dRK5arrayRK5arrayRKNSt4pairIiiEERKNSt4pairIiiEERKNSt4pairIiiEEi14StreamOrDevice", "conv2d::input"], [0, 1, 1, "_CPPv46conv2dRK5arrayRK5arrayRKNSt4pairIiiEERKNSt4pairIiiEERKNSt4pairIiiEEi14StreamOrDevice", "conv2d::padding"], [0, 1, 1, "_CPPv46conv2dRK5arrayRK5arrayRKNSt4pairIiiEERKNSt4pairIiiEERKNSt4pairIiiEEi14StreamOrDevice", "conv2d::s"], [0, 1, 1, "_CPPv46conv2dRK5arrayRK5arrayRKNSt4pairIiiEERKNSt4pairIiiEERKNSt4pairIiiEEi14StreamOrDevice", "conv2d::stride"], [0, 1, 1, "_CPPv46conv2dRK5arrayRK5arrayRKNSt4pairIiiEERKNSt4pairIiiEERKNSt4pairIiiEEi14StreamOrDevice", "conv2d::weight"], [0, 0, 1, "_CPPv46conv3dRK5arrayRK5arrayRKNSt5tupleIiiiEERKNSt5tupleIiiiEERKNSt5tupleIiiiEEi14StreamOrDevice", "conv3d"], [0, 1, 1, "_CPPv46conv3dRK5arrayRK5arrayRKNSt5tupleIiiiEERKNSt5tupleIiiiEERKNSt5tupleIiiiEEi14StreamOrDevice", "conv3d::dilation"], [0, 1, 1, "_CPPv46conv3dRK5arrayRK5arrayRKNSt5tupleIiiiEERKNSt5tupleIiiiEERKNSt5tupleIiiiEEi14StreamOrDevice", "conv3d::groups"], [0, 1, 1, "_CPPv46conv3dRK5arrayRK5arrayRKNSt5tupleIiiiEERKNSt5tupleIiiiEERKNSt5tupleIiiiEEi14StreamOrDevice", "conv3d::input"], [0, 1, 1, "_CPPv46conv3dRK5arrayRK5arrayRKNSt5tupleIiiiEERKNSt5tupleIiiiEERKNSt5tupleIiiiEEi14StreamOrDevice", "conv3d::padding"], [0, 1, 1, "_CPPv46conv3dRK5arrayRK5arrayRKNSt5tupleIiiiEERKNSt5tupleIiiiEERKNSt5tupleIiiiEEi14StreamOrDevice", "conv3d::s"], [0, 1, 1, "_CPPv46conv3dRK5arrayRK5arrayRKNSt5tupleIiiiEERKNSt5tupleIiiiEERKNSt5tupleIiiiEEi14StreamOrDevice", "conv3d::stride"], [0, 1, 1, "_CPPv46conv3dRK5arrayRK5arrayRKNSt5tupleIiiiEERKNSt5tupleIiiiEERKNSt5tupleIiiiEEi14StreamOrDevice", "conv3d::weight"], [0, 0, 1, "_CPPv412conv_general5array5arrayNSt6vectorIiEENSt6vectorIiEENSt6vectorIiEENSt6vectorIiEENSt6vectorIiEEib14StreamOrDevice", "conv_general"], [0, 0, 1, "_CPPv412conv_generalRK5arrayRK5arrayNSt6vectorIiEENSt6vectorIiEENSt6vectorIiEENSt6vectorIiEEib14StreamOrDevice", "conv_general"], [0, 1, 1, "_CPPv412conv_general5array5arrayNSt6vectorIiEENSt6vectorIiEENSt6vectorIiEENSt6vectorIiEENSt6vectorIiEEib14StreamOrDevice", "conv_general::flip"], [0, 1, 1, "_CPPv412conv_generalRK5arrayRK5arrayNSt6vectorIiEENSt6vectorIiEENSt6vectorIiEENSt6vectorIiEEib14StreamOrDevice", "conv_general::flip"], [0, 1, 1, "_CPPv412conv_general5array5arrayNSt6vectorIiEENSt6vectorIiEENSt6vectorIiEENSt6vectorIiEENSt6vectorIiEEib14StreamOrDevice", "conv_general::groups"], [0, 1, 1, "_CPPv412conv_generalRK5arrayRK5arrayNSt6vectorIiEENSt6vectorIiEENSt6vectorIiEENSt6vectorIiEEib14StreamOrDevice", "conv_general::groups"], [0, 1, 1, "_CPPv412conv_general5array5arrayNSt6vectorIiEENSt6vectorIiEENSt6vectorIiEENSt6vectorIiEENSt6vectorIiEEib14StreamOrDevice", "conv_general::input"], [0, 1, 1, "_CPPv412conv_generalRK5arrayRK5arrayNSt6vectorIiEENSt6vectorIiEENSt6vectorIiEENSt6vectorIiEEib14StreamOrDevice", "conv_general::input"], [0, 1, 1, "_CPPv412conv_general5array5arrayNSt6vectorIiEENSt6vectorIiEENSt6vectorIiEENSt6vectorIiEENSt6vectorIiEEib14StreamOrDevice", "conv_general::input_dilation"], [0, 1, 1, "_CPPv412conv_generalRK5arrayRK5arrayNSt6vectorIiEENSt6vectorIiEENSt6vectorIiEENSt6vectorIiEEib14StreamOrDevice", "conv_general::input_dilation"], [0, 1, 1, "_CPPv412conv_general5array5arrayNSt6vectorIiEENSt6vectorIiEENSt6vectorIiEENSt6vectorIiEENSt6vectorIiEEib14StreamOrDevice", "conv_general::kernel_dilation"], [0, 1, 1, "_CPPv412conv_generalRK5arrayRK5arrayNSt6vectorIiEENSt6vectorIiEENSt6vectorIiEENSt6vectorIiEEib14StreamOrDevice", "conv_general::kernel_dilation"], [0, 1, 1, "_CPPv412conv_generalRK5arrayRK5arrayNSt6vectorIiEENSt6vectorIiEENSt6vectorIiEENSt6vectorIiEEib14StreamOrDevice", "conv_general::padding"], [0, 1, 1, "_CPPv412conv_general5array5arrayNSt6vectorIiEENSt6vectorIiEENSt6vectorIiEENSt6vectorIiEENSt6vectorIiEEib14StreamOrDevice", "conv_general::padding_hi"], [0, 1, 1, "_CPPv412conv_general5array5arrayNSt6vectorIiEENSt6vectorIiEENSt6vectorIiEENSt6vectorIiEENSt6vectorIiEEib14StreamOrDevice", "conv_general::padding_lo"], [0, 1, 1, "_CPPv412conv_general5array5arrayNSt6vectorIiEENSt6vectorIiEENSt6vectorIiEENSt6vectorIiEENSt6vectorIiEEib14StreamOrDevice", "conv_general::s"], [0, 1, 1, "_CPPv412conv_generalRK5arrayRK5arrayNSt6vectorIiEENSt6vectorIiEENSt6vectorIiEENSt6vectorIiEEib14StreamOrDevice", "conv_general::s"], [0, 1, 1, "_CPPv412conv_general5array5arrayNSt6vectorIiEENSt6vectorIiEENSt6vectorIiEENSt6vectorIiEENSt6vectorIiEEib14StreamOrDevice", "conv_general::stride"], [0, 1, 1, "_CPPv412conv_generalRK5arrayRK5arrayNSt6vectorIiEENSt6vectorIiEENSt6vectorIiEENSt6vectorIiEEib14StreamOrDevice", "conv_general::stride"], [0, 1, 1, "_CPPv412conv_general5array5arrayNSt6vectorIiEENSt6vectorIiEENSt6vectorIiEENSt6vectorIiEENSt6vectorIiEEib14StreamOrDevice", "conv_general::weight"], [0, 1, 1, "_CPPv412conv_generalRK5arrayRK5arrayNSt6vectorIiEENSt6vectorIiEENSt6vectorIiEENSt6vectorIiEEib14StreamOrDevice", "conv_general::weight"], [0, 0, 1, "_CPPv416conv_transpose1dRK5arrayRK5arrayiiiii14StreamOrDevice", "conv_transpose1d"], [0, 1, 1, "_CPPv416conv_transpose1dRK5arrayRK5arrayiiiii14StreamOrDevice", "conv_transpose1d::dilation"], [0, 1, 1, "_CPPv416conv_transpose1dRK5arrayRK5arrayiiiii14StreamOrDevice", "conv_transpose1d::groups"], [0, 1, 1, "_CPPv416conv_transpose1dRK5arrayRK5arrayiiiii14StreamOrDevice", "conv_transpose1d::input"], [0, 1, 1, "_CPPv416conv_transpose1dRK5arrayRK5arrayiiiii14StreamOrDevice", "conv_transpose1d::output_padding"], [0, 1, 1, "_CPPv416conv_transpose1dRK5arrayRK5arrayiiiii14StreamOrDevice", "conv_transpose1d::padding"], [0, 1, 1, "_CPPv416conv_transpose1dRK5arrayRK5arrayiiiii14StreamOrDevice", "conv_transpose1d::s"], [0, 1, 1, "_CPPv416conv_transpose1dRK5arrayRK5arrayiiiii14StreamOrDevice", "conv_transpose1d::stride"], [0, 1, 1, "_CPPv416conv_transpose1dRK5arrayRK5arrayiiiii14StreamOrDevice", "conv_transpose1d::weight"], [0, 0, 1, "_CPPv416conv_transpose2dRK5arrayRK5arrayRKNSt4pairIiiEERKNSt4pairIiiEERKNSt4pairIiiEERKNSt4pairIiiEEi14StreamOrDevice", "conv_transpose2d"], [0, 1, 1, "_CPPv416conv_transpose2dRK5arrayRK5arrayRKNSt4pairIiiEERKNSt4pairIiiEERKNSt4pairIiiEERKNSt4pairIiiEEi14StreamOrDevice", "conv_transpose2d::dilation"], [0, 1, 1, "_CPPv416conv_transpose2dRK5arrayRK5arrayRKNSt4pairIiiEERKNSt4pairIiiEERKNSt4pairIiiEERKNSt4pairIiiEEi14StreamOrDevice", "conv_transpose2d::groups"], [0, 1, 1, "_CPPv416conv_transpose2dRK5arrayRK5arrayRKNSt4pairIiiEERKNSt4pairIiiEERKNSt4pairIiiEERKNSt4pairIiiEEi14StreamOrDevice", "conv_transpose2d::input"], [0, 1, 1, "_CPPv416conv_transpose2dRK5arrayRK5arrayRKNSt4pairIiiEERKNSt4pairIiiEERKNSt4pairIiiEERKNSt4pairIiiEEi14StreamOrDevice", "conv_transpose2d::output_padding"], [0, 1, 1, "_CPPv416conv_transpose2dRK5arrayRK5arrayRKNSt4pairIiiEERKNSt4pairIiiEERKNSt4pairIiiEERKNSt4pairIiiEEi14StreamOrDevice", "conv_transpose2d::padding"], [0, 1, 1, "_CPPv416conv_transpose2dRK5arrayRK5arrayRKNSt4pairIiiEERKNSt4pairIiiEERKNSt4pairIiiEERKNSt4pairIiiEEi14StreamOrDevice", "conv_transpose2d::s"], [0, 1, 1, "_CPPv416conv_transpose2dRK5arrayRK5arrayRKNSt4pairIiiEERKNSt4pairIiiEERKNSt4pairIiiEERKNSt4pairIiiEEi14StreamOrDevice", "conv_transpose2d::stride"], [0, 1, 1, "_CPPv416conv_transpose2dRK5arrayRK5arrayRKNSt4pairIiiEERKNSt4pairIiiEERKNSt4pairIiiEERKNSt4pairIiiEEi14StreamOrDevice", "conv_transpose2d::weight"], [0, 0, 1, "_CPPv416conv_transpose3dRK5arrayRK5arrayRKNSt5tupleIiiiEERKNSt5tupleIiiiEERKNSt5tupleIiiiEERKNSt5tupleIiiiEEi14StreamOrDevice", "conv_transpose3d"], [0, 1, 1, "_CPPv416conv_transpose3dRK5arrayRK5arrayRKNSt5tupleIiiiEERKNSt5tupleIiiiEERKNSt5tupleIiiiEERKNSt5tupleIiiiEEi14StreamOrDevice", "conv_transpose3d::dilation"], [0, 1, 1, "_CPPv416conv_transpose3dRK5arrayRK5arrayRKNSt5tupleIiiiEERKNSt5tupleIiiiEERKNSt5tupleIiiiEERKNSt5tupleIiiiEEi14StreamOrDevice", "conv_transpose3d::groups"], [0, 1, 1, "_CPPv416conv_transpose3dRK5arrayRK5arrayRKNSt5tupleIiiiEERKNSt5tupleIiiiEERKNSt5tupleIiiiEERKNSt5tupleIiiiEEi14StreamOrDevice", "conv_transpose3d::input"], [0, 1, 1, "_CPPv416conv_transpose3dRK5arrayRK5arrayRKNSt5tupleIiiiEERKNSt5tupleIiiiEERKNSt5tupleIiiiEERKNSt5tupleIiiiEEi14StreamOrDevice", "conv_transpose3d::output_padding"], [0, 1, 1, "_CPPv416conv_transpose3dRK5arrayRK5arrayRKNSt5tupleIiiiEERKNSt5tupleIiiiEERKNSt5tupleIiiiEERKNSt5tupleIiiiEEi14StreamOrDevice", "conv_transpose3d::padding"], [0, 1, 1, "_CPPv416conv_transpose3dRK5arrayRK5arrayRKNSt5tupleIiiiEERKNSt5tupleIiiiEERKNSt5tupleIiiiEERKNSt5tupleIiiiEEi14StreamOrDevice", "conv_transpose3d::s"], [0, 1, 1, "_CPPv416conv_transpose3dRK5arrayRK5arrayRKNSt5tupleIiiiEERKNSt5tupleIiiiEERKNSt5tupleIiiiEERKNSt5tupleIiiiEEi14StreamOrDevice", "conv_transpose3d::stride"], [0, 1, 1, "_CPPv416conv_transpose3dRK5arrayRK5arrayRKNSt5tupleIiiiEERKNSt5tupleIiiiEERKNSt5tupleIiiiEERKNSt5tupleIiiiEEi14StreamOrDevice", "conv_transpose3d::weight"], [0, 0, 1, "_CPPv44copy5array14StreamOrDevice", "copy"], [0, 1, 1, "_CPPv44copy5array14StreamOrDevice", "copy::a"], [0, 1, 1, "_CPPv44copy5array14StreamOrDevice", "copy::s"], [0, 0, 1, "_CPPv43cosRK5array14StreamOrDevice", "cos"], [0, 1, 1, "_CPPv43cosRK5array14StreamOrDevice", "cos::a"], [0, 1, 1, "_CPPv43cosRK5array14StreamOrDevice", "cos::s"], [0, 0, 1, "_CPPv44coshRK5array14StreamOrDevice", "cosh"], [0, 1, 1, "_CPPv44coshRK5array14StreamOrDevice", "cosh::a"], [0, 1, 1, "_CPPv44coshRK5array14StreamOrDevice", "cosh::s"], [0, 0, 1, "_CPPv46cummaxRK5arrayibb14StreamOrDevice", "cummax"], [0, 1, 1, "_CPPv46cummaxRK5arrayibb14StreamOrDevice", "cummax::a"], [0, 1, 1, "_CPPv46cummaxRK5arrayibb14StreamOrDevice", "cummax::axis"], [0, 1, 1, "_CPPv46cummaxRK5arrayibb14StreamOrDevice", "cummax::inclusive"], [0, 1, 1, "_CPPv46cummaxRK5arrayibb14StreamOrDevice", "cummax::reverse"], [0, 1, 1, "_CPPv46cummaxRK5arrayibb14StreamOrDevice", "cummax::s"], [0, 0, 1, "_CPPv46cumminRK5arrayibb14StreamOrDevice", "cummin"], [0, 1, 1, "_CPPv46cumminRK5arrayibb14StreamOrDevice", "cummin::a"], [0, 1, 1, "_CPPv46cumminRK5arrayibb14StreamOrDevice", "cummin::axis"], [0, 1, 1, "_CPPv46cumminRK5arrayibb14StreamOrDevice", "cummin::inclusive"], [0, 1, 1, "_CPPv46cumminRK5arrayibb14StreamOrDevice", "cummin::reverse"], [0, 1, 1, "_CPPv46cumminRK5arrayibb14StreamOrDevice", "cummin::s"], [0, 0, 1, "_CPPv47cumprodRK5arrayibb14StreamOrDevice", "cumprod"], [0, 1, 1, "_CPPv47cumprodRK5arrayibb14StreamOrDevice", "cumprod::a"], [0, 1, 1, "_CPPv47cumprodRK5arrayibb14StreamOrDevice", "cumprod::axis"], [0, 1, 1, "_CPPv47cumprodRK5arrayibb14StreamOrDevice", "cumprod::inclusive"], [0, 1, 1, "_CPPv47cumprodRK5arrayibb14StreamOrDevice", "cumprod::reverse"], [0, 1, 1, "_CPPv47cumprodRK5arrayibb14StreamOrDevice", "cumprod::s"], [0, 0, 1, "_CPPv46cumsumRK5arrayibb14StreamOrDevice", "cumsum"], [0, 1, 1, "_CPPv46cumsumRK5arrayibb14StreamOrDevice", "cumsum::a"], [0, 1, 1, "_CPPv46cumsumRK5arrayibb14StreamOrDevice", "cumsum::axis"], [0, 1, 1, "_CPPv46cumsumRK5arrayibb14StreamOrDevice", "cumsum::inclusive"], [0, 1, 1, "_CPPv46cumsumRK5arrayibb14StreamOrDevice", "cumsum::reverse"], [0, 1, 1, "_CPPv46cumsumRK5arrayibb14StreamOrDevice", "cumsum::s"], [0, 0, 1, "_CPPv47degreesRK5array14StreamOrDevice", "degrees"], [0, 1, 1, "_CPPv47degreesRK5array14StreamOrDevice", "degrees::a"], [0, 1, 1, "_CPPv47degreesRK5array14StreamOrDevice", "degrees::s"], [0, 0, 1, "_CPPv47dependsRKNSt6vectorI5arrayEERKNSt6vectorI5arrayEE", "depends"], [0, 1, 1, "_CPPv47dependsRKNSt6vectorI5arrayEERKNSt6vectorI5arrayEE", "depends::dependencies"], [0, 1, 1, "_CPPv47dependsRKNSt6vectorI5arrayEERKNSt6vectorI5arrayEE", "depends::inputs"], [0, 0, 1, "_CPPv410dequantizeRK5arrayRK5arrayRKNSt8optionalI5arrayEEiiRKNSt6stringE14StreamOrDevice", "dequantize"], [0, 1, 1, "_CPPv410dequantizeRK5arrayRK5arrayRKNSt8optionalI5arrayEEiiRKNSt6stringE14StreamOrDevice", "dequantize::biases"], [0, 1, 1, "_CPPv410dequantizeRK5arrayRK5arrayRKNSt8optionalI5arrayEEiiRKNSt6stringE14StreamOrDevice", "dequantize::bits"], [0, 1, 1, "_CPPv410dequantizeRK5arrayRK5arrayRKNSt8optionalI5arrayEEiiRKNSt6stringE14StreamOrDevice", "dequantize::group_size"], [0, 1, 1, "_CPPv410dequantizeRK5arrayRK5arrayRKNSt8optionalI5arrayEEiiRKNSt6stringE14StreamOrDevice", "dequantize::mode"], [0, 1, 1, "_CPPv410dequantizeRK5arrayRK5arrayRKNSt8optionalI5arrayEEiiRKNSt6stringE14StreamOrDevice", "dequantize::s"], [0, 1, 1, "_CPPv410dequantizeRK5arrayRK5arrayRKNSt8optionalI5arrayEEiiRKNSt6stringE14StreamOrDevice", "dequantize::scales"], [0, 1, 1, "_CPPv410dequantizeRK5arrayRK5arrayRKNSt8optionalI5arrayEEiiRKNSt6stringE14StreamOrDevice", "dequantize::w"], [0, 0, 1, "_CPPv44diagRK5arrayi14StreamOrDevice", "diag"], [0, 1, 1, "_CPPv44diagRK5arrayi14StreamOrDevice", "diag::a"], [0, 1, 1, "_CPPv44diagRK5arrayi14StreamOrDevice", "diag::k"], [0, 1, 1, "_CPPv44diagRK5arrayi14StreamOrDevice", "diag::s"], [0, 0, 1, "_CPPv48diagonalRK5arrayiii14StreamOrDevice", "diagonal"], [0, 1, 1, "_CPPv48diagonalRK5arrayiii14StreamOrDevice", "diagonal::a"], [0, 1, 1, "_CPPv48diagonalRK5arrayiii14StreamOrDevice", "diagonal::axis1"], [0, 1, 1, "_CPPv48diagonalRK5arrayiii14StreamOrDevice", "diagonal::axis2"], [0, 1, 1, "_CPPv48diagonalRK5arrayiii14StreamOrDevice", "diagonal::offset"], [0, 1, 1, "_CPPv48diagonalRK5arrayiii14StreamOrDevice", "diagonal::s"], [0, 0, 1, "_CPPv46divideRK5arrayRK5array14StreamOrDevice", "divide"], [0, 1, 1, "_CPPv46divideRK5arrayRK5array14StreamOrDevice", "divide::a"], [0, 1, 1, "_CPPv46divideRK5arrayRK5array14StreamOrDevice", "divide::b"], [0, 1, 1, "_CPPv46divideRK5arrayRK5array14StreamOrDevice", "divide::s"], [0, 0, 1, "_CPPv46divmodRK5arrayRK5array14StreamOrDevice", "divmod"], [0, 1, 1, "_CPPv46divmodRK5arrayRK5array14StreamOrDevice", "divmod::a"], [0, 1, 1, "_CPPv46divmodRK5arrayRK5array14StreamOrDevice", "divmod::b"], [0, 1, 1, "_CPPv46divmodRK5arrayRK5array14StreamOrDevice", "divmod::s"], [0, 0, 1, "_CPPv45equalRK5arrayRK5array14StreamOrDevice", "equal"], [0, 1, 1, "_CPPv45equalRK5arrayRK5array14StreamOrDevice", "equal::a"], [0, 1, 1, "_CPPv45equalRK5arrayRK5array14StreamOrDevice", "equal::b"], [0, 1, 1, "_CPPv45equalRK5arrayRK5array14StreamOrDevice", "equal::s"], [0, 0, 1, "_CPPv43erfRK5array14StreamOrDevice", "erf"], [0, 1, 1, "_CPPv43erfRK5array14StreamOrDevice", "erf::a"], [0, 1, 1, "_CPPv43erfRK5array14StreamOrDevice", "erf::s"], [0, 0, 1, "_CPPv46erfinvRK5array14StreamOrDevice", "erfinv"], [0, 1, 1, "_CPPv46erfinvRK5array14StreamOrDevice", "erfinv::a"], [0, 1, 1, "_CPPv46erfinvRK5array14StreamOrDevice", "erfinv::s"], [0, 0, 1, "_CPPv43expRK5array14StreamOrDevice", "exp"], [0, 1, 1, "_CPPv43expRK5array14StreamOrDevice", "exp::a"], [0, 1, 1, "_CPPv43expRK5array14StreamOrDevice", "exp::s"], [0, 0, 1, "_CPPv411expand_dimsRK5arrayRKNSt6vectorIiEE14StreamOrDevice", "expand_dims"], [0, 0, 1, "_CPPv411expand_dimsRK5arrayi14StreamOrDevice", "expand_dims"], [0, 1, 1, "_CPPv411expand_dimsRK5arrayRKNSt6vectorIiEE14StreamOrDevice", "expand_dims::a"], [0, 1, 1, "_CPPv411expand_dimsRK5arrayi14StreamOrDevice", "expand_dims::a"], [0, 1, 1, "_CPPv411expand_dimsRK5arrayRKNSt6vectorIiEE14StreamOrDevice", "expand_dims::axes"], [0, 1, 1, "_CPPv411expand_dimsRK5arrayi14StreamOrDevice", "expand_dims::axis"], [0, 1, 1, "_CPPv411expand_dimsRK5arrayRKNSt6vectorIiEE14StreamOrDevice", "expand_dims::s"], [0, 1, 1, "_CPPv411expand_dimsRK5arrayi14StreamOrDevice", "expand_dims::s"], [0, 0, 1, "_CPPv45expm1RK5array14StreamOrDevice", "expm1"], [0, 1, 1, "_CPPv45expm1RK5array14StreamOrDevice", "expm1::a"], [0, 1, 1, "_CPPv45expm1RK5array14StreamOrDevice", "expm1::s"], [0, 0, 1, "_CPPv43eyei14StreamOrDevice", "eye"], [0, 0, 1, "_CPPv43eyei5Dtype14StreamOrDevice", "eye"], [0, 0, 1, "_CPPv43eyeii14StreamOrDevice", "eye"], [0, 0, 1, "_CPPv43eyeiii14StreamOrDevice", "eye"], [0, 0, 1, "_CPPv43eyeiii5Dtype14StreamOrDevice", "eye"], [0, 1, 1, "_CPPv43eyei5Dtype14StreamOrDevice", "eye::dtype"], [0, 1, 1, "_CPPv43eyeiii5Dtype14StreamOrDevice", "eye::dtype"], [0, 1, 1, "_CPPv43eyeiii14StreamOrDevice", "eye::k"], [0, 1, 1, "_CPPv43eyeiii5Dtype14StreamOrDevice", "eye::k"], [0, 1, 1, "_CPPv43eyeii14StreamOrDevice", "eye::m"], [0, 1, 1, "_CPPv43eyeiii14StreamOrDevice", "eye::m"], [0, 1, 1, "_CPPv43eyeiii5Dtype14StreamOrDevice", "eye::m"], [0, 1, 1, "_CPPv43eyei14StreamOrDevice", "eye::n"], [0, 1, 1, "_CPPv43eyei5Dtype14StreamOrDevice", "eye::n"], [0, 1, 1, "_CPPv43eyeii14StreamOrDevice", "eye::n"], [0, 1, 1, "_CPPv43eyeiii14StreamOrDevice", "eye::n"], [0, 1, 1, "_CPPv43eyeiii5Dtype14StreamOrDevice", "eye::n"], [0, 1, 1, "_CPPv43eyei14StreamOrDevice", "eye::s"], [0, 1, 1, "_CPPv43eyei5Dtype14StreamOrDevice", "eye::s"], [0, 1, 1, "_CPPv43eyeii14StreamOrDevice", "eye::s"], [0, 1, 1, "_CPPv43eyeiii14StreamOrDevice", "eye::s"], [0, 1, 1, "_CPPv43eyeiii5Dtype14StreamOrDevice", "eye::s"], [0, 0, 1, "_CPPv47flattenRK5array14StreamOrDevice", "flatten"], [0, 0, 1, "_CPPv47flattenRK5arrayii14StreamOrDevice", "flatten"], [0, 1, 1, "_CPPv47flattenRK5array14StreamOrDevice", "flatten::a"], [0, 1, 1, "_CPPv47flattenRK5arrayii14StreamOrDevice", "flatten::a"], [0, 1, 1, "_CPPv47flattenRK5arrayii14StreamOrDevice", "flatten::end_axis"], [0, 1, 1, "_CPPv47flattenRK5array14StreamOrDevice", "flatten::s"], [0, 1, 1, "_CPPv47flattenRK5arrayii14StreamOrDevice", "flatten::s"], [0, 1, 1, "_CPPv47flattenRK5arrayii14StreamOrDevice", "flatten::start_axis"], [0, 0, 1, "_CPPv45floorRK5array14StreamOrDevice", "floor"], [0, 1, 1, "_CPPv45floorRK5array14StreamOrDevice", "floor::a"], [0, 1, 1, "_CPPv45floorRK5array14StreamOrDevice", "floor::s"], [0, 0, 1, "_CPPv412floor_divideRK5arrayRK5array14StreamOrDevice", "floor_divide"], [0, 1, 1, "_CPPv412floor_divideRK5arrayRK5array14StreamOrDevice", "floor_divide::a"], [0, 1, 1, "_CPPv412floor_divideRK5arrayRK5array14StreamOrDevice", "floor_divide::b"], [0, 1, 1, "_CPPv412floor_divideRK5arrayRK5array14StreamOrDevice", "floor_divide::s"], [0, 0, 1, "_CPPv44full5Shape5array14StreamOrDevice", "full"], [0, 0, 1, "_CPPv44full5Shape5array5Dtype14StreamOrDevice", "full"], [0, 0, 1, "_CPPv4I0E4full5array5Shape1T14StreamOrDevice", "full"], [0, 0, 1, "_CPPv4I0E4full5array5Shape1T5Dtype14StreamOrDevice", "full"], [0, 2, 1, "_CPPv4I0E4full5array5Shape1T14StreamOrDevice", "full::T"], [0, 2, 1, "_CPPv4I0E4full5array5Shape1T5Dtype14StreamOrDevice", "full::T"], [0, 1, 1, "_CPPv44full5Shape5array5Dtype14StreamOrDevice", "full::dtype"], [0, 1, 1, "_CPPv4I0E4full5array5Shape1T5Dtype14StreamOrDevice", "full::dtype"], [0, 1, 1, "_CPPv44full5Shape5array14StreamOrDevice", "full::s"], [0, 1, 1, "_CPPv44full5Shape5array5Dtype14StreamOrDevice", "full::s"], [0, 1, 1, "_CPPv4I0E4full5array5Shape1T14StreamOrDevice", "full::s"], [0, 1, 1, "_CPPv4I0E4full5array5Shape1T5Dtype14StreamOrDevice", "full::s"], [0, 1, 1, "_CPPv44full5Shape5array14StreamOrDevice", "full::shape"], [0, 1, 1, "_CPPv44full5Shape5array5Dtype14StreamOrDevice", "full::shape"], [0, 1, 1, "_CPPv4I0E4full5array5Shape1T14StreamOrDevice", "full::shape"], [0, 1, 1, "_CPPv4I0E4full5array5Shape1T5Dtype14StreamOrDevice", "full::shape"], [0, 1, 1, "_CPPv4I0E4full5array5Shape1T14StreamOrDevice", "full::val"], [0, 1, 1, "_CPPv4I0E4full5array5Shape1T5Dtype14StreamOrDevice", "full::val"], [0, 1, 1, "_CPPv44full5Shape5array14StreamOrDevice", "full::vals"], [0, 1, 1, "_CPPv44full5Shape5array5Dtype14StreamOrDevice", "full::vals"], [0, 0, 1, "_CPPv46gatherRK5arrayRK5arrayiRK5Shape14StreamOrDevice", "gather"], [0, 0, 1, "_CPPv46gatherRK5arrayRKNSt6vectorI5arrayEERKNSt6vectorIiEERK5Shape14StreamOrDevice", "gather"], [0, 1, 1, "_CPPv46gatherRK5arrayRK5arrayiRK5Shape14StreamOrDevice", "gather::a"], [0, 1, 1, "_CPPv46gatherRK5arrayRKNSt6vectorI5arrayEERKNSt6vectorIiEERK5Shape14StreamOrDevice", "gather::a"], [0, 1, 1, "_CPPv46gatherRK5arrayRKNSt6vectorI5arrayEERKNSt6vectorIiEERK5Shape14StreamOrDevice", "gather::axes"], [0, 1, 1, "_CPPv46gatherRK5arrayRK5arrayiRK5Shape14StreamOrDevice", "gather::axis"], [0, 1, 1, "_CPPv46gatherRK5arrayRK5arrayiRK5Shape14StreamOrDevice", "gather::indices"], [0, 1, 1, "_CPPv46gatherRK5arrayRKNSt6vectorI5arrayEERKNSt6vectorIiEERK5Shape14StreamOrDevice", "gather::indices"], [0, 1, 1, "_CPPv46gatherRK5arrayRK5arrayiRK5Shape14StreamOrDevice", "gather::s"], [0, 1, 1, "_CPPv46gatherRK5arrayRKNSt6vectorI5arrayEERKNSt6vectorIiEERK5Shape14StreamOrDevice", "gather::s"], [0, 1, 1, "_CPPv46gatherRK5arrayRK5arrayiRK5Shape14StreamOrDevice", "gather::slice_sizes"], [0, 1, 1, "_CPPv46gatherRK5arrayRKNSt6vectorI5arrayEERKNSt6vectorIiEERK5Shape14StreamOrDevice", "gather::slice_sizes"], [0, 0, 1, "_CPPv49gather_mm5array5arrayNSt8optionalI5arrayEENSt8optionalI5arrayEEb14StreamOrDevice", "gather_mm"], [0, 1, 1, "_CPPv49gather_mm5array5arrayNSt8optionalI5arrayEENSt8optionalI5arrayEEb14StreamOrDevice", "gather_mm::a"], [0, 1, 1, "_CPPv49gather_mm5array5arrayNSt8optionalI5arrayEENSt8optionalI5arrayEEb14StreamOrDevice", "gather_mm::b"], [0, 1, 1, "_CPPv49gather_mm5array5arrayNSt8optionalI5arrayEENSt8optionalI5arrayEEb14StreamOrDevice", "gather_mm::lhs_indices"], [0, 1, 1, "_CPPv49gather_mm5array5arrayNSt8optionalI5arrayEENSt8optionalI5arrayEEb14StreamOrDevice", "gather_mm::rhs_indices"], [0, 1, 1, "_CPPv49gather_mm5array5arrayNSt8optionalI5arrayEENSt8optionalI5arrayEEb14StreamOrDevice", "gather_mm::s"], [0, 1, 1, "_CPPv49gather_mm5array5arrayNSt8optionalI5arrayEENSt8optionalI5arrayEEb14StreamOrDevice", "gather_mm::sorted_indices"], [0, 0, 1, "_CPPv410gather_qmmRK5arrayRK5arrayRK5arrayRKNSt8optionalI5arrayEENSt8optionalI5arrayEENSt8optionalI5arrayEEbiiRKNSt6stringEb14StreamOrDevice", "gather_qmm"], [0, 1, 1, "_CPPv410gather_qmmRK5arrayRK5arrayRK5arrayRKNSt8optionalI5arrayEENSt8optionalI5arrayEENSt8optionalI5arrayEEbiiRKNSt6stringEb14StreamOrDevice", "gather_qmm::biases"], [0, 1, 1, "_CPPv410gather_qmmRK5arrayRK5arrayRK5arrayRKNSt8optionalI5arrayEENSt8optionalI5arrayEENSt8optionalI5arrayEEbiiRKNSt6stringEb14StreamOrDevice", "gather_qmm::bits"], [0, 1, 1, "_CPPv410gather_qmmRK5arrayRK5arrayRK5arrayRKNSt8optionalI5arrayEENSt8optionalI5arrayEENSt8optionalI5arrayEEbiiRKNSt6stringEb14StreamOrDevice", "gather_qmm::group_size"], [0, 1, 1, "_CPPv410gather_qmmRK5arrayRK5arrayRK5arrayRKNSt8optionalI5arrayEENSt8optionalI5arrayEENSt8optionalI5arrayEEbiiRKNSt6stringEb14StreamOrDevice", "gather_qmm::lhs_indices"], [0, 1, 1, "_CPPv410gather_qmmRK5arrayRK5arrayRK5arrayRKNSt8optionalI5arrayEENSt8optionalI5arrayEENSt8optionalI5arrayEEbiiRKNSt6stringEb14StreamOrDevice", "gather_qmm::mode"], [0, 1, 1, "_CPPv410gather_qmmRK5arrayRK5arrayRK5arrayRKNSt8optionalI5arrayEENSt8optionalI5arrayEENSt8optionalI5arrayEEbiiRKNSt6stringEb14StreamOrDevice", "gather_qmm::rhs_indices"], [0, 1, 1, "_CPPv410gather_qmmRK5arrayRK5arrayRK5arrayRKNSt8optionalI5arrayEENSt8optionalI5arrayEENSt8optionalI5arrayEEbiiRKNSt6stringEb14StreamOrDevice", "gather_qmm::s"], [0, 1, 1, "_CPPv410gather_qmmRK5arrayRK5arrayRK5arrayRKNSt8optionalI5arrayEENSt8optionalI5arrayEENSt8optionalI5arrayEEbiiRKNSt6stringEb14StreamOrDevice", "gather_qmm::scales"], [0, 1, 1, "_CPPv410gather_qmmRK5arrayRK5arrayRK5arrayRKNSt8optionalI5arrayEENSt8optionalI5arrayEENSt8optionalI5arrayEEbiiRKNSt6stringEb14StreamOrDevice", "gather_qmm::sorted_indices"], [0, 1, 1, "_CPPv410gather_qmmRK5arrayRK5arrayRK5arrayRKNSt8optionalI5arrayEENSt8optionalI5arrayEENSt8optionalI5arrayEEbiiRKNSt6stringEb14StreamOrDevice", "gather_qmm::transpose"], [0, 1, 1, "_CPPv410gather_qmmRK5arrayRK5arrayRK5arrayRKNSt8optionalI5arrayEENSt8optionalI5arrayEENSt8optionalI5arrayEEbiiRKNSt6stringEb14StreamOrDevice", "gather_qmm::w"], [0, 1, 1, "_CPPv410gather_qmmRK5arrayRK5arrayRK5arrayRKNSt8optionalI5arrayEENSt8optionalI5arrayEENSt8optionalI5arrayEEbiiRKNSt6stringEb14StreamOrDevice", "gather_qmm::x"], [0, 0, 1, "_CPPv47greaterRK5arrayRK5array14StreamOrDevice", "greater"], [0, 1, 1, "_CPPv47greaterRK5arrayRK5array14StreamOrDevice", "greater::a"], [0, 1, 1, "_CPPv47greaterRK5arrayRK5array14StreamOrDevice", "greater::b"], [0, 1, 1, "_CPPv47greaterRK5arrayRK5array14StreamOrDevice", "greater::s"], [0, 0, 1, "_CPPv413greater_equalRK5arrayRK5array14StreamOrDevice", "greater_equal"], [0, 1, 1, "_CPPv413greater_equalRK5arrayRK5array14StreamOrDevice", "greater_equal::a"], [0, 1, 1, "_CPPv413greater_equalRK5arrayRK5array14StreamOrDevice", "greater_equal::b"], [0, 1, 1, "_CPPv413greater_equalRK5arrayRK5array14StreamOrDevice", "greater_equal::s"], [0, 0, 1, "_CPPv418hadamard_transformRK5arrayNSt8optionalIfEE14StreamOrDevice", "hadamard_transform"], [0, 1, 1, "_CPPv418hadamard_transformRK5arrayNSt8optionalIfEE14StreamOrDevice", "hadamard_transform::a"], [0, 1, 1, "_CPPv418hadamard_transformRK5arrayNSt8optionalIfEE14StreamOrDevice", "hadamard_transform::s"], [0, 1, 1, "_CPPv418hadamard_transformRK5arrayNSt8optionalIfEE14StreamOrDevice", "hadamard_transform::scale"], [0, 0, 1, "_CPPv48identityi14StreamOrDevice", "identity"], [0, 0, 1, "_CPPv48identityi5Dtype14StreamOrDevice", "identity"], [0, 1, 1, "_CPPv48identityi5Dtype14StreamOrDevice", "identity::dtype"], [0, 1, 1, "_CPPv48identityi14StreamOrDevice", "identity::n"], [0, 1, 1, "_CPPv48identityi5Dtype14StreamOrDevice", "identity::n"], [0, 1, 1, "_CPPv48identityi14StreamOrDevice", "identity::s"], [0, 1, 1, "_CPPv48identityi5Dtype14StreamOrDevice", "identity::s"], [0, 0, 1, "_CPPv44imagRK5array14StreamOrDevice", "imag"], [0, 1, 1, "_CPPv44imagRK5array14StreamOrDevice", "imag::a"], [0, 1, 1, "_CPPv44imagRK5array14StreamOrDevice", "imag::s"], [0, 0, 1, "_CPPv45innerRK5arrayRK5array14StreamOrDevice", "inner"], [0, 1, 1, "_CPPv45innerRK5arrayRK5array14StreamOrDevice", "inner::a"], [0, 1, 1, "_CPPv45innerRK5arrayRK5array14StreamOrDevice", "inner::b"], [0, 1, 1, "_CPPv45innerRK5arrayRK5array14StreamOrDevice", "inner::s"], [0, 0, 1, "_CPPv47iscloseRK5arrayRK5arrayddb14StreamOrDevice", "isclose"], [0, 1, 1, "_CPPv47iscloseRK5arrayRK5arrayddb14StreamOrDevice", "isclose::a"], [0, 1, 1, "_CPPv47iscloseRK5arrayRK5arrayddb14StreamOrDevice", "isclose::atol"], [0, 1, 1, "_CPPv47iscloseRK5arrayRK5arrayddb14StreamOrDevice", "isclose::b"], [0, 1, 1, "_CPPv47iscloseRK5arrayRK5arrayddb14StreamOrDevice", "isclose::equal_nan"], [0, 1, 1, "_CPPv47iscloseRK5arrayRK5arrayddb14StreamOrDevice", "isclose::rtol"], [0, 1, 1, "_CPPv47iscloseRK5arrayRK5arrayddb14StreamOrDevice", "isclose::s"], [0, 0, 1, "_CPPv48isfiniteRK5array14StreamOrDevice", "isfinite"], [0, 1, 1, "_CPPv48isfiniteRK5array14StreamOrDevice", "isfinite::a"], [0, 1, 1, "_CPPv48isfiniteRK5array14StreamOrDevice", "isfinite::s"], [0, 0, 1, "_CPPv45isinfRK5array14StreamOrDevice", "isinf"], [0, 1, 1, "_CPPv45isinfRK5array14StreamOrDevice", "isinf::a"], [0, 1, 1, "_CPPv45isinfRK5array14StreamOrDevice", "isinf::s"], [0, 0, 1, "_CPPv45isnanRK5array14StreamOrDevice", "isnan"], [0, 1, 1, "_CPPv45isnanRK5array14StreamOrDevice", "isnan::a"], [0, 1, 1, "_CPPv45isnanRK5array14StreamOrDevice", "isnan::s"], [0, 0, 1, "_CPPv48isneginfRK5array14StreamOrDevice", "isneginf"], [0, 1, 1, "_CPPv48isneginfRK5array14StreamOrDevice", "isneginf::a"], [0, 1, 1, "_CPPv48isneginfRK5array14StreamOrDevice", "isneginf::s"], [0, 0, 1, "_CPPv48isposinfRK5array14StreamOrDevice", "isposinf"], [0, 1, 1, "_CPPv48isposinfRK5array14StreamOrDevice", "isposinf::a"], [0, 1, 1, "_CPPv48isposinfRK5array14StreamOrDevice", "isposinf::s"], [0, 0, 1, "_CPPv44kronRK5arrayRK5array14StreamOrDevice", "kron"], [0, 1, 1, "_CPPv44kronRK5arrayRK5array14StreamOrDevice", "kron::a"], [0, 1, 1, "_CPPv44kronRK5arrayRK5array14StreamOrDevice", "kron::b"], [0, 1, 1, "_CPPv44kronRK5arrayRK5array14StreamOrDevice", "kron::s"], [0, 0, 1, "_CPPv410left_shiftRK5arrayRK5array14StreamOrDevice", "left_shift"], [0, 1, 1, "_CPPv410left_shiftRK5arrayRK5array14StreamOrDevice", "left_shift::a"], [0, 1, 1, "_CPPv410left_shiftRK5arrayRK5array14StreamOrDevice", "left_shift::b"], [0, 1, 1, "_CPPv410left_shiftRK5arrayRK5array14StreamOrDevice", "left_shift::s"], [0, 0, 1, "_CPPv44lessRK5arrayRK5array14StreamOrDevice", "less"], [0, 1, 1, "_CPPv44lessRK5arrayRK5array14StreamOrDevice", "less::a"], [0, 1, 1, "_CPPv44lessRK5arrayRK5array14StreamOrDevice", "less::b"], [0, 1, 1, "_CPPv44lessRK5arrayRK5array14StreamOrDevice", "less::s"], [0, 0, 1, "_CPPv410less_equalRK5arrayRK5array14StreamOrDevice", "less_equal"], [0, 1, 1, "_CPPv410less_equalRK5arrayRK5array14StreamOrDevice", "less_equal::a"], [0, 1, 1, "_CPPv410less_equalRK5arrayRK5array14StreamOrDevice", "less_equal::b"], [0, 1, 1, "_CPPv410less_equalRK5arrayRK5array14StreamOrDevice", "less_equal::s"], [0, 0, 1, "_CPPv48linspaceddi5Dtype14StreamOrDevice", "linspace"], [0, 1, 1, "_CPPv48linspaceddi5Dtype14StreamOrDevice", "linspace::dtype"], [0, 1, 1, "_CPPv48linspaceddi5Dtype14StreamOrDevice", "linspace::num"], [0, 1, 1, "_CPPv48linspaceddi5Dtype14StreamOrDevice", "linspace::s"], [0, 1, 1, "_CPPv48linspaceddi5Dtype14StreamOrDevice", "linspace::start"], [0, 1, 1, "_CPPv48linspaceddi5Dtype14StreamOrDevice", "linspace::stop"], [0, 0, 1, "_CPPv43logRK5array14StreamOrDevice", "log"], [0, 0, 1, "_CPPv45log10RK5array14StreamOrDevice", "log10"], [0, 1, 1, "_CPPv45log10RK5array14StreamOrDevice", "log10::a"], [0, 1, 1, "_CPPv45log10RK5array14StreamOrDevice", "log10::s"], [0, 0, 1, "_CPPv45log1pRK5array14StreamOrDevice", "log1p"], [0, 1, 1, "_CPPv45log1pRK5array14StreamOrDevice", "log1p::a"], [0, 1, 1, "_CPPv45log1pRK5array14StreamOrDevice", "log1p::s"], [0, 0, 1, "_CPPv44log2RK5array14StreamOrDevice", "log2"], [0, 1, 1, "_CPPv44log2RK5array14StreamOrDevice", "log2::a"], [0, 1, 1, "_CPPv44log2RK5array14StreamOrDevice", "log2::s"], [0, 1, 1, "_CPPv43logRK5array14StreamOrDevice", "log::a"], [0, 1, 1, "_CPPv43logRK5array14StreamOrDevice", "log::s"], [0, 0, 1, "_CPPv49logaddexpRK5arrayRK5array14StreamOrDevice", "logaddexp"], [0, 1, 1, "_CPPv49logaddexpRK5arrayRK5array14StreamOrDevice", "logaddexp::a"], [0, 1, 1, "_CPPv49logaddexpRK5arrayRK5array14StreamOrDevice", "logaddexp::b"], [0, 1, 1, "_CPPv49logaddexpRK5arrayRK5array14StreamOrDevice", "logaddexp::s"], [0, 0, 1, "_CPPv412logcumsumexpRK5arrayibb14StreamOrDevice", "logcumsumexp"], [0, 1, 1, "_CPPv412logcumsumexpRK5arrayibb14StreamOrDevice", "logcumsumexp::a"], [0, 1, 1, "_CPPv412logcumsumexpRK5arrayibb14StreamOrDevice", "logcumsumexp::axis"], [0, 1, 1, "_CPPv412logcumsumexpRK5arrayibb14StreamOrDevice", "logcumsumexp::inclusive"], [0, 1, 1, "_CPPv412logcumsumexpRK5arrayibb14StreamOrDevice", "logcumsumexp::reverse"], [0, 1, 1, "_CPPv412logcumsumexpRK5arrayibb14StreamOrDevice", "logcumsumexp::s"], [0, 0, 1, "_CPPv411logical_andRK5arrayRK5array14StreamOrDevice", "logical_and"], [0, 1, 1, "_CPPv411logical_andRK5arrayRK5array14StreamOrDevice", "logical_and::a"], [0, 1, 1, "_CPPv411logical_andRK5arrayRK5array14StreamOrDevice", "logical_and::b"], [0, 1, 1, "_CPPv411logical_andRK5arrayRK5array14StreamOrDevice", "logical_and::s"], [0, 0, 1, "_CPPv411logical_notRK5array14StreamOrDevice", "logical_not"], [0, 1, 1, "_CPPv411logical_notRK5array14StreamOrDevice", "logical_not::a"], [0, 1, 1, "_CPPv411logical_notRK5array14StreamOrDevice", "logical_not::s"], [0, 0, 1, "_CPPv410logical_orRK5arrayRK5array14StreamOrDevice", "logical_or"], [0, 1, 1, "_CPPv410logical_orRK5arrayRK5array14StreamOrDevice", "logical_or::a"], [0, 1, 1, "_CPPv410logical_orRK5arrayRK5array14StreamOrDevice", "logical_or::b"], [0, 1, 1, "_CPPv410logical_orRK5arrayRK5array14StreamOrDevice", "logical_or::s"], [0, 0, 1, "_CPPv49logsumexpRK5array14StreamOrDevice", "logsumexp"], [0, 0, 1, "_CPPv49logsumexpRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "logsumexp"], [0, 0, 1, "_CPPv49logsumexpRK5arrayb14StreamOrDevice", "logsumexp"], [0, 0, 1, "_CPPv49logsumexpRK5arrayib14StreamOrDevice", "logsumexp"], [0, 1, 1, "_CPPv49logsumexpRK5array14StreamOrDevice", "logsumexp::a"], [0, 1, 1, "_CPPv49logsumexpRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "logsumexp::a"], [0, 1, 1, "_CPPv49logsumexpRK5arrayb14StreamOrDevice", "logsumexp::a"], [0, 1, 1, "_CPPv49logsumexpRK5arrayib14StreamOrDevice", "logsumexp::a"], [0, 1, 1, "_CPPv49logsumexpRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "logsumexp::axes"], [0, 1, 1, "_CPPv49logsumexpRK5arrayib14StreamOrDevice", "logsumexp::axis"], [0, 1, 1, "_CPPv49logsumexpRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "logsumexp::keepdims"], [0, 1, 1, "_CPPv49logsumexpRK5arrayb14StreamOrDevice", "logsumexp::keepdims"], [0, 1, 1, "_CPPv49logsumexpRK5arrayib14StreamOrDevice", "logsumexp::keepdims"], [0, 1, 1, "_CPPv49logsumexpRK5array14StreamOrDevice", "logsumexp::s"], [0, 1, 1, "_CPPv49logsumexpRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "logsumexp::s"], [0, 1, 1, "_CPPv49logsumexpRK5arrayb14StreamOrDevice", "logsumexp::s"], [0, 1, 1, "_CPPv49logsumexpRK5arrayib14StreamOrDevice", "logsumexp::s"], [0, 0, 1, "_CPPv46matmulRK5arrayRK5array14StreamOrDevice", "matmul"], [0, 1, 1, "_CPPv46matmulRK5arrayRK5array14StreamOrDevice", "matmul::a"], [0, 1, 1, "_CPPv46matmulRK5arrayRK5array14StreamOrDevice", "matmul::b"], [0, 1, 1, "_CPPv46matmulRK5arrayRK5array14StreamOrDevice", "matmul::s"], [0, 0, 1, "_CPPv43maxRK5array14StreamOrDevice", "max"], [0, 0, 1, "_CPPv43maxRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "max"], [0, 0, 1, "_CPPv43maxRK5arrayb14StreamOrDevice", "max"], [0, 0, 1, "_CPPv43maxRK5arrayib14StreamOrDevice", "max"], [0, 1, 1, "_CPPv43maxRK5array14StreamOrDevice", "max::a"], [0, 1, 1, "_CPPv43maxRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "max::a"], [0, 1, 1, "_CPPv43maxRK5arrayb14StreamOrDevice", "max::a"], [0, 1, 1, "_CPPv43maxRK5arrayib14StreamOrDevice", "max::a"], [0, 1, 1, "_CPPv43maxRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "max::axes"], [0, 1, 1, "_CPPv43maxRK5arrayib14StreamOrDevice", "max::axis"], [0, 1, 1, "_CPPv43maxRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "max::keepdims"], [0, 1, 1, "_CPPv43maxRK5arrayb14StreamOrDevice", "max::keepdims"], [0, 1, 1, "_CPPv43maxRK5arrayib14StreamOrDevice", "max::keepdims"], [0, 1, 1, "_CPPv43maxRK5array14StreamOrDevice", "max::s"], [0, 1, 1, "_CPPv43maxRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "max::s"], [0, 1, 1, "_CPPv43maxRK5arrayb14StreamOrDevice", "max::s"], [0, 1, 1, "_CPPv43maxRK5arrayib14StreamOrDevice", "max::s"], [0, 0, 1, "_CPPv47maximumRK5arrayRK5array14StreamOrDevice", "maximum"], [0, 1, 1, "_CPPv47maximumRK5arrayRK5array14StreamOrDevice", "maximum::a"], [0, 1, 1, "_CPPv47maximumRK5arrayRK5array14StreamOrDevice", "maximum::b"], [0, 1, 1, "_CPPv47maximumRK5arrayRK5array14StreamOrDevice", "maximum::s"], [0, 0, 1, "_CPPv44meanRK5array14StreamOrDevice", "mean"], [0, 0, 1, "_CPPv44meanRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "mean"], [0, 0, 1, "_CPPv44meanRK5arrayb14StreamOrDevice", "mean"], [0, 0, 1, "_CPPv44meanRK5arrayib14StreamOrDevice", "mean"], [0, 1, 1, "_CPPv44meanRK5array14StreamOrDevice", "mean::a"], [0, 1, 1, "_CPPv44meanRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "mean::a"], [0, 1, 1, "_CPPv44meanRK5arrayb14StreamOrDevice", "mean::a"], [0, 1, 1, "_CPPv44meanRK5arrayib14StreamOrDevice", "mean::a"], [0, 1, 1, "_CPPv44meanRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "mean::axes"], [0, 1, 1, "_CPPv44meanRK5arrayib14StreamOrDevice", "mean::axis"], [0, 1, 1, "_CPPv44meanRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "mean::keepdims"], [0, 1, 1, "_CPPv44meanRK5arrayb14StreamOrDevice", "mean::keepdims"], [0, 1, 1, "_CPPv44meanRK5arrayib14StreamOrDevice", "mean::keepdims"], [0, 1, 1, "_CPPv44meanRK5array14StreamOrDevice", "mean::s"], [0, 1, 1, "_CPPv44meanRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "mean::s"], [0, 1, 1, "_CPPv44meanRK5arrayb14StreamOrDevice", "mean::s"], [0, 1, 1, "_CPPv44meanRK5arrayib14StreamOrDevice", "mean::s"], [0, 0, 1, "_CPPv48meshgridRKNSt6vectorI5arrayEEbRKNSt6stringE14StreamOrDevice", "meshgrid"], [0, 1, 1, "_CPPv48meshgridRKNSt6vectorI5arrayEEbRKNSt6stringE14StreamOrDevice", "meshgrid::arrays"], [0, 1, 1, "_CPPv48meshgridRKNSt6vectorI5arrayEEbRKNSt6stringE14StreamOrDevice", "meshgrid::indexing"], [0, 1, 1, "_CPPv48meshgridRKNSt6vectorI5arrayEEbRKNSt6stringE14StreamOrDevice", "meshgrid::s"], [0, 1, 1, "_CPPv48meshgridRKNSt6vectorI5arrayEEbRKNSt6stringE14StreamOrDevice", "meshgrid::sparse"], [0, 0, 1, "_CPPv43minRK5array14StreamOrDevice", "min"], [0, 0, 1, "_CPPv43minRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "min"], [0, 0, 1, "_CPPv43minRK5arrayb14StreamOrDevice", "min"], [0, 0, 1, "_CPPv43minRK5arrayib14StreamOrDevice", "min"], [0, 1, 1, "_CPPv43minRK5array14StreamOrDevice", "min::a"], [0, 1, 1, "_CPPv43minRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "min::a"], [0, 1, 1, "_CPPv43minRK5arrayb14StreamOrDevice", "min::a"], [0, 1, 1, "_CPPv43minRK5arrayib14StreamOrDevice", "min::a"], [0, 1, 1, "_CPPv43minRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "min::axes"], [0, 1, 1, "_CPPv43minRK5arrayib14StreamOrDevice", "min::axis"], [0, 1, 1, "_CPPv43minRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "min::keepdims"], [0, 1, 1, "_CPPv43minRK5arrayb14StreamOrDevice", "min::keepdims"], [0, 1, 1, "_CPPv43minRK5arrayib14StreamOrDevice", "min::keepdims"], [0, 1, 1, "_CPPv43minRK5array14StreamOrDevice", "min::s"], [0, 1, 1, "_CPPv43minRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "min::s"], [0, 1, 1, "_CPPv43minRK5arrayb14StreamOrDevice", "min::s"], [0, 1, 1, "_CPPv43minRK5arrayib14StreamOrDevice", "min::s"], [0, 0, 1, "_CPPv47minimumRK5arrayRK5array14StreamOrDevice", "minimum"], [0, 1, 1, "_CPPv47minimumRK5arrayRK5array14StreamOrDevice", "minimum::a"], [0, 1, 1, "_CPPv47minimumRK5arrayRK5array14StreamOrDevice", "minimum::b"], [0, 1, 1, "_CPPv47minimumRK5arrayRK5array14StreamOrDevice", "minimum::s"], [0, 0, 1, "_CPPv48moveaxisRK5arrayii14StreamOrDevice", "moveaxis"], [0, 1, 1, "_CPPv48moveaxisRK5arrayii14StreamOrDevice", "moveaxis::a"], [0, 1, 1, "_CPPv48moveaxisRK5arrayii14StreamOrDevice", "moveaxis::destination"], [0, 1, 1, "_CPPv48moveaxisRK5arrayii14StreamOrDevice", "moveaxis::s"], [0, 1, 1, "_CPPv48moveaxisRK5arrayii14StreamOrDevice", "moveaxis::source"], [0, 0, 1, "_CPPv48multiplyRK5arrayRK5array14StreamOrDevice", "multiply"], [0, 1, 1, "_CPPv48multiplyRK5arrayRK5array14StreamOrDevice", "multiply::a"], [0, 1, 1, "_CPPv48multiplyRK5arrayRK5array14StreamOrDevice", "multiply::b"], [0, 1, 1, "_CPPv48multiplyRK5arrayRK5array14StreamOrDevice", "multiply::s"], [0, 0, 1, "_CPPv410nan_to_numRK5arrayfKNSt8optionalIfEEKNSt8optionalIfEE14StreamOrDevice", "nan_to_num"], [0, 1, 1, "_CPPv410nan_to_numRK5arrayfKNSt8optionalIfEEKNSt8optionalIfEE14StreamOrDevice", "nan_to_num::a"], [0, 1, 1, "_CPPv410nan_to_numRK5arrayfKNSt8optionalIfEEKNSt8optionalIfEE14StreamOrDevice", "nan_to_num::nan"], [0, 1, 1, "_CPPv410nan_to_numRK5arrayfKNSt8optionalIfEEKNSt8optionalIfEE14StreamOrDevice", "nan_to_num::neginf"], [0, 1, 1, "_CPPv410nan_to_numRK5arrayfKNSt8optionalIfEEKNSt8optionalIfEE14StreamOrDevice", "nan_to_num::posinf"], [0, 1, 1, "_CPPv410nan_to_numRK5arrayfKNSt8optionalIfEEKNSt8optionalIfEE14StreamOrDevice", "nan_to_num::s"], [0, 0, 1, "_CPPv48negativeRK5array14StreamOrDevice", "negative"], [0, 1, 1, "_CPPv48negativeRK5array14StreamOrDevice", "negative::a"], [0, 1, 1, "_CPPv48negativeRK5array14StreamOrDevice", "negative::s"], [0, 0, 1, "_CPPv49not_equalRK5arrayRK5array14StreamOrDevice", "not_equal"], [0, 1, 1, "_CPPv49not_equalRK5arrayRK5array14StreamOrDevice", "not_equal::a"], [0, 1, 1, "_CPPv49not_equalRK5arrayRK5array14StreamOrDevice", "not_equal::b"], [0, 1, 1, "_CPPv49not_equalRK5arrayRK5array14StreamOrDevice", "not_equal::s"], [0, 0, 1, "_CPPv418number_of_elementsRK5arrayNSt6vectorIiEEb5Dtype14StreamOrDevice", "number_of_elements"], [0, 1, 1, "_CPPv418number_of_elementsRK5arrayNSt6vectorIiEEb5Dtype14StreamOrDevice", "number_of_elements::a"], [0, 1, 1, "_CPPv418number_of_elementsRK5arrayNSt6vectorIiEEb5Dtype14StreamOrDevice", "number_of_elements::axes"], [0, 1, 1, "_CPPv418number_of_elementsRK5arrayNSt6vectorIiEEb5Dtype14StreamOrDevice", "number_of_elements::dtype"], [0, 1, 1, "_CPPv418number_of_elementsRK5arrayNSt6vectorIiEEb5Dtype14StreamOrDevice", "number_of_elements::inverted"], [0, 1, 1, "_CPPv418number_of_elementsRK5arrayNSt6vectorIiEEb5Dtype14StreamOrDevice", "number_of_elements::s"], [0, 0, 1, "_CPPv44onesRK5Shape14StreamOrDevice", "ones"], [0, 0, 1, "_CPPv44onesRK5Shape5Dtype14StreamOrDevice", "ones"], [0, 1, 1, "_CPPv44onesRK5Shape5Dtype14StreamOrDevice", "ones::dtype"], [0, 1, 1, "_CPPv44onesRK5Shape14StreamOrDevice", "ones::s"], [0, 1, 1, "_CPPv44onesRK5Shape5Dtype14StreamOrDevice", "ones::s"], [0, 1, 1, "_CPPv44onesRK5Shape14StreamOrDevice", "ones::shape"], [0, 1, 1, "_CPPv44onesRK5Shape5Dtype14StreamOrDevice", "ones::shape"], [0, 0, 1, "_CPPv49ones_likeRK5array14StreamOrDevice", "ones_like"], [0, 1, 1, "_CPPv49ones_likeRK5array14StreamOrDevice", "ones_like::a"], [0, 1, 1, "_CPPv49ones_likeRK5array14StreamOrDevice", "ones_like::s"], [0, 0, 1, "_CPPv4I0Ene5array1TRK5array", "operator!="], [0, 0, 1, "_CPPv4I0Ene5arrayRK5array1T", "operator!="], [0, 0, 1, "_CPPv4neRK5arrayRK5array", "operator!="], [0, 2, 1, "_CPPv4I0Ene5array1TRK5array", "operator!=::T"], [0, 2, 1, "_CPPv4I0Ene5arrayRK5array1T", "operator!=::T"], [0, 1, 1, "_CPPv4I0Ene5array1TRK5array", "operator!=::a"], [0, 1, 1, "_CPPv4I0Ene5arrayRK5array1T", "operator!=::a"], [0, 1, 1, "_CPPv4neRK5arrayRK5array", "operator!=::a"], [0, 1, 1, "_CPPv4I0Ene5array1TRK5array", "operator!=::b"], [0, 1, 1, "_CPPv4I0Ene5arrayRK5array1T", "operator!=::b"], [0, 1, 1, "_CPPv4neRK5arrayRK5array", "operator!=::b"], [0, 0, 1, "_CPPv4I0Erm5array1TRK5array", "operator%"], [0, 0, 1, "_CPPv4I0Erm5arrayRK5array1T", "operator%"], [0, 0, 1, "_CPPv4rmRK5arrayRK5array", "operator%"], [0, 2, 1, "_CPPv4I0Erm5array1TRK5array", "operator%::T"], [0, 2, 1, "_CPPv4I0Erm5arrayRK5array1T", "operator%::T"], [0, 1, 1, "_CPPv4I0Erm5array1TRK5array", "operator%::a"], [0, 1, 1, "_CPPv4I0Erm5arrayRK5array1T", "operator%::a"], [0, 1, 1, "_CPPv4rmRK5arrayRK5array", "operator%::a"], [0, 1, 1, "_CPPv4I0Erm5array1TRK5array", "operator%::b"], [0, 1, 1, "_CPPv4I0Erm5arrayRK5array1T", "operator%::b"], [0, 1, 1, "_CPPv4rmRK5arrayRK5array", "operator%::b"], [0, 0, 1, "_CPPv4anRK5arrayRK5array", "operator&"], [0, 0, 1, "_CPPv4aaRK5arrayRK5array", "operator&&"], [0, 1, 1, "_CPPv4aaRK5arrayRK5array", "operator&&::a"], [0, 1, 1, "_CPPv4aaRK5arrayRK5array", "operator&&::b"], [0, 1, 1, "_CPPv4anRK5arrayRK5array", "operator&::a"], [0, 1, 1, "_CPPv4anRK5arrayRK5array", "operator&::b"], [0, 0, 1, "_CPPv4I0Eml5array1TRK5array", "operator*"], [0, 0, 1, "_CPPv4I0Eml5arrayRK5array1T", "operator*"], [0, 0, 1, "_CPPv4mlRK5arrayRK5array", "operator*"], [0, 2, 1, "_CPPv4I0Eml5array1TRK5array", "operator*::T"], [0, 2, 1, "_CPPv4I0Eml5arrayRK5array1T", "operator*::T"], [0, 1, 1, "_CPPv4I0Eml5array1TRK5array", "operator*::a"], [0, 1, 1, "_CPPv4I0Eml5arrayRK5array1T", "operator*::a"], [0, 1, 1, "_CPPv4mlRK5arrayRK5array", "operator*::a"], [0, 1, 1, "_CPPv4I0Eml5array1TRK5array", "operator*::b"], [0, 1, 1, "_CPPv4I0Eml5arrayRK5array1T", "operator*::b"], [0, 1, 1, "_CPPv4mlRK5arrayRK5array", "operator*::b"], [0, 0, 1, "_CPPv4I0Epl5array1TRK5array", "operator+"], [0, 0, 1, "_CPPv4I0Epl5arrayRK5array1T", "operator+"], [0, 0, 1, "_CPPv4plRK5arrayRK5array", "operator+"], [0, 2, 1, "_CPPv4I0Epl5array1TRK5array", "operator+::T"], [0, 2, 1, "_CPPv4I0Epl5arrayRK5array1T", "operator+::T"], [0, 1, 1, "_CPPv4I0Epl5array1TRK5array", "operator+::a"], [0, 1, 1, "_CPPv4I0Epl5arrayRK5array1T", "operator+::a"], [0, 1, 1, "_CPPv4plRK5arrayRK5array", "operator+::a"], [0, 1, 1, "_CPPv4I0Epl5array1TRK5array", "operator+::b"], [0, 1, 1, "_CPPv4I0Epl5arrayRK5array1T", "operator+::b"], [0, 1, 1, "_CPPv4plRK5arrayRK5array", "operator+::b"], [0, 0, 1, "_CPPv4I0Emi5array1TRK5array", "operator-"], [0, 0, 1, "_CPPv4I0Emi5arrayRK5array1T", "operator-"], [0, 0, 1, "_CPPv4miRK5array", "operator-"], [0, 0, 1, "_CPPv4miRK5arrayRK5array", "operator-"], [0, 2, 1, "_CPPv4I0Emi5array1TRK5array", "operator-::T"], [0, 2, 1, "_CPPv4I0Emi5arrayRK5array1T", "operator-::T"], [0, 1, 1, "_CPPv4I0Emi5array1TRK5array", "operator-::a"], [0, 1, 1, "_CPPv4I0Emi5arrayRK5array1T", "operator-::a"], [0, 1, 1, "_CPPv4miRK5array", "operator-::a"], [0, 1, 1, "_CPPv4miRK5arrayRK5array", "operator-::a"], [0, 1, 1, "_CPPv4I0Emi5array1TRK5array", "operator-::b"], [0, 1, 1, "_CPPv4I0Emi5arrayRK5array1T", "operator-::b"], [0, 1, 1, "_CPPv4miRK5arrayRK5array", "operator-::b"], [0, 0, 1, "_CPPv4dvRK5arrayRK5array", "operator/"], [0, 0, 1, "_CPPv4dvRK5arrayd", "operator/"], [0, 0, 1, "_CPPv4dvdRK5array", "operator/"], [0, 1, 1, "_CPPv4dvRK5arrayRK5array", "operator/::a"], [0, 1, 1, "_CPPv4dvRK5arrayd", "operator/::a"], [0, 1, 1, "_CPPv4dvdRK5array", "operator/::a"], [0, 1, 1, "_CPPv4dvRK5arrayRK5array", "operator/::b"], [0, 1, 1, "_CPPv4dvRK5arrayd", "operator/::b"], [0, 1, 1, "_CPPv4dvdRK5array", "operator/::b"], [0, 0, 1, "_CPPv4I0Elt5array1TRK5array", "operator<"], [0, 0, 1, "_CPPv4I0Elt5arrayRK5array1T", "operator<"], [0, 0, 1, "_CPPv4ltRK5arrayRK5array", "operator<"], [0, 2, 1, "_CPPv4I0Elt5array1TRK5array", "operator<::T"], [0, 2, 1, "_CPPv4I0Elt5arrayRK5array1T", "operator<::T"], [0, 1, 1, "_CPPv4I0Elt5array1TRK5array", "operator<::a"], [0, 1, 1, "_CPPv4I0Elt5arrayRK5array1T", "operator<::a"], [0, 1, 1, "_CPPv4ltRK5arrayRK5array", "operator<::a"], [0, 1, 1, "_CPPv4I0Elt5array1TRK5array", "operator<::b"], [0, 1, 1, "_CPPv4I0Elt5arrayRK5array1T", "operator<::b"], [0, 1, 1, "_CPPv4ltRK5arrayRK5array", "operator<::b"], [0, 0, 1, "_CPPv4lsRK5arrayRK5array", "operator<<"], [0, 1, 1, "_CPPv4lsRK5arrayRK5array", "operator<<::a"], [0, 1, 1, "_CPPv4lsRK5arrayRK5array", "operator<<::b"], [0, 0, 1, "_CPPv4I0Ele5array1TRK5array", "operator<="], [0, 0, 1, "_CPPv4I0Ele5arrayRK5array1T", "operator<="], [0, 0, 1, "_CPPv4leRK5arrayRK5array", "operator<="], [0, 2, 1, "_CPPv4I0Ele5array1TRK5array", "operator<=::T"], [0, 2, 1, "_CPPv4I0Ele5arrayRK5array1T", "operator<=::T"], [0, 1, 1, "_CPPv4I0Ele5array1TRK5array", "operator<=::a"], [0, 1, 1, "_CPPv4I0Ele5arrayRK5array1T", "operator<=::a"], [0, 1, 1, "_CPPv4leRK5arrayRK5array", "operator<=::a"], [0, 1, 1, "_CPPv4I0Ele5array1TRK5array", "operator<=::b"], [0, 1, 1, "_CPPv4I0Ele5arrayRK5array1T", "operator<=::b"], [0, 1, 1, "_CPPv4leRK5arrayRK5array", "operator<=::b"], [0, 0, 1, "_CPPv4I0Eeq5array1TRK5array", "operator=="], [0, 0, 1, "_CPPv4I0Eeq5arrayRK5array1T", "operator=="], [0, 0, 1, "_CPPv4eqRK5arrayRK5array", "operator=="], [0, 2, 1, "_CPPv4I0Eeq5array1TRK5array", "operator==::T"], [0, 2, 1, "_CPPv4I0Eeq5arrayRK5array1T", "operator==::T"], [0, 1, 1, "_CPPv4I0Eeq5array1TRK5array", "operator==::a"], [0, 1, 1, "_CPPv4I0Eeq5arrayRK5array1T", "operator==::a"], [0, 1, 1, "_CPPv4eqRK5arrayRK5array", "operator==::a"], [0, 1, 1, "_CPPv4I0Eeq5array1TRK5array", "operator==::b"], [0, 1, 1, "_CPPv4I0Eeq5arrayRK5array1T", "operator==::b"], [0, 1, 1, "_CPPv4eqRK5arrayRK5array", "operator==::b"], [0, 0, 1, "_CPPv4I0Egt5array1TRK5array", "operator>"], [0, 0, 1, "_CPPv4I0Egt5arrayRK5array1T", "operator>"], [0, 0, 1, "_CPPv4gtRK5arrayRK5array", "operator>"], [0, 2, 1, "_CPPv4I0Egt5array1TRK5array", "operator>::T"], [0, 2, 1, "_CPPv4I0Egt5arrayRK5array1T", "operator>::T"], [0, 1, 1, "_CPPv4I0Egt5array1TRK5array", "operator>::a"], [0, 1, 1, "_CPPv4I0Egt5arrayRK5array1T", "operator>::a"], [0, 1, 1, "_CPPv4gtRK5arrayRK5array", "operator>::a"], [0, 1, 1, "_CPPv4I0Egt5array1TRK5array", "operator>::b"], [0, 1, 1, "_CPPv4I0Egt5arrayRK5array1T", "operator>::b"], [0, 1, 1, "_CPPv4gtRK5arrayRK5array", "operator>::b"], [0, 0, 1, "_CPPv4I0Ege5array1TRK5array", "operator>="], [0, 0, 1, "_CPPv4I0Ege5arrayRK5array1T", "operator>="], [0, 0, 1, "_CPPv4geRK5arrayRK5array", "operator>="], [0, 2, 1, "_CPPv4I0Ege5array1TRK5array", "operator>=::T"], [0, 2, 1, "_CPPv4I0Ege5arrayRK5array1T", "operator>=::T"], [0, 1, 1, "_CPPv4I0Ege5array1TRK5array", "operator>=::a"], [0, 1, 1, "_CPPv4I0Ege5arrayRK5array1T", "operator>=::a"], [0, 1, 1, "_CPPv4geRK5arrayRK5array", "operator>=::a"], [0, 1, 1, "_CPPv4I0Ege5array1TRK5array", "operator>=::b"], [0, 1, 1, "_CPPv4I0Ege5arrayRK5array1T", "operator>=::b"], [0, 1, 1, "_CPPv4geRK5arrayRK5array", "operator>=::b"], [0, 0, 1, "_CPPv4rsRK5arrayRK5array", "operator>>"], [0, 1, 1, "_CPPv4rsRK5arrayRK5array", "operator>>::a"], [0, 1, 1, "_CPPv4rsRK5arrayRK5array", "operator>>::b"], [0, 0, 1, "_CPPv4eoRK5arrayRK5array", "operator^"], [0, 1, 1, "_CPPv4eoRK5arrayRK5array", "operator^::a"], [0, 1, 1, "_CPPv4eoRK5arrayRK5array", "operator^::b"], [0, 0, 1, "_CPPv4orRK5arrayRK5array", "operator|"], [0, 1, 1, "_CPPv4orRK5arrayRK5array", "operator|::a"], [0, 1, 1, "_CPPv4orRK5arrayRK5array", "operator|::b"], [0, 0, 1, "_CPPv4ooRK5arrayRK5array", "operator||"], [0, 1, 1, "_CPPv4ooRK5arrayRK5array", "operator||::a"], [0, 1, 1, "_CPPv4ooRK5arrayRK5array", "operator||::b"], [0, 0, 1, "_CPPv4coRK5array", "operator~"], [0, 1, 1, "_CPPv4coRK5array", "operator~::a"], [0, 0, 1, "_CPPv45outerRK5arrayRK5array14StreamOrDevice", "outer"], [0, 1, 1, "_CPPv45outerRK5arrayRK5array14StreamOrDevice", "outer::a"], [0, 1, 1, "_CPPv45outerRK5arrayRK5array14StreamOrDevice", "outer::b"], [0, 1, 1, "_CPPv45outerRK5arrayRK5array14StreamOrDevice", "outer::s"], [0, 0, 1, "_CPPv43padRK5arrayRKNSt4pairIiiEERK5arrayRKNSt6stringE14StreamOrDevice", "pad"], [0, 0, 1, "_CPPv43padRK5arrayRKNSt6vectorINSt4pairIiiEEEERK5arrayRKNSt6stringE14StreamOrDevice", "pad"], [0, 0, 1, "_CPPv43padRK5arrayRKNSt6vectorIiEERK5ShapeRK5ShapeRK5arrayRKNSt6stringE14StreamOrDevice", "pad"], [0, 0, 1, "_CPPv43padRK5arrayiRK5arrayRKNSt6stringE14StreamOrDevice", "pad"], [0, 1, 1, "_CPPv43padRK5arrayRKNSt4pairIiiEERK5arrayRKNSt6stringE14StreamOrDevice", "pad::a"], [0, 1, 1, "_CPPv43padRK5arrayRKNSt6vectorINSt4pairIiiEEEERK5arrayRKNSt6stringE14StreamOrDevice", "pad::a"], [0, 1, 1, "_CPPv43padRK5arrayRKNSt6vectorIiEERK5ShapeRK5ShapeRK5arrayRKNSt6stringE14StreamOrDevice", "pad::a"], [0, 1, 1, "_CPPv43padRK5arrayiRK5arrayRKNSt6stringE14StreamOrDevice", "pad::a"], [0, 1, 1, "_CPPv43padRK5arrayRKNSt6vectorIiEERK5ShapeRK5ShapeRK5arrayRKNSt6stringE14StreamOrDevice", "pad::axes"], [0, 1, 1, "_CPPv43padRK5arrayRKNSt6vectorIiEERK5ShapeRK5ShapeRK5arrayRKNSt6stringE14StreamOrDevice", "pad::high_pad_size"], [0, 1, 1, "_CPPv43padRK5arrayRKNSt6vectorIiEERK5ShapeRK5ShapeRK5arrayRKNSt6stringE14StreamOrDevice", "pad::low_pad_size"], [0, 1, 1, "_CPPv43padRK5arrayRKNSt4pairIiiEERK5arrayRKNSt6stringE14StreamOrDevice", "pad::mode"], [0, 1, 1, "_CPPv43padRK5arrayRKNSt6vectorINSt4pairIiiEEEERK5arrayRKNSt6stringE14StreamOrDevice", "pad::mode"], [0, 1, 1, "_CPPv43padRK5arrayRKNSt6vectorIiEERK5ShapeRK5ShapeRK5arrayRKNSt6stringE14StreamOrDevice", "pad::mode"], [0, 1, 1, "_CPPv43padRK5arrayiRK5arrayRKNSt6stringE14StreamOrDevice", "pad::mode"], [0, 1, 1, "_CPPv43padRK5arrayRKNSt4pairIiiEERK5arrayRKNSt6stringE14StreamOrDevice", "pad::pad_value"], [0, 1, 1, "_CPPv43padRK5arrayRKNSt6vectorINSt4pairIiiEEEERK5arrayRKNSt6stringE14StreamOrDevice", "pad::pad_value"], [0, 1, 1, "_CPPv43padRK5arrayRKNSt6vectorIiEERK5ShapeRK5ShapeRK5arrayRKNSt6stringE14StreamOrDevice", "pad::pad_value"], [0, 1, 1, "_CPPv43padRK5arrayiRK5arrayRKNSt6stringE14StreamOrDevice", "pad::pad_value"], [0, 1, 1, "_CPPv43padRK5arrayRKNSt4pairIiiEERK5arrayRKNSt6stringE14StreamOrDevice", "pad::pad_width"], [0, 1, 1, "_CPPv43padRK5arrayRKNSt6vectorINSt4pairIiiEEEERK5arrayRKNSt6stringE14StreamOrDevice", "pad::pad_width"], [0, 1, 1, "_CPPv43padRK5arrayiRK5arrayRKNSt6stringE14StreamOrDevice", "pad::pad_width"], [0, 1, 1, "_CPPv43padRK5arrayRKNSt4pairIiiEERK5arrayRKNSt6stringE14StreamOrDevice", "pad::s"], [0, 1, 1, "_CPPv43padRK5arrayRKNSt6vectorINSt4pairIiiEEEERK5arrayRKNSt6stringE14StreamOrDevice", "pad::s"], [0, 1, 1, "_CPPv43padRK5arrayRKNSt6vectorIiEERK5ShapeRK5ShapeRK5arrayRKNSt6stringE14StreamOrDevice", "pad::s"], [0, 1, 1, "_CPPv43padRK5arrayiRK5arrayRKNSt6stringE14StreamOrDevice", "pad::s"], [0, 0, 1, "_CPPv49partitionRK5arrayi14StreamOrDevice", "partition"], [0, 0, 1, "_CPPv49partitionRK5arrayii14StreamOrDevice", "partition"], [0, 1, 1, "_CPPv49partitionRK5arrayi14StreamOrDevice", "partition::a"], [0, 1, 1, "_CPPv49partitionRK5arrayii14StreamOrDevice", "partition::a"], [0, 1, 1, "_CPPv49partitionRK5arrayii14StreamOrDevice", "partition::axis"], [0, 1, 1, "_CPPv49partitionRK5arrayi14StreamOrDevice", "partition::kth"], [0, 1, 1, "_CPPv49partitionRK5arrayii14StreamOrDevice", "partition::kth"], [0, 1, 1, "_CPPv49partitionRK5arrayi14StreamOrDevice", "partition::s"], [0, 1, 1, "_CPPv49partitionRK5arrayii14StreamOrDevice", "partition::s"], [0, 0, 1, "_CPPv45powerRK5arrayRK5array14StreamOrDevice", "power"], [0, 1, 1, "_CPPv45powerRK5arrayRK5array14StreamOrDevice", "power::a"], [0, 1, 1, "_CPPv45powerRK5arrayRK5array14StreamOrDevice", "power::b"], [0, 1, 1, "_CPPv45powerRK5arrayRK5array14StreamOrDevice", "power::s"], [0, 0, 1, "_CPPv44prodRK5array14StreamOrDevice", "prod"], [0, 0, 1, "_CPPv44prodRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "prod"], [0, 0, 1, "_CPPv44prodRK5arrayb14StreamOrDevice", "prod"], [0, 0, 1, "_CPPv44prodRK5arrayib14StreamOrDevice", "prod"], [0, 1, 1, "_CPPv44prodRK5array14StreamOrDevice", "prod::a"], [0, 1, 1, "_CPPv44prodRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "prod::a"], [0, 1, 1, "_CPPv44prodRK5arrayb14StreamOrDevice", "prod::a"], [0, 1, 1, "_CPPv44prodRK5arrayib14StreamOrDevice", "prod::a"], [0, 1, 1, "_CPPv44prodRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "prod::axes"], [0, 1, 1, "_CPPv44prodRK5arrayib14StreamOrDevice", "prod::axis"], [0, 1, 1, "_CPPv44prodRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "prod::keepdims"], [0, 1, 1, "_CPPv44prodRK5arrayb14StreamOrDevice", "prod::keepdims"], [0, 1, 1, "_CPPv44prodRK5arrayib14StreamOrDevice", "prod::keepdims"], [0, 1, 1, "_CPPv44prodRK5array14StreamOrDevice", "prod::s"], [0, 1, 1, "_CPPv44prodRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "prod::s"], [0, 1, 1, "_CPPv44prodRK5arrayb14StreamOrDevice", "prod::s"], [0, 1, 1, "_CPPv44prodRK5arrayib14StreamOrDevice", "prod::s"], [0, 0, 1, "_CPPv414put_along_axisRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "put_along_axis"], [0, 1, 1, "_CPPv414put_along_axisRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "put_along_axis::a"], [0, 1, 1, "_CPPv414put_along_axisRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "put_along_axis::axis"], [0, 1, 1, "_CPPv414put_along_axisRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "put_along_axis::indices"], [0, 1, 1, "_CPPv414put_along_axisRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "put_along_axis::s"], [0, 1, 1, "_CPPv414put_along_axisRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "put_along_axis::values"], [0, 0, 1, "_CPPv48quantizeRK5arrayiiRKNSt6stringE14StreamOrDevice", "quantize"], [0, 1, 1, "_CPPv48quantizeRK5arrayiiRKNSt6stringE14StreamOrDevice", "quantize::bits"], [0, 1, 1, "_CPPv48quantizeRK5arrayiiRKNSt6stringE14StreamOrDevice", "quantize::group_size"], [0, 1, 1, "_CPPv48quantizeRK5arrayiiRKNSt6stringE14StreamOrDevice", "quantize::mode"], [0, 1, 1, "_CPPv48quantizeRK5arrayiiRKNSt6stringE14StreamOrDevice", "quantize::s"], [0, 1, 1, "_CPPv48quantizeRK5arrayiiRKNSt6stringE14StreamOrDevice", "quantize::w"], [0, 0, 1, "_CPPv416quantized_matmul5array5array5arrayNSt8optionalI5arrayEEbiiRKNSt6stringE14StreamOrDevice", "quantized_matmul"], [0, 1, 1, "_CPPv416quantized_matmul5array5array5arrayNSt8optionalI5arrayEEbiiRKNSt6stringE14StreamOrDevice", "quantized_matmul::biases"], [0, 1, 1, "_CPPv416quantized_matmul5array5array5arrayNSt8optionalI5arrayEEbiiRKNSt6stringE14StreamOrDevice", "quantized_matmul::bits"], [0, 1, 1, "_CPPv416quantized_matmul5array5array5arrayNSt8optionalI5arrayEEbiiRKNSt6stringE14StreamOrDevice", "quantized_matmul::group_size"], [0, 1, 1, "_CPPv416quantized_matmul5array5array5arrayNSt8optionalI5arrayEEbiiRKNSt6stringE14StreamOrDevice", "quantized_matmul::mode"], [0, 1, 1, "_CPPv416quantized_matmul5array5array5arrayNSt8optionalI5arrayEEbiiRKNSt6stringE14StreamOrDevice", "quantized_matmul::s"], [0, 1, 1, "_CPPv416quantized_matmul5array5array5arrayNSt8optionalI5arrayEEbiiRKNSt6stringE14StreamOrDevice", "quantized_matmul::scales"], [0, 1, 1, "_CPPv416quantized_matmul5array5array5arrayNSt8optionalI5arrayEEbiiRKNSt6stringE14StreamOrDevice", "quantized_matmul::transpose"], [0, 1, 1, "_CPPv416quantized_matmul5array5array5arrayNSt8optionalI5arrayEEbiiRKNSt6stringE14StreamOrDevice", "quantized_matmul::w"], [0, 1, 1, "_CPPv416quantized_matmul5array5array5arrayNSt8optionalI5arrayEEbiiRKNSt6stringE14StreamOrDevice", "quantized_matmul::x"], [0, 0, 1, "_CPPv47radiansRK5array14StreamOrDevice", "radians"], [0, 1, 1, "_CPPv47radiansRK5array14StreamOrDevice", "radians::a"], [0, 1, 1, "_CPPv47radiansRK5array14StreamOrDevice", "radians::s"], [0, 0, 1, "_CPPv44realRK5array14StreamOrDevice", "real"], [0, 1, 1, "_CPPv44realRK5array14StreamOrDevice", "real::a"], [0, 1, 1, "_CPPv44realRK5array14StreamOrDevice", "real::s"], [0, 0, 1, "_CPPv410reciprocalRK5array14StreamOrDevice", "reciprocal"], [0, 1, 1, "_CPPv410reciprocalRK5array14StreamOrDevice", "reciprocal::a"], [0, 1, 1, "_CPPv410reciprocalRK5array14StreamOrDevice", "reciprocal::s"], [0, 0, 1, "_CPPv49remainderRK5arrayRK5array14StreamOrDevice", "remainder"], [0, 1, 1, "_CPPv49remainderRK5arrayRK5array14StreamOrDevice", "remainder::a"], [0, 1, 1, "_CPPv49remainderRK5arrayRK5array14StreamOrDevice", "remainder::b"], [0, 1, 1, "_CPPv49remainderRK5arrayRK5array14StreamOrDevice", "remainder::s"], [0, 0, 1, "_CPPv46repeatRK5arrayi14StreamOrDevice", "repeat"], [0, 0, 1, "_CPPv46repeatRK5arrayii14StreamOrDevice", "repeat"], [0, 1, 1, "_CPPv46repeatRK5arrayi14StreamOrDevice", "repeat::arr"], [0, 1, 1, "_CPPv46repeatRK5arrayii14StreamOrDevice", "repeat::arr"], [0, 1, 1, "_CPPv46repeatRK5arrayii14StreamOrDevice", "repeat::axis"], [0, 1, 1, "_CPPv46repeatRK5arrayi14StreamOrDevice", "repeat::repeats"], [0, 1, 1, "_CPPv46repeatRK5arrayii14StreamOrDevice", "repeat::repeats"], [0, 1, 1, "_CPPv46repeatRK5arrayi14StreamOrDevice", "repeat::s"], [0, 1, 1, "_CPPv46repeatRK5arrayii14StreamOrDevice", "repeat::s"], [0, 0, 1, "_CPPv47reshapeRK5array5Shape14StreamOrDevice", "reshape"], [0, 1, 1, "_CPPv47reshapeRK5array5Shape14StreamOrDevice", "reshape::a"], [0, 1, 1, "_CPPv47reshapeRK5array5Shape14StreamOrDevice", "reshape::s"], [0, 1, 1, "_CPPv47reshapeRK5array5Shape14StreamOrDevice", "reshape::shape"], [0, 0, 1, "_CPPv411right_shiftRK5arrayRK5array14StreamOrDevice", "right_shift"], [0, 1, 1, "_CPPv411right_shiftRK5arrayRK5array14StreamOrDevice", "right_shift::a"], [0, 1, 1, "_CPPv411right_shiftRK5arrayRK5array14StreamOrDevice", "right_shift::b"], [0, 1, 1, "_CPPv411right_shiftRK5arrayRK5array14StreamOrDevice", "right_shift::s"], [0, 0, 1, "_CPPv44rollRK5arrayRK5Shape14StreamOrDevice", "roll"], [0, 0, 1, "_CPPv44rollRK5arrayRK5ShapeRKNSt6vectorIiEE14StreamOrDevice", "roll"], [0, 0, 1, "_CPPv44rollRK5arrayRK5Shapei14StreamOrDevice", "roll"], [0, 0, 1, "_CPPv44rollRK5arrayi14StreamOrDevice", "roll"], [0, 0, 1, "_CPPv44rollRK5arrayiRKNSt6vectorIiEE14StreamOrDevice", "roll"], [0, 0, 1, "_CPPv44rollRK5arrayii14StreamOrDevice", "roll"], [0, 1, 1, "_CPPv44rollRK5arrayRK5Shape14StreamOrDevice", "roll::a"], [0, 1, 1, "_CPPv44rollRK5arrayRK5ShapeRKNSt6vectorIiEE14StreamOrDevice", "roll::a"], [0, 1, 1, "_CPPv44rollRK5arrayRK5Shapei14StreamOrDevice", "roll::a"], [0, 1, 1, "_CPPv44rollRK5arrayi14StreamOrDevice", "roll::a"], [0, 1, 1, "_CPPv44rollRK5arrayiRKNSt6vectorIiEE14StreamOrDevice", "roll::a"], [0, 1, 1, "_CPPv44rollRK5arrayii14StreamOrDevice", "roll::a"], [0, 1, 1, "_CPPv44rollRK5arrayRK5ShapeRKNSt6vectorIiEE14StreamOrDevice", "roll::axes"], [0, 1, 1, "_CPPv44rollRK5arrayiRKNSt6vectorIiEE14StreamOrDevice", "roll::axes"], [0, 1, 1, "_CPPv44rollRK5arrayRK5Shapei14StreamOrDevice", "roll::axis"], [0, 1, 1, "_CPPv44rollRK5arrayii14StreamOrDevice", "roll::axis"], [0, 1, 1, "_CPPv44rollRK5arrayRK5Shape14StreamOrDevice", "roll::s"], [0, 1, 1, "_CPPv44rollRK5arrayRK5ShapeRKNSt6vectorIiEE14StreamOrDevice", "roll::s"], [0, 1, 1, "_CPPv44rollRK5arrayRK5Shapei14StreamOrDevice", "roll::s"], [0, 1, 1, "_CPPv44rollRK5arrayi14StreamOrDevice", "roll::s"], [0, 1, 1, "_CPPv44rollRK5arrayiRKNSt6vectorIiEE14StreamOrDevice", "roll::s"], [0, 1, 1, "_CPPv44rollRK5arrayii14StreamOrDevice", "roll::s"], [0, 1, 1, "_CPPv44rollRK5arrayRK5Shape14StreamOrDevice", "roll::shift"], [0, 1, 1, "_CPPv44rollRK5arrayRK5ShapeRKNSt6vectorIiEE14StreamOrDevice", "roll::shift"], [0, 1, 1, "_CPPv44rollRK5arrayRK5Shapei14StreamOrDevice", "roll::shift"], [0, 1, 1, "_CPPv44rollRK5arrayi14StreamOrDevice", "roll::shift"], [0, 1, 1, "_CPPv44rollRK5arrayiRKNSt6vectorIiEE14StreamOrDevice", "roll::shift"], [0, 1, 1, "_CPPv44rollRK5arrayii14StreamOrDevice", "roll::shift"], [0, 0, 1, "_CPPv45roundRK5array14StreamOrDevice", "round"], [0, 0, 1, "_CPPv45roundRK5arrayi14StreamOrDevice", "round"], [0, 1, 1, "_CPPv45roundRK5array14StreamOrDevice", "round::a"], [0, 1, 1, "_CPPv45roundRK5arrayi14StreamOrDevice", "round::a"], [0, 1, 1, "_CPPv45roundRK5arrayi14StreamOrDevice", "round::decimals"], [0, 1, 1, "_CPPv45roundRK5array14StreamOrDevice", "round::s"], [0, 1, 1, "_CPPv45roundRK5arrayi14StreamOrDevice", "round::s"], [0, 0, 1, "_CPPv45rsqrtRK5array14StreamOrDevice", "rsqrt"], [0, 1, 1, "_CPPv45rsqrtRK5array14StreamOrDevice", "rsqrt::a"], [0, 1, 1, "_CPPv45rsqrtRK5array14StreamOrDevice", "rsqrt::s"], [0, 0, 1, "_CPPv47scatterRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "scatter"], [0, 0, 1, "_CPPv47scatterRK5arrayRKNSt6vectorI5arrayEERK5arrayRKNSt6vectorIiEE14StreamOrDevice", "scatter"], [0, 1, 1, "_CPPv47scatterRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "scatter::a"], [0, 1, 1, "_CPPv47scatterRK5arrayRKNSt6vectorI5arrayEERK5arrayRKNSt6vectorIiEE14StreamOrDevice", "scatter::a"], [0, 1, 1, "_CPPv47scatterRK5arrayRKNSt6vectorI5arrayEERK5arrayRKNSt6vectorIiEE14StreamOrDevice", "scatter::axes"], [0, 1, 1, "_CPPv47scatterRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "scatter::axis"], [0, 1, 1, "_CPPv47scatterRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "scatter::indices"], [0, 1, 1, "_CPPv47scatterRK5arrayRKNSt6vectorI5arrayEERK5arrayRKNSt6vectorIiEE14StreamOrDevice", "scatter::indices"], [0, 1, 1, "_CPPv47scatterRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "scatter::s"], [0, 1, 1, "_CPPv47scatterRK5arrayRKNSt6vectorI5arrayEERK5arrayRKNSt6vectorIiEE14StreamOrDevice", "scatter::s"], [0, 1, 1, "_CPPv47scatterRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "scatter::updates"], [0, 1, 1, "_CPPv47scatterRK5arrayRKNSt6vectorI5arrayEERK5arrayRKNSt6vectorIiEE14StreamOrDevice", "scatter::updates"], [0, 0, 1, "_CPPv411scatter_addRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "scatter_add"], [0, 0, 1, "_CPPv411scatter_addRK5arrayRKNSt6vectorI5arrayEERK5arrayRKNSt6vectorIiEE14StreamOrDevice", "scatter_add"], [0, 1, 1, "_CPPv411scatter_addRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "scatter_add::a"], [0, 1, 1, "_CPPv411scatter_addRK5arrayRKNSt6vectorI5arrayEERK5arrayRKNSt6vectorIiEE14StreamOrDevice", "scatter_add::a"], [0, 1, 1, "_CPPv411scatter_addRK5arrayRKNSt6vectorI5arrayEERK5arrayRKNSt6vectorIiEE14StreamOrDevice", "scatter_add::axes"], [0, 1, 1, "_CPPv411scatter_addRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "scatter_add::axis"], [0, 1, 1, "_CPPv411scatter_addRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "scatter_add::indices"], [0, 1, 1, "_CPPv411scatter_addRK5arrayRKNSt6vectorI5arrayEERK5arrayRKNSt6vectorIiEE14StreamOrDevice", "scatter_add::indices"], [0, 1, 1, "_CPPv411scatter_addRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "scatter_add::s"], [0, 1, 1, "_CPPv411scatter_addRK5arrayRKNSt6vectorI5arrayEERK5arrayRKNSt6vectorIiEE14StreamOrDevice", "scatter_add::s"], [0, 1, 1, "_CPPv411scatter_addRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "scatter_add::updates"], [0, 1, 1, "_CPPv411scatter_addRK5arrayRKNSt6vectorI5arrayEERK5arrayRKNSt6vectorIiEE14StreamOrDevice", "scatter_add::updates"], [0, 0, 1, "_CPPv416scatter_add_axisRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "scatter_add_axis"], [0, 1, 1, "_CPPv416scatter_add_axisRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "scatter_add_axis::a"], [0, 1, 1, "_CPPv416scatter_add_axisRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "scatter_add_axis::axis"], [0, 1, 1, "_CPPv416scatter_add_axisRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "scatter_add_axis::indices"], [0, 1, 1, "_CPPv416scatter_add_axisRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "scatter_add_axis::s"], [0, 1, 1, "_CPPv416scatter_add_axisRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "scatter_add_axis::values"], [0, 0, 1, "_CPPv411scatter_maxRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "scatter_max"], [0, 0, 1, "_CPPv411scatter_maxRK5arrayRKNSt6vectorI5arrayEERK5arrayRKNSt6vectorIiEE14StreamOrDevice", "scatter_max"], [0, 1, 1, "_CPPv411scatter_maxRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "scatter_max::a"], [0, 1, 1, "_CPPv411scatter_maxRK5arrayRKNSt6vectorI5arrayEERK5arrayRKNSt6vectorIiEE14StreamOrDevice", "scatter_max::a"], [0, 1, 1, "_CPPv411scatter_maxRK5arrayRKNSt6vectorI5arrayEERK5arrayRKNSt6vectorIiEE14StreamOrDevice", "scatter_max::axes"], [0, 1, 1, "_CPPv411scatter_maxRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "scatter_max::axis"], [0, 1, 1, "_CPPv411scatter_maxRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "scatter_max::indices"], [0, 1, 1, "_CPPv411scatter_maxRK5arrayRKNSt6vectorI5arrayEERK5arrayRKNSt6vectorIiEE14StreamOrDevice", "scatter_max::indices"], [0, 1, 1, "_CPPv411scatter_maxRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "scatter_max::s"], [0, 1, 1, "_CPPv411scatter_maxRK5arrayRKNSt6vectorI5arrayEERK5arrayRKNSt6vectorIiEE14StreamOrDevice", "scatter_max::s"], [0, 1, 1, "_CPPv411scatter_maxRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "scatter_max::updates"], [0, 1, 1, "_CPPv411scatter_maxRK5arrayRKNSt6vectorI5arrayEERK5arrayRKNSt6vectorIiEE14StreamOrDevice", "scatter_max::updates"], [0, 0, 1, "_CPPv411scatter_minRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "scatter_min"], [0, 0, 1, "_CPPv411scatter_minRK5arrayRKNSt6vectorI5arrayEERK5arrayRKNSt6vectorIiEE14StreamOrDevice", "scatter_min"], [0, 1, 1, "_CPPv411scatter_minRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "scatter_min::a"], [0, 1, 1, "_CPPv411scatter_minRK5arrayRKNSt6vectorI5arrayEERK5arrayRKNSt6vectorIiEE14StreamOrDevice", "scatter_min::a"], [0, 1, 1, "_CPPv411scatter_minRK5arrayRKNSt6vectorI5arrayEERK5arrayRKNSt6vectorIiEE14StreamOrDevice", "scatter_min::axes"], [0, 1, 1, "_CPPv411scatter_minRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "scatter_min::axis"], [0, 1, 1, "_CPPv411scatter_minRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "scatter_min::indices"], [0, 1, 1, "_CPPv411scatter_minRK5arrayRKNSt6vectorI5arrayEERK5arrayRKNSt6vectorIiEE14StreamOrDevice", "scatter_min::indices"], [0, 1, 1, "_CPPv411scatter_minRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "scatter_min::s"], [0, 1, 1, "_CPPv411scatter_minRK5arrayRKNSt6vectorI5arrayEERK5arrayRKNSt6vectorIiEE14StreamOrDevice", "scatter_min::s"], [0, 1, 1, "_CPPv411scatter_minRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "scatter_min::updates"], [0, 1, 1, "_CPPv411scatter_minRK5arrayRKNSt6vectorI5arrayEERK5arrayRKNSt6vectorIiEE14StreamOrDevice", "scatter_min::updates"], [0, 0, 1, "_CPPv412scatter_prodRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "scatter_prod"], [0, 0, 1, "_CPPv412scatter_prodRK5arrayRKNSt6vectorI5arrayEERK5arrayRKNSt6vectorIiEE14StreamOrDevice", "scatter_prod"], [0, 1, 1, "_CPPv412scatter_prodRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "scatter_prod::a"], [0, 1, 1, "_CPPv412scatter_prodRK5arrayRKNSt6vectorI5arrayEERK5arrayRKNSt6vectorIiEE14StreamOrDevice", "scatter_prod::a"], [0, 1, 1, "_CPPv412scatter_prodRK5arrayRKNSt6vectorI5arrayEERK5arrayRKNSt6vectorIiEE14StreamOrDevice", "scatter_prod::axes"], [0, 1, 1, "_CPPv412scatter_prodRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "scatter_prod::axis"], [0, 1, 1, "_CPPv412scatter_prodRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "scatter_prod::indices"], [0, 1, 1, "_CPPv412scatter_prodRK5arrayRKNSt6vectorI5arrayEERK5arrayRKNSt6vectorIiEE14StreamOrDevice", "scatter_prod::indices"], [0, 1, 1, "_CPPv412scatter_prodRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "scatter_prod::s"], [0, 1, 1, "_CPPv412scatter_prodRK5arrayRKNSt6vectorI5arrayEERK5arrayRKNSt6vectorIiEE14StreamOrDevice", "scatter_prod::s"], [0, 1, 1, "_CPPv412scatter_prodRK5arrayRK5arrayRK5arrayi14StreamOrDevice", "scatter_prod::updates"], [0, 1, 1, "_CPPv412scatter_prodRK5arrayRKNSt6vectorI5arrayEERK5arrayRKNSt6vectorIiEE14StreamOrDevice", "scatter_prod::updates"], [0, 0, 1, "_CPPv412segmented_mm5array5array5array14StreamOrDevice", "segmented_mm"], [0, 1, 1, "_CPPv412segmented_mm5array5array5array14StreamOrDevice", "segmented_mm::a"], [0, 1, 1, "_CPPv412segmented_mm5array5array5array14StreamOrDevice", "segmented_mm::b"], [0, 1, 1, "_CPPv412segmented_mm5array5array5array14StreamOrDevice", "segmented_mm::s"], [0, 1, 1, "_CPPv412segmented_mm5array5array5array14StreamOrDevice", "segmented_mm::segments"], [0, 0, 1, "_CPPv47sigmoidRK5array14StreamOrDevice", "sigmoid"], [0, 1, 1, "_CPPv47sigmoidRK5array14StreamOrDevice", "sigmoid::a"], [0, 1, 1, "_CPPv47sigmoidRK5array14StreamOrDevice", "sigmoid::s"], [0, 0, 1, "_CPPv44signRK5array14StreamOrDevice", "sign"], [0, 1, 1, "_CPPv44signRK5array14StreamOrDevice", "sign::a"], [0, 1, 1, "_CPPv44signRK5array14StreamOrDevice", "sign::s"], [0, 0, 1, "_CPPv43sinRK5array14StreamOrDevice", "sin"], [0, 1, 1, "_CPPv43sinRK5array14StreamOrDevice", "sin::a"], [0, 1, 1, "_CPPv43sinRK5array14StreamOrDevice", "sin::s"], [0, 0, 1, "_CPPv44sinhRK5array14StreamOrDevice", "sinh"], [0, 1, 1, "_CPPv44sinhRK5array14StreamOrDevice", "sinh::a"], [0, 1, 1, "_CPPv44sinhRK5array14StreamOrDevice", "sinh::s"], [0, 0, 1, "_CPPv45sliceRK5array5Shape5Shape14StreamOrDevice", "slice"], [0, 0, 1, "_CPPv45sliceRK5array5Shape5Shape5Shape14StreamOrDevice", "slice"], [0, 0, 1, "_CPPv45sliceRK5arrayNSt16initializer_listIiEE5Shape5Shape14StreamOrDevice", "slice"], [0, 0, 1, "_CPPv45sliceRK5arrayRK5arrayNSt6vectorIiEE5Shape14StreamOrDevice", "slice"], [0, 1, 1, "_CPPv45sliceRK5array5Shape5Shape14StreamOrDevice", "slice::a"], [0, 1, 1, "_CPPv45sliceRK5array5Shape5Shape5Shape14StreamOrDevice", "slice::a"], [0, 1, 1, "_CPPv45sliceRK5arrayNSt16initializer_listIiEE5Shape5Shape14StreamOrDevice", "slice::a"], [0, 1, 1, "_CPPv45sliceRK5arrayRK5arrayNSt6vectorIiEE5Shape14StreamOrDevice", "slice::a"], [0, 1, 1, "_CPPv45sliceRK5arrayRK5arrayNSt6vectorIiEE5Shape14StreamOrDevice", "slice::axes"], [0, 1, 1, "_CPPv45sliceRK5array5Shape5Shape14StreamOrDevice", "slice::s"], [0, 1, 1, "_CPPv45sliceRK5array5Shape5Shape5Shape14StreamOrDevice", "slice::s"], [0, 1, 1, "_CPPv45sliceRK5arrayNSt16initializer_listIiEE5Shape5Shape14StreamOrDevice", "slice::s"], [0, 1, 1, "_CPPv45sliceRK5arrayRK5arrayNSt6vectorIiEE5Shape14StreamOrDevice", "slice::s"], [0, 1, 1, "_CPPv45sliceRK5arrayRK5arrayNSt6vectorIiEE5Shape14StreamOrDevice", "slice::slice_size"], [0, 1, 1, "_CPPv45sliceRK5array5Shape5Shape14StreamOrDevice", "slice::start"], [0, 1, 1, "_CPPv45sliceRK5array5Shape5Shape5Shape14StreamOrDevice", "slice::start"], [0, 1, 1, "_CPPv45sliceRK5arrayNSt16initializer_listIiEE5Shape5Shape14StreamOrDevice", "slice::start"], [0, 1, 1, "_CPPv45sliceRK5arrayRK5arrayNSt6vectorIiEE5Shape14StreamOrDevice", "slice::start"], [0, 1, 1, "_CPPv45sliceRK5array5Shape5Shape14StreamOrDevice", "slice::stop"], [0, 1, 1, "_CPPv45sliceRK5array5Shape5Shape5Shape14StreamOrDevice", "slice::stop"], [0, 1, 1, "_CPPv45sliceRK5arrayNSt16initializer_listIiEE5Shape5Shape14StreamOrDevice", "slice::stop"], [0, 1, 1, "_CPPv45sliceRK5array5Shape5Shape5Shape14StreamOrDevice", "slice::strides"], [0, 1, 1, "_CPPv45sliceRK5arrayNSt16initializer_listIiEE5Shape5Shape14StreamOrDevice", "slice::strides"], [0, 0, 1, "_CPPv412slice_updateRK5arrayRK5array5Shape5Shape14StreamOrDevice", "slice_update"], [0, 0, 1, "_CPPv412slice_updateRK5arrayRK5array5Shape5Shape5Shape14StreamOrDevice", "slice_update"], [0, 0, 1, "_CPPv412slice_updateRK5arrayRK5arrayRK5arrayNSt6vectorIiEE14StreamOrDevice", "slice_update"], [0, 1, 1, "_CPPv412slice_updateRK5arrayRK5arrayRK5arrayNSt6vectorIiEE14StreamOrDevice", "slice_update::axes"], [0, 1, 1, "_CPPv412slice_updateRK5arrayRK5array5Shape5Shape14StreamOrDevice", "slice_update::s"], [0, 1, 1, "_CPPv412slice_updateRK5arrayRK5array5Shape5Shape5Shape14StreamOrDevice", "slice_update::s"], [0, 1, 1, "_CPPv412slice_updateRK5arrayRK5arrayRK5arrayNSt6vectorIiEE14StreamOrDevice", "slice_update::s"], [0, 1, 1, "_CPPv412slice_updateRK5arrayRK5array5Shape5Shape14StreamOrDevice", "slice_update::src"], [0, 1, 1, "_CPPv412slice_updateRK5arrayRK5array5Shape5Shape5Shape14StreamOrDevice", "slice_update::src"], [0, 1, 1, "_CPPv412slice_updateRK5arrayRK5arrayRK5arrayNSt6vectorIiEE14StreamOrDevice", "slice_update::src"], [0, 1, 1, "_CPPv412slice_updateRK5arrayRK5array5Shape5Shape14StreamOrDevice", "slice_update::start"], [0, 1, 1, "_CPPv412slice_updateRK5arrayRK5array5Shape5Shape5Shape14StreamOrDevice", "slice_update::start"], [0, 1, 1, "_CPPv412slice_updateRK5arrayRK5arrayRK5arrayNSt6vectorIiEE14StreamOrDevice", "slice_update::start"], [0, 1, 1, "_CPPv412slice_updateRK5arrayRK5array5Shape5Shape14StreamOrDevice", "slice_update::stop"], [0, 1, 1, "_CPPv412slice_updateRK5arrayRK5array5Shape5Shape5Shape14StreamOrDevice", "slice_update::stop"], [0, 1, 1, "_CPPv412slice_updateRK5arrayRK5array5Shape5Shape5Shape14StreamOrDevice", "slice_update::strides"], [0, 1, 1, "_CPPv412slice_updateRK5arrayRK5array5Shape5Shape14StreamOrDevice", "slice_update::update"], [0, 1, 1, "_CPPv412slice_updateRK5arrayRK5array5Shape5Shape5Shape14StreamOrDevice", "slice_update::update"], [0, 1, 1, "_CPPv412slice_updateRK5arrayRK5arrayRK5arrayNSt6vectorIiEE14StreamOrDevice", "slice_update::update"], [0, 0, 1, "_CPPv47softmaxRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "softmax"], [0, 0, 1, "_CPPv47softmaxRK5arrayb14StreamOrDevice", "softmax"], [0, 0, 1, "_CPPv47softmaxRK5arrayib14StreamOrDevice", "softmax"], [0, 1, 1, "_CPPv47softmaxRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "softmax::a"], [0, 1, 1, "_CPPv47softmaxRK5arrayb14StreamOrDevice", "softmax::a"], [0, 1, 1, "_CPPv47softmaxRK5arrayib14StreamOrDevice", "softmax::a"], [0, 1, 1, "_CPPv47softmaxRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "softmax::axes"], [0, 1, 1, "_CPPv47softmaxRK5arrayib14StreamOrDevice", "softmax::axis"], [0, 1, 1, "_CPPv47softmaxRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "softmax::precise"], [0, 1, 1, "_CPPv47softmaxRK5arrayb14StreamOrDevice", "softmax::precise"], [0, 1, 1, "_CPPv47softmaxRK5arrayib14StreamOrDevice", "softmax::precise"], [0, 1, 1, "_CPPv47softmaxRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "softmax::s"], [0, 1, 1, "_CPPv47softmaxRK5arrayb14StreamOrDevice", "softmax::s"], [0, 1, 1, "_CPPv47softmaxRK5arrayib14StreamOrDevice", "softmax::s"], [0, 0, 1, "_CPPv44sortRK5array14StreamOrDevice", "sort"], [0, 0, 1, "_CPPv44sortRK5arrayi14StreamOrDevice", "sort"], [0, 1, 1, "_CPPv44sortRK5array14StreamOrDevice", "sort::a"], [0, 1, 1, "_CPPv44sortRK5arrayi14StreamOrDevice", "sort::a"], [0, 1, 1, "_CPPv44sortRK5arrayi14StreamOrDevice", "sort::axis"], [0, 1, 1, "_CPPv44sortRK5array14StreamOrDevice", "sort::s"], [0, 1, 1, "_CPPv44sortRK5arrayi14StreamOrDevice", "sort::s"], [0, 0, 1, "_CPPv45splitRK5arrayRK5Shape14StreamOrDevice", "split"], [0, 0, 1, "_CPPv45splitRK5arrayRK5Shapei14StreamOrDevice", "split"], [0, 0, 1, "_CPPv45splitRK5arrayi14StreamOrDevice", "split"], [0, 0, 1, "_CPPv45splitRK5arrayii14StreamOrDevice", "split"], [0, 1, 1, "_CPPv45splitRK5arrayRK5Shape14StreamOrDevice", "split::a"], [0, 1, 1, "_CPPv45splitRK5arrayRK5Shapei14StreamOrDevice", "split::a"], [0, 1, 1, "_CPPv45splitRK5arrayi14StreamOrDevice", "split::a"], [0, 1, 1, "_CPPv45splitRK5arrayii14StreamOrDevice", "split::a"], [0, 1, 1, "_CPPv45splitRK5arrayRK5Shapei14StreamOrDevice", "split::axis"], [0, 1, 1, "_CPPv45splitRK5arrayii14StreamOrDevice", "split::axis"], [0, 1, 1, "_CPPv45splitRK5arrayRK5Shape14StreamOrDevice", "split::indices"], [0, 1, 1, "_CPPv45splitRK5arrayRK5Shapei14StreamOrDevice", "split::indices"], [0, 1, 1, "_CPPv45splitRK5arrayi14StreamOrDevice", "split::num_splits"], [0, 1, 1, "_CPPv45splitRK5arrayii14StreamOrDevice", "split::num_splits"], [0, 1, 1, "_CPPv45splitRK5arrayRK5Shape14StreamOrDevice", "split::s"], [0, 1, 1, "_CPPv45splitRK5arrayRK5Shapei14StreamOrDevice", "split::s"], [0, 1, 1, "_CPPv45splitRK5arrayi14StreamOrDevice", "split::s"], [0, 1, 1, "_CPPv45splitRK5arrayii14StreamOrDevice", "split::s"], [0, 0, 1, "_CPPv44sqrtRK5array14StreamOrDevice", "sqrt"], [0, 1, 1, "_CPPv44sqrtRK5array14StreamOrDevice", "sqrt::a"], [0, 1, 1, "_CPPv44sqrtRK5array14StreamOrDevice", "sqrt::s"], [0, 0, 1, "_CPPv46squareRK5array14StreamOrDevice", "square"], [0, 1, 1, "_CPPv46squareRK5array14StreamOrDevice", "square::a"], [0, 1, 1, "_CPPv46squareRK5array14StreamOrDevice", "square::s"], [0, 0, 1, "_CPPv47squeezeRK5array14StreamOrDevice", "squeeze"], [0, 0, 1, "_CPPv47squeezeRK5arrayRKNSt6vectorIiEE14StreamOrDevice", "squeeze"], [0, 0, 1, "_CPPv47squeezeRK5arrayi14StreamOrDevice", "squeeze"], [0, 1, 1, "_CPPv47squeezeRK5array14StreamOrDevice", "squeeze::a"], [0, 1, 1, "_CPPv47squeezeRK5arrayRKNSt6vectorIiEE14StreamOrDevice", "squeeze::a"], [0, 1, 1, "_CPPv47squeezeRK5arrayi14StreamOrDevice", "squeeze::a"], [0, 1, 1, "_CPPv47squeezeRK5arrayRKNSt6vectorIiEE14StreamOrDevice", "squeeze::axes"], [0, 1, 1, "_CPPv47squeezeRK5arrayi14StreamOrDevice", "squeeze::axis"], [0, 1, 1, "_CPPv47squeezeRK5array14StreamOrDevice", "squeeze::s"], [0, 1, 1, "_CPPv47squeezeRK5arrayRKNSt6vectorIiEE14StreamOrDevice", "squeeze::s"], [0, 1, 1, "_CPPv47squeezeRK5arrayi14StreamOrDevice", "squeeze::s"], [0, 0, 1, "_CPPv45stackRKNSt6vectorI5arrayEE14StreamOrDevice", "stack"], [0, 0, 1, "_CPPv45stackRKNSt6vectorI5arrayEEi14StreamOrDevice", "stack"], [0, 1, 1, "_CPPv45stackRKNSt6vectorI5arrayEE14StreamOrDevice", "stack::arrays"], [0, 1, 1, "_CPPv45stackRKNSt6vectorI5arrayEEi14StreamOrDevice", "stack::arrays"], [0, 1, 1, "_CPPv45stackRKNSt6vectorI5arrayEEi14StreamOrDevice", "stack::axis"], [0, 1, 1, "_CPPv45stackRKNSt6vectorI5arrayEE14StreamOrDevice", "stack::s"], [0, 1, 1, "_CPPv45stackRKNSt6vectorI5arrayEEi14StreamOrDevice", "stack::s"], [0, 0, 1, "_CPPv4StRK5array14StreamOrDevice", "std"], [0, 0, 1, "_CPPv4StRK5arrayRKNSt6vectorIiEEbi14StreamOrDevice", "std"], [0, 0, 1, "_CPPv4StRK5arraybi14StreamOrDevice", "std"], [0, 0, 1, "_CPPv4StRK5arrayibi14StreamOrDevice", "std"], [0, 1, 1, "_CPPv4StRK5array14StreamOrDevice", "std::a"], [0, 1, 1, "_CPPv4StRK5arrayRKNSt6vectorIiEEbi14StreamOrDevice", "std::a"], [0, 1, 1, "_CPPv4StRK5arraybi14StreamOrDevice", "std::a"], [0, 1, 1, "_CPPv4StRK5arrayibi14StreamOrDevice", "std::a"], [0, 1, 1, "_CPPv4StRK5arrayRKNSt6vectorIiEEbi14StreamOrDevice", "std::axes"], [0, 1, 1, "_CPPv4StRK5arrayibi14StreamOrDevice", "std::axis"], [0, 1, 1, "_CPPv4StRK5arrayRKNSt6vectorIiEEbi14StreamOrDevice", "std::ddof"], [0, 1, 1, "_CPPv4StRK5arraybi14StreamOrDevice", "std::ddof"], [0, 1, 1, "_CPPv4StRK5arrayibi14StreamOrDevice", "std::ddof"], [0, 1, 1, "_CPPv4StRK5arrayRKNSt6vectorIiEEbi14StreamOrDevice", "std::keepdims"], [0, 1, 1, "_CPPv4StRK5arraybi14StreamOrDevice", "std::keepdims"], [0, 1, 1, "_CPPv4StRK5arrayibi14StreamOrDevice", "std::keepdims"], [0, 1, 1, "_CPPv4StRK5array14StreamOrDevice", "std::s"], [0, 1, 1, "_CPPv4StRK5arrayRKNSt6vectorIiEEbi14StreamOrDevice", "std::s"], [0, 1, 1, "_CPPv4StRK5arraybi14StreamOrDevice", "std::s"], [0, 1, 1, "_CPPv4StRK5arrayibi14StreamOrDevice", "std::s"], [0, 0, 1, "_CPPv413stop_gradientRK5array14StreamOrDevice", "stop_gradient"], [0, 1, 1, "_CPPv413stop_gradientRK5array14StreamOrDevice", "stop_gradient::a"], [0, 1, 1, "_CPPv413stop_gradientRK5array14StreamOrDevice", "stop_gradient::s"], [0, 0, 1, "_CPPv48subtractRK5arrayRK5array14StreamOrDevice", "subtract"], [0, 1, 1, "_CPPv48subtractRK5arrayRK5array14StreamOrDevice", "subtract::a"], [0, 1, 1, "_CPPv48subtractRK5arrayRK5array14StreamOrDevice", "subtract::b"], [0, 1, 1, "_CPPv48subtractRK5arrayRK5array14StreamOrDevice", "subtract::s"], [0, 0, 1, "_CPPv43sumRK5array14StreamOrDevice", "sum"], [0, 0, 1, "_CPPv43sumRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "sum"], [0, 0, 1, "_CPPv43sumRK5arrayb14StreamOrDevice", "sum"], [0, 0, 1, "_CPPv43sumRK5arrayib14StreamOrDevice", "sum"], [0, 1, 1, "_CPPv43sumRK5array14StreamOrDevice", "sum::a"], [0, 1, 1, "_CPPv43sumRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "sum::a"], [0, 1, 1, "_CPPv43sumRK5arrayb14StreamOrDevice", "sum::a"], [0, 1, 1, "_CPPv43sumRK5arrayib14StreamOrDevice", "sum::a"], [0, 1, 1, "_CPPv43sumRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "sum::axes"], [0, 1, 1, "_CPPv43sumRK5arrayib14StreamOrDevice", "sum::axis"], [0, 1, 1, "_CPPv43sumRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "sum::keepdims"], [0, 1, 1, "_CPPv43sumRK5arrayb14StreamOrDevice", "sum::keepdims"], [0, 1, 1, "_CPPv43sumRK5arrayib14StreamOrDevice", "sum::keepdims"], [0, 1, 1, "_CPPv43sumRK5array14StreamOrDevice", "sum::s"], [0, 1, 1, "_CPPv43sumRK5arrayRKNSt6vectorIiEEb14StreamOrDevice", "sum::s"], [0, 1, 1, "_CPPv43sumRK5arrayb14StreamOrDevice", "sum::s"], [0, 1, 1, "_CPPv43sumRK5arrayib14StreamOrDevice", "sum::s"], [0, 0, 1, "_CPPv48swapaxesRK5arrayii14StreamOrDevice", "swapaxes"], [0, 1, 1, "_CPPv48swapaxesRK5arrayii14StreamOrDevice", "swapaxes::a"], [0, 1, 1, "_CPPv48swapaxesRK5arrayii14StreamOrDevice", "swapaxes::axis1"], [0, 1, 1, "_CPPv48swapaxesRK5arrayii14StreamOrDevice", "swapaxes::axis2"], [0, 1, 1, "_CPPv48swapaxesRK5arrayii14StreamOrDevice", "swapaxes::s"], [0, 0, 1, "_CPPv44takeRK5arrayRK5array14StreamOrDevice", "take"], [0, 0, 1, "_CPPv44takeRK5arrayRK5arrayi14StreamOrDevice", "take"], [0, 0, 1, "_CPPv44takeRK5arrayi14StreamOrDevice", "take"], [0, 0, 1, "_CPPv44takeRK5arrayii14StreamOrDevice", "take"], [0, 1, 1, "_CPPv44takeRK5arrayRK5array14StreamOrDevice", "take::a"], [0, 1, 1, "_CPPv44takeRK5arrayRK5arrayi14StreamOrDevice", "take::a"], [0, 1, 1, "_CPPv44takeRK5arrayi14StreamOrDevice", "take::a"], [0, 1, 1, "_CPPv44takeRK5arrayii14StreamOrDevice", "take::a"], [0, 1, 1, "_CPPv44takeRK5arrayRK5arrayi14StreamOrDevice", "take::axis"], [0, 1, 1, "_CPPv44takeRK5arrayii14StreamOrDevice", "take::axis"], [0, 1, 1, "_CPPv44takeRK5arrayi14StreamOrDevice", "take::index"], [0, 1, 1, "_CPPv44takeRK5arrayii14StreamOrDevice", "take::index"], [0, 1, 1, "_CPPv44takeRK5arrayRK5array14StreamOrDevice", "take::indices"], [0, 1, 1, "_CPPv44takeRK5arrayRK5arrayi14StreamOrDevice", "take::indices"], [0, 1, 1, "_CPPv44takeRK5arrayRK5array14StreamOrDevice", "take::s"], [0, 1, 1, "_CPPv44takeRK5arrayRK5arrayi14StreamOrDevice", "take::s"], [0, 1, 1, "_CPPv44takeRK5arrayi14StreamOrDevice", "take::s"], [0, 1, 1, "_CPPv44takeRK5arrayii14StreamOrDevice", "take::s"], [0, 0, 1, "_CPPv415take_along_axisRK5arrayRK5arrayi14StreamOrDevice", "take_along_axis"], [0, 1, 1, "_CPPv415take_along_axisRK5arrayRK5arrayi14StreamOrDevice", "take_along_axis::a"], [0, 1, 1, "_CPPv415take_along_axisRK5arrayRK5arrayi14StreamOrDevice", "take_along_axis::axis"], [0, 1, 1, "_CPPv415take_along_axisRK5arrayRK5arrayi14StreamOrDevice", "take_along_axis::indices"], [0, 1, 1, "_CPPv415take_along_axisRK5arrayRK5arrayi14StreamOrDevice", "take_along_axis::s"], [0, 0, 1, "_CPPv43tanRK5array14StreamOrDevice", "tan"], [0, 1, 1, "_CPPv43tanRK5array14StreamOrDevice", "tan::a"], [0, 1, 1, "_CPPv43tanRK5array14StreamOrDevice", "tan::s"], [0, 0, 1, "_CPPv44tanhRK5array14StreamOrDevice", "tanh"], [0, 1, 1, "_CPPv44tanhRK5array14StreamOrDevice", "tanh::a"], [0, 1, 1, "_CPPv44tanhRK5array14StreamOrDevice", "tanh::s"], [0, 0, 1, "_CPPv49tensordotRK5arrayRK5arrayKi14StreamOrDevice", "tensordot"], [0, 0, 1, "_CPPv49tensordotRK5arrayRK5arrayRKNSt6vectorIiEERKNSt6vectorIiEE14StreamOrDevice", "tensordot"], [0, 1, 1, "_CPPv49tensordotRK5arrayRK5arrayKi14StreamOrDevice", "tensordot::a"], [0, 1, 1, "_CPPv49tensordotRK5arrayRK5arrayRKNSt6vectorIiEERKNSt6vectorIiEE14StreamOrDevice", "tensordot::a"], [0, 1, 1, "_CPPv49tensordotRK5arrayRK5arrayRKNSt6vectorIiEERKNSt6vectorIiEE14StreamOrDevice", "tensordot::axes_a"], [0, 1, 1, "_CPPv49tensordotRK5arrayRK5arrayRKNSt6vectorIiEERKNSt6vectorIiEE14StreamOrDevice", "tensordot::axes_b"], [0, 1, 1, "_CPPv49tensordotRK5arrayRK5arrayKi14StreamOrDevice", "tensordot::axis"], [0, 1, 1, "_CPPv49tensordotRK5arrayRK5arrayKi14StreamOrDevice", "tensordot::b"], [0, 1, 1, "_CPPv49tensordotRK5arrayRK5arrayRKNSt6vectorIiEERKNSt6vectorIiEE14StreamOrDevice", "tensordot::b"], [0, 1, 1, "_CPPv49tensordotRK5arrayRK5arrayKi14StreamOrDevice", "tensordot::s"], [0, 1, 1, "_CPPv49tensordotRK5arrayRK5arrayRKNSt6vectorIiEERKNSt6vectorIiEE14StreamOrDevice", "tensordot::s"], [0, 0, 1, "_CPPv44tileRK5arrayNSt6vectorIiEE14StreamOrDevice", "tile"], [0, 1, 1, "_CPPv44tileRK5arrayNSt6vectorIiEE14StreamOrDevice", "tile::arr"], [0, 1, 1, "_CPPv44tileRK5arrayNSt6vectorIiEE14StreamOrDevice", "tile::reps"], [0, 1, 1, "_CPPv44tileRK5arrayNSt6vectorIiEE14StreamOrDevice", "tile::s"], [0, 0, 1, "_CPPv44topkRK5arrayi14StreamOrDevice", "topk"], [0, 0, 1, "_CPPv44topkRK5arrayii14StreamOrDevice", "topk"], [0, 1, 1, "_CPPv44topkRK5arrayi14StreamOrDevice", "topk::a"], [0, 1, 1, "_CPPv44topkRK5arrayii14StreamOrDevice", "topk::a"], [0, 1, 1, "_CPPv44topkRK5arrayii14StreamOrDevice", "topk::axis"], [0, 1, 1, "_CPPv44topkRK5arrayi14StreamOrDevice", "topk::k"], [0, 1, 1, "_CPPv44topkRK5arrayii14StreamOrDevice", "topk::k"], [0, 1, 1, "_CPPv44topkRK5arrayi14StreamOrDevice", "topk::s"], [0, 1, 1, "_CPPv44topkRK5arrayii14StreamOrDevice", "topk::s"], [0, 0, 1, "_CPPv45traceRK5array14StreamOrDevice", "trace"], [0, 0, 1, "_CPPv45traceRK5arrayiii14StreamOrDevice", "trace"], [0, 0, 1, "_CPPv45traceRK5arrayiii5Dtype14StreamOrDevice", "trace"], [0, 1, 1, "_CPPv45traceRK5array14StreamOrDevice", "trace::a"], [0, 1, 1, "_CPPv45traceRK5arrayiii14StreamOrDevice", "trace::a"], [0, 1, 1, "_CPPv45traceRK5arrayiii5Dtype14StreamOrDevice", "trace::a"], [0, 1, 1, "_CPPv45traceRK5arrayiii14StreamOrDevice", "trace::axis1"], [0, 1, 1, "_CPPv45traceRK5arrayiii5Dtype14StreamOrDevice", "trace::axis1"], [0, 1, 1, "_CPPv45traceRK5arrayiii14StreamOrDevice", "trace::axis2"], [0, 1, 1, "_CPPv45traceRK5arrayiii5Dtype14StreamOrDevice", "trace::axis2"], [0, 1, 1, "_CPPv45traceRK5arrayiii5Dtype14StreamOrDevice", "trace::dtype"], [0, 1, 1, "_CPPv45traceRK5arrayiii14StreamOrDevice", "trace::offset"], [0, 1, 1, "_CPPv45traceRK5arrayiii5Dtype14StreamOrDevice", "trace::offset"], [0, 1, 1, "_CPPv45traceRK5array14StreamOrDevice", "trace::s"], [0, 1, 1, "_CPPv45traceRK5arrayiii14StreamOrDevice", "trace::s"], [0, 1, 1, "_CPPv45traceRK5arrayiii5Dtype14StreamOrDevice", "trace::s"], [0, 0, 1, "_CPPv49transposeRK5array14StreamOrDevice", "transpose"], [0, 0, 1, "_CPPv49transposeRK5arrayNSt16initializer_listIiEE14StreamOrDevice", "transpose"], [0, 0, 1, "_CPPv49transposeRK5arrayNSt6vectorIiEE14StreamOrDevice", "transpose"], [0, 1, 1, "_CPPv49transposeRK5array14StreamOrDevice", "transpose::a"], [0, 1, 1, "_CPPv49transposeRK5arrayNSt16initializer_listIiEE14StreamOrDevice", "transpose::a"], [0, 1, 1, "_CPPv49transposeRK5arrayNSt6vectorIiEE14StreamOrDevice", "transpose::a"], [0, 1, 1, "_CPPv49transposeRK5arrayNSt16initializer_listIiEE14StreamOrDevice", "transpose::axes"], [0, 1, 1, "_CPPv49transposeRK5arrayNSt6vectorIiEE14StreamOrDevice", "transpose::axes"], [0, 1, 1, "_CPPv49transposeRK5array14StreamOrDevice", "transpose::s"], [0, 1, 1, "_CPPv49transposeRK5arrayNSt16initializer_listIiEE14StreamOrDevice", "transpose::s"], [0, 1, 1, "_CPPv49transposeRK5arrayNSt6vectorIiEE14StreamOrDevice", "transpose::s"], [0, 0, 1, "_CPPv43trii5Dtype14StreamOrDevice", "tri"], [0, 0, 1, "_CPPv43triiii5Dtype14StreamOrDevice", "tri"], [0, 1, 1, "_CPPv43triiii5Dtype14StreamOrDevice", "tri::k"], [0, 1, 1, "_CPPv43triiii5Dtype14StreamOrDevice", "tri::m"], [0, 1, 1, "_CPPv43trii5Dtype14StreamOrDevice", "tri::n"], [0, 1, 1, "_CPPv43triiii5Dtype14StreamOrDevice", "tri::n"], [0, 1, 1, "_CPPv43trii5Dtype14StreamOrDevice", "tri::s"], [0, 1, 1, "_CPPv43triiii5Dtype14StreamOrDevice", "tri::s"], [0, 1, 1, "_CPPv43trii5Dtype14StreamOrDevice", "tri::type"], [0, 1, 1, "_CPPv43triiii5Dtype14StreamOrDevice", "tri::type"], [0, 0, 1, "_CPPv44tril5arrayi14StreamOrDevice", "tril"], [0, 1, 1, "_CPPv44tril5arrayi14StreamOrDevice", "tril::k"], [0, 1, 1, "_CPPv44tril5arrayi14StreamOrDevice", "tril::s"], [0, 1, 1, "_CPPv44tril5arrayi14StreamOrDevice", "tril::x"], [0, 0, 1, "_CPPv44triu5arrayi14StreamOrDevice", "triu"], [0, 1, 1, "_CPPv44triu5arrayi14StreamOrDevice", "triu::k"], [0, 1, 1, "_CPPv44triu5arrayi14StreamOrDevice", "triu::s"], [0, 1, 1, "_CPPv44triu5arrayi14StreamOrDevice", "triu::x"], [0, 0, 1, "_CPPv49unflattenRK5arrayi5Shape14StreamOrDevice", "unflatten"], [0, 1, 1, "_CPPv49unflattenRK5arrayi5Shape14StreamOrDevice", "unflatten::a"], [0, 1, 1, "_CPPv49unflattenRK5arrayi5Shape14StreamOrDevice", "unflatten::axis"], [0, 1, 1, "_CPPv49unflattenRK5arrayi5Shape14StreamOrDevice", "unflatten::s"], [0, 1, 1, "_CPPv49unflattenRK5arrayi5Shape14StreamOrDevice", "unflatten::shape"], [0, 0, 1, "_CPPv43varRK5array14StreamOrDevice", "var"], [0, 0, 1, "_CPPv43varRK5arrayRKNSt6vectorIiEEbi14StreamOrDevice", "var"], [0, 0, 1, "_CPPv43varRK5arraybi14StreamOrDevice", "var"], [0, 0, 1, "_CPPv43varRK5arrayibi14StreamOrDevice", "var"], [0, 1, 1, "_CPPv43varRK5array14StreamOrDevice", "var::a"], [0, 1, 1, "_CPPv43varRK5arrayRKNSt6vectorIiEEbi14StreamOrDevice", "var::a"], [0, 1, 1, "_CPPv43varRK5arraybi14StreamOrDevice", "var::a"], [0, 1, 1, "_CPPv43varRK5arrayibi14StreamOrDevice", "var::a"], [0, 1, 1, "_CPPv43varRK5arrayRKNSt6vectorIiEEbi14StreamOrDevice", "var::axes"], [0, 1, 1, "_CPPv43varRK5arrayibi14StreamOrDevice", "var::axis"], [0, 1, 1, "_CPPv43varRK5arrayRKNSt6vectorIiEEbi14StreamOrDevice", "var::ddof"], [0, 1, 1, "_CPPv43varRK5arraybi14StreamOrDevice", "var::ddof"], [0, 1, 1, "_CPPv43varRK5arrayibi14StreamOrDevice", "var::ddof"], [0, 1, 1, "_CPPv43varRK5arrayRKNSt6vectorIiEEbi14StreamOrDevice", "var::keepdims"], [0, 1, 1, "_CPPv43varRK5arraybi14StreamOrDevice", "var::keepdims"], [0, 1, 1, "_CPPv43varRK5arrayibi14StreamOrDevice", "var::keepdims"], [0, 1, 1, "_CPPv43varRK5array14StreamOrDevice", "var::s"], [0, 1, 1, "_CPPv43varRK5arrayRKNSt6vectorIiEEbi14StreamOrDevice", "var::s"], [0, 1, 1, "_CPPv43varRK5arraybi14StreamOrDevice", "var::s"], [0, 1, 1, "_CPPv43varRK5arrayibi14StreamOrDevice", "var::s"], [0, 0, 1, "_CPPv44viewRK5arrayRK5Dtype14StreamOrDevice", "view"], [0, 1, 1, "_CPPv44viewRK5arrayRK5Dtype14StreamOrDevice", "view::a"], [0, 1, 1, "_CPPv44viewRK5arrayRK5Dtype14StreamOrDevice", "view::dtype"], [0, 1, 1, "_CPPv44viewRK5arrayRK5Dtype14StreamOrDevice", "view::s"], [0, 0, 1, "_CPPv45whereRK5arrayRK5arrayRK5array14StreamOrDevice", "where"], [0, 1, 1, "_CPPv45whereRK5arrayRK5arrayRK5array14StreamOrDevice", "where::condition"], [0, 1, 1, "_CPPv45whereRK5arrayRK5arrayRK5array14StreamOrDevice", "where::s"], [0, 1, 1, "_CPPv45whereRK5arrayRK5arrayRK5array14StreamOrDevice", "where::x"], [0, 1, 1, "_CPPv45whereRK5arrayRK5arrayRK5array14StreamOrDevice", "where::y"], [0, 0, 1, "_CPPv45zerosRK5Shape14StreamOrDevice", "zeros"], [0, 0, 1, "_CPPv45zerosRK5Shape5Dtype14StreamOrDevice", "zeros"], [0, 1, 1, "_CPPv45zerosRK5Shape5Dtype14StreamOrDevice", "zeros::dtype"], [0, 1, 1, "_CPPv45zerosRK5Shape14StreamOrDevice", "zeros::s"], [0, 1, 1, "_CPPv45zerosRK5Shape5Dtype14StreamOrDevice", "zeros::s"], [0, 1, 1, "_CPPv45zerosRK5Shape14StreamOrDevice", "zeros::shape"], [0, 1, 1, "_CPPv45zerosRK5Shape5Dtype14StreamOrDevice", "zeros::shape"], [0, 0, 1, "_CPPv410zeros_likeRK5array14StreamOrDevice", "zeros_like"], [0, 1, 1, "_CPPv410zeros_likeRK5array14StreamOrDevice", "zeros_like::a"], [0, 1, 1, "_CPPv410zeros_likeRK5array14StreamOrDevice", "zeros_like::s"]], "mlx.core": [[10, 3, 1, "", "Device"], [11, 3, 1, "", "Dtype"], [12, 3, 1, "", "DtypeCategory"], [344, 3, 1, "", "Stream"], [13, 5, 1, "", "abs"], [14, 5, 1, "", "add"], [15, 5, 1, "", "addmm"], [16, 5, 1, "", "all"], [17, 5, 1, "", "allclose"], [18, 5, 1, "", "any"], [19, 5, 1, "", "arange"], [20, 5, 1, "", "arccos"], [21, 5, 1, "", "arccosh"], [22, 5, 1, "", "arcsin"], [23, 5, 1, "", "arcsinh"], [24, 5, 1, "", "arctan"], [25, 5, 1, "", "arctan2"], [26, 5, 1, "", "arctanh"], [27, 5, 1, "", "argmax"], [28, 5, 1, "", "argmin"], [29, 5, 1, "", "argpartition"], [30, 5, 1, "", "argsort"], [31, 3, 1, "", "array"], [86, 5, 1, "", "array_equal"], [87, 5, 1, "", "as_strided"], [88, 5, 1, "", "async_eval"], [89, 5, 1, "", "atleast_1d"], [90, 5, 1, "", "atleast_2d"], [91, 5, 1, "", "atleast_3d"], [92, 5, 1, "", "bitwise_and"], [93, 5, 1, "", "bitwise_invert"], [94, 5, 1, "", "bitwise_or"], [95, 5, 1, "", "bitwise_xor"], [96, 5, 1, "", "block_masked_mm"], [97, 5, 1, "", "broadcast_arrays"], [98, 5, 1, "", "broadcast_to"], [99, 5, 1, "", "ceil"], [100, 5, 1, "", "clear_cache"], [101, 5, 1, "", "clip"], [102, 5, 1, "", "compile"], [103, 5, 1, "", "concatenate"], [104, 5, 1, "", "conj"], [105, 5, 1, "", "conjugate"], [106, 5, 1, "", "contiguous"], [107, 5, 1, "", "conv1d"], [108, 5, 1, "", "conv2d"], [109, 5, 1, "", "conv3d"], [110, 5, 1, "", "conv_general"], [111, 5, 1, "", "conv_transpose1d"], [112, 5, 1, "", "conv_transpose2d"], [113, 5, 1, "", "conv_transpose3d"], [114, 5, 1, "", "convolve"], [115, 5, 1, "", "cos"], [116, 5, 1, "", "cosh"], [118, 5, 1, "", "cummax"], [119, 5, 1, "", "cummin"], [120, 5, 1, "", "cumprod"], [121, 5, 1, "", "cumsum"], [122, 3, 1, "", "custom_function"], [123, 5, 1, "", "default_device"], [124, 5, 1, "", "default_stream"], [125, 5, 1, "", "degrees"], [126, 5, 1, "", "dequantize"], [127, 5, 1, "", "diag"], [128, 5, 1, "", "diagonal"], [129, 5, 1, "", "disable_compile"], [138, 5, 1, "", "divide"], [139, 5, 1, "", "divmod"], [140, 5, 1, "", "einsum"], [141, 5, 1, "", "einsum_path"], [142, 5, 1, "", "enable_compile"], [143, 5, 1, "", "equal"], [144, 5, 1, "", "erf"], [145, 5, 1, "", "erfinv"], [146, 5, 1, "", "eval"], [147, 5, 1, "", "exp"], [148, 5, 1, "", "expand_dims"], [149, 5, 1, "", "expm1"], [150, 5, 1, "", "export_function"], [151, 5, 1, "", "export_to_dot"], [152, 5, 1, "", "exporter"], [153, 5, 1, "", "eye"], [174, 3, 1, "", "finfo"], [175, 5, 1, "", "flatten"], [176, 5, 1, "", "floor"], [177, 5, 1, "", "floor_divide"], [178, 5, 1, "", "full"], [179, 5, 1, "", "gather_mm"], [180, 5, 1, "", "gather_qmm"], [181, 5, 1, "", "get_active_memory"], [182, 5, 1, "", "get_cache_memory"], [183, 5, 1, "", "get_peak_memory"], [184, 5, 1, "", "grad"], [185, 5, 1, "", "greater"], [186, 5, 1, "", "greater_equal"], [187, 5, 1, "", "hadamard_transform"], [188, 5, 1, "", "identity"], [189, 5, 1, "", "imag"], [190, 5, 1, "", "import_function"], [191, 5, 1, "", "inner"], [192, 5, 1, "", "isclose"], [193, 5, 1, "", "isfinite"], [194, 5, 1, "", "isinf"], [195, 5, 1, "", "isnan"], [196, 5, 1, "", "isneginf"], [197, 5, 1, "", "isposinf"], [198, 5, 1, "", "issubdtype"], [199, 5, 1, "", "jvp"], [200, 5, 1, "", "kron"], [201, 5, 1, "", "left_shift"], [202, 5, 1, "", "less"], [203, 5, 1, "", "less_equal"], [221, 5, 1, "", "linspace"], [222, 5, 1, "", "load"], [223, 5, 1, "", "log"], [224, 5, 1, "", "log10"], [225, 5, 1, "", "log1p"], [226, 5, 1, "", "log2"], [227, 5, 1, "", "logaddexp"], [228, 5, 1, "", "logcumsumexp"], [229, 5, 1, "", "logical_and"], [230, 5, 1, "", "logical_not"], [231, 5, 1, "", "logical_or"], [232, 5, 1, "", "logsumexp"], [233, 5, 1, "", "matmul"], [234, 5, 1, "", "max"], [235, 5, 1, "", "maximum"], [236, 5, 1, "", "mean"], [237, 5, 1, "", "meshgrid"], [242, 5, 1, "", "min"], [243, 5, 1, "", "minimum"], [244, 5, 1, "", "moveaxis"], [245, 5, 1, "", "multiply"], [246, 5, 1, "", "nan_to_num"], [247, 5, 1, "", "negative"], [248, 5, 1, "", "new_stream"], [249, 5, 1, "", "not_equal"], [250, 5, 1, "", "ones"], [251, 5, 1, "", "ones_like"], [252, 5, 1, "", "outer"], [253, 5, 1, "", "pad"], [254, 5, 1, "", "partition"], [255, 5, 1, "", "power"], [256, 5, 1, "", "prod"], [257, 5, 1, "", "put_along_axis"], [258, 5, 1, "", "quantize"], [259, 5, 1, "", "quantized_matmul"], [260, 5, 1, "", "radians"], [274, 5, 1, "", "real"], [275, 5, 1, "", "reciprocal"], [276, 5, 1, "", "remainder"], [277, 5, 1, "", "repeat"], [278, 5, 1, "", "reset_peak_memory"], [279, 5, 1, "", "reshape"], [280, 5, 1, "", "right_shift"], [281, 5, 1, "", "roll"], [282, 5, 1, "", "round"], [283, 5, 1, "", "rsqrt"], [284, 5, 1, "", "save"], [285, 5, 1, "", "save_gguf"], [286, 5, 1, "", "save_safetensors"], [287, 5, 1, "", "savez"], [288, 5, 1, "", "savez_compressed"], [289, 5, 1, "", "set_cache_limit"], [290, 5, 1, "", "set_default_device"], [291, 5, 1, "", "set_default_stream"], [292, 5, 1, "", "set_memory_limit"], [293, 5, 1, "", "set_wired_limit"], [294, 5, 1, "", "sigmoid"], [295, 5, 1, "", "sign"], [296, 5, 1, "", "sin"], [297, 5, 1, "", "sinh"], [298, 5, 1, "", "slice"], [299, 5, 1, "", "slice_update"], [300, 5, 1, "", "softmax"], [301, 5, 1, "", "sort"], [302, 5, 1, "", "split"], [303, 5, 1, "", "sqrt"], [304, 5, 1, "", "square"], [305, 5, 1, "", "squeeze"], [306, 5, 1, "", "stack"], [307, 5, 1, "", "std"], [308, 5, 1, "", "stop_gradient"], [309, 5, 1, "", "stream"], [310, 5, 1, "", "subtract"], [311, 5, 1, "", "sum"], [312, 5, 1, "", "swapaxes"], [313, 5, 1, "", "synchronize"], [314, 5, 1, "", "take"], [315, 5, 1, "", "take_along_axis"], [316, 5, 1, "", "tan"], [317, 5, 1, "", "tanh"], [318, 5, 1, "", "tensordot"], [319, 5, 1, "", "tile"], [320, 5, 1, "", "topk"], [321, 5, 1, "", "trace"], [322, 5, 1, "", "transpose"], [323, 5, 1, "", "tri"], [324, 5, 1, "", "tril"], [325, 5, 1, "", "triu"], [326, 5, 1, "", "unflatten"], [327, 5, 1, "", "value_and_grad"], [328, 5, 1, "", "var"], [329, 5, 1, "", "view"], [330, 5, 1, "", "vjp"], [331, 5, 1, "", "vmap"], [332, 5, 1, "", "where"], [333, 5, 1, "", "zeros"], [334, 5, 1, "", "zeros_like"]], "mlx.core.Device": [[10, 4, 1, "", "__init__"]], "mlx.core.Dtype": [[11, 4, 1, "", "__init__"]], "mlx.core.DtypeCategory": [[12, 4, 1, "", "__init__"]], "mlx.core.Stream": [[344, 4, 1, "", "__init__"]], "mlx.core.array": [[32, 6, 1, "", "T"], [31, 4, 1, "", "__init__"], [33, 4, 1, "", "abs"], [34, 4, 1, "", "all"], [35, 4, 1, "", "any"], [36, 4, 1, "", "argmax"], [37, 4, 1, "", "argmin"], [38, 4, 1, "", "astype"], [39, 6, 1, "", "at"], [40, 4, 1, "", "conj"], [41, 4, 1, "", "cos"], [42, 4, 1, "", "cummax"], [43, 4, 1, "", "cummin"], [44, 4, 1, "", "cumprod"], [45, 4, 1, "", "cumsum"], [46, 4, 1, "", "diag"], [47, 4, 1, "", "diagonal"], [48, 6, 1, "", "dtype"], [49, 4, 1, "", "exp"], [50, 4, 1, "", "flatten"], [51, 6, 1, "", "imag"], [52, 4, 1, "", "item"], [53, 6, 1, "", "itemsize"], [54, 4, 1, "", "log"], [55, 4, 1, "", "log10"], [56, 4, 1, "", "log1p"], [57, 4, 1, "", "log2"], [58, 4, 1, "", "logcumsumexp"], [59, 4, 1, "", "logsumexp"], [60, 4, 1, "", "max"], [61, 4, 1, "", "mean"], [62, 4, 1, "", "min"], [63, 4, 1, "", "moveaxis"], [64, 6, 1, "", "nbytes"], [65, 6, 1, "", "ndim"], [66, 4, 1, "", "prod"], [67, 6, 1, "", "real"], [68, 4, 1, "", "reciprocal"], [69, 4, 1, "", "reshape"], [70, 4, 1, "", "round"], [71, 4, 1, "", "rsqrt"], [72, 6, 1, "", "shape"], [73, 4, 1, "", "sin"], [74, 6, 1, "", "size"], [75, 4, 1, "", "split"], [76, 4, 1, "", "sqrt"], [77, 4, 1, "", "square"], [78, 4, 1, "", "squeeze"], [79, 4, 1, "", "std"], [80, 4, 1, "", "sum"], [81, 4, 1, "", "swapaxes"], [82, 4, 1, "", "tolist"], [83, 4, 1, "", "transpose"], [84, 4, 1, "", "var"], [85, 4, 1, "", "view"]], "mlx.core.cuda": [[117, 5, 1, "", "is_available"]], "mlx.core.custom_function": [[122, 4, 1, "", "__init__"]], "mlx.core.distributed": [[130, 3, 1, "", "Group"], [131, 5, 1, "", "all_gather"], [132, 5, 1, "", "all_sum"], [133, 5, 1, "", "init"], [134, 5, 1, "", "is_available"], [135, 5, 1, "", "recv"], [136, 5, 1, "", "recv_like"], [137, 5, 1, "", "send"]], "mlx.core.distributed.Group": [[130, 4, 1, "", "__init__"]], "mlx.core.fast": [[154, 5, 1, "", "cuda_kernel"], [155, 5, 1, "", "layer_norm"], [156, 5, 1, "", "metal_kernel"], [157, 5, 1, "", "rms_norm"], [158, 5, 1, "", "rope"], [159, 5, 1, "", "scaled_dot_product_attention"]], "mlx.core.fft": [[160, 5, 1, "", "fft"], [161, 5, 1, "", "fft2"], [162, 5, 1, "", "fftn"], [163, 5, 1, "", "fftshift"], [164, 5, 1, "", "ifft"], [165, 5, 1, "", "ifft2"], [166, 5, 1, "", "ifftn"], [167, 5, 1, "", "ifftshift"], [168, 5, 1, "", "irfft"], [169, 5, 1, "", "irfft2"], [170, 5, 1, "", "irfftn"], [171, 5, 1, "", "rfft"], [172, 5, 1, "", "rfft2"], [173, 5, 1, "", "rfftn"]], "mlx.core.finfo": [[174, 4, 1, "", "__init__"]], "mlx.core.linalg": [[204, 5, 1, "", "cholesky"], [205, 5, 1, "", "cholesky_inv"], [206, 5, 1, "", "cross"], [207, 5, 1, "", "eig"], [208, 5, 1, "", "eigh"], [209, 5, 1, "", "eigvals"], [210, 5, 1, "", "eigvalsh"], [211, 5, 1, "", "inv"], [212, 5, 1, "", "lu"], [213, 5, 1, "", "lu_factor"], [214, 5, 1, "", "norm"], [215, 5, 1, "", "pinv"], [216, 5, 1, "", "qr"], [217, 5, 1, "", "solve"], [218, 5, 1, "", "solve_triangular"], [219, 5, 1, "", "svd"], [220, 5, 1, "", "tri_inv"]], "mlx.core.metal": [[238, 5, 1, "", "device_info"], [239, 5, 1, "", "is_available"], [240, 5, 1, "", "start_capture"], [241, 5, 1, "", "stop_capture"]], "mlx.core.random": [[261, 5, 1, "", "bernoulli"], [262, 5, 1, "", "categorical"], [263, 5, 1, "", "gumbel"], [264, 5, 1, "", "key"], [265, 5, 1, "", "laplace"], [266, 5, 1, "", "multivariate_normal"], [267, 5, 1, "", "normal"], [268, 5, 1, "", "permutation"], [269, 5, 1, "", "randint"], [270, 5, 1, "", "seed"], [271, 5, 1, "", "split"], [272, 5, 1, "", "truncated_normal"], [273, 5, 1, "", "uniform"]], "mlx.nn": [[357, 3, 1, "", "ALiBi"], [358, 3, 1, "", "AvgPool1d"], [359, 3, 1, "", "AvgPool2d"], [360, 3, 1, "", "AvgPool3d"], [361, 3, 1, "", "BatchNorm"], [362, 3, 1, "", "CELU"], [363, 3, 1, "", "Conv1d"], [364, 3, 1, "", "Conv2d"], [365, 3, 1, "", "Conv3d"], [366, 3, 1, "", "ConvTranspose1d"], [367, 3, 1, "", "ConvTranspose2d"], [368, 3, 1, "", "ConvTranspose3d"], [369, 3, 1, "", "Dropout"], [370, 3, 1, "", "Dropout2d"], [371, 3, 1, "", "Dropout3d"], [372, 3, 1, "", "ELU"], [373, 3, 1, "", "Embedding"], [374, 3, 1, "", "GELU"], [375, 3, 1, "", "GLU"], [376, 3, 1, "", "GRU"], [377, 3, 1, "", "GroupNorm"], [378, 3, 1, "", "HardShrink"], [379, 3, 1, "", "HardTanh"], [380, 3, 1, "", "Hardswish"], [381, 3, 1, "", "InstanceNorm"], [382, 3, 1, "", "LSTM"], [383, 3, 1, "", "LayerNorm"], [384, 3, 1, "", "LeakyReLU"], [385, 3, 1, "", "Linear"], [386, 3, 1, "", "LogSigmoid"], [387, 3, 1, "", "LogSoftmax"], [388, 3, 1, "", "MaxPool1d"], [389, 3, 1, "", "MaxPool2d"], [390, 3, 1, "", "MaxPool3d"], [391, 3, 1, "", "Mish"], [486, 3, 1, "", "Module"], [412, 3, 1, "", "MultiHeadAttention"], [413, 3, 1, "", "PReLU"], [414, 3, 1, "", "QuantizedEmbedding"], [415, 3, 1, "", "QuantizedLinear"], [416, 3, 1, "", "RMSNorm"], [417, 3, 1, "", "RNN"], [418, 3, 1, "", "ReLU"], [419, 3, 1, "", "ReLU6"], [420, 3, 1, "", "RoPE"], [421, 3, 1, "", "SELU"], [422, 3, 1, "", "Sequential"], [423, 3, 1, "", "SiLU"], [424, 3, 1, "", "Sigmoid"], [425, 3, 1, "", "SinusoidalPositionalEncoding"], [426, 3, 1, "", "Softmax"], [427, 3, 1, "", "Softmin"], [428, 3, 1, "", "Softplus"], [429, 3, 1, "", "Softshrink"], [430, 3, 1, "", "Softsign"], [431, 3, 1, "", "Step"], [432, 3, 1, "", "Tanh"], [433, 3, 1, "", "Transformer"], [434, 3, 1, "", "Upsample"], [335, 5, 1, "", "average_gradients"], [443, 3, 1, "", "celu"], [444, 3, 1, "", "elu"], [445, 3, 1, "", "gelu"], [446, 3, 1, "", "gelu_approx"], [447, 3, 1, "", "gelu_fast_approx"], [448, 3, 1, "", "glu"], [449, 3, 1, "", "hard_shrink"], [450, 3, 1, "", "hard_tanh"], [451, 3, 1, "", "hardswish"], [452, 3, 1, "", "leaky_relu"], [453, 3, 1, "", "log_sigmoid"], [454, 3, 1, "", "log_softmax"], [469, 3, 1, "", "mish"], [470, 3, 1, "", "prelu"], [336, 5, 1, "", "quantize"], [471, 3, 1, "", "relu"], [472, 3, 1, "", "relu6"], [473, 3, 1, "", "selu"], [474, 3, 1, "", "sigmoid"], [475, 3, 1, "", "silu"], [476, 3, 1, "", "softmax"], [477, 3, 1, "", "softmin"], [478, 3, 1, "", "softplus"], [479, 3, 1, "", "softshrink"], [480, 3, 1, "", "step"], [481, 3, 1, "", "tanh"], [337, 5, 1, "", "value_and_grad"]], "mlx.nn.Module": [[392, 4, 1, "", "apply"], [393, 4, 1, "", "apply_to_modules"], [394, 4, 1, "", "children"], [395, 4, 1, "", "eval"], [396, 4, 1, "", "filter_and_map"], [397, 4, 1, "", "freeze"], [398, 4, 1, "", "leaf_modules"], [399, 4, 1, "", "load_weights"], [400, 4, 1, "", "modules"], [401, 4, 1, "", "named_modules"], [402, 4, 1, "", "parameters"], [403, 4, 1, "", "save_weights"], [404, 4, 1, "", "set_dtype"], [405, 6, 1, "", "state"], [406, 4, 1, "", "train"], [407, 4, 1, "", "trainable_parameters"], [408, 6, 1, "", "training"], [409, 4, 1, "", "unfreeze"], [410, 4, 1, "", "update"], [411, 4, 1, "", "update_modules"]], "mlx.nn.init": [[435, 5, 1, "", "constant"], [436, 5, 1, "", "glorot_normal"], [437, 5, 1, "", "glorot_uniform"], [438, 5, 1, "", "he_normal"], [439, 5, 1, "", "he_uniform"], [440, 5, 1, "", "identity"], [441, 5, 1, "", "normal"], [442, 5, 1, "", "uniform"]], "mlx.nn.losses": [[455, 3, 1, "", "binary_cross_entropy"], [456, 3, 1, "", "cosine_similarity_loss"], [457, 3, 1, "", "cross_entropy"], [458, 3, 1, "", "gaussian_nll_loss"], [459, 3, 1, "", "hinge_loss"], [460, 3, 1, "", "huber_loss"], [461, 3, 1, "", "kl_div_loss"], [462, 3, 1, "", "l1_loss"], [463, 3, 1, "", "log_cosh_loss"], [464, 3, 1, "", "margin_ranking_loss"], [465, 3, 1, "", "mse_loss"], [466, 3, 1, "", "nll_loss"], [467, 3, 1, "", "smooth_l1_loss"], [468, 3, 1, "", "triplet_loss"]], "mlx.optimizers": [[489, 3, 1, "", "AdaDelta"], [490, 3, 1, "", "Adafactor"], [491, 3, 1, "", "Adagrad"], [492, 3, 1, "", "Adam"], [493, 3, 1, "", "AdamW"], [494, 3, 1, "", "Adamax"], [495, 3, 1, "", "Lion"], [496, 3, 1, "", "MultiOptimizer"], [497, 3, 1, "", "Muon"], [510, 3, 1, "", "Optimizer"], [502, 3, 1, "", "RMSprop"], [503, 3, 1, "", "SGD"], [338, 5, 1, "", "clip_grad_norm"], [504, 5, 1, "", "cosine_decay"], [505, 5, 1, "", "exponential_decay"], [506, 5, 1, "", "join_schedules"], [507, 5, 1, "", "linear_schedule"], [508, 5, 1, "", "step_decay"]], "mlx.optimizers.Optimizer": [[498, 4, 1, "", "apply_gradients"], [499, 4, 1, "", "init"], [500, 6, 1, "", "state"], [501, 4, 1, "", "update"]], "mlx.utils": [[339, 5, 1, "", "tree_flatten"], [340, 5, 1, "", "tree_map"], [341, 5, 1, "", "tree_map_with_path"], [342, 5, 1, "", "tree_reduce"], [343, 5, 1, "", "tree_unflatten"]]}, "objnames": {"0": ["cpp", "function", "C++ function"], "1": ["cpp", "functionParam", "C++ function parameter"], "2": ["cpp", "templateParam", "C++ template parameter"], "3": ["py", "class", "Python class"], "4": ["py", "method", "Python method"], "5": ["py", "function", "Python function"], "6": ["py", "property", "Python property"]}, "objtypes": {"0": "cpp:function", "1": "cpp:functionParam", "2": "cpp:templateParam", "3": "py:class", "4": "py:method", "5": "py:function", "6": "py:property"}, "terms": {"": [0, 1, 2, 5, 6, 7, 48, 53, 65, 102, 124, 126, 161, 162, 165, 166, 169, 170, 172, 173, 184, 205, 214, 219, 222, 236, 252, 258, 262, 282, 285, 286, 307, 309, 327, 328, 329, 331, 337, 356, 359, 360, 376, 382, 389, 390, 396, 397, 399, 403, 404, 405, 409, 410, 411, 417, 488, 499, 500, 512, 515, 517, 518, 522, 523, 524, 525], "0": [0, 1, 2, 4, 5, 6, 7, 9, 10, 15, 19, 39, 46, 47, 50, 70, 75, 79, 84, 87, 88, 100, 103, 107, 108, 109, 110, 111, 112, 113, 122, 127, 128, 153, 154, 156, 159, 175, 179, 184, 190, 200, 207, 208, 211, 212, 214, 216, 220, 246, 253, 261, 265, 267, 268, 273, 277, 282, 289, 293, 298, 299, 302, 306, 307, 321, 323, 324, 325, 326, 327, 328, 331, 335, 338, 339, 341, 342, 356, 358, 359, 360, 361, 362, 363, 364, 365, 366, 367, 368, 369, 370, 371, 372, 374, 377, 378, 381, 383, 384, 388, 389, 390, 413, 418, 420, 425, 429, 431, 433, 435, 436, 437, 438, 439, 440, 441, 442, 443, 444, 446, 447, 449, 450, 451, 452, 455, 457, 459, 460, 464, 467, 468, 470, 471, 472, 473, 479, 480, 483, 486, 489, 490, 492, 493, 494, 495, 497, 499, 502, 503, 504, 505, 506, 507, 508, 512, 515, 516, 517, 518, 519, 520, 521, 522, 523, 524], "00005": 5, "0001": 425, "0005": 446, "001": 490, "00364": 5, "01": [5, 384, 452, 493, 497], "0137595": 438, "015": 447, "0184009": 439, "02264": 437, "024": 518, "02765": 438, "0300242": 439, "044715": [374, 446], "0485873": 457, "05": [17, 192, 361, 377, 381, 383, 416], "0507": 473, "05202": 6, "06": [458, 468, 489], "0638": 464, "06450": 383, "0645099": 441, "06561": 505, "06675": 495, "07467": 416, "08": [17, 192, 456, 491, 492, 493, 494, 502], "08022": 381, "081": 508, "08415": 447, "08494": 377, "08619": 439, "08681": [391, 469], "09864": 6, "0999938": 506, "0999961": 504, "0d": 497, "0f": 0, "0j": [207, 209], "1": [0, 1, 2, 3, 4, 6, 7, 9, 15, 19, 29, 30, 39, 47, 50, 88, 107, 108, 109, 110, 111, 112, 113, 122, 127, 128, 149, 150, 151, 152, 154, 156, 159, 160, 161, 164, 165, 168, 169, 170, 171, 172, 173, 175, 187, 191, 198, 200, 205, 206, 207, 208, 209, 210, 212, 214, 216, 233, 237, 252, 254, 258, 262, 265, 266, 267, 273, 292, 294, 298, 299, 301, 314, 320, 321, 326, 327, 338, 339, 341, 342, 347, 356, 358, 361, 362, 363, 364, 365, 366, 367, 368, 369, 370, 371, 372, 374, 375, 376, 377, 381, 382, 383, 385, 388, 413, 416, 417, 420, 424, 425, 431, 434, 436, 437, 438, 439, 440, 441, 442, 443, 444, 446, 447, 448, 450, 453, 454, 455, 456, 457, 458, 459, 460, 461, 463, 464, 466, 467, 468, 473, 474, 476, 477, 478, 480, 483, 486, 488, 489, 490, 491, 492, 493, 494, 495, 499, 502, 503, 504, 505, 506, 507, 508, 515, 516, 517, 518, 519, 520, 522, 523, 524, 525], "10": [0, 3, 6, 7, 200, 224, 282, 287, 340, 356, 399, 483, 506, 508, 515, 516, 519], "100": [2, 5, 6, 455, 507, 515, 518, 521, 525], "1000": [2, 159, 504, 515], "10000": 420, "101": 507, "1024": [1, 6], "105361": 455, "10_000": 5, "10x": 495, "11": 214, "12": [6, 9, 187, 200, 506], "1212": 489, "123": [516, 520], "12451": 437, "128": [159, 287, 356], "13": 9, "14": [9, 200], "15": [1, 9, 200, 214, 293, 342, 515], "150594": 436, "15268": 438, "16": [1, 154, 156, 347, 358, 360, 381, 388, 390, 392, 486], "1606": 447, "1607": [381, 383], "16384": 187, "16506": 439, "168": 516, "17": [4, 9], "177208": 438, "18": 200, "1803": 377, "1908": [391, 469], "1910": 416, "191107": 436, "192": 516, "1985": 214, "1_000": 5, "1_all": 9, "1d": [0, 107, 111, 114, 285, 315, 497], "1e": [0, 5, 7, 17, 192, 361, 377, 381, 383, 384, 416, 456, 458, 468, 488, 489, 490, 491, 492, 493, 494, 499, 502, 504, 505, 506, 507, 508], "1e3": 515, "1st": 258, "2": [0, 1, 2, 4, 5, 6, 7, 9, 39, 108, 112, 122, 127, 128, 144, 150, 151, 152, 159, 161, 165, 168, 169, 170, 171, 172, 173, 175, 187, 198, 200, 204, 205, 206, 207, 208, 209, 210, 211, 212, 214, 215, 216, 219, 220, 226, 233, 258, 266, 267, 271, 298, 299, 318, 321, 323, 324, 325, 326, 338, 342, 347, 356, 358, 359, 360, 364, 367, 374, 384, 388, 389, 390, 416, 425, 434, 435, 436, 437, 438, 439, 440, 441, 442, 446, 457, 458, 460, 467, 468, 483, 486, 488, 489, 491, 492, 493, 499, 502, 515, 516, 517, 518, 519, 520, 521, 522, 523, 524, 525], "20": [187, 200, 214], "200": [6, 506, 518], "2002": 6, "2011": 491, "2012": [489, 502], "2015": [370, 492, 494], "2019": [6, 493], "2020": 6, "2021": 6, "20397": 455, "20_000": 6, "21": [6, 200, 508], "2104": 6, "223144": 455, "223404": 437, "225": 214, "225763": 464, "2302": 495, "23607": [214, 216], "24": 200, "24264": 214, "247": 6, "25": [9, 413, 434], "25211": 438, "256": [1, 7, 154, 156], "256995": 464, "27": 4, "28": [187, 200], "2d": [0, 108, 112, 128, 258, 361, 370], "2nd": 258, "2x": 522, "3": [0, 1, 2, 4, 6, 9, 88, 109, 113, 122, 150, 152, 175, 198, 200, 206, 207, 208, 209, 210, 214, 216, 298, 299, 326, 338, 342, 360, 365, 368, 374, 390, 434, 437, 439, 446, 451, 490, 495, 512, 515, 516, 517, 519, 522, 523], "30": 490, "3118": 522, "32": [1, 6, 7, 96, 159, 258, 259, 347, 359, 360, 389, 390, 416, 515], "32mib": 335, "330": 6, "33333": 434, "33554432": 335, "348587": 457, "35": 9, "363207": 436, "36788": 515, "379159": 437, "380709": 441, "39": 6, "3d": [0, 2, 109, 113, 361, 371, 434], "3f": [2, 7, 515], "3x": 2, "4": [0, 1, 2, 6, 126, 156, 159, 175, 180, 200, 214, 258, 259, 287, 298, 326, 336, 342, 347, 358, 359, 360, 361, 381, 388, 389, 390, 414, 415, 433, 434, 436, 437, 438, 455, 515, 516, 517, 519, 523, 525], "4096": [2, 515, 518, 525], "40x": 1, "41421": 214, "417497": 442, "42": 343, "437": 6, "44": 6, "447214": 216, "458835": 438, "475": 6, "48095": 436, "4d": [1, 434, 497], "4m": 1, "5": [0, 1, 2, 5, 6, 9, 159, 200, 214, 261, 292, 298, 342, 358, 361, 369, 370, 371, 374, 378, 381, 388, 429, 434, 435, 438, 439, 446, 449, 467, 479, 483, 488, 497, 502, 504, 505, 515, 518, 519], "50": [0, 221], "500": [6, 525], "510826": 455, "512": [3, 6, 433, 525], "534422": 441, "539245": 455, "53947": 436, "54": 9, "55": 1, "550": 9, "559": 2, "5701": 489, "573409": 464, "57771": 216, "579": 6, "5f": 5, "6": [1, 2, 6, 122, 200, 214, 287, 298, 419, 433, 437, 446, 447, 451, 458, 468, 472, 502, 515, 519, 523], "61278": 436, "617261": 442, "628": 6, "633": 6, "639": 518, "64": [0, 1, 96, 126, 180, 258, 259, 336, 347, 414, 415], "64331": 439, "666329": 439, "66667": 434, "67326": 473, "676": 1, "690": 6, "6967": 438, "7": [2, 6, 9, 200, 214, 258, 519], "702": [374, 447], "707107": [207, 208], "71828": 515, "74166": 214, "74597": 214, "75": 434, "75596": 464, "75787": 438, "765166": 464, "773433": 464, "774": 2, "776856": 437, "793615": 439, "79854": 439, "7b": 6, "7m": 1, "8": [0, 1, 2, 6, 214, 258, 347, 359, 360, 381, 389, 390, 433, 456, 489, 490, 491, 492, 493, 494, 502, 515, 519, 523, 525], "8192": [6, 187], "84804": 214, "863726": 442, "883935": 442, "890597": 437, "894427": 216, "89613": 436, "8gb": 6, "8x": 1, "9": [4, 9, 214, 457, 489, 492, 493, 494, 495, 499, 505, 508, 522], "90041": 437, "912766": 437, "916291": 455, "95": [7, 497], "982273": 441, "99": [495, 502], "995016": 436, "999": [492, 493, 494], "A": [0, 2, 6, 8, 9, 10, 72, 86, 102, 150, 151, 154, 155, 156, 157, 159, 184, 198, 199, 205, 207, 208, 209, 210, 212, 214, 216, 219, 222, 232, 233, 234, 238, 242, 258, 261, 262, 263, 265, 266, 267, 268, 269, 272, 273, 302, 306, 309, 327, 330, 331, 336, 337, 338, 339, 340, 341, 342, 343, 344, 356, 361, 370, 376, 377, 381, 383, 396, 400, 401, 404, 410, 411, 416, 422, 425, 433, 436, 437, 439, 447, 468, 469, 486, 488, 492, 494, 496, 498, 499, 501, 506, 515, 516, 517, 518, 520, 521, 522], "AS": 179, "And": [4, 6, 434], "As": [7, 39, 314, 356, 516], "At": [101, 326, 516], "But": [517, 525], "By": [6, 336, 404, 455, 516, 518, 522], "For": [0, 1, 2, 4, 6, 9, 39, 122, 126, 159, 179, 198, 214, 258, 343, 356, 361, 370, 374, 392, 397, 406, 409, 415, 420, 425, 434, 436, 437, 438, 439, 455, 483, 488, 497, 512, 515, 516, 517, 518, 519, 520, 521, 522, 523, 524, 525], "If": [0, 1, 2, 4, 6, 9, 16, 17, 18, 19, 27, 28, 29, 30, 82, 86, 87, 88, 101, 103, 114, 118, 119, 120, 121, 127, 128, 131, 132, 133, 135, 136, 137, 146, 155, 157, 158, 159, 163, 167, 171, 172, 173, 177, 178, 179, 184, 192, 204, 205, 206, 214, 219, 222, 228, 232, 233, 234, 236, 237, 242, 246, 250, 253, 254, 256, 257, 262, 266, 267, 268, 277, 281, 289, 292, 300, 301, 302, 307, 311, 313, 314, 315, 318, 320, 321, 327, 328, 331, 333, 335, 336, 339, 340, 342, 361, 363, 364, 365, 366, 367, 368, 377, 383, 385, 397, 399, 409, 410, 411, 415, 417, 420, 422, 425, 434, 455, 457, 468, 490, 492, 493, 515, 516, 517, 518, 520, 521, 524, 525, 526], "In": [0, 1, 2, 6, 7, 39, 159, 233, 258, 340, 356, 370, 377, 486, 489, 491, 492, 494, 495, 498, 514, 515, 516, 517, 518, 520, 521, 524, 525], "It": [2, 6, 9, 136, 184, 291, 327, 338, 342, 356, 411, 415, 498, 510, 516, 520, 522, 524], "Its": [356, 517], "No": [2, 6, 208, 210, 516], "Not": [102, 249, 515], "ON": [3, 4, 9], "Of": 518, "On": [1, 515, 518, 521], "One": [160, 164, 171, 253, 283, 515, 517, 518, 520], "THE": 9, "That": [6, 267], "The": [0, 1, 2, 3, 4, 6, 7, 8, 9, 13, 14, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 38, 48, 51, 53, 64, 65, 67, 72, 82, 86, 87, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 118, 119, 120, 121, 125, 126, 127, 128, 131, 132, 133, 135, 136, 137, 138, 139, 140, 141, 143, 144, 145, 147, 148, 149, 150, 151, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 175, 176, 177, 178, 179, 180, 182, 183, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 199, 200, 201, 202, 203, 206, 207, 208, 209, 210, 212, 213, 214, 216, 217, 218, 219, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 240, 242, 243, 244, 245, 247, 249, 250, 251, 252, 253, 254, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 271, 272, 273, 274, 275, 276, 277, 279, 280, 281, 285, 286, 289, 292, 293, 294, 295, 296, 297, 298, 299, 300, 301, 303, 304, 305, 306, 307, 308, 309, 310, 311, 312, 313, 314, 315, 316, 317, 318, 319, 320, 321, 322, 323, 324, 325, 326, 327, 328, 329, 330, 331, 332, 333, 334, 335, 336, 337, 338, 339, 340, 341, 342, 343, 347, 349, 358, 359, 360, 361, 363, 364, 365, 366, 367, 368, 369, 370, 371, 373, 375, 376, 377, 381, 382, 383, 385, 388, 389, 390, 392, 393, 397, 399, 403, 404, 405, 406, 409, 410, 411, 412, 414, 415, 416, 417, 420, 422, 425, 431, 433, 434, 435, 436, 437, 438, 439, 440, 441, 442, 448, 455, 456, 457, 458, 459, 460, 461, 462, 463, 464, 465, 466, 467, 468, 480, 483, 486, 488, 489, 490, 491, 492, 493, 494, 495, 496, 497, 500, 502, 503, 504, 507, 510, 515, 516, 517, 518, 519, 520, 521, 522, 523, 524, 525, 526], "Then": [5, 9], "There": [1, 356, 434, 515], "These": [1, 2, 102, 257, 315, 457, 525], "To": [0, 1, 2, 3, 5, 6, 7, 9, 212, 258, 289, 356, 483, 488, 515, 516, 517, 518, 523], "With": 517, "_": [1, 3, 5, 6, 341, 356, 504, 505, 506, 507, 508, 512, 515, 521, 525], "__call__": [1, 6, 7, 356, 486, 517], "__init__": [2, 6, 7, 10, 11, 12, 31, 122, 130, 174, 344, 356, 486], "__main__": [2, 6], "__name__": [2, 6], "_a": 2, "_ext": 2, "_f": 214, "_in": [436, 437], "_out": [436, 437], "_p": 468, "_val": 450, "a1": 179, "a2": 179, "a_": 214, "a_max": [0, 101], "a_min": [0, 101], "a_ndim": 1, "a_shap": 1, "a_strid": 1, "a_view": 522, "ab": [0, 17, 192, 214, 327, 377, 381, 383, 391, 416, 447, 469, 515, 517], "abil": 516, "abl": [2, 4, 520], "abort": 122, "about": [1, 2, 6, 7, 141, 238, 521, 525], "abov": [1, 2, 6, 258, 324, 356, 434, 516, 517, 518, 519, 520, 521, 525], "absolut": [0, 13, 17, 192, 446, 447, 467, 516], "acc": 342, "acceler": [4, 361], "accept": [516, 520], "access": [0, 6, 52, 356, 486, 499, 516, 521, 525], "accord": [0, 263, 332, 336, 412, 436, 437, 438, 439], "accordingli": 2, "accumul": [342, 416], "accuraci": 7, "accustom": 6, "achiev": [356, 516], "across": [1, 2, 9, 335, 377, 516], "act": [2, 463], "action": 356, "activ": [2, 9, 181, 370, 431, 433, 449, 469, 479, 480, 482, 515], "actual": [6, 19, 399, 486, 521], "ad": [0, 1, 2, 5, 9, 155, 341, 366, 367, 368, 381, 486, 489, 490, 491, 492, 493, 494, 502, 516, 521, 524], "adadelta": 488, "adafactor": 488, "adagrad": 488, "adam": [488, 494, 495, 506, 507], "adamax": 488, "adamw": [488, 495, 497], "adapt": [489, 490, 491, 516], "add": [0, 1, 2, 3, 4, 6, 15, 39, 148, 227, 253, 363, 364, 365, 366, 367, 368, 517, 518, 520, 525], "add_argu": 6, "add_depend": 2, "add_execut": 4, "add_fun": 517, "add_librari": 2, "addit": [0, 2, 4, 6, 9, 14, 15, 150, 155, 157, 159, 222, 361, 366, 367, 368, 377, 383, 412, 416, 486, 518], "addmm": 0, "address": 2, "adjac": 370, "advanc": [6, 515], "advantag": 525, "advis": 522, "affin": [0, 126, 180, 258, 259, 336, 361, 377, 381, 383, 385, 414, 415], "after": [2, 6, 7, 29, 100, 175, 177, 180, 254, 258, 361, 377, 383, 392, 393, 397, 399, 406, 409, 410, 411, 412, 433, 467, 515, 516, 525], "after_1": 253, "after_2": 253, "after_i": 253, "after_n": 253, "afternoon": 6, "again": [6, 9, 356, 515], "against": [0, 4, 97], "aggreg": [412, 516], "ago": 6, "ai": 122, "aim": 516, "ainv": [211, 220], "albeit": 525, "algebra": 8, "algorithm": [434, 495], "alia": [104, 105, 374], "alibi": 356, "align": [205, 258, 376, 382], "align_corn": 434, "all": [0, 1, 2, 3, 7, 9, 17, 29, 39, 89, 90, 91, 102, 108, 109, 110, 112, 113, 122, 131, 132, 133, 151, 153, 162, 163, 166, 167, 170, 173, 179, 180, 207, 209, 219, 233, 253, 254, 281, 305, 335, 336, 356, 392, 393, 397, 400, 401, 402, 407, 409, 412, 425, 433, 434, 483, 486, 510, 512, 515, 519, 520, 521, 523, 526], "all_avg": 516, "all_reduce_grad": 516, "all_reduce_s": 335, "all_sum": 516, "allclos": [0, 1, 154, 156], "alloc": [2, 182, 289, 292, 486], "allow": [0, 1, 2, 150, 152, 179, 180, 198, 338, 356, 411, 486, 510, 516, 519, 520, 523], "allow_col_major": [0, 106], "almost": [6, 516], "alon": [2, 522], "along": [0, 2, 27, 28, 102, 103, 118, 119, 120, 121, 131, 160, 161, 162, 164, 165, 166, 168, 169, 170, 171, 172, 173, 179, 180, 187, 206, 214, 228, 257, 268, 277, 281, 300, 302, 306, 314, 315, 318, 319, 320, 321, 329, 356, 375, 417, 448], "alpha": [0, 2, 15, 258, 362, 372, 443, 444, 468, 470, 473, 493, 502], "alpha_": 2, "alreadi": [2, 3, 6, 516], "also": [0, 1, 2, 4, 6, 7, 8, 9, 12, 14, 92, 94, 95, 129, 138, 139, 143, 162, 166, 170, 173, 185, 186, 201, 202, 203, 227, 235, 243, 245, 249, 255, 258, 276, 280, 310, 336, 337, 349, 356, 396, 410, 412, 414, 415, 423, 445, 473, 475, 482, 488, 515, 516, 517, 518, 519, 520, 521, 522, 523, 526], "altern": 512, "although": 516, "alwai": [1, 87, 181, 190, 207, 209, 339, 516, 517, 518], "am": 6, "among": 2, "amount": [6, 183, 292, 358, 388, 517], "amus": 6, "an": [0, 1, 2, 3, 4, 6, 7, 9, 11, 16, 18, 31, 88, 89, 90, 91, 98, 106, 107, 108, 109, 110, 111, 112, 113, 130, 135, 136, 137, 146, 150, 152, 153, 155, 159, 175, 178, 188, 190, 193, 204, 214, 222, 244, 250, 251, 253, 256, 257, 258, 259, 268, 277, 279, 281, 282, 292, 293, 302, 305, 312, 314, 315, 318, 319, 323, 326, 331, 333, 334, 339, 340, 341, 342, 347, 356, 369, 374, 377, 382, 383, 385, 392, 412, 413, 415, 417, 433, 434, 435, 436, 437, 438, 439, 440, 441, 442, 446, 470, 483, 488, 489, 497, 501, 505, 510, 512, 514, 515, 516, 517, 518, 519, 521, 522, 523, 524, 525, 526], "anaconda": 516, "anchor": 468, "angl": [125, 260, 384], "angular": [158, 420], "ani": [0, 1, 2, 6, 8, 19, 102, 122, 133, 335, 339, 340, 341, 342, 343, 356, 374, 392, 393, 396, 405, 415, 433, 434, 483, 497, 507, 514, 515, 516, 518, 521, 523, 524, 525], "anonym": 515, "anoth": [0, 97, 101, 198, 233, 310, 332, 347, 356, 392, 515, 517, 518, 519, 525], "anwywher": 9, "anyhow": 6, "anymor": 6, "anyth": [6, 327, 516, 521], "anytim": 521, "api": [1, 2, 88, 150, 152, 190, 374, 516, 517, 518], "aplu": 215, "app": 9, "append": [6, 233, 515, 521], "appl": [2, 6, 8, 9, 525], "appli": [0, 39, 158, 159, 179, 219, 340, 341, 342, 356, 358, 359, 360, 361, 362, 363, 364, 365, 366, 367, 368, 370, 371, 372, 374, 375, 377, 378, 379, 380, 381, 383, 384, 385, 386, 387, 388, 389, 390, 391, 393, 406, 413, 415, 416, 417, 418, 419, 421, 423, 424, 426, 427, 428, 429, 430, 431, 432, 434, 443, 444, 445, 448, 449, 450, 451, 452, 453, 454, 455, 456, 457, 458, 459, 460, 461, 462, 463, 464, 465, 466, 467, 468, 469, 470, 471, 472, 473, 474, 475, 476, 477, 478, 479, 480, 481, 483, 492, 493, 498, 501, 507, 510, 515, 516], "applic": [3, 9], "apply_fn": 393, "apply_gradi": 488, "apply_to_modul": [356, 397], "approach": [463, 518], "appropri": 515, "approx": 374, "approxim": [17, 374, 445, 446, 447], "apt": 9, "ar": [0, 1, 2, 5, 6, 7, 8, 9, 17, 19, 86, 88, 96, 97, 98, 101, 102, 110, 114, 122, 126, 128, 133, 135, 136, 146, 153, 154, 156, 159, 161, 162, 165, 166, 169, 170, 172, 173, 175, 179, 180, 184, 192, 193, 194, 195, 196, 197, 198, 199, 207, 208, 209, 210, 212, 214, 216, 222, 233, 252, 253, 254, 258, 259, 261, 262, 263, 267, 268, 269, 272, 273, 281, 287, 288, 305, 306, 314, 327, 330, 331, 336, 339, 340, 347, 361, 363, 364, 365, 366, 367, 368, 369, 370, 371, 377, 381, 383, 385, 399, 412, 415, 434, 455, 457, 458, 482, 486, 488, 495, 499, 514, 515, 516, 517, 518, 519, 520, 521, 522, 523, 524, 525], "arang": [0, 1, 214, 268, 347, 434, 519, 522], "arbitrari": [339, 486, 516], "arbitrarili": [1, 102, 356, 514, 518, 523], "arc": 0, "arcco": 0, "arccosh": 0, "architectur": [6, 9, 238, 356, 411, 525], "archiv": 524, "arcsin": 0, "arcsinh": 0, "arctan": 0, "arctan2": 0, "arctanh": 0, "arg": [2, 6, 11, 19, 88, 130, 146, 150, 151, 152, 174, 190, 287, 288, 344, 520], "arg1": 198, "arg2": 198, "argmax": [0, 7], "argmin": 0, "argnam": [184, 327], "argnum": [2, 122, 184, 327, 518], "argpars": 6, "argpartit": 0, "argsort": 0, "argument": [1, 32, 69, 83, 88, 102, 146, 184, 327, 340, 341, 342, 356, 434, 512, 516, 517, 518, 520, 524, 525, 526], "argumentpars": 6, "ari": [89, 90, 91], "aris": 522, "arm": 9, "arm64": 9, "around": 6, "arr": [0, 284, 519], "arr_0": 524, "arrai": [0, 1, 2, 4, 6, 7, 8, 11, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 118, 119, 120, 121, 122, 125, 126, 127, 128, 131, 132, 135, 136, 137, 138, 139, 140, 141, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 175, 176, 177, 178, 179, 180, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 242, 243, 244, 245, 246, 247, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 271, 272, 273, 274, 275, 276, 277, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 294, 295, 296, 297, 298, 299, 300, 301, 302, 303, 304, 305, 306, 307, 308, 310, 311, 312, 314, 315, 316, 317, 318, 319, 320, 321, 322, 323, 324, 325, 326, 327, 328, 329, 330, 331, 332, 333, 334, 335, 338, 347, 356, 361, 382, 392, 399, 402, 407, 413, 434, 435, 436, 437, 438, 439, 440, 441, 442, 448, 455, 456, 457, 458, 459, 460, 461, 462, 463, 464, 465, 466, 467, 468, 469, 470, 480, 483, 486, 489, 490, 491, 492, 493, 494, 495, 496, 497, 502, 503, 504, 505, 506, 507, 508, 515, 516, 517, 518, 521, 522, 523, 525], "array_equ": [0, 17, 192], "arrayfir": 8, "arxiv": [6, 377, 381, 383, 391, 416, 447, 469, 489, 495], "as_strid": 0, "ascend": [208, 210], "ask": [6, 516, 520], "assert": [1, 2, 154, 156], "assign": [0, 2, 39, 486, 516], "associ": [2, 287, 288, 521], "assum": [0, 6, 96, 206, 208, 210, 216, 340, 356, 377, 516], "ast": 1, "astyp": [0, 1, 2, 6, 154, 156, 392, 522], "asynchron": 88, "atleast": 0, "atleast_1d": 0, "atleast_2d": 0, "atleast_3d": 0, "atol": [0, 17, 192], "atom": [1, 156], "atomic_fetch_add_explicit": 1, "atomic_output": [1, 156], "attach": 2, "attempt": [102, 516], "attend": 412, "attent": [159, 397, 412, 425, 433], "attention_norm": 6, "attribut": [1, 10, 11, 12, 31, 174, 344, 405, 486, 510], "audio": 434, "auto": [0, 2, 4, 9, 154, 516, 517], "autom": 518, "automat": [1, 2, 8, 154, 156, 222, 516, 523, 524, 525], "autoregress": 6, "avail": [2, 5, 6, 7, 9, 11, 117, 133, 134, 239, 292, 349, 516, 520, 525], "averag": [335, 358, 359, 360, 489, 490, 492, 493, 494, 516], "avgpool1d": 356, "avgpool2d": 356, "avgpool3d": 356, "avoid": [1, 2, 404, 515, 516], "awai": [2, 6], "awar": [515, 521], "ax": [0, 2, 16, 18, 27, 28, 83, 122, 148, 161, 162, 163, 165, 166, 167, 169, 170, 172, 173, 175, 191, 214, 217, 218, 232, 234, 236, 242, 253, 256, 281, 298, 299, 300, 305, 307, 311, 312, 318, 322, 328, 518], "axes_a": 0, "axes_b": 0, "axi": [0, 2, 6, 7, 16, 18, 27, 28, 29, 30, 34, 35, 36, 37, 42, 43, 44, 45, 58, 59, 60, 61, 62, 66, 75, 78, 79, 80, 84, 103, 118, 119, 120, 121, 128, 131, 148, 155, 157, 160, 164, 168, 169, 170, 171, 172, 173, 175, 187, 206, 212, 214, 228, 232, 234, 236, 242, 244, 253, 254, 256, 257, 262, 268, 277, 281, 300, 301, 302, 305, 306, 307, 311, 312, 314, 315, 319, 320, 321, 322, 326, 328, 329, 331, 358, 359, 360, 375, 388, 389, 390, 417, 448, 454, 456, 457, 461, 466, 468, 476, 477, 519], "axis1": [0, 47, 81, 128, 312, 321], "axis2": [0, 47, 81, 128, 312, 321], "axpbi": 2, "axpby_gener": 2, "axpby_general_": 2, "axpby_general_bfloat16": 2, "axpby_general_complex64": 2, "axpby_general_float16": 2, "axpby_general_float32": 2, "axpby_impl": 2, "b": [0, 1, 2, 3, 4, 6, 14, 15, 17, 25, 86, 92, 94, 95, 96, 138, 139, 143, 154, 156, 159, 177, 179, 185, 186, 190, 191, 192, 200, 201, 202, 203, 206, 214, 217, 218, 227, 229, 231, 233, 235, 243, 245, 249, 252, 255, 258, 265, 276, 280, 310, 318, 327, 339, 341, 342, 375, 385, 417, 434, 448, 518, 519, 521, 522, 523, 524, 525], "b1": 179, "b2": 179, "b_": [376, 382], "b_stride": 1, "ba": [492, 494], "back": [6, 117, 122, 239, 335, 522], "backend": [1, 9, 133, 134, 520], "backward": [1, 515, 518], "bad": 521, "balanc": 463, "baltimor": 214, "bandwidth": [515, 516], "base": [0, 2, 4, 158, 224, 226, 255, 420, 433, 486, 488, 494, 510, 512, 515, 519], "base_idx": 1, "basi": 510, "basic": [5, 282, 518], "batch": [6, 15, 96, 159, 179, 180, 233, 266, 361, 363, 364, 365, 366, 367, 368, 370, 371, 376, 382, 412, 417, 434, 521], "batch_idx": 1, "batch_iter": [7, 488], "batch_siz": [7, 488], "batchnorm": 356, "becaus": [6, 181, 356, 515, 516, 517, 521], "becom": 133, "been": [0, 2, 6, 182, 521], "befor": [1, 2, 6, 9, 29, 154, 156, 254, 335, 396, 433, 499, 516, 519, 521], "before_1": 253, "before_2": 253, "before_i": 253, "before_n": 253, "beforehand": 252, "beggin": 281, "begin": [87, 183, 205, 258, 376, 382, 431, 449, 460, 467, 473, 479, 480, 516], "behav": 122, "behavior": [167, 212, 266, 463, 519, 521], "behaviour": [122, 204, 205], "behind": 518, "being": [308, 356], "bell": 2, "below": [2, 9, 214, 258, 323, 325, 347, 434, 516, 521], "bench": 2, "benchmark": [2, 515], "benefici": [370, 371, 521], "benefit": 516, "best": 516, "beta": [0, 2, 15, 126, 258, 361, 377, 381, 383, 467, 488, 492, 493, 494, 495], "beta_": 2, "beta_1": [490, 492, 493, 494, 495], "beta_2": [492, 493, 494, 495], "better": [335, 497, 518, 525], "between": [0, 2, 8, 101, 175, 433, 456, 459, 460, 463, 506, 516, 520, 521, 522, 525], "beyond": [281, 504, 507], "bfloat16": [2, 12, 187, 347, 522], "bfloat16_t": 2, "bia": [6, 126, 155, 180, 258, 259, 340, 356, 363, 364, 365, 366, 367, 368, 376, 382, 383, 385, 397, 399, 409, 412, 415, 417, 492, 493, 494, 499, 518], "bias": [0, 126, 180, 258, 259, 376, 382, 397, 409, 412], "bias_correct": [492, 493], "bicub": 434, "big": [1, 335, 515], "bigger": [6, 490], "bilinear": [1, 434], "binari": [222, 284, 285, 286, 287, 288, 329, 431, 455, 480, 515, 520], "binary_cross_entropi": [356, 515], "bind": 520, "bit": [0, 126, 180, 201, 258, 259, 280, 336, 347, 392, 414, 415, 416], "bitwis": [0, 92, 93, 94, 95, 201, 280], "bitwise_and": 0, "bitwise_invert": 0, "bitwise_or": 0, "bitwise_xor": 0, "bla": 9, "block": [0, 2, 6, 96, 433], "block_masked_mm": 0, "block_siz": [0, 96], "bn": 361, "bodi": [1, 154, 156], "bool": [0, 1, 2, 16, 17, 18, 27, 28, 34, 35, 36, 37, 42, 43, 44, 45, 58, 59, 60, 61, 62, 66, 79, 80, 82, 84, 86, 102, 106, 110, 117, 118, 119, 120, 121, 133, 134, 150, 152, 154, 156, 158, 179, 180, 192, 198, 204, 205, 214, 218, 219, 220, 222, 228, 232, 234, 236, 237, 239, 242, 256, 259, 307, 311, 328, 336, 361, 363, 364, 365, 366, 367, 368, 376, 377, 381, 382, 383, 385, 392, 396, 397, 399, 404, 406, 409, 410, 411, 412, 415, 417, 420, 425, 433, 434, 455, 458, 490, 492, 493, 496, 497, 503], "bool_": [12, 347], "boolean": [0, 17, 86, 159, 192, 193, 194, 195, 196, 197, 198, 229, 230, 231, 347, 408, 519], "both": [1, 2, 14, 92, 94, 95, 138, 139, 143, 185, 186, 198, 201, 202, 203, 214, 227, 235, 243, 245, 249, 255, 262, 276, 280, 310, 336, 358, 359, 360, 381, 382, 388, 389, 390, 488, 515, 516, 517, 518, 523, 525], "bottom": 434, "bound": [0, 269, 272, 273, 374, 442, 515, 519, 525], "boundari": 506, "bracket": 6, "brain": 347, "break": 522, "bregler": 370, "bridg": 516, "broadcast": [0, 2, 14, 17, 92, 94, 95, 97, 98, 101, 138, 139, 143, 159, 178, 185, 186, 192, 201, 202, 203, 227, 233, 235, 243, 245, 249, 255, 257, 261, 262, 266, 269, 272, 273, 276, 280, 310, 315, 332, 412], "broadcast_arrai": [0, 2], "broadcast_to": 0, "broadcasted_input": 2, "brought": 8, "btl_tcp_if_includ": [516, 520], "btl_tcp_link": [516, 520], "buffer": [1, 2, 181, 522], "bui": 6, "build": [1, 3, 4, 6, 8, 438, 486, 515, 517], "build_ext": [2, 9], "build_shared_lib": [2, 9], "built": [1, 2, 4, 9, 521], "bundl": 6, "byte": [53, 64, 181, 182, 183, 289, 292, 293, 335, 347, 520], "c": [0, 1, 2, 6, 15, 214, 361, 363, 364, 365, 366, 367, 368, 370, 371, 381, 382, 522, 523, 525], "c_": [382, 495], "c_in": [107, 108, 109, 110, 111, 112, 113], "c_out": [107, 108, 109, 110, 111, 112, 113], "c_pad": 1, "c_t": [382, 495], "cabl": 516, "cach": [6, 9, 100, 181, 182, 289, 515], "calcul": [214, 215, 455, 458, 464, 490], "call": [2, 3, 6, 7, 32, 100, 133, 136, 177, 183, 190, 335, 356, 373, 397, 409, 414, 422, 486, 488, 499, 515, 516, 517, 518, 520, 521], "callabl": [102, 122, 150, 152, 154, 156, 184, 190, 199, 327, 330, 331, 336, 337, 339, 340, 341, 342, 392, 393, 396, 404, 417, 422, 433, 435, 436, 437, 438, 439, 440, 441, 442, 489, 490, 491, 492, 493, 494, 495, 496, 497, 502, 503, 504, 505, 506, 507, 508], "can": [1, 2, 3, 4, 6, 8, 9, 14, 19, 69, 83, 87, 88, 92, 94, 95, 102, 128, 129, 130, 138, 139, 143, 146, 150, 151, 159, 179, 185, 186, 190, 201, 202, 203, 214, 227, 235, 243, 245, 249, 255, 258, 261, 262, 269, 272, 273, 276, 280, 285, 293, 310, 321, 326, 327, 335, 342, 356, 359, 360, 373, 374, 389, 390, 396, 409, 414, 422, 434, 457, 483, 486, 488, 498, 499, 512, 514, 515, 516, 517, 518, 519, 520, 521, 522, 523, 524, 525, 526], "cannot": [6, 101, 519, 522], "captur": [2, 3, 102, 122, 240, 241, 356, 515], "care": [6, 516, 517, 520, 521], "carefulli": [515, 517], "carri": 2, "cartesian": 237, "case": [2, 6, 131, 132, 133, 135, 136, 137, 162, 166, 168, 170, 171, 172, 173, 175, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 215, 216, 217, 218, 219, 220, 233, 279, 305, 326, 359, 360, 370, 389, 390, 431, 449, 467, 473, 479, 480, 498, 499, 515, 516, 517, 518, 520, 523, 524, 525, 526], "cast": [2, 38, 171, 172, 173, 222, 335, 392, 404, 522], "caster": 2, "categor": 6, "categori": [12, 198, 347], "caus": [356, 515, 521], "causal": [6, 159], "caution": 87, "cd": [3, 9], "cdf": [263, 374, 445], "cdot": [447, 456, 459, 475], "ceil": 0, "ceildiv": 1, "cell": 382, "celu": 356, "center": 163, "certain": [406, 515], "chang": [87, 88, 102, 150, 152, 190, 291, 329, 410, 415, 434, 460, 467, 515, 522], "channel": [1, 107, 108, 109, 110, 111, 112, 113, 361, 363, 364, 365, 366, 367, 368, 370, 371], "channel_idx": 1, "char": 2, "charact": 339, "check": [0, 2, 9, 86, 117, 134, 198, 208, 210, 239, 399, 410, 411, 516, 517, 518, 519], "checklist": [516, 520], "checkout": [3, 515], "checkpoint": [433, 488], "chen": 495, "child": 411, "children": 356, "chip": 9, "choleski": 205, "choos": [6, 158, 420, 520], "chosen": 141, "clamp": 175, "clang": 9, "clarifi": 516, "clariti": 518, "class": [2, 6, 7, 10, 11, 12, 31, 122, 130, 174, 344, 357, 358, 359, 360, 361, 362, 363, 364, 365, 366, 367, 368, 369, 370, 371, 372, 373, 374, 375, 376, 377, 378, 379, 380, 381, 382, 383, 384, 385, 386, 387, 388, 389, 390, 391, 412, 413, 414, 415, 416, 417, 418, 419, 420, 421, 422, 423, 424, 425, 426, 427, 428, 429, 430, 431, 432, 433, 434, 443, 444, 445, 446, 447, 448, 449, 450, 451, 452, 453, 454, 455, 456, 457, 458, 459, 460, 461, 462, 463, 464, 465, 466, 467, 468, 469, 470, 471, 472, 473, 474, 475, 476, 477, 478, 479, 480, 481, 486, 489, 490, 491, 492, 493, 494, 495, 496, 497, 502, 503, 510], "class_pred": 336, "classif": [438, 439], "classifi": 7, "classmethod": [414, 415], "clear": 100, "click": 9, "clip": [0, 338, 455, 490], "clip_threshold": 490, "clipped_grad": 338, "clone": 9, "close": [5, 8, 9, 17, 192], "closer": 340, "cmake": [3, 4, 9], "cmake_arg": [3, 9], "cmake_build_typ": 9, "cmake_current_list_dir": 2, "cmake_cxx_standard": 4, "cmake_cxx_standard_requir": 4, "cmake_host_system_processor": 9, "cmake_library_output_directori": 2, "cmake_minimum_requir": 4, "cmakebuild": 2, "cmakeextens": 2, "cmakelist": [2, 4], "cmdclass": 2, "co": [0, 2, 122, 425, 518], "code": [1, 154, 156, 515, 516, 517, 521], "coeffici": [2, 489, 490, 492, 493, 494, 495], "col": [106, 323], "cold": 9, "collect": [340, 341, 514], "column": [106, 153, 188, 207, 208, 258], "com": 9, "combin": [6, 219, 342], "come": [2, 6, 516, 518], "command": [2, 3, 4, 9, 516, 520], "command_buff": 2, "common": [488, 515, 521], "commonli": [7, 410, 483, 515], "commun": [8, 130, 133, 134, 335, 520], "communication_stream": 335, "communication_typ": 335, "compact": 213, "compar": [2, 86, 515], "comparison": [17, 143, 185, 186, 202, 203, 249], "compat": [6, 150, 152, 159, 190, 262, 266, 374, 524], "compil": [0, 1, 3, 4, 8, 9, 129, 142, 154, 156, 516, 517, 518, 521], "compiled_fun": [515, 517], "compiled_grad_fn": 515, "complement": 93, "complet": [5, 6, 9, 410, 411, 517, 518, 525], "complex": [2, 51, 67, 104, 105, 169, 170, 171, 172, 173, 189, 207, 208, 209, 210, 267, 274, 339, 347, 356, 411, 515, 517, 518], "complex64": [2, 12, 207, 209, 347], "complex64_t": 2, "complexflo": 12, "compon": [2, 4, 6, 163, 219], "compos": [8, 356, 515, 518, 523], "composit": 523, "compress": 288, "compromis": 6, "comput": [0, 1, 2, 5, 6, 7, 8, 9, 118, 119, 120, 121, 122, 126, 141, 149, 158, 184, 199, 200, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 217, 218, 220, 227, 228, 236, 252, 258, 276, 300, 307, 308, 318, 327, 328, 330, 337, 356, 361, 376, 377, 381, 382, 383, 397, 410, 415, 416, 420, 433, 436, 437, 438, 439, 446, 447, 455, 456, 457, 458, 459, 460, 461, 462, 463, 464, 465, 466, 467, 468, 488, 489, 490, 492, 493, 494, 495, 501, 515, 516, 517, 518, 523, 525], "computation": 521, "compute_encod": 2, "compute_uv": 219, "concaten": [0, 6, 131, 335], "concept": 486, "concis": 6, "concret": [2, 376, 382, 385, 417, 521, 525], "conda": [9, 516], "condit": [0, 332, 515, 525], "config": [2, 4, 516], "configu": 488, "configur": 516, "confirm": [516, 520], "confus": 7, "conj": 105, "conjug": [0, 104], "connect": [497, 516, 520], "consecut": [158, 258, 420], "consequ": 6, "consid": [6, 17, 86, 106, 192, 339, 340, 341, 377, 496, 514, 516], "consider": 515, "const": [0, 1, 2, 458], "constant": [0, 2, 6, 9, 122, 155, 157, 253, 356, 361, 377, 383, 416, 458, 468, 502, 504, 515, 517, 522], "constant_valu": 253, "constitut": 340, "construct": [0, 2, 7, 46, 127, 178, 212, 250, 319, 333], "consult": 516, "consum": 521, "contain": [2, 6, 9, 29, 30, 72, 102, 128, 141, 168, 169, 170, 179, 180, 207, 208, 214, 229, 230, 231, 258, 302, 332, 335, 338, 356, 396, 398, 399, 405, 433, 464, 483, 486, 515, 518], "content": [9, 396, 515], "context": [309, 517], "contigu": [0, 1, 2, 87, 154, 156], "continu": [362, 443, 516, 518], "contract": [0, 141], "contribut": 2, "contriv": [518, 525], "control": [0, 384, 512, 521], "conv": 114, "conv1d": [0, 356], "conv2d": [0, 356], "conv3d": [0, 356], "conv_gener": 0, "conv_transpose1d": 0, "conv_transpose2d": 0, "conv_transpose3d": 0, "conveni": [1, 2, 7, 198], "convent": [19, 114, 140, 141, 434], "convers": 8, "convert": [0, 1, 2, 82, 89, 90, 91, 125, 175, 260, 414, 415, 521, 522, 523], "convolut": [0, 107, 108, 109, 110, 111, 112, 113, 114, 363, 364, 365, 366, 367, 368, 370, 371, 497], "convolv": [107, 108, 109, 110, 111, 112, 113], "convtranspose1d": 356, "convtranspose2d": 356, "convtranspose3d": 356, "cooperative_group": 154, "coordin": [0, 237], "copi": [0, 1, 2, 6, 8, 106, 254, 301, 522], "core": [1, 2, 3, 4, 5, 6, 7, 336, 356, 358, 359, 360, 361, 381, 388, 389, 390, 399, 402, 404, 407, 414, 415, 434, 435, 436, 437, 438, 439, 440, 441, 442, 455, 457, 464, 483, 486, 488, 515, 516, 522, 523], "corner": 434, "correct": [2, 9, 492, 493, 494, 519, 521], "correctli": [39, 516], "correl": [110, 370], "correspond": [0, 1, 2, 16, 18, 82, 101, 126, 128, 160, 161, 162, 164, 165, 166, 168, 169, 170, 171, 172, 173, 207, 208, 232, 234, 242, 256, 298, 299, 311, 318, 326, 331, 340, 496, 516, 518, 520], "cos_first": 425, "cosh": [0, 463], "cosin": [0, 20, 21, 115, 116, 456, 504, 506, 518], "cosine_decai": [488, 506], "cosine_similarity_loss": 356, "cost": [9, 490, 516, 521], "costli": 521, "cot": 1, "cot_index": 1, "cotan": 122, "cotang": [1, 2, 122, 330], "could": [6, 356, 519], "count": [356, 506], "counter": 512, "cours": 518, "coursera": 502, "cout": [4, 517], "cov": 266, "covari": [266, 361], "cover": 2, "cpp": [2, 4], "cpu": [8, 207, 208, 209, 210, 216, 347, 525], "cpython": 2, "crash": [87, 515], "creat": [0, 1, 2, 6, 9, 87, 133, 153, 188, 309, 356, 486, 488, 506, 515, 516, 517, 519, 520, 522], "create_additive_causal_mask": 6, "cross": [7, 110, 455, 457], "cross_entropi": [7, 356], "crowd": 6, "cry": 6, "cubic": 434, "cuda": [8, 154], "cummax": 0, "cummin": 0, "cumprod": 0, "cumsum": 0, "cumul": [0, 87, 118, 119, 120, 121, 228], "current": [6, 8, 9, 87, 96, 109, 112, 113, 126, 137, 182, 238, 258, 342, 356, 490, 516, 521], "current_binary_dir": 2, "custom": [8, 122, 154, 156, 433], "custom_decod": 433, "custom_encod": 433, "custom_funct": 1, "custom_kernel_myexp_float": 1, "custom_tim": 2, "cvpr": 370, "cxx": 4, "cycl": 514, "d": [0, 1, 2, 6, 109, 113, 127, 128, 159, 191, 214, 233, 237, 252, 314, 321, 323, 324, 325, 343, 365, 368, 371, 376, 382, 417, 489, 492, 494, 525], "d1": 525, "d2": 525, "d2fdx2": 518, "d_i": 385, "dampen": 503, "darwin": 2, "data": [0, 2, 7, 8, 11, 19, 135, 153, 171, 172, 178, 188, 221, 246, 250, 263, 272, 321, 323, 329, 333, 371, 435, 436, 437, 438, 439, 440, 441, 442, 515, 516, 517, 519, 522], "dataset": [5, 516, 521], "datatyp": 53, "dbuild_shared_lib": 9, "dcmake_build_typ": [4, 9], "ddof": [0, 79, 84, 307, 328], "deal": 515, "deb": 9, "debug": [1, 3, 516, 520], "debugg": 8, "decai": [490, 493, 495, 497, 503, 504, 505, 508], "decay_r": [490, 505, 508], "decay_step": 504, "decent": 7, "decid": [340, 396], "decim": [0, 70, 282], "declar": 2, "decltyp": 1, "decod": 433, "decomposit": [204, 205, 215, 219], "decor": [1, 122], "decoupl": 493, "dedic": 516, "deep": [361, 436, 437, 438, 439], "def": [1, 2, 5, 6, 7, 122, 150, 152, 154, 156, 327, 356, 486, 515, 516, 517, 518, 519, 521, 522, 525], "default": [1, 2, 9, 15, 16, 17, 18, 19, 27, 28, 29, 30, 86, 87, 96, 102, 103, 107, 108, 109, 110, 111, 112, 113, 122, 123, 124, 126, 127, 128, 131, 132, 133, 135, 136, 137, 150, 152, 153, 154, 156, 158, 160, 161, 162, 164, 165, 166, 168, 169, 170, 171, 172, 173, 175, 179, 180, 184, 187, 188, 192, 200, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 232, 234, 236, 237, 242, 246, 250, 253, 254, 256, 258, 259, 261, 262, 263, 265, 266, 267, 268, 269, 271, 272, 273, 277, 279, 282, 289, 290, 291, 292, 293, 301, 302, 305, 306, 307, 309, 311, 313, 318, 320, 321, 322, 323, 324, 325, 326, 327, 328, 331, 333, 335, 336, 339, 347, 358, 359, 360, 361, 362, 363, 364, 365, 366, 367, 368, 372, 375, 376, 378, 381, 382, 384, 385, 388, 389, 390, 392, 397, 399, 404, 406, 409, 410, 411, 412, 413, 414, 415, 417, 420, 425, 429, 433, 434, 435, 436, 437, 438, 439, 440, 441, 442, 448, 455, 456, 457, 458, 459, 460, 461, 462, 463, 464, 465, 466, 467, 468, 486, 489, 490, 491, 492, 493, 494, 495, 497, 502, 503, 504, 512, 514, 515, 516, 517, 518, 520, 522, 524, 526], "default_devic": 526, "default_stream": 526, "defin": [1, 2, 5, 6, 7, 9, 122, 136, 154, 156, 180, 206, 214, 259, 336, 339, 520, 522], "definit": [122, 204, 205, 266], "degre": [0, 260, 468], "deleg": 496, "delta": [460, 489], "delv": [438, 439], "demonstr": 522, "denomin": [381, 456, 489, 491, 492, 493, 494, 502], "dens": [237, 525], "depend": [0, 2, 3, 4, 5, 9, 82, 214, 376, 382, 417, 515, 516, 519, 524, 525], "depth": [339, 360, 365, 368, 371, 390, 518], "dequant": [0, 258], "deriv": [2, 517, 518, 521], "descend": 394, "descent": [503, 515, 521], "describ": [2, 258, 521], "descript": [2, 4, 6, 347], "design": [1, 5, 8, 512, 525], "destin": [0, 2, 63, 137, 244, 257, 339, 488, 517], "destroi": 515, "detach": 518, "detail": [1, 2, 11, 258, 289, 356, 370, 420, 425, 434, 436, 437, 438, 439, 489, 491, 492, 494, 495, 516, 519, 523], "detect": 515, "determin": [0, 2, 128, 266, 342, 347, 403, 524], "dev": [2, 9], "develop": [2, 4, 9], "developer_dir": 9, "deviat": [0, 267, 307, 436, 438, 441], "devic": [1, 2, 8, 9, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 33, 34, 35, 36, 37, 38, 40, 41, 42, 43, 44, 45, 46, 47, 49, 50, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 66, 68, 69, 70, 71, 73, 75, 76, 77, 78, 79, 80, 81, 83, 84, 85, 86, 87, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 101, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 118, 119, 120, 121, 123, 124, 125, 126, 127, 128, 131, 132, 135, 136, 137, 138, 139, 140, 143, 144, 145, 147, 148, 149, 153, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 175, 176, 177, 178, 179, 180, 185, 186, 187, 188, 189, 191, 192, 193, 194, 195, 196, 197, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 265, 266, 267, 268, 269, 271, 272, 273, 274, 275, 276, 277, 279, 280, 281, 282, 283, 290, 291, 292, 294, 295, 296, 297, 298, 299, 300, 301, 302, 303, 304, 305, 306, 307, 308, 309, 310, 311, 312, 313, 314, 315, 316, 317, 318, 319, 320, 321, 322, 323, 324, 325, 326, 328, 329, 332, 333, 334, 344, 525, 526], "device_info": 293, "devicetyp": 10, "df": 522, "dfdx": [517, 518, 519], "dft": [160, 161, 162, 164, 165, 166, 171, 172, 173], "dhwc": 371, "diag": [0, 219], "diagon": [0, 46, 127, 153, 321, 323, 324, 325], "dict": [88, 102, 146, 151, 222, 238, 285, 286, 287, 336, 338, 339, 343, 402, 407, 410, 411, 486, 498, 499, 501, 514, 518, 524], "dict_kei": [340, 499], "dictionari": [6, 102, 150, 190, 222, 238, 285, 286, 338, 339, 342, 356, 396, 405, 410, 411, 500, 514, 524], "did": 6, "diff": 2, "differ": [8, 167, 198, 207, 209, 310, 329, 467, 496, 497, 515, 516, 517, 518, 520], "differenti": [1, 8, 362, 443], "difficult": 518, "difficulti": [436, 437], "dilat": [0, 107, 108, 109, 110, 111, 112, 113, 363, 364, 365, 366, 367, 368], "dim": [1, 6, 158, 159, 373, 377, 381, 383, 412, 414, 416, 420, 425, 433], "dimens": [0, 1, 2, 6, 16, 18, 27, 28, 65, 72, 82, 89, 90, 91, 102, 108, 109, 110, 112, 113, 128, 148, 158, 159, 169, 170, 172, 173, 175, 179, 180, 191, 204, 205, 207, 208, 209, 210, 211, 212, 214, 215, 216, 219, 220, 232, 233, 234, 236, 242, 256, 257, 258, 262, 271, 307, 311, 315, 318, 322, 328, 361, 363, 364, 365, 366, 367, 368, 370, 371, 375, 376, 377, 381, 382, 383, 412, 416, 417, 420, 433, 434, 448, 457, 497, 515, 518], "dimension": [31, 155, 157, 160, 161, 162, 164, 165, 166, 171, 172, 173, 358, 359, 360, 361, 363, 364, 365, 366, 367, 368, 373, 385, 388, 389, 390, 414, 415, 425, 519, 522], "dir": 4, "direct": [6, 394, 495, 525], "directli": [2, 6, 87], "directori": [2, 4, 6, 9], "disabl": [129, 289, 335, 515, 516], "disable_compil": 515, "disappoint": 6, "discard": [6, 339], "discov": [9, 516], "discoveri": 495, "discret": [114, 160, 161, 162, 164, 165, 166, 171, 172, 173, 373, 414], "discuss": 2, "disk": 6, "dispatch": 2, "dispatch_thread": 2, "dispatchthread": 1, "displai": 356, "distanc": [6, 468], "distribut": [8, 9, 261, 262, 263, 265, 266, 267, 272, 273, 335, 385, 436, 437, 438, 439, 441, 442, 458, 461, 466, 468, 483], "distributed_config": [516, 520], "diverg": 461, "divid": [0, 2, 39, 177, 258, 276, 516], "divis": [0, 138, 177, 258, 276], "divisor": [307, 328], "divmod": 0, "dloss_dw": 518, "dloss_dx": 518, "dlpack": 522, "dlvalu": 327, "dmlx_build_cpu": 9, "dmlx_build_cuda": 9, "dmlx_build_gguf": 9, "dmlx_build_safetensor": 9, "dmlx_metal_debug": 3, "dmlx_metal_jit": 9, "do": [0, 2, 6, 9, 212, 329, 356, 398, 409, 483, 486, 515, 516, 517, 518, 521], "doc": [2, 7, 516, 520], "document": [2, 3, 4, 69, 83, 156, 285, 286, 347, 515, 516, 517, 518, 519], "doe": [0, 2, 3, 6, 9, 181, 258, 329, 338, 356, 515, 516, 519, 520, 521, 522], "doesn": [2, 356, 517], "domain": 272, "don": [1, 9, 106, 515, 525], "done": [356, 369, 416, 515, 516, 521, 522], "dot": [151, 211, 220, 318, 339, 401, 412, 516], "doubl": [0, 6, 347], "doubt": 6, "down": [6, 338], "download": 9, "downsampl": [358, 359, 360, 388, 389, 390], "dparam": 327, "dpkg": 9, "draw": 262, "driver": 9, "drop": 396, "dropout": [356, 370, 371, 406, 433, 515], "dropout2d": 356, "dropout3d": 356, "dst": 137, "dt": 144, "dtype": [0, 1, 2, 6, 12, 19, 31, 38, 39, 82, 85, 135, 136, 153, 154, 156, 174, 175, 178, 188, 198, 200, 207, 208, 209, 210, 214, 216, 221, 250, 263, 265, 266, 267, 269, 272, 273, 298, 299, 321, 323, 326, 329, 333, 335, 347, 404, 434, 435, 436, 437, 438, 439, 440, 441, 442, 455, 457, 464, 504, 505, 506, 507, 508, 515, 516, 517, 518, 519, 522, 523, 524], "dtypecategori": [198, 347], "dual": 463, "duchi": 491, "duplic": 517, "dure": [3, 102, 292, 369, 370, 371, 434, 522], "dx": 122, "dy": 122, "dyld": 516, "dyld_library_path": 516, "dylib": 2, "dynam": [0, 154, 517, 521], "e": [2, 7, 9, 122, 144, 156, 179, 180, 199, 294, 361, 363, 364, 365, 366, 367, 368, 370, 371, 377, 381, 383, 397, 416, 453, 454, 476, 477, 482, 488, 491, 497, 515, 517, 521, 526], "e2m1": 258, "e5": 347, "e8": 347, "each": [0, 1, 2, 72, 88, 126, 146, 158, 180, 198, 204, 205, 207, 208, 209, 210, 211, 215, 219, 220, 233, 237, 253, 258, 259, 262, 277, 287, 288, 302, 319, 322, 329, 331, 332, 370, 371, 373, 376, 377, 382, 417, 420, 433, 455, 457, 512, 515, 516, 517, 520, 521], "eager": 521, "earli": 370, "eas": 6, "easi": [2, 356, 496], "easier": [1, 151, 521], "easiest": 516, "edg": [101, 253, 434, 515], "edit": [9, 411], "effect": [370, 515, 521], "effici": [6, 8, 179, 370, 420, 516, 521, 523], "eigenvalu": [207, 208, 209, 210], "eigenvector": [207, 208], "einstein": [140, 141], "einsum": 141, "either": [9, 14, 69, 82, 83, 92, 94, 95, 101, 138, 139, 143, 177, 185, 186, 190, 201, 202, 203, 214, 227, 233, 235, 243, 245, 249, 255, 258, 276, 280, 310, 327, 359, 360, 389, 390, 422, 434, 438, 439, 516, 520, 522], "elem": [1, 154, 156], "elem_to_loc": [1, 2], "element": [0, 1, 2, 13, 14, 20, 21, 22, 23, 24, 25, 26, 29, 74, 87, 92, 93, 94, 95, 99, 115, 116, 118, 119, 120, 121, 126, 138, 139, 143, 144, 145, 147, 149, 153, 176, 177, 180, 185, 186, 192, 193, 194, 195, 196, 197, 201, 202, 203, 223, 224, 225, 226, 227, 228, 229, 230, 231, 235, 237, 243, 245, 247, 249, 254, 255, 258, 259, 275, 276, 277, 280, 281, 283, 294, 295, 296, 297, 303, 304, 310, 314, 316, 317, 320, 327, 329, 332, 362, 369, 370, 371, 376, 380, 382, 391, 413, 417, 420, 424, 443, 450, 451, 453, 454, 469, 470, 472, 475, 476, 477, 478, 515, 518, 519], "elementwis": [1, 104, 105], "elif": 6, "ellipsi": 519, "elman": 417, "els": [0, 2, 6, 356, 397, 516, 521], "elsewher": [323, 519], "elu": [356, 473], "emb": [6, 373, 414, 425], "embed": [6, 336, 356, 414, 420, 425, 456, 497], "empti": [266, 339], "en0": 520, "en2": 516, "enabl": [3, 6, 9, 102, 142, 335, 497, 503], "enclos": 517, "encod": [2, 158, 420, 425, 433, 457], "encount": [2, 518], "end": [117, 128, 205, 239, 258, 281, 335, 376, 382, 431, 449, 460, 467, 473, 479, 480, 504, 507, 517], "end_axi": [0, 50, 175], "end_encod": 2, "endif": 2, "endl": [4, 517], "endswith": 397, "enhanc": [6, 420, 521], "enough": [2, 521], "ensur": [0, 1, 2, 9, 154, 156, 338, 463, 516, 517], "ensure_row_contigu": [1, 154, 156], "enter": 6, "entir": [16, 18, 27, 28, 232, 234, 236, 242, 256, 307, 311, 328, 370, 371], "entri": [0, 268, 326, 370, 371], "entropi": [7, 455, 457], "enumer": 356, "environ": [9, 129, 142, 516], "ep": [5, 155, 157, 361, 377, 381, 383, 416, 456, 458, 468, 488, 489, 490, 491, 492, 493, 494, 502], "epoch": 7, "epsilon": [361, 377, 381, 383, 416, 456, 458, 489, 491, 492, 493, 494, 502], "epsilon_1": 490, "epsilon_2": 490, "equal": [0, 1, 17, 29, 86, 153, 186, 192, 203, 249, 254, 269, 302, 335, 381, 385], "equal_nan": [0, 17, 86, 192], "equat": [140, 141, 217, 218], "equival": [0, 2, 32, 69, 83, 136, 139, 154, 177, 180, 187, 314, 362, 372, 374, 378, 379, 380, 386, 387, 411, 413, 415, 418, 419, 421, 423, 426, 427, 428, 429, 430, 432, 516], "erf": [0, 145, 515], "erfinv": 0, "error": [0, 2, 9, 133, 144, 145, 293, 302, 374, 445, 446, 447, 463, 465, 515, 518, 520, 522], "error_norm": 5, "estim": 494, "eta": 495, "etc": [2, 258, 356, 434, 516], "ethernet": [516, 520], "eval": [2, 3, 5, 6, 7, 356, 486, 488, 515, 516, 517, 518, 521, 523], "eval_cpu": 2, "eval_fn": 7, "eval_gpu": 2, "evalu": [2, 6, 7, 8, 88, 137, 146, 199, 292, 330, 356, 395, 406, 486, 488, 515, 517, 523], "even": [1, 2, 6, 102, 167, 207, 209, 515, 516, 517, 521, 522], "evenli": [0, 221], "everi": [1, 258, 340, 488, 508, 518, 520], "everyth": [6, 516], "everywher": 0, "exact": [446, 447], "exactli": [2, 6, 158, 399, 518], "exampl": [0, 3, 4, 5, 6, 7, 9, 19, 39, 88, 122, 133, 150, 151, 152, 154, 156, 159, 175, 190, 198, 200, 207, 208, 209, 210, 214, 216, 298, 299, 309, 314, 326, 338, 341, 342, 356, 358, 359, 360, 361, 381, 388, 389, 390, 397, 399, 406, 409, 434, 435, 436, 437, 438, 439, 440, 441, 442, 455, 457, 464, 483, 488, 499, 504, 505, 506, 507, 508, 512, 518, 519, 520, 521, 522, 523, 524], "exce": [335, 338], "exceed": 292, "except": [8, 122, 153, 168, 169, 171, 172, 173, 292, 347, 377, 399, 517, 519, 522], "exclud": [257, 315], "exclus": [0, 87, 95], "execut": [2, 4, 9, 89, 90, 91, 183, 200, 516, 522, 525], "execute_process": 4, "exist": [2, 3, 6, 397, 409, 516], "exp": [0, 1, 88, 149, 154, 156, 227, 232, 263, 300, 362, 372, 424, 443, 444, 461, 473, 474, 478, 515, 517, 525], "exp_elementwis": [1, 154, 156], "expand_dim": 0, "expect": [6, 363, 364, 365, 366, 367, 368, 369, 370, 371, 425, 433, 458, 515, 516, 519], "expens": 433, "expensive_fun": 521, "experiment": [88, 150, 152, 190, 522], "explain": 2, "explicit": [2, 499, 512, 522], "explicitli": [179, 356, 512, 520], "explor": 9, "expm1": 0, "exponenti": [0, 147, 149, 362, 372, 421, 443, 444, 473, 505], "exponential_decai": 488, "export": [8, 9, 150, 151, 190], "export_funct": 517, "ext_modul": 2, "extend": [2, 253], "extens": [8, 222, 240, 403, 524], "extern": 522, "extra": [1, 340, 341, 517], "extract": [0, 6, 46, 127, 128, 298, 356, 396, 486], "extras_requir": 2, "extrem": [519, 521], "ey": [0, 6, 211, 220], "f": [0, 2, 5, 7, 122, 214, 356, 382, 493, 515, 522], "f_jvp": 122, "f_t": 382, "f_vjp": 122, "f_vmap": 122, "face": 6, "factor": [2, 15, 187, 204, 205, 212, 213, 216, 434, 457, 505, 508], "fail": [515, 516, 520], "fall": [2, 122], "fallback": 496, "fals": [0, 1, 2, 6, 16, 17, 18, 27, 28, 34, 35, 36, 37, 42, 43, 44, 45, 58, 59, 60, 61, 62, 66, 79, 80, 84, 86, 102, 106, 110, 118, 119, 120, 121, 133, 150, 152, 156, 179, 180, 192, 198, 204, 205, 214, 218, 219, 220, 222, 228, 232, 234, 236, 237, 242, 256, 307, 311, 328, 332, 336, 339, 340, 341, 342, 347, 377, 381, 383, 385, 397, 399, 409, 412, 415, 420, 425, 433, 434, 455, 458, 490, 492, 493, 503, 517, 522], "famili": 6, "fan": [436, 437, 438, 439], "fan_in": [436, 437, 438, 439], "fan_out": [436, 437, 438, 439], "far": 488, "fast": [1, 8, 374, 447, 516, 525], "faster": [1, 2, 9, 139, 179, 180, 445, 455, 515, 516, 518], "featur": [1, 8, 107, 108, 109, 110, 111, 112, 113, 158, 361, 376, 377, 381, 382, 383, 385, 415, 416, 417, 420, 433, 434, 515, 516, 521], "feed": 6, "feed_forward": 6, "feedforward": [436, 437], "feel": 6, "fetch": 1, "few": [1, 2, 6, 7, 8, 9, 517, 521, 523], "fewer": 516, "ffn": 6, "ffn_norm": 6, "fft": 8, "fftshift": 167, "fi": 516, "figur": 516, "file": [4, 6, 9, 150, 151, 152, 190, 222, 284, 285, 286, 287, 288, 399, 403, 516, 517, 518, 524], "file_or_weight": 399, "fill": [0, 178, 251, 323, 334, 435, 436, 437, 438, 439, 441, 442], "filter": [0, 114, 363, 364, 365, 366, 367, 368, 392, 396, 496, 497], "filter_and_map": 356, "filter_fn": [392, 396], "final": [2, 4, 5, 6, 7, 187, 497, 504, 507, 516, 520], "find": [2, 4, 5, 9, 516], "find_packag": [2, 4], "finder": 9, "fine": [512, 517, 521], "finetun": 356, "finit": [0, 193, 246], "first": [0, 1, 2, 3, 4, 5, 6, 7, 9, 128, 131, 133, 175, 184, 198, 200, 201, 219, 229, 231, 233, 254, 271, 280, 312, 318, 321, 327, 339, 341, 342, 356, 359, 360, 377, 389, 390, 434, 456, 464, 490, 494, 499, 515, 516, 517, 518, 519, 520, 522, 525], "first_lay": 521, "firt": 515, "fit": [258, 525], "five": 515, "fix": [2, 6, 9, 515, 521], "flag": [4, 9, 179, 515, 522], "flat": [179, 180, 339, 343], "flat_param": 287, "flatten": [0, 29, 30, 118, 119, 120, 121, 214, 228, 252, 254, 257, 277, 281, 301, 314, 315, 320, 339, 497, 515], "flexibl": 8, "flexibli": 411, "flip": [0, 110, 114], "float": [0, 1, 2, 12, 15, 17, 19, 82, 155, 156, 157, 158, 159, 174, 177, 178, 187, 192, 198, 214, 246, 258, 259, 261, 265, 335, 338, 347, 361, 369, 370, 371, 377, 381, 383, 392, 404, 416, 420, 425, 431, 433, 434, 435, 436, 437, 438, 439, 441, 442, 456, 457, 458, 460, 464, 467, 468, 479, 480, 489, 490, 491, 492, 493, 494, 495, 497, 502, 503, 504, 505, 507, 508], "float16": [1, 2, 12, 154, 156, 187, 222, 347, 392, 521, 522], "float16_t": [1, 2], "float32": [0, 1, 2, 12, 19, 153, 154, 156, 159, 187, 188, 198, 208, 210, 214, 216, 221, 250, 263, 265, 266, 267, 272, 273, 299, 323, 333, 347, 434, 435, 436, 437, 438, 439, 440, 441, 442, 455, 457, 464, 504, 505, 506, 507, 508, 515, 516, 517, 518, 519, 521, 522, 523, 524], "float64": [12, 198, 347, 522], "floor": [0, 1, 177], "floor_divid": 0, "flow": [0, 308, 521], "flush": 2, "fn": [190, 337, 340, 341, 342, 523], "follow": [1, 2, 4, 6, 7, 8, 9, 19, 114, 126, 159, 179, 214, 253, 258, 341, 356, 446, 447, 461, 489, 490, 491, 494, 495, 497, 503, 512, 515, 516, 517, 518, 520, 525], "food": 6, "forc": [6, 7, 106, 356, 516, 523], "forg": 516, "format": [6, 151, 222, 258, 284, 285, 286, 287, 288, 516, 522], "formul": [362, 372], "formula": 467, "forth": [434, 516], "forward": [1, 2, 327, 515, 520, 521], "found": [4, 258, 396], "four": 361, "fourier": [160, 161, 162, 164, 165, 166, 171, 172, 173], "fourth": 517, "frac": [144, 258, 267, 294, 361, 369, 370, 371, 377, 381, 383, 385, 416, 424, 436, 437, 438, 439, 456, 458, 460, 463, 474, 476, 477, 489, 491, 492, 493, 494, 502], "fraction": 19, "framework": 8, "free": 289, "freez": [356, 409, 486], "freq": 158, "frequenc": [158, 163, 420, 425], "frequent": [515, 521], "friend": 6, "fro": 214, "frobeniu": 214, "from": [0, 1, 2, 4, 6, 7, 8, 87, 125, 126, 128, 131, 132, 135, 136, 137, 154, 156, 169, 170, 172, 173, 178, 179, 183, 187, 190, 207, 209, 214, 222, 233, 237, 251, 258, 260, 261, 262, 263, 264, 265, 269, 272, 287, 289, 298, 305, 308, 310, 314, 315, 320, 321, 332, 334, 339, 340, 341, 342, 343, 356, 385, 397, 399, 412, 436, 437, 438, 439, 441, 442, 458, 467, 483, 488, 514, 515, 516, 517, 518, 521, 522, 523, 524, 525], "from_embed": 414, "from_linear": 415, "front": [2, 517], "frozen": [356, 397, 407, 409, 415, 486], "fuction": 139, "full": [0, 1, 2, 7, 69, 83, 114, 156, 212, 300, 410, 411, 458, 496, 515, 516, 517, 521], "full_turn": 425, "fulli": [2, 8, 497, 520, 522, 525], "fun": [102, 150, 152, 184, 199, 327, 330, 331, 515, 517, 519, 521, 525], "fun1": 521, "func": 417, "function": [0, 1, 2, 3, 5, 6, 7, 8, 17, 19, 87, 102, 122, 139, 144, 145, 150, 152, 154, 156, 184, 190, 192, 199, 204, 205, 207, 208, 209, 210, 211, 214, 215, 216, 219, 220, 233, 293, 294, 327, 330, 331, 337, 338, 340, 341, 342, 356, 362, 372, 374, 375, 378, 379, 380, 386, 387, 391, 393, 397, 404, 409, 413, 417, 418, 419, 421, 422, 423, 424, 426, 427, 428, 429, 430, 431, 432, 433, 445, 446, 447, 448, 449, 450, 451, 453, 454, 455, 469, 474, 476, 477, 478, 479, 480, 481, 483, 488, 499, 512, 514, 516, 519, 521, 522, 524], "functionexport": 152, "functool": 515, "further": [2, 9, 518], "fuse": [1, 515], "fusibl": 515, "futur": [6, 88, 150, 152, 190, 415, 519, 521], "fx": 122, "g": [3, 9, 122, 156, 214, 258, 382, 482, 497, 502, 503, 517, 521, 526], "g_t": [382, 489, 491, 492, 493, 494, 495, 502, 503], "gain": [436, 437, 438, 439], "gamma": [361, 377, 381, 383, 416, 436, 437, 438, 439], "gap": 1, "gate": [375, 376, 448], "gather": [0, 131, 179, 180], "gather_mm": [0, 180], "gather_qmm": 0, "gaurante": 329, "gaussian": [5, 374, 445, 446, 447, 458], "gaussian_nll_loss": 356, "gc_func": 433, "gelu": [356, 446, 447, 515], "gelu_approx": [356, 374, 445], "gelu_fast_approx": [356, 374, 445], "geluapprox": 374, "gelufast": 374, "gener": [0, 1, 3, 5, 12, 19, 110, 153, 154, 156, 169, 170, 215, 221, 237, 261, 266, 267, 268, 269, 272, 273, 433, 512, 515, 519, 521, 526], "generate_stub": 9, "geq": [431, 480], "get": [2, 5, 7, 9, 108, 109, 110, 112, 113, 123, 124, 174, 181, 182, 183, 238, 264, 356, 515, 517, 518, 521, 525], "get_cache_memori": 100, "get_command_encod": 2, "get_kernel": 2, "get_librari": 2, "gguf": [9, 222, 285, 524], "gh": 1, "gii": 1, "git": 9, "github": [5, 7, 9, 515], "give": [2, 6, 7, 29, 515], "given": [0, 2, 9, 16, 18, 29, 39, 87, 88, 98, 101, 103, 118, 119, 120, 121, 126, 128, 141, 146, 148, 159, 160, 161, 162, 164, 165, 166, 171, 172, 173, 178, 179, 212, 214, 228, 232, 234, 236, 242, 246, 248, 256, 266, 268, 269, 281, 282, 289, 291, 300, 302, 307, 311, 313, 319, 320, 321, 323, 324, 325, 328, 344, 369, 396, 412, 456, 458, 464, 496], "gix": 1, "gix_mult": 1, "giy_mult": 1, "glibc": 9, "global": [129, 131, 132, 133, 135, 136, 137, 142, 270, 335, 338, 512, 515], "glorot": [436, 437], "glorot_norm": 356, "glorot_uniform": 356, "glu": [6, 356], "gm": 1, "gn": 1, "go": [2, 6, 516, 518], "golub": 214, "good": [2, 9, 488, 515, 516, 520, 525], "goroshin": 370, "gower": 6, "gpu": [1, 3, 8, 9, 238, 347, 519, 525], "gputrac": [3, 240], "grad": [2, 5, 7, 122, 327, 338, 488, 498, 515, 516, 517, 518, 519, 521, 523], "grad_fn": [5, 515, 518], "gradient": [0, 5, 7, 122, 184, 308, 327, 335, 337, 338, 356, 397, 410, 415, 433, 463, 486, 488, 489, 490, 492, 493, 494, 495, 498, 501, 503, 515, 516, 518, 519, 521, 522, 523], "grain": 512, "graph": [2, 6, 7, 8, 151, 292, 517, 518], "great": 3, "greater": [0, 6, 29, 149, 186, 254, 338, 431, 480], "greater_equ": 0, "grep": 9, "grid": [2, 154, 156, 237], "grid_dim": 2, "grid_grad": 1, "grid_idx": 1, "grid_sampl": 1, "grid_sample_grad": 1, "grid_sample_ref": 1, "grid_sample_vjp": 1, "grid_shap": 1, "grid_siz": 1, "ground": [5, 6, 457, 467], "group": [0, 1, 107, 108, 109, 110, 111, 112, 113, 126, 131, 132, 133, 135, 136, 137, 159, 180, 258, 259, 329, 335, 336, 363, 364, 377, 414, 415, 516], "group_dim": 2, "group_siz": [0, 126, 180, 258, 259, 336, 414, 415], "groupnorm": 356, "grow": 521, "gru": 356, "guid": [2, 4, 8, 516, 517], "guidelin": 292, "gw": 1, "h": [1, 2, 4, 108, 109, 112, 113, 214, 361, 364, 365, 367, 368, 370, 371, 376, 382, 417, 518, 521], "h_": [376, 382, 417], "h_in": 1, "h_stride": 1, "h_t": [376, 382, 417], "ha": [2, 3, 6, 7, 8, 9, 82, 102, 128, 137, 168, 169, 171, 172, 173, 182, 184, 204, 205, 207, 208, 209, 210, 211, 215, 219, 220, 237, 262, 361, 376, 382, 385, 417, 486, 488, 515, 516, 517, 519, 520, 521, 523, 525], "had": 6, "hadamard": [0, 187], "hadamard_transform": 0, "half": [19, 269, 273, 420, 521], "halv": [375, 448], "hand": [6, 518, 521], "handi": 518, "handl": [2, 356, 515], "happen": [2, 6, 155, 157, 433, 488, 515, 521], "happi": 6, "hard": 6, "hard_shrink": [356, 378], "hard_tanh": [356, 379], "hardcod": 515, "hardshrink": [356, 449], "hardswish": 356, "hardtanh": [356, 450], "hat": [126, 258], "have": [0, 1, 2, 6, 9, 17, 86, 89, 90, 91, 96, 122, 131, 159, 169, 170, 172, 173, 180, 192, 233, 240, 258, 262, 329, 335, 339, 382, 412, 422, 495, 499, 514, 515, 516, 517, 519, 520, 521, 525], "haven": 6, "hazan": 491, "he": [6, 438, 439], "he_norm": 356, "he_uniform": 356, "head": [159, 412, 433], "header": [2, 9, 154, 156], "heart": 6, "heavi": 6, "height": [359, 360, 361, 364, 365, 367, 368, 370, 371, 389, 390], "hello": [339, 343], "help": [2, 6, 515, 525], "helper": [6, 154, 156, 335, 515, 516, 520], "henc": [0, 2, 258, 515], "hendryck": 447, "here": [2, 6, 9, 488, 515, 517, 518, 521, 524, 525], "hermitian": [208, 210], "hf": 382, "hg": 382, "hh": 417, "hi": [6, 382], "hidden": [376, 382, 417, 433, 497], "hidden_dim": [7, 486, 488], "hidden_s": [376, 382, 417], "hierarchi": 347, "high": [269, 273, 356, 373, 442, 483], "high_pad_s": 0, "higher": [2, 191, 293, 464, 516, 518], "highli": 9, "him": 6, "hing": 459, "hinge_loss": 356, "hinton": 502, "hit": 2, "hn": 376, "ho": 382, "hold": [2, 6, 11, 12, 214, 515], "homebrew": 516, "hopkin": 214, "host": 2, "host1": 516, "host2": 516, "host3": 516, "host4": 516, "host_nam": 1, "hostfil": [516, 520], "hostnam": [516, 520], "hostname1": [516, 520], "hostname2": [516, 520], "hostname3": 516, "hostname4": 516, "hot": 457, "hour": 6, "how": [2, 4, 6, 7, 356, 358, 359, 360, 363, 364, 365, 366, 367, 368, 373, 388, 389, 390, 414, 434, 498, 515, 519, 525], "howev": [2, 122, 356, 374, 377, 499, 512, 515, 516, 521, 522], "hr": 376, "http": [9, 377, 381, 383, 391, 416, 447, 469], "huber": 460, "huber_loss": 356, "human": [438, 439], "hundr": 9, "hurri": 6, "hutter": 493, "hyperbol": [0, 21, 23, 26, 116, 297, 317, 432, 481], "hz": 376, "i": [0, 1, 2, 3, 4, 6, 7, 8, 9, 17, 19, 29, 38, 82, 87, 88, 101, 108, 109, 110, 112, 113, 114, 117, 118, 119, 120, 121, 122, 127, 128, 131, 132, 134, 135, 136, 137, 139, 146, 150, 152, 154, 155, 156, 157, 158, 159, 160, 161, 162, 164, 165, 166, 168, 169, 170, 171, 172, 173, 175, 177, 178, 179, 180, 187, 190, 192, 193, 198, 199, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 222, 227, 228, 232, 233, 237, 239, 253, 254, 257, 258, 259, 266, 267, 268, 279, 281, 284, 285, 286, 292, 293, 294, 300, 302, 307, 308, 313, 314, 315, 318, 321, 322, 326, 327, 328, 329, 330, 331, 332, 335, 336, 338, 339, 340, 341, 342, 347, 349, 356, 358, 359, 360, 361, 363, 364, 365, 366, 367, 368, 369, 370, 371, 374, 376, 377, 381, 382, 383, 385, 388, 389, 390, 396, 397, 403, 405, 406, 408, 409, 410, 411, 412, 413, 415, 416, 417, 420, 425, 431, 433, 434, 438, 439, 445, 447, 455, 456, 458, 463, 464, 467, 468, 470, 475, 480, 486, 488, 490, 492, 493, 495, 496, 498, 499, 504, 506, 507, 512, 515, 516, 517, 518, 519, 520, 521, 522, 523, 524, 525, 526], "i386": 9, "i_n": 1, "i_nw": 1, "i_s": 1, "i_sw": 1, "i_t": 382, "iclr": [492, 493, 494], "id": [7, 9], "idea": [518, 521], "idempot": [397, 409], "ident": [0, 122, 137, 153, 167, 308, 356, 406, 516], "identifi": [2, 339, 514], "idim": 7, "idiom": [7, 515], "idx": [39, 519], "ie": 409, "ieee": 347, "ifac": 516, "ignor": [6, 39, 88, 101, 102, 146, 490, 520], "ih": 417, "ii": 1, "ij": 237, "im": 267, "imag": [0, 364, 365, 367, 368, 370, 371, 434], "imagenet": [438, 439], "imaginari": [51, 189], "immedi": [6, 392], "implement": [0, 1, 5, 7, 158, 159, 179, 180, 373, 396, 412, 420, 422, 425, 431, 433, 434, 480, 489, 490, 491, 494, 495, 497, 498, 510, 515, 518], "impli": 329, "implicit": [512, 515, 518], "implicitli": 521, "import": [2, 3, 5, 6, 7, 9, 122, 133, 187, 190, 214, 287, 327, 339, 340, 341, 342, 343, 356, 358, 359, 360, 361, 381, 388, 389, 390, 399, 434, 455, 457, 464, 483, 486, 488, 515, 516, 518, 519, 521, 522, 523], "import_funct": 517, "imported_ab": 517, "imported_fun": 517, "imported_funct": 517, "improv": [1, 2, 3, 6, 455, 489, 490, 491, 492, 493, 494, 502, 515, 516], "in_ax": [331, 518], "in_channel": [363, 364, 365, 366, 367, 368], "in_dim": [356, 486], "in_proj": 486, "includ": [1, 2, 4, 118, 119, 120, 121, 151, 154, 156, 181, 182, 228, 292, 383, 393, 405, 415, 458, 488, 515, 517, 518, 519, 523, 524, 526], "include_dir": 2, "inclus": [0, 42, 43, 44, 45, 58, 118, 119, 120, 121, 175, 228], "incom": 2, "inconveni": 515, "incorpor": 522, "incorrect": 522, "increas": [293, 520], "increment": 19, "incur": [6, 9], "independ": [130, 370, 371], "index": [0, 1, 2, 8, 10, 29, 39, 148, 153, 179, 184, 237, 254, 298, 299, 314, 315, 327], "indic": [0, 2, 17, 27, 28, 29, 30, 39, 159, 179, 180, 184, 192, 193, 194, 195, 196, 197, 198, 212, 219, 257, 298, 299, 302, 314, 315, 327, 406, 408, 457, 464, 506, 519], "indices_or_sect": [75, 302], "indirectli": 522, "individu": [356, 370, 371], "ineffici": [519, 521], "inexact": [12, 198], "inf": [214, 246, 412], "infer": [8, 178, 222, 321, 326, 516, 517], "infin": [0, 194, 196, 197, 246, 388, 389, 390, 494], "infinit": [17, 192, 193], "info": [6, 9], "inform": [3, 4, 6, 7, 9, 141, 174, 238, 285, 286, 347, 356, 361, 374, 412, 516, 517, 518, 525], "inherit": [7, 514], "inifn": 194, "init": [356, 413, 483, 488, 504, 505, 507, 508, 516], "init_fn": [435, 436, 437, 438, 439, 440, 441, 442, 483], "init_valu": 1, "initi": [1, 3, 5, 6, 133, 342, 356, 361, 377, 381, 383, 385, 413, 416, 435, 436, 437, 438, 439, 440, 441, 442, 486, 499, 504, 505, 507, 508, 515, 516, 517, 521], "initializer_list": 0, "inject": 0, "inlin": 0, "inner": [0, 515], "inorm": 381, "inp": [1, 154, 156], "inp_ndim": 1, "inp_shap": 1, "inp_strid": 1, "inplac": [2, 9], "input": [0, 1, 2, 5, 6, 13, 14, 15, 16, 17, 18, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 86, 87, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 118, 119, 120, 121, 122, 125, 127, 128, 131, 132, 137, 138, 139, 140, 141, 143, 144, 145, 147, 148, 149, 150, 151, 152, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 175, 176, 177, 179, 180, 184, 185, 186, 187, 189, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 242, 243, 244, 245, 246, 247, 249, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 268, 271, 274, 275, 276, 277, 279, 280, 281, 282, 283, 294, 295, 296, 297, 298, 299, 300, 301, 302, 303, 304, 305, 306, 307, 308, 310, 311, 312, 314, 315, 316, 317, 318, 319, 320, 321, 322, 324, 325, 326, 327, 328, 329, 331, 332, 334, 358, 359, 360, 361, 363, 364, 365, 366, 367, 368, 370, 371, 373, 375, 376, 377, 381, 382, 383, 385, 388, 389, 390, 412, 415, 416, 417, 420, 431, 433, 434, 435, 436, 437, 438, 439, 440, 441, 442, 448, 455, 456, 458, 459, 460, 461, 463, 464, 466, 468, 480, 483, 515, 517, 518, 519, 520, 523, 524], "input_dil": [0, 110], "input_dim": [7, 356, 385, 415], "input_nam": [1, 154, 156], "input_s": [376, 382, 417], "inputs1": 464, "inputs2": 464, "insert": [128, 148, 525], "insid": [515, 517], "inspect": [3, 515, 523], "inspir": 8, "instabl": 468, "instal": [2, 4, 520], "instanc": [6, 39, 122, 258, 343, 356, 381, 392, 393, 394, 397, 399, 400, 401, 406, 409, 410, 411, 422, 486, 516, 520, 522], "instancenorm": 356, "instanti": [1, 2, 7, 521], "instantiate_kernel": 2, "instead": [2, 9, 122, 356, 411, 425, 518, 521], "instruct": [4, 9, 517], "int": [0, 1, 2, 4, 6, 7, 10, 16, 18, 19, 27, 28, 29, 30, 34, 35, 36, 37, 42, 43, 44, 45, 46, 47, 50, 58, 59, 60, 61, 62, 63, 66, 70, 72, 75, 78, 79, 80, 81, 82, 84, 87, 96, 98, 103, 107, 108, 109, 110, 111, 112, 113, 118, 119, 120, 121, 126, 127, 128, 135, 136, 137, 141, 148, 153, 154, 158, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 175, 178, 180, 181, 182, 183, 184, 188, 198, 206, 214, 221, 228, 232, 234, 236, 238, 242, 244, 250, 253, 254, 256, 257, 258, 259, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 277, 279, 281, 282, 289, 292, 293, 298, 299, 300, 301, 302, 305, 306, 307, 311, 312, 314, 315, 318, 319, 320, 321, 322, 323, 324, 325, 326, 327, 328, 331, 333, 335, 336, 356, 358, 359, 360, 361, 363, 364, 365, 366, 367, 368, 373, 375, 376, 377, 381, 382, 383, 385, 388, 389, 390, 412, 414, 415, 416, 417, 420, 425, 433, 448, 456, 457, 461, 466, 468, 486, 497, 504, 506, 507, 508], "int16": 347, "int32": [0, 1, 12, 19, 39, 175, 198, 200, 214, 269, 298, 326, 347, 434, 519, 523], "int64": [12, 347], "int64_t": 2, "int8": [12, 347], "int_0": 144, "integ": [0, 12, 177, 179, 180, 198, 214, 238, 253, 258, 259, 261, 268, 269, 302, 314, 318, 331, 347, 373, 404, 506, 519], "integr": [19, 314, 521], "intend": [0, 515], "interact": 433, "interest": 525, "interfac": [2, 516, 520], "intermedi": 522, "intern": 361, "interpol": 434, "interpret": 4, "interv": [19, 221, 269, 273], "introduc": [0, 281], "intuit": 356, "invalid": [0, 87], "invers": [0, 20, 21, 22, 23, 24, 25, 26, 93, 145, 164, 165, 166, 167, 168, 169, 170, 205, 211, 215, 220], "invert": 0, "involv": [488, 515], "iogpu": 293, "iostream": 4, "ip": [516, 520], "ip1": [516, 520], "ip2": [516, 520], "ip3": 516, "ip4": 516, "is_avail": 133, "is_equival": 2, "is_leaf": [339, 340, 341, 342], "is_leaf_fn": 396, "isclos": 0, "isfinit": 0, "ish": 6, "ishmael": 6, "isinf": 0, "isnan": 0, "isneginf": 0, "isposinf": 0, "issu": [516, 518, 522], "issubdtyp": [2, 12, 347], "item": [0, 2, 5, 6, 7, 340, 517, 521, 522, 523], "iter": [5, 7, 219, 340, 341, 497, 512, 515, 521], "iterm": 9, "itertool": [6, 340], "its": [0, 1, 2, 9, 159, 205, 215, 233, 254, 271, 323, 337, 343, 356, 415, 488, 492, 493, 494, 516, 521, 522, 525], "itself": [2, 336, 496, 499], "ix": 1, "ix_n": 1, "ix_nw": 1, "ix_s": 1, "ix_sw": 1, "iy_n": 1, "iy_nw": 1, "iy_s": 1, "iy_sw": 1, "j": [6, 9, 214, 370, 491, 492, 494], "j8": 2, "jacobian": [2, 199, 330, 523], "jain": 370, "jax": [8, 512], "jit": [1, 154, 156], "jmlr": 491, "jnp": 522, "john": 214, "join": 506, "join_schedul": 488, "jointli": 266, "json": [516, 520], "just": [2, 4, 7, 383, 515, 517, 519], "jvp": [2, 122, 523], "k": [0, 6, 46, 96, 107, 111, 127, 153, 159, 179, 187, 320, 323, 324, 325, 385, 397], "kaim": 439, "kd": [109, 113], "keep": [2, 16, 18, 27, 28, 232, 234, 236, 242, 256, 307, 311, 328, 356, 396, 518, 521], "keepdim": [0, 16, 18, 27, 28, 34, 35, 36, 37, 59, 60, 61, 62, 66, 79, 80, 84, 214, 232, 234, 236, 242, 256, 300, 307, 311, 328], "kei": [1, 3, 6, 150, 159, 190, 238, 261, 262, 263, 265, 266, 267, 268, 269, 271, 272, 273, 339, 340, 396, 397, 409, 412, 499, 512, 514, 517, 518], "kept": 293, "kernel": [2, 8, 9, 107, 108, 109, 110, 111, 112, 113, 154, 156, 358, 388, 515, 519], "kernel_dil": [0, 110], "kernel_s": [358, 359, 360, 363, 364, 365, 366, 367, 368, 388, 389, 390], "key_cach": 6, "key_input_dim": 412, "key_proj": 6, "keyring_1": 9, "keyword": [150, 184, 287, 288, 327, 340, 356, 512, 517, 524, 526], "kh": [108, 109, 112, 113], "kind": 6, "kingma": [492, 494], "kl_div_loss": 356, "kname": 2, "know": [2, 6], "known": [423, 475], "kron": 0, "kroneck": [0, 200], "kth": [0, 29, 254], "kullback": 461, "kw": [108, 109, 112, 113], "kw_onli": 2, "kwarg": [11, 130, 150, 151, 190, 287, 288, 344, 517, 526], "l": [6, 7, 107, 111, 204, 205, 208, 210, 212, 356, 361, 363, 366, 376, 382, 417, 467], "l1": [327, 460, 462, 463, 467], "l1_loss": 356, "l2": [460, 463, 497, 503], "l2_loss": 356, "l_": 460, "la": 214, "label": [3, 5, 457, 464], "label_smooth": 457, "lack": 519, "lambd": [378, 429, 449, 479], "lambda": [340, 341, 342, 356, 378, 392, 397, 404, 429, 449, 473, 479, 489, 490, 491, 492, 493, 494, 495, 502, 503, 515, 516, 517, 518], "languag": [1, 2, 4], "lapack": 9, "larg": [6, 356, 412, 463, 515, 517, 521], "larger": [1, 158, 293, 420, 495], "largest": [214, 246, 320], "lasso": 327, "last": [0, 1, 6, 30, 82, 155, 157, 162, 166, 169, 170, 172, 173, 175, 179, 180, 183, 191, 204, 205, 207, 208, 209, 210, 211, 215, 216, 219, 220, 233, 262, 301, 318, 329, 363, 364, 365, 366, 367, 368, 370, 371, 377, 434, 496, 497, 522], "later": [3, 9, 488], "launch": [1, 2, 133, 516, 519], "layer": [8, 155, 336, 356, 358, 359, 360, 370, 371, 376, 377, 382, 383, 385, 388, 389, 390, 406, 411, 414, 415, 417, 422, 433, 482, 486, 497, 517, 520], "layer_s": 7, "layernorm": 356, "layout": 1, "lazi": [8, 486, 523], "lazili": [6, 356], "lceil": 96, "ld": [376, 382, 417], "lead": [0, 19, 87, 515], "leaf": [102, 336, 339, 340, 341, 342, 396], "leaf_modul": 356, "leaki": [384, 452], "leaky_relu": 356, "leakyrelu": 356, "learn": [5, 7, 8, 361, 377, 381, 383, 413, 416, 488, 489, 490, 491, 492, 493, 494, 495, 497, 502, 503], "learnabl": [363, 364, 365, 366, 367, 368, 422], "learning_r": [7, 488, 489, 490, 491, 492, 493, 494, 495, 497, 499, 502, 503, 504, 505, 506, 507, 508, 515], "least": [6, 89, 90, 91, 101, 204, 205, 207, 208, 209, 210, 211, 215, 216, 219, 220, 258], "leav": [2, 88, 146, 340, 341, 342], "lectur": 502, "lecun": 370, "left": [0, 6, 158, 201, 214, 258, 281, 374, 420, 434, 446, 447, 458, 460, 468], "left_shift": 0, "leibler": 461, "len": [6, 162, 166, 170, 173, 187, 506], "length": [6, 167, 305, 361, 363, 366, 376, 382, 417, 506], "leq": [460, 473], "less": [0, 1, 6, 29, 203, 254, 293, 335, 420, 467, 496, 516], "less_equ": 0, "let": [1, 2, 5, 6, 205, 515, 517, 518, 521, 522], "level": [0, 179, 180, 438, 439], "lh": [376, 382, 417], "lhs_indic": [0, 179, 180], "lhs_mask": 96, "lib": [2, 516], "libbla": 9, "libcudnn9": 9, "liblapack": 9, "libmlx": 9, "libmlx_ext": 2, "libmpi": 516, "librari": [1, 2, 4, 9, 349, 356, 516, 517], "like": [2, 6, 8, 136, 150, 152, 190, 198, 251, 334, 371, 463, 499, 501, 515, 516, 517, 518, 520, 521, 522, 523, 525], "likelihood": [458, 466], "limit": [0, 2, 101, 289, 292, 293, 519], "linalg": 187, "line": [6, 516, 517, 520, 521, 522], "linear": [0, 2, 6, 7, 8, 217, 218, 336, 340, 356, 362, 372, 374, 375, 384, 399, 415, 417, 418, 419, 421, 423, 434, 443, 444, 445, 446, 447, 448, 452, 471, 472, 473, 475, 483, 486, 499, 506, 507, 515, 517], "linear1": 6, "linear2": 6, "linear3": 6, "linear_schedul": [488, 506], "linearli": 412, "link": [2, 4, 9], "linspac": 0, "lion": 488, "list": [1, 6, 11, 16, 18, 31, 75, 82, 87, 88, 89, 90, 91, 98, 102, 103, 110, 141, 146, 154, 156, 161, 162, 163, 165, 166, 167, 169, 170, 172, 173, 178, 184, 199, 214, 232, 234, 236, 237, 242, 250, 253, 256, 261, 262, 263, 265, 266, 267, 269, 272, 273, 285, 300, 302, 306, 307, 311, 318, 319, 322, 327, 328, 330, 333, 339, 342, 343, 356, 397, 399, 400, 401, 402, 407, 409, 410, 411, 486, 492, 493, 494, 495, 496, 506, 514, 515, 516, 517, 518, 520, 521], "listen": 516, "liter": [2, 253, 434, 438, 439, 455, 456, 457, 458, 459, 460, 461, 462, 463, 464, 465, 466, 467, 468], "littl": 6, "liu": 6, "live": [8, 154, 156, 525], "ll": [1, 5, 7, 460, 515, 518], "llama": 6, "llamaattent": 6, "llamaencoderlay": 6, "llm": 8, "load": [2, 7, 8, 349, 399, 516], "load_weight": [356, 521], "loader": 7, "loader_path": 2, "loan": 214, "loc": [1, 265, 267], "local": [356, 370, 516], "localhost": [516, 520], "locat": [0, 2, 4, 87, 298, 299, 410, 411, 516, 519, 525], "log": [0, 225, 227, 232, 386, 387, 453, 454, 455, 458, 461, 463, 466, 478], "log10": 0, "log1p": 0, "log2": 0, "log_cosh_loss": 356, "log_sigmoid": [356, 386], "log_softmax": [356, 387], "logaddexp": 0, "logarithm": [0, 223, 224, 225, 226], "logcosh": 463, "logcumsumexp": 0, "logic": [0, 2, 229, 230, 231, 516], "logical_and": 0, "logical_not": 0, "logical_or": 0, "logist": [0, 5, 294, 447, 475], "logit": [6, 262, 455, 457, 515], "logsigmoid": 356, "logsoftmax": 356, "logsumexp": [0, 228], "long": 6, "longer": [6, 114, 518], "look": [2, 6, 516], "lookup": 373, "loop": [6, 7, 515, 516, 518, 521], "loshchilov": 493, "loss": [5, 7, 327, 356, 488, 515, 516, 518, 521], "loss_and_grad": 356, "loss_and_grad_fn": [7, 488, 515, 518], "loss_fn": [5, 7, 488, 515, 518], "loss_grad_fn": 516, "lot": 518, "low": [269, 273, 442, 483], "low_pad_s": 0, "lower": [204, 205, 208, 210, 218, 220, 258, 269, 272, 273, 323, 442], "lr": [5, 495], "lr_schedul": [504, 505, 506, 507, 508], "lstm": 356, "lto": 2, "lu": [6, 213], "luckili": 521, "lvalu": 327, "m": [0, 2, 4, 6, 9, 96, 153, 179, 187, 214, 323, 489, 515], "m1": [1, 6, 515, 518, 525], "m10": 347, "m7": 347, "m_": [492, 493, 494, 495], "m_t": [492, 493, 494, 495], "mac": 516, "machin": [6, 8, 9, 502, 516], "maco": [9, 293], "macosx": 9, "made": [6, 349], "mai": [2, 4, 88, 150, 152, 179, 180, 190, 214, 336, 370, 497, 516, 518, 519], "main": [4, 8, 128, 153, 154, 156, 321, 340, 341, 356, 516], "maintain": [370, 371, 495], "major": [0, 2, 106], "make": [1, 2, 3, 4, 6, 7, 9, 106, 151, 152, 233, 248, 291, 356, 496, 504, 505, 507, 508, 515, 521, 523, 525], "make_shar": 2, "malloc": 2, "man": 6, "manag": [8, 309, 512, 516, 517, 525], "mani": [1, 2, 87, 302, 363, 364, 365, 366, 367, 368, 373, 414, 515, 516, 517, 521], "manual": [356, 516], "map": [2, 7, 39, 222, 340, 373, 392, 517], "map_fn": [392, 396], "map_torch_to_mlx": 6, "margin": [464, 468], "margin_ranking_loss": 356, "mask": [0, 6, 96, 159, 406, 412, 519], "mask_lh": [0, 96], "mask_n": 1, "mask_nw": 1, "mask_out": [0, 96], "mask_rh": [0, 96], "mask_s": 1, "mask_sw": 1, "matadata": 222, "match": [9, 159, 160, 161, 162, 164, 165, 166, 168, 169, 170, 171, 172, 173, 181, 399, 434, 457, 519, 522], "materi": [6, 8], "math": [6, 468, 515], "mathbf": 205, "mathcal": [267, 385], "mathemat": 214, "mathrm": [144, 294, 381], "matmul": [0, 179, 525], "matric": [214, 216, 219], "matrix": [0, 5, 15, 46, 96, 126, 127, 153, 179, 180, 187, 188, 204, 205, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 219, 220, 233, 237, 258, 259, 266, 414, 415, 440, 483], "matter": [6, 356, 517], "matur": 516, "max": [0, 1, 2, 214, 235, 362, 388, 389, 390, 413, 443, 450, 451, 456, 458, 459, 464, 468, 470, 472, 490, 494, 515, 518, 525], "max_buffer_s": 238, "max_freq": 425, "max_i": 258, "max_norm": 338, "max_recommended_working_set_s": [238, 293], "max_val": 450, "maximum": [0, 7, 27, 39, 101, 118, 183, 292, 338, 356, 384, 388, 389, 390, 418, 425, 446, 447, 452, 471, 486, 521], "maxpool1d": 356, "maxpool2d": 356, "maxpool3d": 356, "maxtotalthreadsperthreadgroup": 2, "mca": [516, 520], "md": 214, "me": 6, "mean": [0, 1, 5, 6, 7, 154, 157, 265, 266, 267, 327, 356, 361, 377, 397, 416, 441, 455, 456, 457, 458, 459, 460, 461, 462, 463, 464, 465, 466, 467, 468, 515, 516, 518, 522], "meant": 122, "measur": 525, "mechan": 433, "medic": 371, "meet": 9, "member": [356, 402, 407], "memori": [0, 1, 2, 8, 87, 100, 154, 181, 182, 183, 278, 289, 292, 293, 433, 486, 490, 515, 521, 522], "memory_order_relax": 1, "memory_s": [238, 293], "memoryview": [521, 522], "merg": 515, "meshgrid": 0, "metadata": [5, 222, 285, 286], "metal": [2, 4, 8, 154, 156, 292], "metal_captur": 3, "metal_kernel": 1, "metal_path": 9, "metallib": [2, 9], "method": [2, 6, 10, 11, 31, 122, 130, 174, 336, 344, 356, 357, 358, 359, 360, 361, 362, 363, 364, 365, 366, 367, 368, 369, 370, 371, 372, 373, 374, 375, 376, 377, 378, 379, 380, 381, 382, 383, 384, 385, 386, 387, 388, 389, 390, 391, 403, 412, 413, 414, 415, 416, 417, 418, 419, 420, 421, 422, 423, 424, 425, 426, 427, 428, 429, 430, 431, 432, 433, 434, 486, 489, 490, 491, 492, 493, 494, 495, 496, 497, 499, 502, 503, 510], "millisecond": [9, 515, 525], "min": [0, 2, 214, 243, 362, 413, 443, 450, 451, 470, 472], "min_freq": 425, "min_i": 258, "min_val": 450, "mind": 6, "mine": 6, "minibatch": 7, "minim": [516, 520], "minimum": [0, 28, 39, 101, 119, 425, 455, 456], "minsizerel": 9, "minu": 149, "minut": 6, "mish": 356, "mismatch": 517, "miss": [399, 517, 524], "mix": 519, "mkdir": [3, 9], "ml": 9, "mlp": [7, 356, 433, 488], "mlp_dim": [6, 433], "mlx": [1, 3, 5, 6, 7, 9, 349, 356, 483, 486, 488, 512, 514, 515, 516, 517, 518, 519, 520, 521, 522, 523, 524, 525], "mlx_build_acceler": 4, "mlx_build_benchmark": 9, "mlx_build_cpu": 9, "mlx_build_cuda": 9, "mlx_build_exampl": 9, "mlx_build_gguf": 9, "mlx_build_met": [2, 4, 9], "mlx_build_metallib": 2, "mlx_build_python_bind": 9, "mlx_build_safetensor": 9, "mlx_build_test": 9, "mlx_cxx_flag": 4, "mlx_disable_compil": [129, 142, 515], "mlx_ext": 2, "mlx_ext_metallib": 2, "mlx_found": 4, "mlx_include_dir": [2, 4], "mlx_librari": 4, "mlx_metal_debug": [3, 9], "mlx_metal_jit": 9, "mlx_root": 4, "mlx_sample_extens": 2, "mlx_trace": 3, "mlxfn": [150, 152, 190, 517], "mnist": 7, "mode": [0, 1, 2, 114, 126, 180, 253, 258, 259, 336, 395, 406, 408, 414, 415, 434, 438, 439], "model": [5, 7, 8, 287, 336, 337, 340, 341, 356, 392, 395, 397, 399, 403, 406, 408, 409, 410, 412, 433, 483, 486, 488, 498, 499, 501, 515, 516, 517, 521], "modest": 2, "modif": 522, "modifi": 522, "modul": [2, 4, 6, 7, 336, 337, 422, 433, 483, 501, 514, 515, 521], "moment": [6, 490, 494, 516], "momentum": [361, 495, 497, 499, 503, 515], "monei": 6, "monitor": 520, "monoton": 469, "moor": 215, "more": [1, 2, 3, 4, 7, 11, 82, 128, 150, 179, 204, 205, 207, 208, 209, 210, 211, 212, 215, 219, 220, 233, 258, 285, 286, 289, 292, 347, 356, 361, 370, 420, 425, 433, 434, 436, 437, 438, 439, 455, 512, 515, 516, 518, 519, 523, 525], "moreov": 520, "most": [2, 159, 262, 326, 356, 501, 515, 516, 518, 519, 521], "move": [0, 2, 244, 525], "moveaxi": 0, "mpi": [133, 349], "mpirun": [516, 520], "mse": 327, "mse_loss": 356, "mtl": 2, "mtl_capture_en": 3, "mtlcommandbuff": 2, "mu": 503, "much": [1, 2, 6, 358, 359, 360, 388, 389, 390, 515, 521], "multi": [8, 159, 363, 364, 365, 366, 367, 368, 517, 519, 522], "multidimension": 237, "multiheadattent": [6, 356], "multioptim": 488, "multipl": [0, 1, 9, 15, 96, 152, 155, 157, 179, 180, 233, 245, 259, 412, 425, 505, 506, 508, 515, 521, 524], "multipli": [0, 2, 39, 180, 259, 369, 425, 434], "muon": 488, "murtadha": 6, "must": [0, 1, 2, 3, 9, 96, 101, 150, 158, 159, 178, 180, 208, 210, 214, 258, 261, 262, 266, 269, 272, 273, 332, 434, 522], "mx": [1, 2, 3, 4, 5, 6, 7, 39, 88, 104, 105, 122, 133, 136, 150, 151, 152, 154, 156, 159, 175, 190, 198, 200, 207, 208, 209, 210, 212, 214, 216, 222, 268, 287, 298, 299, 326, 327, 338, 356, 358, 359, 360, 361, 372, 381, 384, 388, 389, 390, 392, 399, 403, 418, 434, 435, 436, 437, 438, 439, 440, 441, 442, 444, 452, 455, 456, 457, 461, 464, 471, 481, 483, 486, 488, 512, 515, 516, 517, 518, 519, 521, 522, 523, 524, 525, 526], "mxfp4": [126, 258], "my": [6, 9], "my_devic": 526, "my_path": 287, "my_script": [516, 520], "myexp": [1, 154, 156], "myexp_strid": 1, "mymlp": 486, "n": [0, 1, 2, 6, 31, 96, 107, 108, 109, 110, 111, 112, 113, 153, 159, 160, 162, 164, 166, 168, 171, 173, 187, 188, 266, 267, 307, 323, 328, 361, 363, 364, 365, 366, 367, 368, 370, 371, 376, 382, 417, 434, 463, 468, 516, 520], "n_kv": 159, "n_q": 159, "n_t": 376, "naiv": [2, 518], "naive_add": 518, "name": [1, 2, 122, 151, 154, 156, 180, 222, 259, 285, 286, 287, 288, 356, 377, 396, 399, 401, 516, 519, 524], "named_modul": 356, "namespac": 4, "nan": [0, 17, 86, 192, 193, 195, 246], "nan_to_num": 0, "nanobind": 2, "nanobind_add_modul": 2, "nativ": [9, 516], "natur": [0, 223, 225, 521], "nb": 2, "nb_domain": 2, "nb_modul": 2, "nb_static": 2, "nbyte": 2, "nc": 361, "nccl": 133, "ndarrai": [31, 519, 521, 523], "ndhwc": [365, 368, 371], "ndim": [0, 1, 2, 175, 214, 219, 434], "ne": 1, "nearest": [1, 434], "necessari": [106, 356], "necessarili": [209, 320], "need": [1, 2, 4, 6, 7, 8, 9, 86, 356, 410, 411, 425, 433, 512, 516, 518, 520, 521, 522, 523, 525], "neg": [0, 128, 175, 196, 246, 281, 321, 384, 388, 389, 390, 412, 458, 466, 468, 519], "negat": [0, 247], "negative_slop": [384, 452], "neginf": [0, 246], "neighbor": [434, 520], "neither": [184, 327], "nelem": 2, "nervou": 6, "nest": [82, 102, 342, 356, 486, 514, 518], "nesterov": [497, 503], "network": [6, 8, 335, 361, 370, 373, 436, 437, 483, 486, 497, 502, 516], "neural": [6, 8, 373, 436, 437, 469, 483, 486, 497, 502], "never": [6, 521], "new": [0, 1, 2, 7, 98, 128, 244, 248, 279, 306, 322, 329, 340, 341, 404, 412, 486, 488, 501, 506, 515, 517, 519, 521, 522], "new_tre": 341, "newton": 497, "next": [2, 4, 6, 7, 289, 517], "nh": [376, 382, 417], "nhwc": [361, 364, 367], "nice": [518, 521], "nlc": [361, 363, 366], "nld": [376, 382, 417], "nlh": [376, 382, 417], "nll": [458, 466], "nll_loss": 356, "nn": [2, 6, 7, 287, 340, 356, 483, 486, 488, 499, 501, 515, 517, 521], "nobodi": 6, "node": [88, 102, 146, 331, 341, 342, 516, 520], "nois": 5, "noisi": 5, "nomins": 2, "non": [0, 1, 2, 4, 9, 237, 407, 417, 469, 486], "nondeterminist": 519, "none": [1, 2, 6, 10, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 33, 34, 35, 36, 37, 38, 40, 41, 42, 43, 44, 45, 46, 47, 49, 50, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 66, 68, 69, 70, 71, 73, 75, 76, 77, 78, 79, 80, 81, 83, 84, 85, 86, 87, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 118, 119, 120, 121, 122, 125, 126, 127, 128, 129, 131, 132, 135, 136, 137, 138, 139, 140, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 153, 155, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 184, 185, 186, 187, 188, 189, 191, 192, 193, 194, 195, 196, 197, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 240, 241, 242, 243, 244, 245, 246, 247, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 286, 290, 291, 294, 295, 296, 297, 298, 299, 300, 301, 302, 303, 304, 305, 306, 307, 308, 310, 311, 312, 313, 314, 315, 316, 317, 318, 319, 320, 321, 322, 323, 324, 325, 326, 327, 328, 329, 331, 332, 333, 334, 335, 336, 339, 340, 341, 342, 358, 359, 360, 374, 388, 389, 390, 392, 396, 397, 404, 409, 412, 417, 425, 433, 455, 456, 457, 458, 459, 460, 461, 462, 463, 464, 465, 466, 467, 468, 490, 510, 517, 519], "nonlinear": [417, 515], "nonzero": 519, "noop": [409, 516], "nor": [2, 184, 327], "norm": [6, 157, 338, 377, 468, 494, 495], "norm1": 6, "norm2": 6, "norm_first": 433, "normal": [1, 2, 5, 6, 154, 155, 156, 157, 159, 207, 208, 266, 272, 356, 358, 359, 360, 361, 377, 381, 383, 388, 389, 390, 416, 433, 436, 438, 522, 525], "not_equ": 0, "notabl": [6, 8], "notat": [126, 339, 401], "note": [0, 1, 2, 4, 6, 9, 17, 19, 87, 96, 102, 109, 112, 113, 122, 126, 169, 170, 180, 181, 192, 212, 214, 258, 262, 329, 336, 356, 416, 434, 488, 519, 522, 524], "noth": [6, 122, 356, 521], "notic": [6, 517, 518, 524], "now": [1, 2, 6, 9, 415, 515, 522], "np": [1, 6, 7, 516, 522, 523], "npy": [222, 284, 524], "npz": [6, 222, 287, 288, 399, 403, 524], "ns_step": 497, "nuc": 214, "nuclear": 214, "nuisanc": 516, "nullopt": 0, "num": [0, 6, 221, 271], "num_class": [7, 488], "num_decoder_lay": 433, "num_embed": [373, 414], "num_encoder_lay": 433, "num_epoch": [7, 488], "num_exampl": 5, "num_featur": [5, 361], "num_group": 377, "num_head": [6, 412, 433], "num_it": 5, "num_lay": [6, 7, 488], "num_param": 356, "num_paramet": 413, "num_sampl": 262, "num_split": 0, "number": [0, 2, 12, 19, 64, 74, 102, 108, 109, 110, 112, 113, 126, 151, 153, 159, 180, 184, 188, 199, 221, 246, 253, 258, 259, 262, 265, 267, 271, 273, 277, 281, 282, 318, 319, 323, 327, 330, 331, 335, 336, 356, 361, 363, 364, 365, 366, 367, 368, 370, 371, 377, 381, 412, 413, 433, 434, 436, 437, 438, 439, 497, 504, 506, 507, 512, 515, 518, 520, 526], "number_of_el": 0, "numer": [6, 155, 157, 214, 227, 232, 300, 361, 377, 381, 383, 416, 455, 456, 458, 468, 489, 490, 491, 492, 493, 494, 502, 515, 521], "numpi": [2, 6, 7, 8, 14, 17, 19, 92, 94, 95, 97, 98, 138, 139, 143, 185, 186, 192, 201, 202, 203, 207, 209, 227, 233, 235, 243, 245, 249, 255, 276, 280, 310, 521, 523, 524], "nvidia": 9, "nw": 1, "nwhc": 370, "o": [9, 159, 382], "o_t": 382, "obj": 285, "object": [3, 11, 31, 52, 82, 102, 151, 152, 154, 156, 198, 331, 339, 340, 341, 342, 347, 370, 433, 514, 520], "observ": 6, "occupi": [126, 180, 258, 259], "occur": 522, "odd": 167, "odim": 7, "odot": [376, 382], "off": [6, 9, 521], "offer": 463, "offset": [0, 1, 2, 6, 47, 87, 128, 155, 158, 321], "often": 371, "ok": [399, 515, 517, 518], "okai": [515, 521], "old": 6, "older": [150, 152, 190], "omit": [494, 516], "onc": [1, 2, 9, 515, 517], "one": [0, 2, 4, 6, 9, 39, 82, 89, 97, 101, 108, 109, 110, 112, 113, 133, 148, 150, 153, 155, 157, 158, 179, 214, 225, 233, 259, 262, 305, 310, 326, 335, 347, 366, 367, 368, 409, 434, 457, 496, 516, 517, 520, 525], "ones": [0, 2, 6, 251, 287, 299, 323, 410, 411, 488, 516, 519], "ones_lik": 0, "onli": [1, 2, 6, 8, 86, 96, 108, 109, 110, 112, 113, 122, 159, 179, 208, 210, 214, 219, 258, 266, 293, 329, 347, 356, 396, 397, 399, 404, 406, 409, 410, 411, 486, 515, 516, 517, 518, 520, 524, 525], "onlin": 491, "op": [1, 2, 252, 329, 397, 521], "open": [3, 9, 19, 269, 273, 516], "openmpi": 516, "oper": [3, 6, 8, 10, 38, 89, 90, 91, 110, 159, 179, 180, 255, 257, 300, 308, 315, 344, 347, 356, 433, 495, 515, 516, 518, 519, 521, 522, 523, 525, 526], "operand": [140, 141, 179], "opportun": 515, "opt": [498, 516], "optim": [1, 3, 5, 7, 8, 410, 515, 516, 518, 521], "option": [0, 3, 6, 15, 16, 18, 19, 27, 28, 29, 30, 33, 34, 35, 36, 37, 38, 40, 41, 42, 43, 44, 45, 46, 47, 49, 50, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 66, 68, 69, 70, 71, 73, 75, 76, 77, 78, 79, 80, 81, 83, 84, 85, 87, 89, 90, 91, 96, 102, 103, 107, 108, 109, 110, 111, 112, 113, 114, 118, 119, 120, 121, 122, 126, 127, 128, 131, 132, 133, 135, 136, 137, 150, 152, 153, 155, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 175, 178, 179, 180, 184, 188, 196, 197, 200, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 228, 232, 234, 236, 237, 242, 246, 250, 253, 254, 256, 258, 259, 261, 262, 263, 265, 266, 267, 268, 269, 271, 272, 273, 277, 279, 281, 286, 300, 301, 302, 305, 306, 307, 311, 313, 314, 318, 320, 321, 322, 323, 324, 325, 326, 327, 328, 331, 333, 335, 336, 339, 340, 341, 342, 358, 359, 360, 361, 363, 364, 365, 366, 367, 368, 376, 382, 385, 388, 389, 390, 392, 396, 397, 399, 404, 409, 412, 414, 415, 417, 420, 425, 433, 434, 435, 436, 437, 438, 439, 440, 441, 442, 455, 456, 457, 458, 459, 460, 461, 462, 463, 464, 465, 466, 467, 468, 489, 490, 491, 492, 493, 494, 495, 497, 499, 502, 503, 504, 512, 515, 517, 524, 526], "ord": 214, "order": [0, 1, 29, 87, 110, 141, 208, 209, 210, 214, 254, 320, 356, 377, 410, 422, 499, 515, 518, 520], "ordinari": 191, "org": [377, 381, 383, 391, 416, 447, 469], "origin": [6, 128, 338, 361, 405, 436, 437, 438, 439, 489, 490, 491, 494, 495, 497, 517, 522], "orthogon": 497, "orthonorm": 187, "other": [0, 2, 6, 8, 198, 214, 356, 398, 486, 495, 515, 516, 517, 519, 520, 521, 523], "other_input": 356, "otherwis": [19, 110, 133, 268, 336, 339, 340, 341, 342, 397, 399, 409, 431, 433, 434, 449, 455, 460, 467, 479, 480, 521, 522], "our": [1, 2, 6, 7, 422, 489, 490, 491, 494, 495, 497, 516], "out": [0, 1, 2, 9, 96, 154, 156, 159, 190, 370, 371, 406, 515, 516, 517, 518, 519], "out_ax": [331, 518], "out_channel": [363, 364, 365, 366, 367, 368], "out_dim": [356, 486], "out_dtyp": 2, "out_idx": 2, "out_mask": 96, "out_proj": [6, 486], "out_ptr": 2, "out_shap": [1, 2], "outer": [0, 515, 521], "outlier": 463, "output": [0, 1, 2, 6, 9, 16, 17, 18, 19, 29, 87, 96, 97, 98, 102, 104, 105, 106, 111, 112, 113, 118, 119, 120, 121, 122, 140, 151, 153, 154, 155, 156, 157, 158, 159, 168, 171, 172, 173, 178, 179, 184, 187, 188, 192, 214, 221, 228, 232, 234, 236, 237, 242, 246, 250, 251, 254, 256, 257, 261, 262, 263, 265, 266, 267, 269, 272, 273, 287, 288, 298, 299, 300, 305, 307, 311, 315, 321, 323, 327, 328, 329, 330, 331, 332, 333, 334, 361, 363, 364, 365, 366, 367, 368, 381, 385, 412, 415, 431, 433, 434, 436, 437, 438, 439, 455, 456, 457, 458, 459, 460, 461, 462, 463, 464, 465, 466, 467, 468, 480, 483, 515, 516, 517, 518, 519, 520, 521, 522, 523, 524, 525], "output_dim": [7, 356, 385, 415], "output_directori": 2, "output_dtyp": [1, 154, 156], "output_fil": 6, "output_nam": [1, 154, 156], "output_pad": [0, 111, 112, 113, 366, 367, 368], "output_shap": [1, 154, 156], "output_strip_trailing_whitespac": 4, "output_vari": 4, "outsid": [154, 156, 175], "over": [0, 2, 6, 7, 16, 18, 27, 28, 29, 30, 107, 108, 109, 110, 111, 112, 113, 118, 119, 120, 121, 162, 163, 166, 167, 170, 173, 191, 214, 219, 221, 228, 232, 234, 236, 242, 254, 256, 283, 300, 301, 307, 311, 318, 320, 328, 361, 363, 364, 365, 366, 367, 368, 377, 383, 416, 457, 504, 507, 516, 518, 520], "overal": 2, "overhead": [1, 515, 521, 525], "overlap": 1, "overload": 19, "overrid": [2, 142], "overview": 3, "overwrit": 6, "own": [9, 516, 522], "owndata": 522, "p": [9, 212, 261, 356, 369, 370, 371, 468, 492, 494], "pack": [180, 258, 259], "packag": [2, 5, 7, 9, 349, 483, 516, 520], "package_data": 2, "pad": [0, 1, 107, 108, 109, 110, 111, 112, 113, 160, 161, 162, 164, 165, 166, 168, 169, 170, 171, 172, 173, 358, 359, 360, 363, 364, 365, 366, 367, 368, 388, 389, 390], "pad_valu": 0, "pad_width": [0, 253], "padding_hi": 0, "padding_lo": 0, "page": [516, 523], "pain": 6, "pair": [0, 2, 253, 399, 420], "pairwis": 468, "pan": 6, "paper": [361, 425, 489, 490, 491, 494, 495], "parallel": [516, 525], "param": [327, 336, 356, 483, 517, 518], "paramet": [0, 1, 2, 5, 6, 7, 13, 14, 15, 16, 17, 18, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 38, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 118, 119, 120, 121, 125, 126, 127, 128, 131, 132, 133, 135, 136, 137, 138, 139, 140, 141, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 175, 176, 177, 178, 179, 180, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 240, 242, 243, 244, 245, 246, 247, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 291, 292, 293, 294, 295, 296, 297, 298, 299, 300, 301, 302, 303, 304, 305, 306, 307, 308, 309, 310, 311, 312, 313, 314, 315, 316, 317, 318, 319, 320, 321, 322, 323, 324, 325, 326, 327, 328, 329, 330, 331, 332, 333, 334, 335, 336, 337, 338, 339, 340, 341, 342, 343, 358, 359, 360, 361, 362, 363, 364, 365, 366, 367, 368, 369, 370, 371, 372, 373, 374, 375, 376, 377, 378, 381, 382, 383, 384, 385, 388, 389, 390, 392, 393, 396, 397, 399, 404, 405, 406, 409, 410, 411, 412, 413, 414, 415, 416, 417, 420, 422, 425, 429, 431, 433, 434, 435, 436, 437, 438, 439, 440, 441, 442, 448, 455, 456, 457, 458, 459, 460, 461, 462, 463, 464, 465, 466, 467, 468, 480, 482, 483, 486, 488, 489, 490, 491, 492, 493, 494, 495, 496, 497, 498, 499, 501, 502, 503, 504, 505, 506, 507, 508, 510, 515, 516, 517, 518, 521], "parameter_scal": 490, "parametr": [413, 470], "pars": [6, 151], "parse_arg": 6, "parser": 6, "part": [1, 2, 51, 67, 150, 152, 189, 190, 274, 518, 519], "parti": 516, "partial": [410, 411, 515, 521], "particip": [131, 132, 135, 136, 137], "particular": [258, 377], "particularli": 515, "partit": [0, 29], "pass": [1, 2, 6, 7, 9, 69, 83, 179, 180, 252, 253, 327, 335, 337, 339, 340, 341, 356, 397, 409, 410, 411, 422, 515, 516, 517, 520, 521], "password": [516, 520], "path": [3, 4, 9, 141, 150, 151, 152, 190, 222, 240, 284, 285, 286, 287, 288, 336, 341, 399, 496, 516, 520], "pattern": [356, 521], "peak": [183, 278], "penalti": [497, 503], "penros": 215, "pep": 522, "per": [6, 7, 126, 159, 180, 258, 259, 335, 336, 361, 377, 381, 383, 416, 510, 515, 516, 520, 521], "perceptron": [8, 517], "perf_count": 515, "perfectli": 521, "perform": [0, 1, 2, 3, 6, 8, 15, 96, 110, 118, 119, 120, 121, 137, 140, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 179, 180, 187, 208, 210, 228, 233, 259, 282, 300, 314, 335, 356, 377, 433, 438, 439, 488, 497, 515, 516, 519, 521, 525], "perhap": [2, 6], "perm": 7, "permtuat": 268, "permuat": 212, "permut": [0, 7], "persist": 9, "pg": 214, "phi": [374, 445], "physic": 516, "pi": [144, 374, 425, 446, 518], "pick": 2, "pip": [2, 4, 9], "pipelin": 2, "pivot": [212, 213], "pixel": 370, "place": [6, 39, 281, 282, 336, 516, 521, 522], "placehold": 515, "plai": [2, 6], "plain": 422, "plan": [2, 515], "platform": 9, "plot": 516, "plu": [0, 225], "png": 516, "point": [0, 2, 5, 6, 9, 87, 174, 177, 258, 259, 347], "pool": [358, 359, 360, 388, 389, 390, 525], "popul": 2, "port": 520, "portion": 369, "posinf": [0, 246], "posit": [0, 6, 29, 128, 158, 175, 184, 197, 204, 205, 244, 246, 254, 266, 281, 321, 327, 340, 356, 363, 364, 365, 366, 367, 368, 412, 420, 425, 458, 468, 517], "possibl": [133, 179, 302, 373, 414, 515, 516, 519, 525], "possibli": [1, 6, 15, 96, 179, 233, 338], "postur": 6, "power": [0, 518, 522], "practic": [2, 515], "pre": [9, 159, 455], "preced": 377, "precis": [0, 6, 149, 159, 258, 356, 374, 416, 455, 498, 515], "preclud": 356, "pred": [459, 463], "predic": [336, 404, 496], "predict": [455, 458, 459, 460, 461, 462, 463, 465, 466, 467], "prefix": [331, 339, 341], "prelu": 356, "prepar": [2, 6, 516], "prepend": [3, 233], "preprint": [6, 489, 495], "preprocessor": 9, "present": 1, "preserv": [279, 518], "press": [6, 214], "pretti": [515, 521], "prevent": [308, 468, 522], "previou": [289, 292, 293], "primal": [1, 2, 122, 199, 330], "primit": 518, "print": [1, 2, 5, 6, 7, 9, 88, 122, 200, 338, 339, 340, 341, 343, 356, 512, 515, 516, 517, 518, 519, 520, 521, 522, 523], "prior": [257, 314, 315], "priorit": 518, "privat": [2, 4], "prng": [261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 512], "prob": 455, "probabl": [9, 269, 369, 370, 371, 415, 455, 457, 461, 525], "problem": [5, 7, 356], "process": [6, 110, 114, 130, 131, 132, 133, 135, 136, 137, 335, 340, 341, 371, 373, 433, 514, 516, 520], "processor": 9, "prod": [0, 1], "produc": [0, 2, 9, 102, 412, 483, 517], "product": [0, 2, 15, 87, 120, 191, 199, 200, 206, 233, 252, 256, 318, 330, 412, 523], "profil": 3, "program": [4, 183], "programmat": 411, "project": [3, 4, 6, 412, 517], "project_source_dir": 2, "promot": [2, 159], "promote_typ": 2, "promoted_dtyp": 2, "prompt": 6, "propag": [518, 519], "properti": [32, 39, 48, 51, 53, 64, 65, 67, 72, 74, 405, 408, 500, 516, 518], "proportion": 338, "protocol": 522, "provid": [0, 2, 6, 87, 150, 151, 184, 267, 268, 281, 318, 327, 335, 340, 342, 349, 356, 392, 397, 399, 409, 410, 411, 414, 415, 433, 434, 482, 486, 496, 516, 517, 524, 526], "pseudo": [215, 512], "pth": 6, "public": [2, 356], "pun": 0, "pure": [1, 122, 356, 488], "purpos": [1, 214, 516], "purs": 6, "push": 2, "push_back": 2, "put": [0, 1, 7, 257, 515, 516], "put_along_axi": [0, 212], "py": [2, 6, 9, 516, 520], "pypi": 9, "python": [1, 3, 4, 6, 52, 72, 82, 88, 146, 335, 339, 340, 341, 342, 343, 486, 498, 499, 501, 514, 516, 517, 518, 520, 522], "python_execut": 4, "python_requir": 2, "pytorch": [6, 8, 374, 377, 518], "pytorch_compat": 377, "q": [159, 216], "quantiz": [0, 126, 180, 222, 259, 414, 415], "quantized_matmul": 0, "quantizedembed": 356, "quantizedlinear": 356, "quarter": 6, "queri": [6, 159, 293, 412], "query_input_dim": 412, "query_proj": 6, "question": [6, 521], "queue": 3, "quick": [2, 8], "quit": [518, 522], "quotient": [0, 138, 139, 177], "r": [2, 6, 216, 327, 370, 376], "r_t": 376, "race": 525, "radian": [0, 125], "rag": 6, "rain": 6, "rais": [0, 6, 122, 214, 255, 302, 399, 517], "ram": [6, 292], "random": [1, 2, 3, 5, 6, 7, 8, 154, 156, 159, 358, 359, 360, 361, 381, 388, 389, 390, 399, 406, 515, 517, 518, 525, 526], "randomli": [5, 6, 268, 369, 370, 371], "rang": [0, 2, 3, 5, 6, 7, 9, 19, 175, 179, 221, 437, 439, 446, 447, 488, 504, 505, 506, 507, 508, 512, 515, 518, 521, 525], "rank": [0, 135, 136, 137, 464, 516, 520], "rate": [5, 488, 489, 490, 491, 492, 493, 494, 495, 497, 502, 503], "rather": [2, 518, 525], "ratio": [0, 25], "rceil": 96, "re": [7, 9, 267, 483], "reachabl": 516, "readabl": 3, "real": [0, 168, 169, 170, 171, 172, 173, 204, 205, 207, 208, 209, 210, 267], "realli": 383, "reason": [1, 6, 519], "reboot": 9, "receiv": [135, 136, 336, 506, 516, 522], "reciproc": [0, 283], "reclaim": 289, "recommend": [9, 292, 495, 497], "recompil": [102, 515], "reconstruct": 212, "record": [3, 183, 521], "recreat": [343, 488], "rectifi": [384, 418, 419, 438, 439, 452, 471, 472], "recurr": [376, 382, 417], "recurs": [151, 356, 396, 397, 402, 407, 409, 486], "recv": [136, 516], "reduc": [0, 1, 9, 16, 18, 27, 28, 132, 232, 234, 236, 242, 256, 307, 311, 328, 335, 342, 361, 433, 463], "reduct": [16, 18, 132, 232, 234, 242, 256, 342, 455, 456, 457, 458, 459, 460, 461, 462, 463, 464, 465, 466, 467, 468], "redund": 518, "refer": [214, 381, 391, 405, 436, 437, 438, 439, 447, 469, 519], "reflect": [405, 515, 519, 522], "regard": 374, "regardless": [87, 159, 516], "regist": [2, 7], "regress": [8, 463], "regular": [39, 370, 469, 493, 515, 517, 519], "regularli": 2, "reimplement": 2, "rel": [17, 192, 490, 515, 516], "relative_step": 490, "releas": 4, "relev": 2, "reli": [1, 2], "relu": [356, 413, 433, 470, 483], "relu6": 356, "remain": [0, 6, 293, 327, 341, 369, 370, 371, 516], "remaind": [0, 139], "remov": [0, 128, 233, 262, 305, 457], "rep": [0, 319], "repeat": [0, 319], "repeatedli": 5, "repetit": 277, "replac": [0, 6, 246, 410, 411, 433, 467], "replai": 3, "repli": 6, "repo": [5, 7, 9, 515], "report": [181, 292], "repres": [2, 6, 130, 133, 180, 464, 468, 522], "represent": [6, 213, 258, 329, 339, 343], "request": 154, "requir": [1, 2, 4, 6, 356, 516, 520, 521, 522], "requires_grad": 518, "rerun": [515, 521], "rescal": 338, "research": 8, "reset": 278, "reset_peak_memori": 183, "reshap": [0, 6, 214, 434, 515, 519], "resid": 293, "resolv": 2, "resourc": 2, "resource_limit": 238, "respect": [2, 5, 7, 122, 155, 157, 179, 180, 184, 258, 327, 340, 356, 361, 374, 377, 381, 383, 486, 516, 518, 520, 523], "respons": 2, "rest": [6, 158, 340, 341, 420, 520], "restart": 9, "restor": 281, "result": [0, 6, 15, 19, 39, 82, 87, 102, 151, 155, 157, 180, 200, 214, 233, 259, 266, 277, 292, 306, 340, 341, 342, 347, 425, 455, 515, 516, 518, 522], "resum": 6, "return": [0, 1, 2, 4, 5, 6, 7, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 38, 52, 72, 82, 86, 87, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 118, 119, 120, 121, 122, 125, 126, 127, 128, 131, 132, 133, 135, 136, 137, 138, 139, 140, 141, 143, 144, 145, 147, 148, 149, 150, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 175, 176, 177, 178, 179, 180, 182, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 242, 243, 244, 245, 246, 247, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 271, 272, 273, 274, 275, 276, 277, 279, 280, 282, 283, 289, 292, 293, 294, 295, 296, 297, 298, 299, 300, 301, 302, 303, 304, 305, 306, 307, 308, 309, 310, 311, 312, 314, 315, 316, 317, 318, 319, 320, 321, 322, 323, 324, 325, 326, 327, 328, 329, 330, 331, 332, 333, 334, 336, 337, 338, 339, 340, 341, 342, 343, 356, 376, 382, 392, 393, 394, 396, 397, 398, 399, 400, 401, 402, 406, 407, 409, 410, 411, 417, 435, 436, 437, 438, 439, 440, 441, 442, 455, 456, 457, 458, 459, 460, 461, 462, 463, 464, 465, 466, 467, 468, 483, 486, 496, 498, 514, 515, 516, 517, 518, 519, 521, 522, 524, 525], "return_metadata": 222, "revers": [0, 2, 42, 43, 44, 45, 58, 87, 118, 119, 120, 121, 228, 322, 425], "rf": 9, "rfft": 168, "rfft2": 169, "rfftn": 170, "rho": 489, "rhs_indic": [0, 179, 180], "rhs_mask": 96, "right": [0, 1, 2, 9, 207, 258, 280, 281, 374, 434, 446, 447, 458, 460, 468], "right_shift": 0, "ring": 133, "rm": [6, 9, 157, 490], "rmsnorm": [6, 356], "rmsprop": 488, "rnn": [356, 376], "robust": 463, "roform": [6, 420], "roll": 0, "root": [0, 6, 157, 283, 303, 416], "rope": [6, 356], "rosetta": 9, "rotari": [6, 158, 420], "rotat": [158, 420], "round": [0, 258], "row": [0, 1, 2, 87, 106, 153, 154, 156, 188, 258, 323], "rpath": 2, "rsqrt": 0, "rtol": [0, 17, 192], "rule": [2, 488], "run": [1, 2, 3, 4, 6, 7, 8, 9, 10, 154, 156, 252, 344, 361, 392, 489, 490, 492, 493, 494, 515, 517, 520, 521, 525, 526], "runtim": [6, 133, 349, 515, 516], "runtime_error": 2, "safetensor": [9, 222, 286, 399, 403, 488, 521, 524], "sai": [2, 6, 483, 521], "said": 6, "sake": 518, "same": [0, 2, 6, 9, 17, 39, 86, 97, 98, 102, 108, 109, 110, 112, 113, 114, 131, 155, 157, 163, 167, 168, 171, 172, 173, 180, 184, 192, 199, 253, 262, 281, 282, 299, 329, 330, 332, 335, 341, 356, 359, 360, 361, 369, 377, 381, 389, 390, 414, 435, 436, 437, 438, 439, 440, 441, 442, 457, 468, 486, 498, 512, 515, 516, 517, 519, 520, 525], "sampl": [2, 5, 6, 221, 261, 262, 263, 265, 266, 269, 272, 273, 436, 437, 438, 439, 441, 442, 458, 464, 468, 512, 515, 517], "sat": 6, "save": [3, 6, 8, 222, 240, 258, 285, 286, 287, 288, 403, 517, 521], "save_gguf": 524, "save_safetensor": [403, 488, 524], "save_weight": 356, "savez": [6, 403, 524], "savez_compress": 524, "saw": [6, 518], "scalar": [0, 2, 14, 15, 17, 31, 52, 82, 86, 92, 93, 94, 95, 96, 98, 101, 138, 139, 143, 177, 178, 184, 185, 186, 187, 192, 201, 202, 203, 221, 227, 229, 230, 231, 233, 235, 243, 245, 246, 249, 253, 255, 261, 267, 269, 272, 273, 276, 280, 285, 310, 327, 329, 332, 337, 468, 517, 518, 521, 523], "scale": [0, 2, 6, 15, 126, 155, 157, 158, 159, 180, 187, 258, 259, 265, 267, 338, 370, 371, 383, 412, 420, 421, 425, 434, 473, 490], "scale_arr": 2, "scale_factor": 434, "scale_paramet": 490, "scatter": 0, "scatter_add": 0, "scatter_add_axi": 0, "scatter_max": 0, "scatter_min": 0, "scatter_prod": 0, "schedul": [2, 488, 504, 505, 506, 507, 508, 510, 525], "schema": [3, 520], "schulz": 497, "scipi": [187, 212], "scope": 356, "score": [6, 159, 464], "script": [516, 520], "sdk": 9, "se": 1, "second": [6, 9, 128, 198, 200, 201, 229, 231, 233, 258, 280, 312, 321, 327, 359, 360, 389, 390, 456, 464, 490, 494, 515, 517, 518, 525], "second_layer_a": 521, "second_layer_b": 521, "secret": 6, "section": [1, 6, 9, 302, 468, 515, 516, 518], "see": [1, 2, 4, 6, 7, 9, 11, 12, 33, 34, 35, 36, 37, 40, 41, 42, 43, 44, 45, 47, 49, 50, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 66, 68, 69, 70, 71, 73, 75, 76, 77, 78, 79, 80, 81, 83, 84, 85, 214, 285, 286, 289, 336, 347, 356, 361, 362, 370, 372, 374, 378, 379, 380, 386, 387, 395, 413, 414, 415, 418, 419, 420, 421, 423, 425, 426, 427, 428, 429, 430, 432, 434, 436, 437, 438, 439, 445, 446, 447, 473, 515, 516, 517, 518, 519, 520, 523, 525], "seed": 264, "seen": [516, 522], "segment": 0, "segmented_mm": 0, "select": [0, 3, 9, 208, 210, 320, 332, 392, 396, 404, 520], "self": [6, 7, 10, 31, 32, 33, 34, 35, 36, 37, 38, 40, 41, 42, 43, 44, 45, 46, 47, 49, 50, 52, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 66, 68, 69, 70, 71, 73, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 122, 174, 356, 469, 486], "selu": 356, "semant": [14, 92, 94, 95, 97, 98, 138, 139, 143, 185, 186, 201, 202, 203, 227, 233, 235, 243, 245, 249, 255, 276, 280, 310, 525], "semi": [204, 205, 266], "send": 516, "sender": 516, "sennrich": 6, "sensit": 463, "sentencepiec": 6, "separ": [0, 6, 69, 83, 377, 464], "sequenc": [6, 16, 18, 34, 35, 59, 60, 61, 62, 66, 78, 79, 80, 84, 87, 98, 110, 135, 148, 154, 156, 161, 162, 163, 165, 166, 167, 169, 170, 172, 173, 178, 184, 232, 234, 236, 242, 250, 256, 261, 262, 263, 265, 266, 267, 269, 272, 273, 279, 298, 299, 300, 302, 305, 307, 311, 318, 319, 322, 326, 327, 328, 333, 361, 363, 366, 376, 382, 417, 433, 512, 525], "sequenti": [356, 483], "seri": 9, "serial": 488, "set": [2, 4, 6, 7, 9, 102, 122, 129, 131, 132, 133, 135, 136, 137, 142, 155, 157, 158, 238, 289, 290, 291, 292, 293, 309, 335, 374, 383, 385, 395, 397, 404, 405, 406, 409, 410, 415, 420, 431, 456, 468, 480, 486, 488, 490, 492, 493, 499, 512, 517, 518, 521], "set_byt": 2, "set_compute_pipeline_st": 2, "set_data": 2, "set_dtyp": 356, "set_input_arrai": 2, "set_memory_limit": 289, "set_output_arrai": 2, "set_vector_byt": 2, "setup": [2, 4, 5, 7, 9, 515, 516, 517], "sever": [6, 9, 107, 108, 109, 110, 111, 112, 113, 287, 288, 335, 515, 516, 520, 524], "sgd": [5, 7, 488, 495, 499, 504, 505, 508, 515], "shade": [1, 2], "shall": 6, "shape": [0, 2, 3, 6, 7, 69, 86, 87, 96, 97, 98, 102, 107, 108, 109, 110, 111, 112, 113, 128, 131, 135, 136, 150, 152, 154, 156, 159, 160, 163, 164, 167, 168, 171, 172, 173, 178, 179, 187, 199, 211, 220, 233, 250, 251, 261, 262, 263, 265, 266, 267, 269, 272, 273, 279, 281, 299, 326, 329, 330, 332, 333, 334, 356, 358, 359, 360, 361, 363, 364, 365, 366, 367, 368, 370, 371, 376, 381, 382, 385, 388, 389, 390, 399, 417, 435, 436, 437, 438, 439, 440, 441, 442, 457, 468, 488, 515, 517, 518, 519, 523, 525], "shapeless": [0, 102, 150, 152], "share": [8, 126, 154, 180, 258, 259, 329, 516], "shared_memori": 154, "shazeer": 6, "shift": [0, 163, 167, 201, 280, 281, 361], "shop": 6, "should": [1, 2, 4, 5, 6, 7, 9, 87, 100, 128, 131, 154, 155, 156, 157, 159, 199, 240, 257, 258, 293, 315, 321, 327, 330, 335, 336, 339, 356, 363, 364, 365, 366, 367, 368, 370, 371, 406, 412, 422, 457, 459, 464, 486, 496, 497, 514, 515, 516, 517, 518, 521, 522, 526], "show": [9, 347, 515], "shown": 2, "shuffl": 7, "side": [0, 253, 358, 359, 360, 366, 367, 368, 388, 389, 390, 515], "sigma": [374, 375, 376, 382, 424, 436, 437, 438, 439, 447, 448, 453, 474, 475], "sigmoid": [0, 6, 356, 386, 423, 447, 453, 455, 475], "sign": [0, 17, 192, 347, 495], "signal": [114, 434], "signatur": [1, 154, 156], "signedinteg": [12, 198], "signific": 258, "significantli": 516, "silent": [171, 172, 173], "silicon": [2, 6, 8, 9, 525], "silu": 356, "sim": 267, "simd": 1, "simd_sum": 1, "simdgroup": 1, "simdgroup_s": 1, "similar": [6, 180, 198, 340, 410, 411, 412, 456, 516, 522, 524], "similarli": [2, 9, 233, 258, 518, 521], "simpl": [2, 6, 7, 356, 373, 482, 488, 515, 516, 517, 518, 520, 521], "simple_axpbi": 2, "simple_tim": 2, "simplest": [2, 356, 516], "simpli": [2, 6, 9, 372, 384, 418, 444, 452, 471, 481, 486, 515, 516, 518, 520], "simplic": 0, "simplifi": 516, "simultan": 1, "sin": [0, 122, 425, 517, 518, 523], "sinc": [1, 2, 6, 7, 180, 183, 486, 495, 506, 515, 517, 522, 525], "sine": [0, 22, 23, 296, 297, 517, 518], "sing": 214, "singer": 491, "singl": [7, 88, 146, 199, 222, 237, 253, 330, 359, 360, 389, 390, 515, 517, 519, 524], "singleton": [0, 16, 18, 27, 28, 133, 232, 233, 234, 236, 242, 256, 307, 311, 328, 516], "singular": [214, 215, 219], "sinh": 0, "sinusoid": 425, "sinusoidalpositionalencod": 356, "size": [0, 1, 2, 6, 7, 53, 72, 96, 108, 109, 112, 113, 126, 148, 154, 155, 156, 157, 159, 160, 161, 162, 164, 165, 166, 168, 169, 170, 171, 172, 173, 178, 180, 182, 187, 188, 198, 206, 214, 258, 259, 262, 279, 292, 293, 298, 302, 305, 326, 329, 335, 336, 356, 358, 359, 360, 363, 364, 365, 366, 367, 368, 373, 381, 388, 389, 390, 414, 415, 434, 490, 516, 521, 522], "size_in_megabyt": 293, "size_t": [0, 2], "skip": [3, 87], "slice": [0, 299, 519], "slice_s": [0, 298], "slice_upd": 0, "slide": [358, 359, 360, 388, 389, 390], "slight": [6, 521], "slightli": [420, 525], "slope": 384, "slow": 515, "slowli": 6, "sm": 9, "small": [6, 149, 155, 157, 335, 361, 377, 383, 416, 458, 463, 468, 515, 525], "smaller": [0, 9, 254, 335, 495, 515], "smallest": 214, "smile": 6, "smooth": [457, 467, 502], "smooth_l1_loss": 356, "sned": 137, "snippet": 516, "so": [1, 2, 6, 9, 184, 187, 327, 369, 434, 488, 515, 516, 521, 525], "socket": 516, "softmax": [0, 6, 159, 356, 387, 454, 457], "softmin": 356, "softplu": [356, 391, 469], "softshrink": 356, "softsign": 356, "solut": [217, 218], "solv": 356, "some": [0, 2, 5, 6, 7, 151, 397, 409, 488, 499, 515, 516, 517, 518, 520, 521], "someon": 6, "someth": [5, 6, 519], "sometim": 515, "sonoma": 9, "soon": 6, "sort": [0, 29, 30, 179, 180, 254, 320], "sorted_indic": [0, 179, 180], "sourc": [0, 1, 2, 3, 4, 63, 135, 136, 154, 156, 244, 322, 516], "space": [0, 2, 221, 455, 466], "spars": [0, 237], "spatial": [108, 109, 110, 112, 113, 358, 359, 360, 377, 388, 389, 390, 434], "speak": [6, 214], "specif": [1, 2, 9, 258, 516, 518], "specifi": [0, 2, 19, 38, 108, 109, 110, 112, 113, 128, 169, 170, 178, 184, 206, 214, 221, 244, 250, 257, 262, 277, 312, 314, 315, 318, 321, 322, 327, 331, 333, 361, 431, 455, 456, 457, 458, 459, 460, 461, 462, 463, 464, 465, 466, 467, 468, 480, 515, 516, 517, 518, 525], "spectrum": 163, "speed": 1, "spent": 6, "split": [0, 375, 377, 448], "splittabl": 512, "sqrt": [0, 6, 144, 159, 187, 361, 374, 377, 381, 383, 385, 416, 425, 436, 437, 438, 439, 446, 489, 491, 492, 493, 502, 515], "squar": [0, 5, 6, 157, 188, 207, 209, 211, 220, 283, 303, 327, 340, 356, 416, 465, 467, 489, 490, 492, 493, 494, 518, 522], "squeez": [0, 434, 515], "src": [0, 135, 136], "ssh": [516, 520], "stabil": [155, 157, 361, 377, 381, 383, 416, 455, 456, 458, 489, 490, 491, 492, 493, 494, 502], "stabl": [227, 232, 300, 463], "stable_abi": 2, "stack": [0, 515], "standard": [0, 1, 4, 7, 52, 82, 233, 263, 267, 307, 433, 436, 438, 441, 516, 523], "starmap": [6, 340], "start": [0, 1, 2, 5, 6, 8, 9, 19, 158, 221, 240, 298, 299, 302, 342, 515, 517, 519, 520, 525], "start_axi": [0, 50, 175], "start_captur": 3, "start_indic": [298, 299], "state": [6, 7, 356, 376, 382, 417, 488, 499, 512, 515], "static": [9, 515], "static_cast": 2, "std": [0, 2, 4, 441, 517], "stderr": 520, "stdout": 520, "step": [0, 3, 4, 6, 7, 19, 335, 356, 376, 382, 417, 490, 497, 499, 504, 506, 507, 508, 515, 516], "step_decai": 488, "step_siz": 508, "still": [6, 9, 214, 515, 521], "stochast": [491, 492, 494, 503, 521], "stood": 6, "stop": [0, 2, 6, 19, 221, 241, 308, 518, 519], "stop_captur": 3, "stop_gradi": [0, 518], "storag": 87, "store": [6, 339], "str": [114, 126, 133, 140, 141, 150, 151, 152, 154, 156, 159, 180, 184, 190, 208, 210, 214, 222, 237, 238, 240, 258, 259, 284, 285, 286, 287, 288, 327, 336, 339, 343, 392, 393, 396, 397, 399, 401, 403, 409, 414, 415, 434, 438, 439, 455, 456, 457, 458, 459, 460, 461, 462, 463, 464, 465, 466, 467, 468, 496], "straight": 6, "strang": 6, "stream": [2, 8, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 33, 34, 35, 36, 37, 38, 40, 41, 42, 43, 44, 45, 46, 47, 49, 50, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 66, 68, 69, 70, 71, 73, 75, 76, 77, 78, 79, 80, 81, 83, 84, 85, 86, 87, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 101, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 118, 119, 120, 121, 124, 125, 126, 127, 128, 131, 132, 135, 136, 137, 138, 139, 140, 143, 144, 145, 147, 148, 149, 153, 155, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 175, 176, 177, 178, 179, 180, 185, 186, 187, 188, 189, 191, 192, 193, 194, 195, 196, 197, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 265, 266, 267, 268, 269, 271, 272, 273, 274, 275, 276, 277, 279, 280, 281, 282, 283, 291, 294, 295, 296, 297, 298, 299, 300, 301, 302, 303, 304, 305, 306, 307, 308, 310, 311, 312, 313, 314, 315, 316, 317, 318, 319, 320, 321, 322, 323, 324, 325, 326, 328, 329, 332, 333, 334, 335, 516, 525], "streamcontext": 309, "streamordevic": [0, 2], "street": 6, "strength": [495, 497, 503], "strict": [133, 185, 202, 397, 399, 409, 410, 411], "strictli": [214, 293], "stride": [0, 2, 87, 107, 108, 109, 110, 111, 112, 113, 358, 359, 360, 363, 364, 365, 366, 367, 368, 388, 389, 390, 420, 519], "string": [0, 2, 141, 150, 154, 156, 159, 190, 238, 253, 517, 522, 524], "stronger": 520, "structur": [2, 335, 498, 518], "stub": 9, "style": [2, 14, 17, 92, 94, 95, 138, 139, 143, 185, 186, 192, 201, 202, 203, 227, 233, 235, 243, 245, 249, 255, 276, 280, 310], "su": 6, "sub": [0, 7, 128, 271, 298, 299, 321, 336, 497], "subarrai": [128, 302], "subclass": 486, "subdivid": 1, "subdtyp": 198, "subgradi": 491, "sublinear": 490, "submodul": [6, 7, 356, 393, 397, 398, 409, 411], "subnetwork": 516, "suboptim": 517, "subscript": [140, 141], "subsect": 6, "subsequ": [133, 488, 516, 520], "subset": [356, 396, 410, 411], "substanti": 9, "subtl": 515, "subtract": [0, 39], "subtyp": [198, 347], "succe": 133, "successfulli": 516, "sudo": [9, 293, 516], "suggest": 516, "sum": [0, 2, 5, 14, 121, 132, 152, 191, 214, 232, 300, 318, 321, 356, 455, 456, 457, 458, 459, 460, 461, 462, 463, 464, 465, 466, 467, 468, 516, 519, 522], "sum_": [214, 463], "sum_i": 454, "sum_j": [476, 477], "summat": [140, 141], "super": [6, 7, 356, 486], "superset": [340, 498], "support": [1, 2, 6, 8, 9, 17, 96, 109, 112, 113, 126, 159, 175, 187, 192, 204, 205, 207, 208, 209, 210, 211, 215, 216, 219, 220, 222, 233, 258, 266, 516, 518, 519, 522, 524], "suppos": [518, 525], "sure": [2, 3, 6, 9, 356, 515], "surpass": [438, 439], "surpris": 6, "sw": 1, "swap": [0, 114, 292, 312, 411], "swapax": [0, 122], "swiglu": 6, "swish": [423, 475], "switch": 9, "symbol": 495, "symmetr": [108, 109, 112, 113, 204, 205, 208, 210], "symmetri": [208, 210], "synchron": [2, 515], "syntax": [39, 519], "synthet": 5, "sysctl": 293, "system": [4, 6, 9, 181, 182, 217, 218, 238, 293], "t": [0, 1, 2, 4, 6, 9, 106, 144, 154, 156, 159, 180, 204, 205, 259, 327, 356, 376, 382, 417, 489, 490, 491, 492, 493, 494, 495, 502, 503, 515, 517, 518, 525], "t_kv": 159, "t_q": 159, "tabl": [1, 214, 347, 373], "take": [0, 2, 6, 7, 92, 93, 94, 95, 102, 150, 179, 184, 199, 235, 243, 251, 259, 315, 327, 330, 331, 334, 341, 342, 358, 359, 360, 388, 389, 390, 412, 455, 496, 512, 516, 517, 518, 519, 520, 524, 525, 526], "take_along_axi": [0, 212, 519], "taken": [128, 314, 321], "talk": 516, "tan": 0, "tangent": [0, 2, 24, 25, 26, 122, 199, 316, 317, 432, 481], "tangent_i": 2, "tangent_x": 2, "tanh": [0, 356, 374, 376, 382, 391, 417, 446, 469], "target": [2, 327, 455, 457, 458, 459, 460, 461, 462, 463, 464, 465, 466, 467, 515], "target_include_directori": 2, "target_link_librari": [2, 4], "target_link_opt": 2, "target_sourc": 2, "task": 463, "tau": 503, "tcp": 516, "tediou": 516, "tell": [4, 6, 515, 522], "temp": 6, "templat": [0, 1, 2, 154, 156], "ten": 521, "tend": 495, "tensor": [222, 318, 468, 522], "tensordot": 0, "term": [2, 458, 489, 490, 491, 492, 493, 494, 502], "termin": [9, 520], "test": [7, 9, 516, 520], "test_imag": 7, "test_label": 7, "text": [6, 267, 374, 376, 382, 391, 417, 424, 431, 436, 437, 438, 439, 446, 449, 450, 451, 458, 459, 460, 463, 464, 467, 469, 470, 473, 474, 479, 480, 490, 495], "textrm": [258, 374, 375, 445, 448], "tf": 522, "tgp_size": 2, "th": [118, 119, 120, 121, 127, 153, 207, 208, 228, 506], "than": [1, 2, 6, 82, 114, 128, 139, 158, 179, 185, 186, 202, 203, 204, 205, 207, 208, 209, 210, 211, 212, 215, 219, 220, 233, 289, 293, 338, 340, 420, 431, 434, 464, 467, 480, 490, 495, 496, 515, 517, 518, 525], "thank": 521, "thei": [1, 2, 5, 6, 9, 17, 114, 180, 192, 258, 422, 459, 486, 495, 514, 515, 516, 517, 521, 523, 524, 525], "them": [0, 2, 6, 131, 356, 397, 409, 516, 517, 520, 525], "themselv": [2, 515], "thi": [0, 1, 2, 4, 6, 7, 9, 16, 17, 18, 19, 27, 28, 29, 30, 87, 88, 100, 122, 142, 150, 152, 154, 156, 179, 180, 181, 187, 190, 192, 199, 204, 205, 207, 208, 209, 210, 211, 214, 215, 216, 219, 220, 227, 232, 233, 234, 236, 242, 254, 256, 262, 291, 293, 300, 301, 302, 307, 311, 314, 320, 328, 335, 338, 341, 342, 356, 369, 370, 371, 375, 376, 382, 393, 394, 396, 397, 400, 401, 402, 407, 409, 410, 411, 412, 415, 417, 431, 436, 437, 438, 439, 446, 447, 448, 455, 463, 480, 486, 496, 499, 514, 515, 516, 517, 518, 520, 521, 522, 524], "thin": 520, "thing": [2, 6], "third": [206, 360, 390, 516, 517], "this_grid": 154, "thompson": 370, "those": [2, 6, 356, 497], "though": [2, 6, 515, 517, 521, 522], "thousand": 521, "thread": [1, 2], "thread_index_in_simdgroup": 1, "thread_position_in_grid": [1, 2, 156], "thread_rank": 154, "threadgroup": [1, 2, 154, 156], "threads_per_simdgroup": 1, "three": [6, 91, 258, 360, 390, 434], "threefri": 512, "threshold": [431, 460, 467, 480], "through": [1, 2, 308, 433, 495, 515, 516, 517, 518, 522], "throw": [2, 102, 133], "thu": [6, 356], "thumb": 488, "tic": 515, "tieleman": 502, "tile": [0, 159], "time": [1, 2, 6, 9, 292, 319, 356, 376, 382, 417, 515, 516, 518, 521, 525], "timeit": [515, 518], "titl": 2, "tmp": [1, 154, 156], "to_quant": 336, "to_stream": 2, "toc": 515, "togeth": [0, 1, 2, 7, 258, 340, 341, 516], "tok_embed": 6, "token": [6, 373, 414], "told": 6, "toler": [0, 17, 192], "too": [198, 515, 521], "took": 6, "tool": 9, "toolkit": 9, "top": [2, 320, 385, 434], "topk": 0, "torch": [6, 522], "torch_weight": 6, "total": [293, 518], "total_norm": 338, "tpi": 515, "tpng": 516, "trace": [0, 3, 152, 515], "trace_fil": 3, "tracer": 410, "track": [2, 356, 361], "track_running_stat": 361, "trade": 521, "tradit": [6, 158, 370, 371, 420], "train": [6, 7, 356, 361, 369, 370, 371, 395, 397, 409, 436, 437, 517], "train_imag": [7, 488], "train_label": [7, 488], "trainabl": [7, 337, 356, 486], "trainable_paramet": [356, 396, 499], "transfer": 520, "transform": [1, 6, 8, 122, 160, 161, 162, 164, 165, 166, 168, 169, 170, 171, 172, 173, 187, 337, 356, 361, 377, 383, 385, 396, 397, 409, 415, 420, 519], "transformerencod": 287, "transit": 506, "translat": [155, 383], "transpos": [0, 6, 32, 111, 112, 113, 180, 259, 366, 367, 368], "treat": [0, 2, 122, 169, 170, 172, 173, 314, 434, 515], "tree": [8, 88, 102, 146, 184, 327, 331, 335, 339, 340, 341, 342, 343, 498, 499, 501, 510, 518], "tree_flatten": [287, 340, 343, 356, 488, 517], "tree_map": [341, 356, 516], "tree_unflatten": [6, 488, 517], "trembl": 6, "tri": [0, 133], "triangl": [208, 210, 323], "triangular": [204, 205, 218, 220], "trigger": 515, "tril": 0, "trilinear": 434, "triplet": 468, "triplet_loss": 356, "triu": 0, "true": [0, 1, 2, 4, 5, 6, 17, 42, 43, 44, 45, 58, 86, 102, 118, 119, 120, 121, 154, 156, 158, 180, 192, 198, 204, 205, 214, 219, 222, 228, 237, 259, 300, 332, 336, 339, 340, 341, 342, 347, 356, 361, 363, 364, 365, 366, 367, 368, 376, 377, 381, 382, 383, 385, 396, 397, 399, 406, 409, 410, 411, 415, 417, 420, 425, 433, 434, 455, 463, 490, 492, 493, 496, 497, 515, 517], "truncat": [160, 161, 162, 164, 165, 166, 168, 169, 170, 171, 172, 173, 272], "truth": [5, 457, 467], "try": [2, 9, 516], "tupl": [0, 31, 69, 72, 75, 83, 88, 97, 103, 108, 109, 110, 112, 113, 135, 139, 141, 146, 148, 150, 161, 162, 165, 166, 169, 170, 172, 173, 190, 199, 207, 208, 212, 213, 214, 216, 219, 253, 258, 279, 281, 298, 299, 305, 326, 327, 330, 339, 340, 341, 342, 343, 358, 359, 360, 364, 365, 367, 368, 388, 389, 390, 399, 401, 422, 434, 490, 492, 493, 494, 495, 514, 517, 518], "tutori": 2, "twice": 525, "two": [0, 2, 14, 15, 17, 25, 86, 90, 92, 94, 95, 96, 128, 138, 143, 161, 165, 172, 179, 180, 185, 186, 192, 200, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 215, 216, 219, 220, 227, 233, 235, 243, 245, 249, 252, 258, 312, 342, 359, 375, 382, 389, 448, 456, 515, 516, 517, 518, 519, 525], "txt": [2, 4], "type": [0, 1, 2, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 38, 72, 82, 86, 87, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 118, 119, 120, 121, 125, 126, 127, 128, 131, 132, 133, 135, 136, 137, 138, 139, 140, 141, 143, 144, 145, 147, 148, 149, 153, 155, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 242, 243, 244, 245, 246, 247, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 271, 272, 273, 274, 275, 276, 277, 279, 280, 282, 283, 289, 292, 293, 294, 295, 296, 297, 298, 299, 300, 301, 302, 303, 304, 305, 306, 307, 308, 310, 311, 312, 314, 315, 316, 317, 318, 319, 320, 321, 322, 323, 324, 325, 326, 327, 328, 329, 330, 331, 332, 333, 334, 335, 338, 339, 342, 356, 404, 433, 435, 436, 437, 438, 439, 440, 441, 442, 455, 456, 457, 458, 459, 460, 461, 462, 463, 464, 465, 466, 467, 468, 515, 517, 519, 522], "type_to_nam": 2, "typenam": [0, 1, 2], "typic": [0, 159, 335, 373, 488, 515, 521], "u": [1, 2, 4, 204, 208, 210, 212, 219, 385, 411, 510, 516, 520, 521], "u_": 489, "u_t": 489, "ubuntu": 9, "ubuntu2204": 9, "uint": [1, 2, 156], "uint16": [12, 347], "uint3": 1, "uint32": [12, 27, 28, 29, 30, 262, 347], "uint64": [12, 347], "uint8": [12, 347], "ultra": 6, "unabl": 9, "unam": 9, "unari": 515, "unchang": [158, 308, 420], "uncheck": 9, "uncompress": 287, "undefin": [0, 29, 122, 204, 205, 254, 266, 519], "under": 214, "underli": [2, 329], "understand": [6, 436, 437], "unevalu": 151, "unexpect": [2, 19], "unexpectedli": 520, "unflatten": 0, "unfreez": [356, 397], "unfrozen": 409, "unifi": 8, "uniform": [3, 356, 385, 399, 437, 439, 483, 512, 515, 518, 525], "uniformli": 273, "unintend": 0, "union": [19, 33, 34, 35, 36, 37, 38, 40, 41, 42, 43, 44, 45, 46, 47, 49, 50, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 66, 68, 69, 70, 71, 73, 75, 76, 77, 78, 79, 80, 81, 83, 84, 85, 89, 90, 91, 126, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 196, 197, 198, 200, 207, 208, 209, 210, 219, 238, 285, 309, 339], "uniqu": [2, 217, 218, 512, 516], "unique_ptr": 2, "unit": [362, 372, 374, 375, 376, 384, 418, 419, 421, 423, 436, 437, 438, 439, 443, 444, 445, 446, 447, 448, 452, 471, 472, 473, 475], "unittest": 9, "univers": 214, "unless": [6, 17, 192, 214, 486], "unlik": [6, 17, 192, 212, 258, 370, 371, 405, 519], "unnecessari": [2, 6], "unnorm": [262, 455, 457], "unscal": 490, "unsign": [180, 258, 259, 347], "unsignedinteg": 12, "unspecifi": [16, 18, 19, 27, 28, 29, 30, 103, 118, 119, 120, 121, 178, 228, 232, 234, 236, 242, 250, 254, 256, 277, 300, 301, 307, 311, 314, 320, 321, 328, 333, 335, 526], "unsqueez": 6, "unsupport": 222, "until": [2, 335, 521, 523], "unus": 2, "up": [1, 2, 6, 122, 515], "upcast": 2, "updat": [0, 1, 2, 5, 6, 7, 9, 39, 102, 299, 336, 340, 342, 361, 392, 393, 399, 404, 405, 406, 411, 488, 490, 493, 495, 498, 499, 503, 504, 505, 506, 507, 508, 515, 516, 517, 521], "update_modul": 356, "uplo": [208, 210], "upon": [6, 340, 341], "upper": [204, 205, 208, 210, 218, 220, 258, 269, 272, 273, 442], "upsampl": 356, "us": [0, 3, 5, 6, 7, 8, 9, 19, 39, 87, 122, 126, 129, 131, 132, 135, 136, 137, 139, 154, 156, 158, 175, 180, 181, 182, 183, 201, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 233, 258, 259, 267, 277, 279, 280, 281, 289, 292, 293, 313, 326, 335, 336, 339, 342, 347, 349, 356, 359, 360, 370, 373, 374, 376, 382, 385, 389, 390, 392, 396, 403, 410, 412, 414, 415, 417, 420, 425, 433, 434, 438, 439, 446, 447, 456, 483, 486, 488, 489, 490, 492, 493, 494, 495, 496, 498, 499, 512, 514, 515, 516, 517, 518, 519, 520, 523, 525], "usag": [433, 515, 516], "user": [2, 6, 356], "uss": 335, "usual": [373, 414, 514, 521], "util": [1, 2, 6, 8, 9, 287, 356, 488, 520], "v": [6, 114, 159, 207, 208, 356, 397, 522], "v_": [489, 491, 492, 493, 494, 502, 503], "v_t": [489, 491, 492, 493, 494, 502, 503], "val": [0, 31, 178], "valid": [7, 114, 175, 331, 339, 397, 409, 514, 516], "valid_parameter_filt": 392, "valu": [0, 1, 5, 6, 12, 13, 17, 19, 27, 28, 52, 82, 86, 101, 133, 150, 153, 154, 159, 160, 161, 162, 164, 165, 166, 168, 169, 170, 171, 172, 173, 175, 178, 190, 192, 206, 214, 215, 219, 221, 238, 246, 253, 257, 258, 261, 262, 263, 265, 266, 267, 269, 272, 273, 281, 285, 293, 314, 315, 327, 331, 337, 339, 340, 341, 342, 347, 359, 360, 362, 369, 370, 371, 372, 378, 381, 385, 389, 390, 396, 412, 413, 429, 431, 433, 435, 455, 456, 457, 458, 459, 460, 462, 463, 464, 465, 466, 467, 480, 486, 490, 493, 504, 505, 507, 508, 518], "value_and_grad": [7, 122, 356, 410, 486, 488, 501, 515, 518, 522, 523], "value_and_grad_fn": 521, "value_cach": 6, "value_dim": 412, "value_input_dim": 412, "value_output_dim": 412, "value_proj": 6, "valueerror": [122, 214, 399, 518], "values_hat": 6, "van": 214, "var": [0, 361, 377, 381, 383, 458], "vari": 335, "variabl": [9, 102, 122, 129, 142, 150, 151, 152, 184, 199, 327, 330, 331, 515, 516, 517], "varianc": [0, 307, 328, 361, 377, 458], "variant": [6, 467, 494], "variou": 214, "vector": [0, 2, 5, 8, 191, 199, 214, 314, 330, 331, 373, 457, 517, 523], "verbos": [1, 154, 156, 516], "veri": [6, 412, 520, 521, 525], "verifi": [5, 9], "versa": 281, "version": [2, 4, 9, 88, 126, 150, 152, 190, 227, 232, 258, 300, 331, 512, 518, 519], "versu": 515, "via": [9, 122, 498, 501, 516, 520, 521, 522], "vice": 281, "video": 371, "view": [0, 3, 87, 522], "visual": 151, "vjp": [2, 122, 523], "vmap": [2, 122, 517, 518, 521, 523], "vmap_add": 518, "vocab_s": 6, "vocabulari": [373, 414], "void": [1, 2], "volta": 9, "vt": 219, "w": [0, 1, 5, 108, 109, 112, 113, 126, 180, 207, 208, 258, 259, 327, 341, 361, 364, 365, 367, 368, 370, 371, 385, 488, 503, 518], "w1": [6, 338], "w2": [6, 338], "w3": 6, "w_": [376, 382, 417, 489, 490, 491, 492, 493, 494, 495, 502, 503], "w_i": [126, 258], "w_in": 1, "w_q": 258, "w_star": 5, "w_stride": 1, "w_t": [489, 491, 492, 493, 494, 495, 502, 503], "wa": [4, 6, 87, 135, 136, 516, 517, 521], "wai": [2, 6, 9, 356, 434, 515, 516, 517, 518, 519, 520], "wait": 6, "walk": [6, 517], "walkthrough": 2, "walsh": 187, "want": [1, 2, 6, 516, 517, 518, 520, 525], "warm": [2, 515], "warmup_init": 490, "watch": [6, 515], "wd": 495, "we": [0, 1, 2, 5, 6, 7, 122, 126, 135, 136, 180, 258, 259, 356, 373, 414, 422, 493, 495, 512, 514, 515, 516, 517, 518, 520, 521, 525], "weight": [0, 5, 107, 108, 109, 110, 111, 112, 113, 155, 157, 340, 356, 399, 403, 414, 415, 455, 457, 486, 490, 493, 495, 496, 497, 499, 503, 518, 521], "weight_decai": [490, 493, 495, 497, 503], "weight_fil": 6, "weights_fp16": 521, "well": [6, 356, 397, 409, 412, 516, 521], "wen": 6, "went": 6, "were": [6, 525], "wet": 6, "wget": 9, "what": [2, 6, 340, 520], "whatsoev": 6, "whc": 370, "when": [0, 1, 2, 6, 8, 9, 102, 110, 122, 137, 204, 205, 207, 208, 209, 210, 211, 214, 215, 219, 220, 222, 292, 363, 364, 365, 366, 367, 368, 434, 438, 439, 455, 461, 467, 486, 488, 506, 512, 515, 516, 517, 525], "where": [0, 4, 7, 153, 192, 205, 258, 327, 331, 361, 363, 364, 365, 366, 367, 368, 369, 370, 371, 372, 374, 376, 377, 381, 382, 383, 385, 396, 413, 416, 417, 431, 438, 439, 444, 445, 447, 458, 464, 470, 473, 475, 480, 499, 516, 518, 519], "wherea": 518, "whether": [150, 152, 154, 156, 180, 208, 210, 218, 220, 259, 376, 382, 396, 412, 417, 455, 458, 464], "which": [0, 1, 2, 6, 7, 8, 9, 19, 38, 87, 88, 102, 110, 128, 131, 132, 133, 135, 136, 137, 146, 150, 152, 158, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 175, 184, 190, 193, 194, 195, 196, 197, 199, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 222, 237, 240, 258, 262, 263, 277, 279, 281, 284, 285, 286, 287, 288, 305, 306, 314, 321, 326, 327, 330, 331, 335, 336, 359, 360, 370, 371, 374, 389, 390, 392, 396, 420, 455, 457, 460, 464, 467, 483, 498, 499, 512, 515, 516, 517, 518, 519, 520, 521, 525, 526], "while": [2, 3, 6, 9, 167, 279, 420, 521, 522], "whistl": 2, "who": 6, "whose": [153, 336, 337], "why": 6, "wi": 516, "wide": 521, "width": [359, 360, 361, 364, 365, 367, 368, 370, 371, 389, 390, 414, 415], "window": [9, 358, 359, 360, 388, 389, 390], "wipe": 9, "wire": 293, "wired_limit_mb": 293, "wise": [0, 2, 13, 14, 20, 21, 22, 23, 24, 25, 26, 92, 93, 94, 95, 99, 115, 116, 138, 139, 143, 144, 145, 147, 149, 176, 177, 185, 186, 192, 201, 202, 203, 223, 224, 225, 226, 227, 229, 230, 231, 235, 243, 245, 247, 249, 255, 275, 276, 280, 283, 294, 295, 296, 297, 303, 304, 310, 316, 317, 362, 370, 371, 380, 391, 413, 424, 443, 450, 451, 453, 454, 469, 470, 472, 475, 476, 477, 478, 515], "wish": 9, "with_logit": 455, "within": [0, 3, 29, 192], "without": [1, 6, 8, 308, 412, 482, 514, 515, 516, 517, 520, 521, 522, 525], "wk": 6, "wl": 2, "wo": 6, "word": 0, "work": [2, 3, 6, 292, 347, 497, 515, 516, 517, 518, 519, 520, 521], "workhors": 356, "world": [343, 516], "world2": 516, "world_ani": 516, "world_mpi": 516, "world_r": 516, "worri": [1, 521], "would": [2, 6, 434, 516, 517, 519, 521, 522, 525], "wq": 6, "wrap": [122, 356, 496], "wrapper": [517, 520], "write": [0, 1, 6, 356, 522], "written": [2, 517], "wrong": 517, "wrt": 337, "wv": 6, "x": [0, 1, 2, 4, 5, 6, 7, 39, 88, 96, 122, 131, 132, 136, 137, 144, 149, 150, 151, 155, 156, 157, 180, 187, 188, 190, 214, 259, 263, 267, 268, 282, 287, 294, 324, 325, 332, 340, 342, 356, 358, 359, 360, 361, 362, 372, 374, 375, 377, 381, 383, 384, 385, 388, 389, 390, 391, 392, 413, 416, 418, 424, 425, 431, 434, 443, 444, 445, 446, 447, 448, 449, 450, 451, 452, 453, 454, 467, 469, 470, 471, 472, 473, 474, 475, 476, 477, 478, 479, 480, 481, 486, 488, 495, 515, 516, 517, 518, 519, 521, 522, 523, 525], "x1": 456, "x2": 456, "x86_64": 9, "x_1": [456, 464], "x_2": [456, 464], "x_cast": 2, "x_grad": 1, "x_i": [454, 476, 477], "x_j": [476, 477], "x_offset": 2, "x_ptr": 2, "x_shape": 1, "x_stride": 2, "x_t": [376, 382, 417], "x_view": 522, "xcode": 9, "xcodeproj": 3, "xcrun": 9, "xf": 382, "xg": 382, "xi": 382, "xn": 376, "xo": 382, "xor": 95, "xr": 376, "xy": [0, 237], "xz": 376, "x\u00b2": 522, "y": [0, 2, 4, 5, 6, 7, 9, 39, 88, 122, 150, 151, 187, 190, 332, 356, 361, 370, 377, 381, 383, 385, 416, 459, 464, 467, 488, 491, 515, 516, 517, 518, 521, 522], "y_": [459, 463], "y_cast": 2, "y_hat": 356, "y_offset": 2, "y_ptr": 2, "y_stride": 2, "ye": 6, "year": 6, "yet": [6, 356, 486, 499, 518, 519, 521, 523], "yield": [6, 7, 512], "you": [1, 2, 3, 4, 6, 7, 8, 9, 293, 356, 425, 433, 483, 512, 515, 516, 517, 518, 519, 520, 522, 524, 525], "your": [2, 6, 9, 486, 516, 518, 521], "z": [2, 88, 376, 515, 517, 521], "z_t": 376, "zeiler": 489, "zero": [0, 150, 153, 160, 161, 162, 163, 164, 165, 166, 168, 169, 170, 171, 172, 173, 206, 237, 278, 299, 323, 324, 325, 334, 356, 358, 359, 360, 369, 370, 371, 399, 435, 436, 437, 438, 439, 440, 441, 442, 483, 488, 490, 517, 519], "zero_grad": 518, "zeros_lik": [0, 212], "zhang": 6, "zip": [6, 7], "zip_saf": 2}, "titles": ["Operations", "Custom Metal Kernels", "Custom Extensions in MLX", "Metal Debugger", "Using MLX in C++", "Linear Regression", "LLM inference", "Multi-Layer Perceptron", "MLX", "Build and Install", "mlx.core.Device", "mlx.core.Dtype", "mlx.core.DtypeCategory", "mlx.core.abs", "mlx.core.add", "mlx.core.addmm", "mlx.core.all", "mlx.core.allclose", "mlx.core.any", "mlx.core.arange", "mlx.core.arccos", "mlx.core.arccosh", "mlx.core.arcsin", "mlx.core.arcsinh", "mlx.core.arctan", "mlx.core.arctan2", "mlx.core.arctanh", "mlx.core.argmax", "mlx.core.argmin", "mlx.core.argpartition", "mlx.core.argsort", "mlx.core.array", "mlx.core.array.T", "mlx.core.array.abs", "mlx.core.array.all", "mlx.core.array.any", "mlx.core.array.argmax", "mlx.core.array.argmin", "mlx.core.array.astype", "mlx.core.array.at", "mlx.core.array.conj", "mlx.core.array.cos", "mlx.core.array.cummax", "mlx.core.array.cummin", "mlx.core.array.cumprod", "mlx.core.array.cumsum", "mlx.core.array.diag", "mlx.core.array.diagonal", "mlx.core.array.dtype", "mlx.core.array.exp", "mlx.core.array.flatten", "mlx.core.array.imag", "mlx.core.array.item", "mlx.core.array.itemsize", "mlx.core.array.log", "mlx.core.array.log10", "mlx.core.array.log1p", "mlx.core.array.log2", "mlx.core.array.logcumsumexp", "mlx.core.array.logsumexp", "mlx.core.array.max", "mlx.core.array.mean", "mlx.core.array.min", "mlx.core.array.moveaxis", "mlx.core.array.nbytes", "mlx.core.array.ndim", "mlx.core.array.prod", "mlx.core.array.real", "mlx.core.array.reciprocal", "mlx.core.array.reshape", "mlx.core.array.round", "mlx.core.array.rsqrt", "mlx.core.array.shape", "mlx.core.array.sin", "mlx.core.array.size", "mlx.core.array.split", "mlx.core.array.sqrt", "mlx.core.array.square", "mlx.core.array.squeeze", "mlx.core.array.std", "mlx.core.array.sum", "mlx.core.array.swapaxes", "mlx.core.array.tolist", "mlx.core.array.transpose", "mlx.core.array.var", "mlx.core.array.view", "mlx.core.array_equal", "mlx.core.as_strided", "mlx.core.async_eval", "mlx.core.atleast_1d", "mlx.core.atleast_2d", "mlx.core.atleast_3d", "mlx.core.bitwise_and", "mlx.core.bitwise_invert", "mlx.core.bitwise_or", "mlx.core.bitwise_xor", "mlx.core.block_masked_mm", "mlx.core.broadcast_arrays", "mlx.core.broadcast_to", "mlx.core.ceil", "mlx.core.clear_cache", "mlx.core.clip", "mlx.core.compile", "mlx.core.concatenate", "mlx.core.conj", "mlx.core.conjugate", "mlx.core.contiguous", "mlx.core.conv1d", "mlx.core.conv2d", "mlx.core.conv3d", "mlx.core.conv_general", "mlx.core.conv_transpose1d", "mlx.core.conv_transpose2d", "mlx.core.conv_transpose3d", "mlx.core.convolve", "mlx.core.cos", "mlx.core.cosh", "mlx.core.cuda.is_available", "mlx.core.cummax", "mlx.core.cummin", "mlx.core.cumprod", "mlx.core.cumsum", "mlx.core.custom_function", "mlx.core.default_device", "mlx.core.default_stream", "mlx.core.degrees", "mlx.core.dequantize", "mlx.core.diag", "mlx.core.diagonal", "mlx.core.disable_compile", "mlx.core.distributed.Group", "mlx.core.distributed.all_gather", "mlx.core.distributed.all_sum", "mlx.core.distributed.init", "mlx.core.distributed.is_available", "mlx.core.distributed.recv", "mlx.core.distributed.recv_like", "mlx.core.distributed.send", "mlx.core.divide", "mlx.core.divmod", "mlx.core.einsum", "mlx.core.einsum_path", "mlx.core.enable_compile", "mlx.core.equal", "mlx.core.erf", "mlx.core.erfinv", "mlx.core.eval", "mlx.core.exp", "mlx.core.expand_dims", "mlx.core.expm1", "mlx.core.export_function", "mlx.core.export_to_dot", "mlx.core.exporter", "mlx.core.eye", "mlx.core.fast.cuda_kernel", "mlx.core.fast.layer_norm", "mlx.core.fast.metal_kernel", "mlx.core.fast.rms_norm", "mlx.core.fast.rope", "mlx.core.fast.scaled_dot_product_attention", "mlx.core.fft.fft", "mlx.core.fft.fft2", "mlx.core.fft.fftn", "mlx.core.fft.fftshift", "mlx.core.fft.ifft", "mlx.core.fft.ifft2", "mlx.core.fft.ifftn", "mlx.core.fft.ifftshift", "mlx.core.fft.irfft", "mlx.core.fft.irfft2", "mlx.core.fft.irfftn", "mlx.core.fft.rfft", "mlx.core.fft.rfft2", "mlx.core.fft.rfftn", "mlx.core.finfo", "mlx.core.flatten", "mlx.core.floor", "mlx.core.floor_divide", "mlx.core.full", "mlx.core.gather_mm", "mlx.core.gather_qmm", "mlx.core.get_active_memory", "mlx.core.get_cache_memory", "mlx.core.get_peak_memory", "mlx.core.grad", "mlx.core.greater", "mlx.core.greater_equal", "mlx.core.hadamard_transform", "mlx.core.identity", "mlx.core.imag", "mlx.core.import_function", "mlx.core.inner", "mlx.core.isclose", "mlx.core.isfinite", "mlx.core.isinf", "mlx.core.isnan", "mlx.core.isneginf", "mlx.core.isposinf", "mlx.core.issubdtype", "mlx.core.jvp", "mlx.core.kron", "mlx.core.left_shift", "mlx.core.less", "mlx.core.less_equal", "mlx.core.linalg.cholesky", "mlx.core.linalg.cholesky_inv", "mlx.core.linalg.cross", "mlx.core.linalg.eig", "mlx.core.linalg.eigh", "mlx.core.linalg.eigvals", "mlx.core.linalg.eigvalsh", "mlx.core.linalg.inv", "mlx.core.linalg.lu", "mlx.core.linalg.lu_factor", "mlx.core.linalg.norm", "mlx.core.linalg.pinv", "mlx.core.linalg.qr", "mlx.core.linalg.solve", "mlx.core.linalg.solve_triangular", "mlx.core.linalg.svd", "mlx.core.linalg.tri_inv", "mlx.core.linspace", "mlx.core.load", "mlx.core.log", "mlx.core.log10", "mlx.core.log1p", "mlx.core.log2", "mlx.core.logaddexp", "mlx.core.logcumsumexp", "mlx.core.logical_and", "mlx.core.logical_not", "mlx.core.logical_or", "mlx.core.logsumexp", "mlx.core.matmul", "mlx.core.max", "mlx.core.maximum", "mlx.core.mean", "mlx.core.meshgrid", "mlx.core.metal.device_info", "mlx.core.metal.is_available", "mlx.core.metal.start_capture", "mlx.core.metal.stop_capture", "mlx.core.min", "mlx.core.minimum", "mlx.core.moveaxis", "mlx.core.multiply", "mlx.core.nan_to_num", "mlx.core.negative", "mlx.core.new_stream", "mlx.core.not_equal", "mlx.core.ones", "mlx.core.ones_like", "mlx.core.outer", "mlx.core.pad", "mlx.core.partition", "mlx.core.power", "mlx.core.prod", "mlx.core.put_along_axis", "mlx.core.quantize", "mlx.core.quantized_matmul", "mlx.core.radians", "mlx.core.random.bernoulli", "mlx.core.random.categorical", "mlx.core.random.gumbel", "mlx.core.random.key", "mlx.core.random.laplace", "mlx.core.random.multivariate_normal", "mlx.core.random.normal", "mlx.core.random.permutation", "mlx.core.random.randint", "mlx.core.random.seed", "mlx.core.random.split", "mlx.core.random.truncated_normal", "mlx.core.random.uniform", "mlx.core.real", "mlx.core.reciprocal", "mlx.core.remainder", "mlx.core.repeat", "mlx.core.reset_peak_memory", "mlx.core.reshape", "mlx.core.right_shift", "mlx.core.roll", "mlx.core.round", "mlx.core.rsqrt", "mlx.core.save", "mlx.core.save_gguf", "mlx.core.save_safetensors", "mlx.core.savez", "mlx.core.savez_compressed", "mlx.core.set_cache_limit", "mlx.core.set_default_device", "mlx.core.set_default_stream", "mlx.core.set_memory_limit", "mlx.core.set_wired_limit", "mlx.core.sigmoid", "mlx.core.sign", "mlx.core.sin", "mlx.core.sinh", "mlx.core.slice", "mlx.core.slice_update", "mlx.core.softmax", "mlx.core.sort", "mlx.core.split", "mlx.core.sqrt", "mlx.core.square", "mlx.core.squeeze", "mlx.core.stack", "mlx.core.std", "mlx.core.stop_gradient", "mlx.core.stream", "mlx.core.subtract", "mlx.core.sum", "mlx.core.swapaxes", "mlx.core.synchronize", "mlx.core.take", "mlx.core.take_along_axis", "mlx.core.tan", "mlx.core.tanh", "mlx.core.tensordot", "mlx.core.tile", "mlx.core.topk", "mlx.core.trace", "mlx.core.transpose", "mlx.core.tri", "mlx.core.tril", "mlx.core.triu", "mlx.core.unflatten", "mlx.core.value_and_grad", "mlx.core.var", "mlx.core.view", "mlx.core.vjp", "mlx.core.vmap", "mlx.core.where", "mlx.core.zeros", "mlx.core.zeros_like", "mlx.nn.average_gradients", "mlx.nn.quantize", "mlx.nn.value_and_grad", "mlx.optimizers.clip_grad_norm", "mlx.utils.tree_flatten", "mlx.utils.tree_map", "mlx.utils.tree_map_with_path", "mlx.utils.tree_reduce", "mlx.utils.tree_unflatten", "mlx.core.Stream", "Array", "CUDA", "Data Types", "Devices and Streams", "Distributed Communication", "Export Functions", "Fast", "FFT", "Linear Algebra", "Memory Management", "Metal", "Neural Networks", "mlx.nn.ALiBi", "mlx.nn.AvgPool1d", "mlx.nn.AvgPool2d", "mlx.nn.AvgPool3d", "mlx.nn.BatchNorm", "mlx.nn.CELU", "mlx.nn.Conv1d", "mlx.nn.Conv2d", "mlx.nn.Conv3d", "mlx.nn.ConvTranspose1d", "mlx.nn.ConvTranspose2d", "mlx.nn.ConvTranspose3d", "mlx.nn.Dropout", "mlx.nn.Dropout2d", "mlx.nn.Dropout3d", "mlx.nn.ELU", "mlx.nn.Embedding", "mlx.nn.GELU", "mlx.nn.GLU", "mlx.nn.GRU", "mlx.nn.GroupNorm", "mlx.nn.HardShrink", "mlx.nn.HardTanh", "mlx.nn.Hardswish", "mlx.nn.InstanceNorm", "mlx.nn.LSTM", "mlx.nn.LayerNorm", "mlx.nn.LeakyReLU", "mlx.nn.Linear", "mlx.nn.LogSigmoid", "mlx.nn.LogSoftmax", "mlx.nn.MaxPool1d", "mlx.nn.MaxPool2d", "mlx.nn.MaxPool3d", "mlx.nn.Mish", "mlx.nn.Module.apply", "mlx.nn.Module.apply_to_modules", "mlx.nn.Module.children", "mlx.nn.Module.eval", "mlx.nn.Module.filter_and_map", "mlx.nn.Module.freeze", "mlx.nn.Module.leaf_modules", "mlx.nn.Module.load_weights", "mlx.nn.Module.modules", "mlx.nn.Module.named_modules", "mlx.nn.Module.parameters", "mlx.nn.Module.save_weights", "mlx.nn.Module.set_dtype", "mlx.nn.Module.state", "mlx.nn.Module.train", "mlx.nn.Module.trainable_parameters", "mlx.nn.Module.training", "mlx.nn.Module.unfreeze", "mlx.nn.Module.update", "mlx.nn.Module.update_modules", "mlx.nn.MultiHeadAttention", "mlx.nn.PReLU", "mlx.nn.QuantizedEmbedding", "mlx.nn.QuantizedLinear", "mlx.nn.RMSNorm", "mlx.nn.RNN", "mlx.nn.ReLU", "mlx.nn.ReLU6", "mlx.nn.RoPE", "mlx.nn.SELU", "mlx.nn.Sequential", "mlx.nn.SiLU", "mlx.nn.Sigmoid", "mlx.nn.SinusoidalPositionalEncoding", "mlx.nn.Softmax", "mlx.nn.Softmin", "mlx.nn.Softplus", "mlx.nn.Softshrink", "mlx.nn.Softsign", "mlx.nn.Step", "mlx.nn.Tanh", "mlx.nn.Transformer", "mlx.nn.Upsample", "mlx.nn.init.constant", "mlx.nn.init.glorot_normal", "mlx.nn.init.glorot_uniform", "mlx.nn.init.he_normal", "mlx.nn.init.he_uniform", "mlx.nn.init.identity", "mlx.nn.init.normal", "mlx.nn.init.uniform", "mlx.nn.celu", "mlx.nn.elu", "mlx.nn.gelu", "mlx.nn.gelu_approx", "mlx.nn.gelu_fast_approx", "mlx.nn.glu", "mlx.nn.hard_shrink", "mlx.nn.hard_tanh", "mlx.nn.hardswish", "mlx.nn.leaky_relu", "mlx.nn.log_sigmoid", "mlx.nn.log_softmax", "mlx.nn.losses.binary_cross_entropy", "mlx.nn.losses.cosine_similarity_loss", "mlx.nn.losses.cross_entropy", "mlx.nn.losses.gaussian_nll_loss", "mlx.nn.losses.hinge_loss", "mlx.nn.losses.huber_loss", "mlx.nn.losses.kl_div_loss", "mlx.nn.losses.l1_loss", "mlx.nn.losses.log_cosh_loss", "mlx.nn.losses.margin_ranking_loss", "mlx.nn.losses.mse_loss", "mlx.nn.losses.nll_loss", "mlx.nn.losses.smooth_l1_loss", "mlx.nn.losses.triplet_loss", "mlx.nn.mish", "mlx.nn.prelu", "mlx.nn.relu", "mlx.nn.relu6", "mlx.nn.selu", "mlx.nn.sigmoid", "mlx.nn.silu", "mlx.nn.softmax", "mlx.nn.softmin", "mlx.nn.softplus", "mlx.nn.softshrink", "mlx.nn.step", "mlx.nn.tanh", "Functions", "Initializers", "Layers", "Loss Functions", "Module", "Operations", "Optimizers", "mlx.optimizers.AdaDelta", "mlx.optimizers.Adafactor", "mlx.optimizers.Adagrad", "mlx.optimizers.Adam", "mlx.optimizers.AdamW", "mlx.optimizers.Adamax", "mlx.optimizers.Lion", "mlx.optimizers.MultiOptimizer", "mlx.optimizers.Muon", "mlx.optimizers.Optimizer.apply_gradients", "mlx.optimizers.Optimizer.init", "mlx.optimizers.Optimizer.state", "mlx.optimizers.Optimizer.update", "mlx.optimizers.RMSprop", "mlx.optimizers.SGD", "mlx.optimizers.cosine_decay", "mlx.optimizers.exponential_decay", "mlx.optimizers.join_schedules", "mlx.optimizers.linear_schedule", "mlx.optimizers.step_decay", "Common Optimizers", "Optimizer", "Schedulers", "Random", "Transforms", "Tree Utils", "Compilation", "Distributed Communication", "Exporting Functions", "Function Transforms", "Indexing Arrays", "Launching Distributed Programs", "Lazy Evaluation", "Conversion to NumPy and Other Frameworks", "Quick Start Guide", "Saving and Loading Arrays", "Unified Memory", "Using Streams"], "titleterms": {"A": 525, "In": 519, "The": 356, "ab": [13, 33], "adadelta": 489, "adafactor": 490, "adagrad": 491, "adam": 492, "adamax": 494, "adamw": 493, "add": 14, "addmm": 15, "algebra": 353, "alibi": 357, "all": [6, 16, 34, 516], "all_gath": 131, "all_sum": 132, "allclos": 17, "ani": [18, 35], "api": [8, 9], "appli": 392, "apply_gradi": 498, "apply_to_modul": 393, "arang": 19, "arcco": 20, "arccosh": 21, "arcsin": 22, "arcsinh": 23, "arctan": 24, "arctan2": 25, "arctanh": 26, "argmax": [27, 36], "argmin": [28, 37], "argpartit": 29, "argsort": 30, "arrai": [31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 345, 519, 524], "array_equ": 86, "as_strid": 87, "astyp": 38, "async_ev": 88, "atleast_1d": 89, "atleast_2d": 90, "atleast_3d": 91, "attent": 6, "automat": 518, "average_gradi": [335, 516], "avgpool1d": 358, "avgpool2d": 359, "avgpool3d": 360, "back": 2, "backend": 516, "basic": [515, 517, 523], "batchnorm": 361, "benchmark": 6, "bernoulli": 261, "binari": 9, "binary_cross_entropi": 455, "bind": 2, "bitwise_and": 92, "bitwise_invert": 93, "bitwise_or": 94, "bitwise_xor": 95, "block_masked_mm": 96, "broadcast_arrai": 97, "broadcast_to": 98, "build": [2, 9], "c": [4, 8, 9, 517], "categor": 262, "ceil": 99, "celu": [362, 443], "children": 394, "choleski": 204, "cholesky_inv": 205, "class": 356, "clear_cach": 100, "clip": 101, "clip_grad_norm": 338, "cmake": 2, "co": [41, 115], "code": [2, 6], "common": 509, "commun": [349, 516], "compil": [102, 515], "complex": 1, "comput": 521, "concaten": 103, "conj": [40, 104], "conjug": 105, "constant": 435, "contigu": 106, "conv1d": [107, 363], "conv2d": [108, 364], "conv3d": [109, 365], "conv_gener": 110, "conv_transpose1d": 111, "conv_transpose2d": 112, "conv_transpose3d": 113, "convers": 522, "convert": 6, "convolv": 114, "convtranspose1d": 366, "convtranspose2d": 367, "convtranspose3d": 368, "core": [10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294, 295, 296, 297, 298, 299, 300, 301, 302, 303, 304, 305, 306, 307, 308, 309, 310, 311, 312, 313, 314, 315, 316, 317, 318, 319, 320, 321, 322, 323, 324, 325, 326, 327, 328, 329, 330, 331, 332, 333, 334, 344], "cosh": 116, "cosine_decai": 504, "cosine_similarity_loss": 456, "cpu": [2, 9], "cross": 206, "cross_entropi": 457, "cuda": [9, 117, 346], "cuda_kernel": 154, "cummax": [42, 118], "cummin": [43, 119], "cumprod": [44, 120], "cumsum": [45, 121], "custom": [1, 2], "custom_funct": 122, "data": 347, "debug": 515, "debugg": 3, "default_devic": 123, "default_stream": 124, "defin": 516, "degre": 125, "dequant": 126, "devic": [10, 348], "device_info": 238, "diag": [46, 127], "diagon": [47, 128], "differ": 519, "differenti": 518, "disable_compil": 129, "distribut": [130, 131, 132, 133, 134, 135, 136, 137, 349, 516, 520], "divid": 138, "divmod": 139, "download": [2, 6], "dropout": 369, "dropout2d": 370, "dropout3d": 371, "dtype": [11, 48], "dtypecategori": 12, "eig": 207, "eigh": 208, "eigval": 209, "eigvalsh": 210, "einsum": 140, "einsum_path": 141, "elu": [372, 444], "embed": 373, "enable_compil": 142, "encod": 6, "end": 2, "equal": 143, "erf": 144, "erfinv": 145, "eval": [146, 395], "evalu": 521, "exampl": [1, 2, 8, 515, 516, 517, 525], "exp": [49, 147], "expand_dim": 148, "expm1": 149, "exponential_decai": 505, "export": [152, 350, 517], "export_funct": 150, "export_to_dot": 151, "extens": 2, "ey": 153, "fast": [154, 155, 156, 157, 158, 159, 351], "fft": [160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 352], "fft2": 161, "fftn": 162, "fftshift": 163, "filter_and_map": 396, "finfo": 174, "flatten": [50, 175], "floor": 176, "floor_divid": 177, "format": 524, "found": 9, "framework": 522, "freez": 397, "from": [9, 519], "full": [6, 178], "function": [350, 482, 485, 515, 517, 518, 523], "further": 8, "gather_mm": 179, "gather_qmm": 180, "gaussian_nll_loss": 458, "gelu": [374, 445], "gelu_approx": 446, "gelu_fast_approx": 447, "gener": 6, "get": 516, "get_active_memori": 181, "get_cache_memori": 182, "get_peak_memori": 183, "glorot_norm": 436, "glorot_uniform": 437, "glu": [375, 448], "gpu": 2, "grad": [184, 356], "graph": [515, 521, 523], "greater": 185, "greater_equ": 186, "grid": 1, "group": 130, "groupnorm": 377, "gru": 376, "guid": 523, "gumbel": 263, "hadamard_transform": 187, "hard_shrink": 449, "hard_tanh": 450, "hardshrink": 378, "hardswish": [380, 451], "hardtanh": 379, "he_norm": 438, "he_uniform": 439, "hinge_loss": 459, "host": [516, 520], "huber_loss": 460, "ident": [188, 440], "ifft": 164, "ifft2": 165, "ifftn": 166, "ifftshift": 167, "imag": [51, 189], "implement": [2, 6], "import": 517, "import_funct": 190, "index": 519, "infer": 6, "init": [133, 435, 436, 437, 438, 439, 440, 441, 442, 499], "initi": 483, "inner": 191, "inspect": 356, "instal": [8, 9, 516], "instancenorm": 381, "introduc": 2, "inv": 211, "irfft": 168, "irfft2": 169, "irfftn": 170, "is_avail": [117, 134, 239], "isclos": 192, "isfinit": 193, "isinf": 194, "isnan": 195, "isneginf": 196, "isposinf": 197, "issubdtyp": 198, "item": 52, "items": 53, "jax": 522, "join_schedul": 506, "jvp": 199, "kei": 264, "kernel": 1, "kl_div_loss": 461, "kron": 200, "l1_loss": 462, "laplac": 265, "launch": 520, "layer": [6, 7, 484], "layer_norm": 155, "layernorm": 383, "lazi": 521, "leaf_modul": 398, "leaky_relu": 452, "leakyrelu": 384, "left_shift": 201, "less": 202, "less_equ": 203, "linalg": [204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220], "linear": [5, 353, 385], "linear_schedul": 507, "linspac": 221, "linux": 9, "lion": 495, "llm": 6, "load": [6, 222, 488, 524], "load_weight": 399, "log": [54, 223], "log10": [55, 224], "log1p": [56, 225], "log2": [57, 226], "log_cosh_loss": 463, "log_sigmoid": 453, "log_softmax": 454, "logaddexp": 227, "logcumsumexp": [58, 228], "logical_and": 229, "logical_not": 230, "logical_or": 231, "logsigmoid": 386, "logsoftmax": 387, "logsumexp": [59, 232], "loss": [455, 456, 457, 458, 459, 460, 461, 462, 463, 464, 465, 466, 467, 468, 485], "lstm": 382, "lu": 212, "lu_factor": 213, "manag": 354, "margin_ranking_loss": 464, "matmul": 233, "max": [60, 234], "maximum": 235, "maxpool1d": 388, "maxpool2d": 389, "maxpool3d": 390, "mean": [61, 236], "memori": [354, 525], "meshgrid": 237, "metal": [1, 3, 9, 238, 239, 240, 241, 355], "metal_kernel": 156, "min": [62, 242], "minim": 9, "minimum": 243, "mish": [391, 469], "mlx": [2, 4, 8, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294, 295, 296, 297, 298, 299, 300, 301, 302, 303, 304, 305, 306, 307, 308, 309, 310, 311, 312, 313, 314, 315, 316, 317, 318, 319, 320, 321, 322, 323, 324, 325, 326, 327, 328, 329, 330, 331, 332, 333, 334, 335, 336, 337, 338, 339, 340, 341, 342, 343, 344, 357, 358, 359, 360, 361, 362, 363, 364, 365, 366, 367, 368, 369, 370, 371, 372, 373, 374, 375, 376, 377, 378, 379, 380, 381, 382, 383, 384, 385, 386, 387, 388, 389, 390, 391, 392, 393, 394, 395, 396, 397, 398, 399, 400, 401, 402, 403, 404, 405, 406, 407, 408, 409, 410, 411, 412, 413, 414, 415, 416, 417, 418, 419, 420, 421, 422, 423, 424, 425, 426, 427, 428, 429, 430, 431, 432, 433, 434, 435, 436, 437, 438, 439, 440, 441, 442, 443, 444, 445, 446, 447, 448, 449, 450, 451, 452, 453, 454, 455, 456, 457, 458, 459, 460, 461, 462, 463, 464, 465, 466, 467, 468, 469, 470, 471, 472, 473, 474, 475, 476, 477, 478, 479, 480, 481, 489, 490, 491, 492, 493, 494, 495, 496, 497, 498, 499, 500, 501, 502, 503, 504, 505, 506, 507, 508], "model": 6, "modul": [356, 392, 393, 394, 395, 396, 397, 398, 399, 400, 401, 402, 403, 404, 405, 406, 407, 408, 409, 410, 411, 486, 517], "more": 517, "moveaxi": [63, 244], "mpi": [516, 520], "mse_loss": 465, "multi": 7, "multiheadattent": 412, "multioptim": 496, "multipl": 517, "multipli": 245, "multivariate_norm": 266, "muon": 497, "named_modul": 401, "nan_to_num": 246, "nbyte": 64, "ndim": 65, "neg": 247, "network": 356, "neural": 356, "new_stream": 248, "nll_loss": 466, "nn": [335, 336, 337, 357, 358, 359, 360, 361, 362, 363, 364, 365, 366, 367, 368, 369, 370, 371, 372, 373, 374, 375, 376, 377, 378, 379, 380, 381, 382, 383, 384, 385, 386, 387, 388, 389, 390, 391, 392, 393, 394, 395, 396, 397, 398, 399, 400, 401, 402, 403, 404, 405, 406, 407, 408, 409, 410, 411, 412, 413, 414, 415, 416, 417, 418, 419, 420, 421, 422, 423, 424, 425, 426, 427, 428, 429, 430, 431, 432, 433, 434, 435, 436, 437, 438, 439, 440, 441, 442, 443, 444, 445, 446, 447, 448, 449, 450, 451, 452, 453, 454, 455, 456, 457, 458, 459, 460, 461, 462, 463, 464, 465, 466, 467, 468, 469, 470, 471, 472, 473, 474, 475, 476, 477, 478, 479, 480, 481, 516], "norm": 214, "normal": [267, 441], "not_equ": 249, "numpi": [519, 522], "ones": 250, "ones_lik": 251, "onli": [9, 521], "oper": [0, 2, 487], "optim": [338, 488, 489, 490, 491, 492, 493, 494, 495, 496, 497, 498, 499, 500, 501, 502, 503, 504, 505, 506, 507, 508, 509, 510], "option": 9, "other": 522, "outer": 252, "packag": 4, "pad": 253, "paramet": [356, 402], "partit": 254, "perceptron": 7, "permut": 268, "pinv": 215, "place": 519, "power": 255, "prelu": [413, 470], "primit": 2, "prod": [66, 256], "program": [516, 520], "provid": 520, "pure": 515, "put": 6, "put_along_axi": 257, "python": [2, 8, 9], "pytorch": 522, "qr": 216, "quantiz": [258, 336], "quantized_matmul": 259, "quantizedembed": 414, "quantizedlinear": 415, "quick": [356, 523], "radian": 260, "randint": 269, "random": [261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 512], "read": 8, "real": [67, 274], "reciproc": [68, 275], "recv": 135, "recv_lik": 136, "reduc": 516, "refer": 8, "regress": 5, "relu": [418, 471], "relu6": [419, 472], "remaind": 276, "remot": [516, 520], "repeat": 277, "requir": 9, "reset_peak_memori": 278, "reshap": [69, 279], "result": 2, "rfft": 171, "rfft2": 172, "rfftn": 173, "right_shift": 280, "ring": [516, 520], "rms_norm": 157, "rmsnorm": 416, "rmsprop": 502, "rnn": 417, "roll": 281, "rope": [158, 420], "round": [70, 282], "rsqrt": [71, 283], "run": 516, "sampl": 1, "save": [284, 488, 524], "save_gguf": 285, "save_safetensor": 286, "save_weight": 403, "savez": 287, "savez_compress": 288, "scaled_dot_product_attent": 159, "schedul": 511, "script": [2, 6], "seed": 270, "select": 516, "selu": [421, 473], "send": 137, "sequenti": 422, "serial": 524, "set": [516, 520], "set_cache_limit": 289, "set_default_devic": 290, "set_default_stream": 291, "set_dtyp": 404, "set_memory_limit": 292, "set_wired_limit": 293, "setuptool": 2, "sgd": 503, "shape": [1, 72], "shapeless": [515, 517], "shell": 9, "sigmoid": [294, 424, 474], "sign": 295, "silu": [423, 475], "simpl": [1, 525], "sin": [73, 296], "sinh": 297, "sinusoidalpositionalencod": 425, "size": [9, 74], "slice": 298, "slice_upd": 299, "smooth_l1_loss": 467, "softmax": [300, 426, 476], "softmin": [427, 477], "softplu": [428, 478], "softshrink": [429, 479], "softsign": 430, "solv": 217, "solve_triangular": 218, "sort": 301, "sourc": 9, "specif": 520, "specifi": 526, "speedup": 515, "split": [75, 271, 302], "sqrt": [76, 303], "squar": [77, 304], "squeez": [78, 305], "stack": 306, "start": [356, 516, 523], "start_captur": 240, "state": [405, 500], "std": [79, 307], "step": [431, 480], "step_decai": 508, "stop_captur": 241, "stop_gradi": 308, "stream": [309, 344, 348, 526], "stride": 1, "subtract": 310, "sum": [80, 311], "support": 347, "svd": 219, "swapax": [81, 312], "synchron": 313, "t": 32, "take": 314, "take_along_axi": 315, "tan": 316, "tanh": [317, 432, 481], "tensordot": 318, "tensorflow": 522, "thunderbolt": 516, "tile": 319, "togeth": 6, "tolist": 82, "topk": 320, "trace": [321, 517], "train": [406, 408, 515, 516], "trainable_paramet": 407, "transform": [2, 433, 513, 515, 517, 518, 521, 523], "transpos": [83, 322], "tree": 514, "tree_flatten": 339, "tree_map": 340, "tree_map_with_path": 341, "tree_reduc": 342, "tree_unflatten": 343, "tri": 323, "tri_inv": 220, "tril": 324, "triplet_loss": 468, "triu": 325, "troubleshoot": 9, "truncated_norm": 272, "tune": 516, "type": 347, "unflatten": 326, "unfreez": 409, "unifi": 525, "uniform": [273, 442], "up": [516, 520], "updat": [356, 410, 501, 519], "update_modul": 411, "upsampl": 434, "us": [1, 2, 4, 521, 526], "usag": [2, 8, 520], "util": [339, 340, 341, 342, 343, 514, 516], "valu": 356, "value_and_grad": [327, 337], "var": [84, 328], "variabl": 4, "vector": 518, "view": [85, 329], "vjp": [1, 330], "vmap": 331, "weight": 6, "what": 521, "when": 521, "where": 332, "why": 521, "workflow": 3, "x86": 9, "xcode": 3, "you": 521, "zero": 333, "zeros_lik": 334}}) \ No newline at end of file diff --git a/docs/build/html/usage/compile.html b/docs/build/html/usage/compile.html index 32181cff1..c2e5c85a5 100644 --- a/docs/build/html/usage/compile.html +++ b/docs/build/html/usage/compile.html @@ -8,7 +8,7 @@ - Compilation — MLX 0.28.0 documentation + Compilation — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • @@ -1113,7 +1121,7 @@ computation graph. Printing such an array results in a crash.

        def fun(x, y): z = x + y state.append(z) - return mx.exp(z), state + return mx.exp(z) fun(mx.array(1.0), mx.array(2.0)) # Prints [array(3, dtype=float32)] diff --git a/docs/build/html/usage/distributed.html b/docs/build/html/usage/distributed.html index bb72ab050..408c7f461 100644 --- a/docs/build/html/usage/distributed.html +++ b/docs/build/html/usage/distributed.html @@ -8,7 +8,7 @@ - Distributed Communication — MLX 0.28.0 documentation + Distributed Communication — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/usage/export.html b/docs/build/html/usage/export.html index 10db0dffb..e52902082 100644 --- a/docs/build/html/usage/export.html +++ b/docs/build/html/usage/export.html @@ -8,7 +8,7 @@ - Exporting Functions — MLX 0.28.0 documentation + Exporting Functions — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/usage/function_transforms.html b/docs/build/html/usage/function_transforms.html index d5f75835d..16eda30de 100644 --- a/docs/build/html/usage/function_transforms.html +++ b/docs/build/html/usage/function_transforms.html @@ -8,7 +8,7 @@ - Function Transforms — MLX 0.28.0 documentation + Function Transforms — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/usage/indexing.html b/docs/build/html/usage/indexing.html index 7ce301a8d..d945dc844 100644 --- a/docs/build/html/usage/indexing.html +++ b/docs/build/html/usage/indexing.html @@ -8,7 +8,7 @@ - Indexing Arrays — MLX 0.28.0 documentation + Indexing Arrays — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/usage/launching_distributed.html b/docs/build/html/usage/launching_distributed.html index b76d04737..563584846 100644 --- a/docs/build/html/usage/launching_distributed.html +++ b/docs/build/html/usage/launching_distributed.html @@ -8,7 +8,7 @@ - Launching Distributed Programs — MLX 0.28.0 documentation + Launching Distributed Programs — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/usage/lazy_evaluation.html b/docs/build/html/usage/lazy_evaluation.html index ce6d0e3da..b1e161ef0 100644 --- a/docs/build/html/usage/lazy_evaluation.html +++ b/docs/build/html/usage/lazy_evaluation.html @@ -8,7 +8,7 @@ - Lazy Evaluation — MLX 0.28.0 documentation + Lazy Evaluation — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/usage/numpy.html b/docs/build/html/usage/numpy.html index 895dbc013..571a08b8b 100644 --- a/docs/build/html/usage/numpy.html +++ b/docs/build/html/usage/numpy.html @@ -8,7 +8,7 @@ - Conversion to NumPy and Other Frameworks — MLX 0.28.0 documentation + Conversion to NumPy and Other Frameworks — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/usage/quick_start.html b/docs/build/html/usage/quick_start.html index a3d57288c..c88a04a3c 100644 --- a/docs/build/html/usage/quick_start.html +++ b/docs/build/html/usage/quick_start.html @@ -8,7 +8,7 @@ - Quick Start Guide — MLX 0.28.0 documentation + Quick Start Guide — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/usage/saving_and_loading.html b/docs/build/html/usage/saving_and_loading.html index 10036cb86..feb720bc8 100644 --- a/docs/build/html/usage/saving_and_loading.html +++ b/docs/build/html/usage/saving_and_loading.html @@ -8,7 +8,7 @@ - Saving and Loading Arrays — MLX 0.28.0 documentation + Saving and Loading Arrays — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/usage/unified_memory.html b/docs/build/html/usage/unified_memory.html index b5fb7e2ff..d8c4a64c9 100644 --- a/docs/build/html/usage/unified_memory.html +++ b/docs/build/html/usage/unified_memory.html @@ -8,7 +8,7 @@ - Unified Memory — MLX 0.28.0 documentation + Unified Memory — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
      • mlx.core.get_active_memory
      • mlx.core.get_peak_memory
      • diff --git a/docs/build/html/usage/using_streams.html b/docs/build/html/usage/using_streams.html index bc8752519..888e61c9d 100644 --- a/docs/build/html/usage/using_streams.html +++ b/docs/build/html/usage/using_streams.html @@ -8,7 +8,7 @@ - Using Streams — MLX 0.28.0 documentation + Using Streams — MLX 0.29.0 documentation @@ -30,15 +30,18 @@ + - + + + @@ -137,8 +140,8 @@ - MLX 0.28.0 documentation - Home - + MLX 0.29.0 documentation - Home + @@ -477,6 +480,7 @@
      • mlx.core.fast.rope
      • mlx.core.fast.scaled_dot_product_attention
      • mlx.core.fast.metal_kernel
      • +
      • mlx.core.fast.cuda_kernel
    • FFT
    • +
    • CUDA
      +
    • Memory Management
    • from_linear(linear_layer[, group_size, bits])

      from_linear(linear_layer[, group_size, ...])

      Create a QuantizedLinear layer from a Linear layer.

      degrees(a, /, *[, stream])

      Convert angles from radians to degrees.

      dequantize(w, /, scales, biases[, ...])

      Dequantize the matrix w using the provided scales and biases and the group_size and bits configuration.

      dequantize(w, /, scales, biases, group_size, ...)

      Dequantize the matrix w using quantization parameters.

      diag(a, /[, k, stream])

      Extract a diagonal or construct a diagonal matrix.

      gather_mm(a, b, /, lhs_indices, rhs_indices, *)

      Matrix multiplication with matrix-level gather.

      gather_qmm(x, w, /, scales, biases[, ...])

      gather_qmm(x, w, /, scales[, biases, ...])

      Perform quantized matrix multiplication with matrix-level gather.

      greater(a, b[, stream])

      put_along_axis(a, /, indices, values[, ...])

      Put values along an axis at the specified indices.

      quantize(w, /[, group_size, bits, stream])

      quantize(w, /[, group_size, bits, mode, stream])

      Quantize the matrix w using bits bits per element.

      quantized_matmul(x, w, /, scales, biases[, ...])

      quantized_matmul(x, w, /, scales[, biases, ...])

      Perform the matrix multiplication with the quantized matrix w.

      radians(a, /, *[, stream])