From a7d2c589b25b1deb486a6cd4c35ecdfe26a8e5fd Mon Sep 17 00:00:00 2001
From: Awni Hannun
Date: Mon, 20 May 2024 09:40:17 -0700
Subject: [PATCH] docs update
---
docs/build/html/.buildinfo | 2 +-
docs/build/html/_sources/dev/extensions.rst | 16 +-
docs/build/html/_sources/install.rst | 22 +-
.../python/_autosummary/mlx.core.addmm.rst | 6 +
.../_autosummary/mlx.core.as_strided.rst | 6 +
.../_autosummary/mlx.core.linalg.cholesky.rst | 6 +
.../_autosummary/mlx.core.linalg.inv.rst | 6 +
.../_autosummary/mlx.core.linalg.svd.rst | 6 +
.../python/_autosummary/mlx.core.power.rst | 6 +
.../_autosummary/mlx.core.remainder.rst | 6 +
docs/build/html/_sources/python/linalg.rst | 3 +
.../python/nn/_autosummary/mlx.nn.Conv3d.rst | 16 +
.../nn/_autosummary/mlx.nn.Embedding.rst | 1 +
.../python/nn/_autosummary/mlx.nn.Linear.rst | 1 +
docs/build/html/_sources/python/nn/layers.rst | 1 +
docs/build/html/_sources/python/ops.rst | 5 +
.../html/_static/documentation_options.js | 2 +-
docs/build/html/annotated.html | 172 +-
...ckend_2metal_2kernels_2bf16_8h_source.html | 4 +-
...ls_2steel_2gemm_2transforms_8h_source.html | 114 +-
docs/build/html/bf16__math_8h_source.html | 2 +-
docs/build/html/classes.html | 4 +-
..._1core_1_1_block_sparse_q_m_m-members.html | 115 ++
...assmlx_1_1core_1_1_block_sparse_q_m_m.html | 447 ++++++
...lassmlx_1_1core_1_1_block_sparse_q_m_m.png | Bin 0 -> 981 bytes
...classmlx_1_1core_1_1_cholesky-members.html | 115 ++
.../html/classmlx_1_1core_1_1_cholesky.html | 327 ++++
.../html/classmlx_1_1core_1_1_cholesky.png | Bin 0 -> 909 bytes
.../html/classmlx_1_1core_1_1_primitive.html | 10 +-
.../classmlx_1_1core_1_1_unary_primitive.html | 136 +-
.../classmlx_1_1core_1_1_unary_primitive.png | Bin 31591 -> 32408 bytes
docs/build/html/compiled_8h_source.html | 2 +-
docs/build/html/cpp/ops.html | 39 +-
docs/build/html/dev/extensions.html | 37 +-
docs/build/html/dev/metal_debugger.html | 23 +-
docs/build/html/doxygen_crawl.html | 4 +
.../html/examples/linear_regression.html | 19 +-
docs/build/html/examples/llama-inference.html | 19 +-
docs/build/html/examples/mlp.html | 19 +-
docs/build/html/functions_a.html | 4 +-
docs/build/html/functions_b.html | 1 +
docs/build/html/functions_c.html | 1 +
docs/build/html/functions_e.html | 6 +-
docs/build/html/functions_func_a.html | 4 +-
docs/build/html/functions_func_b.html | 1 +
docs/build/html/functions_func_c.html | 1 +
docs/build/html/functions_func_e.html | 4 +-
docs/build/html/functions_func_i.html | 2 +-
docs/build/html/functions_func_j.html | 2 +-
docs/build/html/functions_func_p.html | 2 +-
docs/build/html/functions_func_v.html | 4 +-
docs/build/html/functions_i.html | 2 +-
docs/build/html/functions_j.html | 2 +-
docs/build/html/functions_p.html | 2 +-
docs/build/html/functions_t.html | 2 +-
docs/build/html/functions_v.html | 4 +-
docs/build/html/functions_vars_t.html | 2 +-
docs/build/html/genindex.html | 47 +-
docs/build/html/group__ops.html | 117 ++
docs/build/html/hierarchy.html | 132 +-
docs/build/html/index.html | 21 +-
docs/build/html/install.html | 64 +-
.../kernels_2steel_2gemm_2gemm_8h_source.html | 2 +-
docs/build/html/linalg_8h.html | 2 +
docs/build/html/linalg_8h_source.html | 5 +-
docs/build/html/mma_8h_source.html | 229 ++-
docs/build/html/namespacemembers_b.html | 1 +
docs/build/html/namespacemembers_c.html | 2 +
docs/build/html/namespacemembers_func_b.html | 1 +
docs/build/html/namespacemembers_func_c.html | 2 +
docs/build/html/namespacemlx_1_1core.html | 10 +
.../html/namespacemlx_1_1core_1_1linalg.html | 28 +
docs/build/html/namespaces.html | 172 +-
docs/build/html/objects.inv | Bin 23657 -> 24021 bytes
docs/build/html/ops_8h.html | 6 +
docs/build/html/ops_8h_source.html | 251 +--
docs/build/html/primitives_8h.html | 4 +
docs/build/html/primitives_8h_source.html | 1416 +++++++++--------
.../python/_autosummary/mlx.core.Device.html | 19 +-
.../python/_autosummary/mlx.core.Dtype.html | 19 +-
.../_autosummary/mlx.core.DtypeCategory.html | 19 +-
.../python/_autosummary/mlx.core.Stream.html | 19 +-
.../python/_autosummary/mlx.core.abs.html | 19 +-
.../python/_autosummary/mlx.core.add.html | 25 +-
.../python/_autosummary/mlx.core.addmm.html | 940 +++++++++++
.../python/_autosummary/mlx.core.all.html | 25 +-
.../_autosummary/mlx.core.allclose.html | 19 +-
.../python/_autosummary/mlx.core.any.html | 19 +-
.../python/_autosummary/mlx.core.arange.html | 19 +-
.../python/_autosummary/mlx.core.arccos.html | 19 +-
.../python/_autosummary/mlx.core.arccosh.html | 19 +-
.../python/_autosummary/mlx.core.arcsin.html | 19 +-
.../python/_autosummary/mlx.core.arcsinh.html | 19 +-
.../python/_autosummary/mlx.core.arctan.html | 19 +-
.../python/_autosummary/mlx.core.arctan2.html | 19 +-
.../python/_autosummary/mlx.core.arctanh.html | 19 +-
.../python/_autosummary/mlx.core.argmax.html | 19 +-
.../python/_autosummary/mlx.core.argmin.html | 19 +-
.../_autosummary/mlx.core.argpartition.html | 19 +-
.../python/_autosummary/mlx.core.argsort.html | 19 +-
.../python/_autosummary/mlx.core.array.T.html | 19 +-
.../_autosummary/mlx.core.array.abs.html | 19 +-
.../_autosummary/mlx.core.array.all.html | 19 +-
.../_autosummary/mlx.core.array.any.html | 19 +-
.../_autosummary/mlx.core.array.argmax.html | 19 +-
.../_autosummary/mlx.core.array.argmin.html | 19 +-
.../_autosummary/mlx.core.array.astype.html | 19 +-
.../_autosummary/mlx.core.array.at.html | 19 +-
.../_autosummary/mlx.core.array.cos.html | 19 +-
.../_autosummary/mlx.core.array.cummax.html | 19 +-
.../_autosummary/mlx.core.array.cummin.html | 19 +-
.../_autosummary/mlx.core.array.cumprod.html | 19 +-
.../_autosummary/mlx.core.array.cumsum.html | 19 +-
.../_autosummary/mlx.core.array.diag.html | 19 +-
.../_autosummary/mlx.core.array.diagonal.html | 19 +-
.../_autosummary/mlx.core.array.dtype.html | 19 +-
.../_autosummary/mlx.core.array.exp.html | 19 +-
.../_autosummary/mlx.core.array.flatten.html | 19 +-
.../python/_autosummary/mlx.core.array.html | 19 +-
.../_autosummary/mlx.core.array.item.html | 19 +-
.../_autosummary/mlx.core.array.itemsize.html | 19 +-
.../_autosummary/mlx.core.array.log.html | 19 +-
.../_autosummary/mlx.core.array.log10.html | 19 +-
.../_autosummary/mlx.core.array.log1p.html | 19 +-
.../_autosummary/mlx.core.array.log2.html | 19 +-
.../mlx.core.array.logsumexp.html | 19 +-
.../_autosummary/mlx.core.array.max.html | 19 +-
.../_autosummary/mlx.core.array.mean.html | 19 +-
.../_autosummary/mlx.core.array.min.html | 19 +-
.../_autosummary/mlx.core.array.moveaxis.html | 19 +-
.../_autosummary/mlx.core.array.nbytes.html | 19 +-
.../_autosummary/mlx.core.array.ndim.html | 19 +-
.../_autosummary/mlx.core.array.prod.html | 19 +-
.../mlx.core.array.reciprocal.html | 19 +-
.../_autosummary/mlx.core.array.reshape.html | 19 +-
.../_autosummary/mlx.core.array.round.html | 19 +-
.../_autosummary/mlx.core.array.rsqrt.html | 19 +-
.../_autosummary/mlx.core.array.shape.html | 19 +-
.../_autosummary/mlx.core.array.sin.html | 19 +-
.../_autosummary/mlx.core.array.size.html | 19 +-
.../_autosummary/mlx.core.array.split.html | 19 +-
.../_autosummary/mlx.core.array.sqrt.html | 19 +-
.../_autosummary/mlx.core.array.square.html | 19 +-
.../_autosummary/mlx.core.array.squeeze.html | 19 +-
.../_autosummary/mlx.core.array.sum.html | 19 +-
.../_autosummary/mlx.core.array.swapaxes.html | 19 +-
.../_autosummary/mlx.core.array.tolist.html | 19 +-
.../mlx.core.array.transpose.html | 19 +-
.../_autosummary/mlx.core.array.var.html | 19 +-
.../_autosummary/mlx.core.array_equal.html | 25 +-
.../_autosummary/mlx.core.as_strided.html | 950 +++++++++++
.../_autosummary/mlx.core.atleast_1d.html | 25 +-
.../_autosummary/mlx.core.atleast_2d.html | 19 +-
.../_autosummary/mlx.core.atleast_3d.html | 19 +-
.../_autosummary/mlx.core.bitwise_and.html | 19 +-
.../_autosummary/mlx.core.bitwise_or.html | 19 +-
.../_autosummary/mlx.core.bitwise_xor.html | 19 +-
.../mlx.core.block_masked_mm.html | 19 +-
.../mlx.core.block_sparse_mm.html | 21 +-
.../_autosummary/mlx.core.broadcast_to.html | 19 +-
.../python/_autosummary/mlx.core.ceil.html | 19 +-
.../python/_autosummary/mlx.core.clip.html | 19 +-
.../python/_autosummary/mlx.core.compile.html | 19 +-
.../_autosummary/mlx.core.concatenate.html | 19 +-
.../python/_autosummary/mlx.core.conj.html | 19 +-
.../_autosummary/mlx.core.conjugate.html | 19 +-
.../python/_autosummary/mlx.core.conv1d.html | 19 +-
.../python/_autosummary/mlx.core.conv2d.html | 19 +-
.../_autosummary/mlx.core.conv_general.html | 19 +-
.../_autosummary/mlx.core.convolve.html | 19 +-
.../python/_autosummary/mlx.core.cos.html | 19 +-
.../python/_autosummary/mlx.core.cosh.html | 19 +-
.../python/_autosummary/mlx.core.cummax.html | 19 +-
.../python/_autosummary/mlx.core.cummin.html | 19 +-
.../python/_autosummary/mlx.core.cumprod.html | 19 +-
.../python/_autosummary/mlx.core.cumsum.html | 19 +-
.../_autosummary/mlx.core.default_device.html | 19 +-
.../_autosummary/mlx.core.default_stream.html | 19 +-
.../python/_autosummary/mlx.core.degrees.html | 19 +-
.../_autosummary/mlx.core.dequantize.html | 21 +-
.../python/_autosummary/mlx.core.diag.html | 19 +-
.../_autosummary/mlx.core.diagonal.html | 19 +-
.../mlx.core.disable_compile.html | 19 +-
.../python/_autosummary/mlx.core.divide.html | 19 +-
.../python/_autosummary/mlx.core.divmod.html | 19 +-
.../_autosummary/mlx.core.enable_compile.html | 19 +-
.../python/_autosummary/mlx.core.equal.html | 19 +-
.../python/_autosummary/mlx.core.erf.html | 19 +-
.../python/_autosummary/mlx.core.erfinv.html | 19 +-
.../python/_autosummary/mlx.core.eval.html | 19 +-
.../python/_autosummary/mlx.core.exp.html | 19 +-
.../_autosummary/mlx.core.expand_dims.html | 19 +-
.../python/_autosummary/mlx.core.expm1.html | 19 +-
.../python/_autosummary/mlx.core.eye.html | 19 +-
.../mlx.core.fast.layer_norm.html | 19 +-
.../_autosummary/mlx.core.fast.rms_norm.html | 19 +-
.../_autosummary/mlx.core.fast.rope.html | 19 +-
...ore.fast.scaled_dot_product_attention.html | 19 +-
.../python/_autosummary/mlx.core.fft.fft.html | 19 +-
.../_autosummary/mlx.core.fft.fft2.html | 19 +-
.../_autosummary/mlx.core.fft.fftn.html | 19 +-
.../_autosummary/mlx.core.fft.ifft.html | 19 +-
.../_autosummary/mlx.core.fft.ifft2.html | 19 +-
.../_autosummary/mlx.core.fft.ifftn.html | 19 +-
.../_autosummary/mlx.core.fft.irfft.html | 19 +-
.../_autosummary/mlx.core.fft.irfft2.html | 19 +-
.../_autosummary/mlx.core.fft.irfftn.html | 19 +-
.../_autosummary/mlx.core.fft.rfft.html | 19 +-
.../_autosummary/mlx.core.fft.rfft2.html | 19 +-
.../_autosummary/mlx.core.fft.rfftn.html | 19 +-
.../python/_autosummary/mlx.core.flatten.html | 19 +-
.../python/_autosummary/mlx.core.floor.html | 19 +-
.../_autosummary/mlx.core.floor_divide.html | 19 +-
.../python/_autosummary/mlx.core.full.html | 19 +-
.../python/_autosummary/mlx.core.grad.html | 19 +-
.../python/_autosummary/mlx.core.greater.html | 19 +-
.../_autosummary/mlx.core.greater_equal.html | 19 +-
.../_autosummary/mlx.core.identity.html | 19 +-
.../python/_autosummary/mlx.core.inner.html | 21 +-
.../python/_autosummary/mlx.core.isclose.html | 19 +-
.../python/_autosummary/mlx.core.isinf.html | 19 +-
.../python/_autosummary/mlx.core.isnan.html | 19 +-
.../_autosummary/mlx.core.isneginf.html | 19 +-
.../_autosummary/mlx.core.isposinf.html | 19 +-
.../_autosummary/mlx.core.issubdtype.html | 21 +-
.../python/_autosummary/mlx.core.jvp.html | 19 +-
.../_autosummary/mlx.core.left_shift.html | 19 +-
.../python/_autosummary/mlx.core.less.html | 19 +-
.../_autosummary/mlx.core.less_equal.html | 19 +-
.../mlx.core.linalg.cholesky.html | 943 +++++++++++
.../_autosummary/mlx.core.linalg.inv.html | 938 +++++++++++
.../_autosummary/mlx.core.linalg.norm.html | 31 +-
.../_autosummary/mlx.core.linalg.qr.html | 31 +-
.../_autosummary/mlx.core.linalg.svd.html | 939 +++++++++++
.../_autosummary/mlx.core.linspace.html | 19 +-
.../python/_autosummary/mlx.core.load.html | 21 +-
.../python/_autosummary/mlx.core.log.html | 19 +-
.../python/_autosummary/mlx.core.log10.html | 19 +-
.../python/_autosummary/mlx.core.log1p.html | 19 +-
.../python/_autosummary/mlx.core.log2.html | 19 +-
.../_autosummary/mlx.core.logaddexp.html | 19 +-
.../_autosummary/mlx.core.logical_and.html | 19 +-
.../_autosummary/mlx.core.logical_not.html | 19 +-
.../_autosummary/mlx.core.logical_or.html | 19 +-
.../_autosummary/mlx.core.logsumexp.html | 19 +-
.../python/_autosummary/mlx.core.matmul.html | 19 +-
.../python/_autosummary/mlx.core.max.html | 19 +-
.../python/_autosummary/mlx.core.maximum.html | 19 +-
.../python/_autosummary/mlx.core.mean.html | 19 +-
.../_autosummary/mlx.core.meshgrid.html | 19 +-
.../mlx.core.metal.clear_cache.html | 19 +-
.../mlx.core.metal.device_info.html | 19 +-
.../mlx.core.metal.get_active_memory.html | 19 +-
.../mlx.core.metal.get_cache_memory.html | 19 +-
.../mlx.core.metal.get_peak_memory.html | 19 +-
.../mlx.core.metal.is_available.html | 19 +-
.../mlx.core.metal.reset_peak_memory.html | 19 +-
.../mlx.core.metal.set_cache_limit.html | 19 +-
.../mlx.core.metal.set_memory_limit.html | 19 +-
.../mlx.core.metal.start_capture.html | 19 +-
.../mlx.core.metal.stop_capture.html | 19 +-
.../python/_autosummary/mlx.core.min.html | 19 +-
.../python/_autosummary/mlx.core.minimum.html | 19 +-
.../_autosummary/mlx.core.moveaxis.html | 19 +-
.../_autosummary/mlx.core.multiply.html | 19 +-
.../_autosummary/mlx.core.negative.html | 19 +-
.../_autosummary/mlx.core.new_stream.html | 19 +-
.../_autosummary/mlx.core.not_equal.html | 19 +-
.../python/_autosummary/mlx.core.ones.html | 19 +-
.../_autosummary/mlx.core.ones_like.html | 19 +-
.../python/_autosummary/mlx.core.outer.html | 21 +-
.../python/_autosummary/mlx.core.pad.html | 25 +-
.../_autosummary/mlx.core.partition.html | 19 +-
.../python/_autosummary/mlx.core.power.html | 936 +++++++++++
.../python/_autosummary/mlx.core.prod.html | 25 +-
.../_autosummary/mlx.core.quantize.html | 25 +-
.../mlx.core.quantized_matmul.html | 21 +-
.../python/_autosummary/mlx.core.radians.html | 19 +-
.../mlx.core.random.bernoulli.html | 19 +-
.../mlx.core.random.categorical.html | 19 +-
.../_autosummary/mlx.core.random.gumbel.html | 19 +-
.../_autosummary/mlx.core.random.key.html | 19 +-
.../mlx.core.random.multivariate_normal.html | 19 +-
.../_autosummary/mlx.core.random.normal.html | 19 +-
.../_autosummary/mlx.core.random.randint.html | 19 +-
.../_autosummary/mlx.core.random.seed.html | 19 +-
.../_autosummary/mlx.core.random.split.html | 19 +-
.../mlx.core.random.truncated_normal.html | 19 +-
.../_autosummary/mlx.core.random.uniform.html | 19 +-
.../_autosummary/mlx.core.reciprocal.html | 25 +-
.../_autosummary/mlx.core.remainder.html | 937 +++++++++++
.../python/_autosummary/mlx.core.repeat.html | 25 +-
.../python/_autosummary/mlx.core.reshape.html | 19 +-
.../_autosummary/mlx.core.right_shift.html | 19 +-
.../python/_autosummary/mlx.core.round.html | 21 +-
.../python/_autosummary/mlx.core.rsqrt.html | 19 +-
.../python/_autosummary/mlx.core.save.html | 19 +-
.../_autosummary/mlx.core.save_gguf.html | 19 +-
.../mlx.core.save_safetensors.html | 19 +-
.../python/_autosummary/mlx.core.savez.html | 19 +-
.../mlx.core.savez_compressed.html | 19 +-
.../mlx.core.set_default_device.html | 19 +-
.../mlx.core.set_default_stream.html | 19 +-
.../python/_autosummary/mlx.core.sigmoid.html | 19 +-
.../python/_autosummary/mlx.core.sign.html | 19 +-
.../python/_autosummary/mlx.core.sin.html | 19 +-
.../python/_autosummary/mlx.core.sinh.html | 19 +-
.../python/_autosummary/mlx.core.softmax.html | 19 +-
.../python/_autosummary/mlx.core.sort.html | 19 +-
.../python/_autosummary/mlx.core.split.html | 19 +-
.../python/_autosummary/mlx.core.sqrt.html | 19 +-
.../python/_autosummary/mlx.core.square.html | 19 +-
.../python/_autosummary/mlx.core.squeeze.html | 19 +-
.../python/_autosummary/mlx.core.stack.html | 19 +-
.../python/_autosummary/mlx.core.std.html | 19 +-
.../_autosummary/mlx.core.stop_gradient.html | 19 +-
.../_autosummary/mlx.core.subtract.html | 19 +-
.../python/_autosummary/mlx.core.sum.html | 19 +-
.../_autosummary/mlx.core.swapaxes.html | 19 +-
.../_autosummary/mlx.core.synchronize.html | 19 +-
.../python/_autosummary/mlx.core.take.html | 19 +-
.../mlx.core.take_along_axis.html | 19 +-
.../python/_autosummary/mlx.core.tan.html | 19 +-
.../python/_autosummary/mlx.core.tanh.html | 19 +-
.../_autosummary/mlx.core.tensordot.html | 21 +-
.../python/_autosummary/mlx.core.tile.html | 21 +-
.../python/_autosummary/mlx.core.topk.html | 19 +-
.../_autosummary/mlx.core.transpose.html | 19 +-
.../python/_autosummary/mlx.core.tri.html | 19 +-
.../python/_autosummary/mlx.core.tril.html | 19 +-
.../python/_autosummary/mlx.core.triu.html | 19 +-
.../_autosummary/mlx.core.value_and_grad.html | 19 +-
.../python/_autosummary/mlx.core.var.html | 19 +-
.../python/_autosummary/mlx.core.vjp.html | 19 +-
.../python/_autosummary/mlx.core.vmap.html | 19 +-
.../python/_autosummary/mlx.core.where.html | 21 +-
.../python/_autosummary/mlx.core.zeros.html | 19 +-
.../_autosummary/mlx.core.zeros_like.html | 19 +-
.../python/_autosummary/mlx.nn.quantize.html | 27 +-
.../_autosummary/mlx.nn.value_and_grad.html | 19 +-
.../mlx.optimizers.clip_grad_norm.html | 19 +-
.../_autosummary/mlx.utils.tree_flatten.html | 19 +-
.../_autosummary/mlx.utils.tree_map.html | 19 +-
.../mlx.utils.tree_map_with_path.html | 19 +-
.../_autosummary/mlx.utils.tree_reduce.html | 19 +-
.../mlx.utils.tree_unflatten.html | 19 +-
.../python/_autosummary/stream_class.html | 19 +-
docs/build/html/python/array.html | 19 +-
docs/build/html/python/data_types.html | 19 +-
.../html/python/devices_and_streams.html | 19 +-
docs/build/html/python/fast.html | 19 +-
docs/build/html/python/fft.html | 19 +-
docs/build/html/python/linalg.html | 36 +-
docs/build/html/python/metal.html | 25 +-
docs/build/html/python/nn.html | 23 +-
.../python/nn/_autosummary/mlx.nn.ALiBi.html | 19 +-
.../nn/_autosummary/mlx.nn.AvgPool1d.html | 19 +-
.../nn/_autosummary/mlx.nn.AvgPool2d.html | 19 +-
.../nn/_autosummary/mlx.nn.BatchNorm.html | 19 +-
.../python/nn/_autosummary/mlx.nn.Conv1d.html | 25 +-
.../python/nn/_autosummary/mlx.nn.Conv2d.html | 31 +-
.../python/nn/_autosummary/mlx.nn.Conv3d.html | 948 +++++++++++
.../nn/_autosummary/mlx.nn.Dropout.html | 25 +-
.../nn/_autosummary/mlx.nn.Dropout2d.html | 19 +-
.../nn/_autosummary/mlx.nn.Dropout3d.html | 19 +-
.../nn/_autosummary/mlx.nn.Embedding.html | 22 +-
.../python/nn/_autosummary/mlx.nn.GELU.html | 19 +-
.../python/nn/_autosummary/mlx.nn.GRU.html | 19 +-
.../nn/_autosummary/mlx.nn.GroupNorm.html | 19 +-
.../nn/_autosummary/mlx.nn.InstanceNorm.html | 19 +-
.../python/nn/_autosummary/mlx.nn.LSTM.html | 19 +-
.../nn/_autosummary/mlx.nn.LayerNorm.html | 19 +-
.../python/nn/_autosummary/mlx.nn.Linear.html | 22 +-
.../nn/_autosummary/mlx.nn.MaxPool1d.html | 19 +-
.../nn/_autosummary/mlx.nn.MaxPool2d.html | 19 +-
.../python/nn/_autosummary/mlx.nn.Mish.html | 19 +-
.../nn/_autosummary/mlx.nn.Module.apply.html | 19 +-
.../mlx.nn.Module.apply_to_modules.html | 19 +-
.../_autosummary/mlx.nn.Module.children.html | 19 +-
.../nn/_autosummary/mlx.nn.Module.eval.html | 19 +-
.../mlx.nn.Module.filter_and_map.html | 19 +-
.../nn/_autosummary/mlx.nn.Module.freeze.html | 19 +-
.../mlx.nn.Module.leaf_modules.html | 19 +-
.../mlx.nn.Module.load_weights.html | 19 +-
.../_autosummary/mlx.nn.Module.modules.html | 19 +-
.../mlx.nn.Module.named_modules.html | 19 +-
.../mlx.nn.Module.parameters.html | 19 +-
.../mlx.nn.Module.save_weights.html | 19 +-
.../_autosummary/mlx.nn.Module.set_dtype.html | 19 +-
.../nn/_autosummary/mlx.nn.Module.state.html | 19 +-
.../nn/_autosummary/mlx.nn.Module.train.html | 19 +-
.../mlx.nn.Module.trainable_parameters.html | 19 +-
.../_autosummary/mlx.nn.Module.training.html | 19 +-
.../_autosummary/mlx.nn.Module.unfreeze.html | 19 +-
.../nn/_autosummary/mlx.nn.Module.update.html | 19 +-
.../mlx.nn.Module.update_modules.html | 19 +-
.../mlx.nn.MultiHeadAttention.html | 19 +-
.../python/nn/_autosummary/mlx.nn.PReLU.html | 19 +-
.../mlx.nn.QuantizedEmbedding.html | 19 +-
.../_autosummary/mlx.nn.QuantizedLinear.html | 19 +-
.../nn/_autosummary/mlx.nn.RMSNorm.html | 19 +-
.../python/nn/_autosummary/mlx.nn.RNN.html | 19 +-
.../python/nn/_autosummary/mlx.nn.ReLU.html | 19 +-
.../python/nn/_autosummary/mlx.nn.RoPE.html | 19 +-
.../python/nn/_autosummary/mlx.nn.SELU.html | 19 +-
.../nn/_autosummary/mlx.nn.Sequential.html | 19 +-
.../python/nn/_autosummary/mlx.nn.SiLU.html | 19 +-
.../mlx.nn.SinusoidalPositionalEncoding.html | 19 +-
.../nn/_autosummary/mlx.nn.Softshrink.html | 19 +-
.../python/nn/_autosummary/mlx.nn.Step.html | 19 +-
.../nn/_autosummary/mlx.nn.Transformer.html | 19 +-
.../nn/_autosummary/mlx.nn.Upsample.html | 19 +-
.../nn/_autosummary/mlx.nn.init.constant.html | 19 +-
.../mlx.nn.init.glorot_normal.html | 19 +-
.../mlx.nn.init.glorot_uniform.html | 19 +-
.../_autosummary/mlx.nn.init.he_normal.html | 19 +-
.../_autosummary/mlx.nn.init.he_uniform.html | 19 +-
.../nn/_autosummary/mlx.nn.init.identity.html | 19 +-
.../nn/_autosummary/mlx.nn.init.normal.html | 19 +-
.../nn/_autosummary/mlx.nn.init.uniform.html | 19 +-
.../nn/_autosummary_functions/mlx.nn.elu.html | 19 +-
.../_autosummary_functions/mlx.nn.gelu.html | 19 +-
.../mlx.nn.gelu_approx.html | 19 +-
.../mlx.nn.gelu_fast_approx.html | 19 +-
.../nn/_autosummary_functions/mlx.nn.glu.html | 19 +-
.../mlx.nn.hardswish.html | 19 +-
.../mlx.nn.leaky_relu.html | 19 +-
.../mlx.nn.log_sigmoid.html | 19 +-
.../mlx.nn.log_softmax.html | 19 +-
.../mlx.nn.losses.binary_cross_entropy.html | 19 +-
.../mlx.nn.losses.cosine_similarity_loss.html | 19 +-
.../mlx.nn.losses.cross_entropy.html | 19 +-
.../mlx.nn.losses.gaussian_nll_loss.html | 19 +-
.../mlx.nn.losses.hinge_loss.html | 19 +-
.../mlx.nn.losses.huber_loss.html | 19 +-
.../mlx.nn.losses.kl_div_loss.html | 19 +-
.../mlx.nn.losses.l1_loss.html | 19 +-
.../mlx.nn.losses.log_cosh_loss.html | 19 +-
.../mlx.nn.losses.margin_ranking_loss.html | 19 +-
.../mlx.nn.losses.mse_loss.html | 19 +-
.../mlx.nn.losses.nll_loss.html | 19 +-
.../mlx.nn.losses.smooth_l1_loss.html | 19 +-
.../mlx.nn.losses.triplet_loss.html | 19 +-
.../_autosummary_functions/mlx.nn.mish.html | 19 +-
.../_autosummary_functions/mlx.nn.prelu.html | 19 +-
.../_autosummary_functions/mlx.nn.relu.html | 19 +-
.../_autosummary_functions/mlx.nn.relu6.html | 19 +-
.../_autosummary_functions/mlx.nn.selu.html | 19 +-
.../mlx.nn.sigmoid.html | 19 +-
.../_autosummary_functions/mlx.nn.silu.html | 19 +-
.../mlx.nn.softmax.html | 19 +-
.../mlx.nn.softplus.html | 19 +-
.../mlx.nn.softshrink.html | 19 +-
.../_autosummary_functions/mlx.nn.step.html | 19 +-
.../_autosummary_functions/mlx.nn.tanh.html | 19 +-
docs/build/html/python/nn/functions.html | 19 +-
docs/build/html/python/nn/init.html | 19 +-
docs/build/html/python/nn/layers.html | 82 +-
docs/build/html/python/nn/losses.html | 19 +-
docs/build/html/python/nn/module.html | 19 +-
docs/build/html/python/ops.html | 210 +--
docs/build/html/python/optimizers.html | 19 +-
.../_autosummary/mlx.optimizers.AdaDelta.html | 19 +-
.../mlx.optimizers.Adafactor.html | 19 +-
.../_autosummary/mlx.optimizers.Adagrad.html | 19 +-
.../_autosummary/mlx.optimizers.Adam.html | 19 +-
.../_autosummary/mlx.optimizers.AdamW.html | 19 +-
.../_autosummary/mlx.optimizers.Adamax.html | 19 +-
.../_autosummary/mlx.optimizers.Lion.html | 19 +-
....optimizers.Optimizer.apply_gradients.html | 19 +-
.../mlx.optimizers.Optimizer.init.html | 19 +-
.../mlx.optimizers.Optimizer.state.html | 19 +-
.../mlx.optimizers.Optimizer.update.html | 19 +-
.../_autosummary/mlx.optimizers.RMSprop.html | 19 +-
.../_autosummary/mlx.optimizers.SGD.html | 19 +-
.../mlx.optimizers.cosine_decay.html | 23 +-
.../mlx.optimizers.exponential_decay.html | 19 +-
.../mlx.optimizers.join_schedules.html | 19 +-
.../mlx.optimizers.linear_schedule.html | 19 +-
.../mlx.optimizers.step_decay.html | 19 +-
.../python/optimizers/common_optimizers.html | 19 +-
.../html/python/optimizers/optimizer.html | 19 +-
.../html/python/optimizers/schedulers.html | 21 +-
docs/build/html/python/random.html | 19 +-
docs/build/html/python/transforms.html | 19 +-
docs/build/html/python/tree_utils.html | 19 +-
docs/build/html/reduce__inst_8h.html | 18 +-
docs/build/html/reduce__inst_8h_source.html | 139 +-
docs/build/html/search.html | 19 +-
docs/build/html/search/all_1.js | 18 +-
docs/build/html/search/all_10.js | 115 +-
docs/build/html/search/all_14.js | 4 +-
docs/build/html/search/all_16.js | 4 +-
docs/build/html/search/all_2.js | 41 +-
docs/build/html/search/all_3.js | 171 +-
docs/build/html/search/all_4.js | 106 +-
docs/build/html/search/all_5.js | 47 +-
docs/build/html/search/all_9.js | 2 +-
docs/build/html/search/all_a.js | 2 +-
docs/build/html/search/all_d.js | 160 +-
docs/build/html/search/classes_2.js | 9 +-
docs/build/html/search/classes_3.js | 59 +-
docs/build/html/search/functions_1.js | 92 +-
docs/build/html/search/functions_10.js | 6 +-
docs/build/html/search/functions_16.js | 4 +-
docs/build/html/search/functions_2.js | 24 +-
docs/build/html/search/functions_3.js | 109 +-
docs/build/html/search/functions_4.js | 8 +-
docs/build/html/search/functions_5.js | 8 +-
docs/build/html/search/functions_9.js | 2 +-
docs/build/html/search/functions_a.js | 2 +-
docs/build/html/search/variables_12.js | 4 +-
docs/build/html/searchindex.js | 2 +-
...tmlx_1_1steel_1_1_block_m_m_a-members.html | 6 +-
.../structmlx_1_1steel_1_1_block_m_m_a.html | 97 ++
...lx_1_1steel_1_1_transform_add-members.html | 5 +-
.../structmlx_1_1steel_1_1_transform_add.html | 31 +-
..._1_1steel_1_1_transform_axpby-members.html | 7 +-
...tructmlx_1_1steel_1_1_transform_axpby.html | 35 +-
docs/build/html/usage/compile.html | 19 +-
.../build/html/usage/function_transforms.html | 19 +-
docs/build/html/usage/indexing.html | 19 +-
docs/build/html/usage/lazy_evaluation.html | 19 +-
docs/build/html/usage/numpy.html | 19 +-
docs/build/html/usage/quick_start.html | 19 +-
docs/build/html/usage/saving_and_loading.html | 19 +-
docs/build/html/usage/unified_memory.html | 19 +-
docs/build/html/usage/using_streams.html | 19 +-
528 files changed, 17198 insertions(+), 4162 deletions(-)
create mode 100644 docs/build/html/_sources/python/_autosummary/mlx.core.addmm.rst
create mode 100644 docs/build/html/_sources/python/_autosummary/mlx.core.as_strided.rst
create mode 100644 docs/build/html/_sources/python/_autosummary/mlx.core.linalg.cholesky.rst
create mode 100644 docs/build/html/_sources/python/_autosummary/mlx.core.linalg.inv.rst
create mode 100644 docs/build/html/_sources/python/_autosummary/mlx.core.linalg.svd.rst
create mode 100644 docs/build/html/_sources/python/_autosummary/mlx.core.power.rst
create mode 100644 docs/build/html/_sources/python/_autosummary/mlx.core.remainder.rst
create mode 100644 docs/build/html/_sources/python/nn/_autosummary/mlx.nn.Conv3d.rst
create mode 100644 docs/build/html/classmlx_1_1core_1_1_block_sparse_q_m_m-members.html
create mode 100644 docs/build/html/classmlx_1_1core_1_1_block_sparse_q_m_m.html
create mode 100644 docs/build/html/classmlx_1_1core_1_1_block_sparse_q_m_m.png
create mode 100644 docs/build/html/classmlx_1_1core_1_1_cholesky-members.html
create mode 100644 docs/build/html/classmlx_1_1core_1_1_cholesky.html
create mode 100644 docs/build/html/classmlx_1_1core_1_1_cholesky.png
create mode 100644 docs/build/html/python/_autosummary/mlx.core.addmm.html
create mode 100644 docs/build/html/python/_autosummary/mlx.core.as_strided.html
create mode 100644 docs/build/html/python/_autosummary/mlx.core.linalg.cholesky.html
create mode 100644 docs/build/html/python/_autosummary/mlx.core.linalg.inv.html
create mode 100644 docs/build/html/python/_autosummary/mlx.core.linalg.svd.html
create mode 100644 docs/build/html/python/_autosummary/mlx.core.power.html
create mode 100644 docs/build/html/python/_autosummary/mlx.core.remainder.html
create mode 100644 docs/build/html/python/nn/_autosummary/mlx.nn.Conv3d.html
diff --git a/docs/build/html/.buildinfo b/docs/build/html/.buildinfo
index 4cd5f8315..e0cff859d 100644
--- a/docs/build/html/.buildinfo
+++ b/docs/build/html/.buildinfo
@@ -1,4 +1,4 @@
# Sphinx build info version 1
# This file hashes the configuration used when building these files. When it is not found, a full rebuild will be done.
-config: 6d31d3d7850f7f8959377483b35af018
+config: c0e0bb3fe4816a1bf9a98909252329c4
tags: 645f666f9bcd5a90fca523b33c5a78b7
diff --git a/docs/build/html/_sources/dev/extensions.rst b/docs/build/html/_sources/dev/extensions.rst
index acf41a773..9a2be90cd 100644
--- a/docs/build/html/_sources/dev/extensions.rst
+++ b/docs/build/html/_sources/dev/extensions.rst
@@ -1,5 +1,5 @@
-Developer Documentation
-=======================
+Custom Extensions in MLX
+========================
You can extend MLX with custom operations on the CPU or GPU. This guide
explains how to do that with a simple example.
@@ -494,7 +494,7 @@ below.
auto kernel = d.get_kernel(kname.str(), "mlx_ext");
// Prepare to encode kernel
- auto compute_encoder = d.get_command_encoder(s.index);
+ auto& compute_encoder = d.get_command_encoder(s.index);
compute_encoder->setComputePipelineState(kernel);
// Kernel parameters are registered with buffer indices corresponding to
@@ -503,11 +503,11 @@ below.
size_t nelem = out.size();
// Encode input arrays to kernel
- set_array_buffer(compute_encoder, x, 0);
- set_array_buffer(compute_encoder, y, 1);
+ compute_encoder.set_input_array(x, 0);
+ compute_encoder.set_input_array(y, 1);
// Encode output arrays to kernel
- set_array_buffer(compute_encoder, out, 2);
+ compute_encoder.set_output_array(out, 2);
// Encode alpha and beta
compute_encoder->setBytes(&alpha_, sizeof(float), 3);
@@ -531,7 +531,7 @@ below.
// Launch the grid with the given number of threads divided among
// the given threadgroups
- compute_encoder->dispatchThreads(grid_dims, group_dims);
+ compute_encoder.dispatchThreads(grid_dims, group_dims);
}
We can now call the :meth:`axpby` operation on both the CPU and the GPU!
@@ -825,7 +825,7 @@ Let's look at a simple script and its results:
print(f"c shape: {c.shape}")
print(f"c dtype: {c.dtype}")
- print(f"c correctness: {mx.all(c == 6.0).item()}")
+ print(f"c correct: {mx.all(c == 6.0).item()}")
Output:
diff --git a/docs/build/html/_sources/install.rst b/docs/build/html/_sources/install.rst
index 252b234e6..693385e2c 100644
--- a/docs/build/html/_sources/install.rst
+++ b/docs/build/html/_sources/install.rst
@@ -153,6 +153,8 @@ should point to the path to the built metal library.
- OFF
* - MLX_BUILD_METAL
- ON
+ * - MLX_BUILD_CPU
+ - ON
* - MLX_BUILD_PYTHON_BINDINGS
- OFF
* - MLX_METAL_DEBUG
@@ -179,10 +181,28 @@ should point to the path to the built metal library.
xcrun -sdk macosx --show-sdk-version
+Binary Size Minimization
+~~~~~~~~~~~~~~~~~~~~~~~~
+
+To produce a smaller binary use the CMake flags `CMAKE_BUILD_TYPE=MinSizeRel`
+and `BUILD_SHARED_LIBS=ON`.
+
+The MLX CMake build has several additional options to make smaller binaries.
+For example, if you don't need the CPU backend or support for safetensors and
+GGUF, you can do:
+
+.. code-block:: shell
+
+ cmake ..
+ -DCMAKE_BUILD_TYPE=MinSizeRel \
+ -DBUILD_SHARED_LIBS=ON \
+ -DMLX_BUILD_CPU=ON \
+ -DMLX_BUILD_SAFETENSORS=OFF \
+ -DMLX_BUILD_GGUF=OFF
+
Troubleshooting
^^^^^^^^^^^^^^^
-
Metal not found
~~~~~~~~~~~~~~~
diff --git a/docs/build/html/_sources/python/_autosummary/mlx.core.addmm.rst b/docs/build/html/_sources/python/_autosummary/mlx.core.addmm.rst
new file mode 100644
index 000000000..2f513993b
--- /dev/null
+++ b/docs/build/html/_sources/python/_autosummary/mlx.core.addmm.rst
@@ -0,0 +1,6 @@
+mlx.core.addmm
+==============
+
+.. currentmodule:: mlx.core
+
+.. autofunction:: addmm
\ No newline at end of file
diff --git a/docs/build/html/_sources/python/_autosummary/mlx.core.as_strided.rst b/docs/build/html/_sources/python/_autosummary/mlx.core.as_strided.rst
new file mode 100644
index 000000000..5ed10ae88
--- /dev/null
+++ b/docs/build/html/_sources/python/_autosummary/mlx.core.as_strided.rst
@@ -0,0 +1,6 @@
+mlx.core.as\_strided
+====================
+
+.. currentmodule:: mlx.core
+
+.. autofunction:: as_strided
\ No newline at end of file
diff --git a/docs/build/html/_sources/python/_autosummary/mlx.core.linalg.cholesky.rst b/docs/build/html/_sources/python/_autosummary/mlx.core.linalg.cholesky.rst
new file mode 100644
index 000000000..3e2dfd367
--- /dev/null
+++ b/docs/build/html/_sources/python/_autosummary/mlx.core.linalg.cholesky.rst
@@ -0,0 +1,6 @@
+mlx.core.linalg.cholesky
+========================
+
+.. currentmodule:: mlx.core.linalg
+
+.. autofunction:: cholesky
\ No newline at end of file
diff --git a/docs/build/html/_sources/python/_autosummary/mlx.core.linalg.inv.rst b/docs/build/html/_sources/python/_autosummary/mlx.core.linalg.inv.rst
new file mode 100644
index 000000000..0e030237b
--- /dev/null
+++ b/docs/build/html/_sources/python/_autosummary/mlx.core.linalg.inv.rst
@@ -0,0 +1,6 @@
+mlx.core.linalg.inv
+===================
+
+.. currentmodule:: mlx.core.linalg
+
+.. autofunction:: inv
\ No newline at end of file
diff --git a/docs/build/html/_sources/python/_autosummary/mlx.core.linalg.svd.rst b/docs/build/html/_sources/python/_autosummary/mlx.core.linalg.svd.rst
new file mode 100644
index 000000000..4f0ddf4be
--- /dev/null
+++ b/docs/build/html/_sources/python/_autosummary/mlx.core.linalg.svd.rst
@@ -0,0 +1,6 @@
+mlx.core.linalg.svd
+===================
+
+.. currentmodule:: mlx.core.linalg
+
+.. autofunction:: svd
\ No newline at end of file
diff --git a/docs/build/html/_sources/python/_autosummary/mlx.core.power.rst b/docs/build/html/_sources/python/_autosummary/mlx.core.power.rst
new file mode 100644
index 000000000..1ef907bd8
--- /dev/null
+++ b/docs/build/html/_sources/python/_autosummary/mlx.core.power.rst
@@ -0,0 +1,6 @@
+mlx.core.power
+==============
+
+.. currentmodule:: mlx.core
+
+.. autofunction:: power
\ No newline at end of file
diff --git a/docs/build/html/_sources/python/_autosummary/mlx.core.remainder.rst b/docs/build/html/_sources/python/_autosummary/mlx.core.remainder.rst
new file mode 100644
index 000000000..2b333363e
--- /dev/null
+++ b/docs/build/html/_sources/python/_autosummary/mlx.core.remainder.rst
@@ -0,0 +1,6 @@
+mlx.core.remainder
+==================
+
+.. currentmodule:: mlx.core
+
+.. autofunction:: remainder
\ No newline at end of file
diff --git a/docs/build/html/_sources/python/linalg.rst b/docs/build/html/_sources/python/linalg.rst
index 0ac559f5e..3c34cb3f7 100644
--- a/docs/build/html/_sources/python/linalg.rst
+++ b/docs/build/html/_sources/python/linalg.rst
@@ -8,5 +8,8 @@ Linear Algebra
.. autosummary::
:toctree: _autosummary
+ inv
norm
+ cholesky
qr
+ svd
diff --git a/docs/build/html/_sources/python/nn/_autosummary/mlx.nn.Conv3d.rst b/docs/build/html/_sources/python/nn/_autosummary/mlx.nn.Conv3d.rst
new file mode 100644
index 000000000..5442dcdbf
--- /dev/null
+++ b/docs/build/html/_sources/python/nn/_autosummary/mlx.nn.Conv3d.rst
@@ -0,0 +1,16 @@
+mlx.nn.Conv3d
+=============
+
+.. currentmodule:: mlx.nn
+
+.. autoclass:: Conv3d
+
+
+
+
+ .. rubric:: Methods
+
+ .. autosummary::
+
+
+
diff --git a/docs/build/html/_sources/python/nn/_autosummary/mlx.nn.Embedding.rst b/docs/build/html/_sources/python/nn/_autosummary/mlx.nn.Embedding.rst
index 598179428..9c433bfb5 100644
--- a/docs/build/html/_sources/python/nn/_autosummary/mlx.nn.Embedding.rst
+++ b/docs/build/html/_sources/python/nn/_autosummary/mlx.nn.Embedding.rst
@@ -13,5 +13,6 @@
.. autosummary::
~Embedding.as_linear
+ ~Embedding.to_quantized
diff --git a/docs/build/html/_sources/python/nn/_autosummary/mlx.nn.Linear.rst b/docs/build/html/_sources/python/nn/_autosummary/mlx.nn.Linear.rst
index f19fc1994..70850e9d8 100644
--- a/docs/build/html/_sources/python/nn/_autosummary/mlx.nn.Linear.rst
+++ b/docs/build/html/_sources/python/nn/_autosummary/mlx.nn.Linear.rst
@@ -12,5 +12,6 @@
.. autosummary::
+ ~Linear.to_quantized
diff --git a/docs/build/html/_sources/python/nn/layers.rst b/docs/build/html/_sources/python/nn/layers.rst
index 6fb624d54..cbbbb5c3b 100644
--- a/docs/build/html/_sources/python/nn/layers.rst
+++ b/docs/build/html/_sources/python/nn/layers.rst
@@ -15,6 +15,7 @@ Layers
BatchNorm
Conv1d
Conv2d
+ Conv3d
Dropout
Dropout2d
Dropout3d
diff --git a/docs/build/html/_sources/python/ops.rst b/docs/build/html/_sources/python/ops.rst
index 177332c49..c88885101 100644
--- a/docs/build/html/_sources/python/ops.rst
+++ b/docs/build/html/_sources/python/ops.rst
@@ -10,6 +10,7 @@ Operations
abs
add
+ addmm
all
allclose
any
@@ -26,6 +27,7 @@ Operations
argpartition
argsort
array_equal
+ as_strided
atleast_1d
atleast_2d
atleast_3d
@@ -76,6 +78,7 @@ Operations
isnan
isneginf
isposinf
+ issubdtype
left_shift
less
less_equal
@@ -106,11 +109,13 @@ Operations
outer
partition
pad
+ power
prod
quantize
quantized_matmul
radians
reciprocal
+ remainder
repeat
reshape
right_shift
diff --git a/docs/build/html/_static/documentation_options.js b/docs/build/html/_static/documentation_options.js
index 607aaea4c..4b6f897c5 100644
--- a/docs/build/html/_static/documentation_options.js
+++ b/docs/build/html/_static/documentation_options.js
@@ -1,5 +1,5 @@
const DOCUMENTATION_OPTIONS = {
- VERSION: '0.13.0',
+ VERSION: '0.13.1',
LANGUAGE: 'en',
COLLAPSE_INDEX: false,
BUILDER: 'html',
diff --git a/docs/build/html/annotated.html b/docs/build/html/annotated.html
index 220efdb5b..d2c356c14 100644
--- a/docs/build/html/annotated.html
+++ b/docs/build/html/annotated.html
@@ -192,91 +192,93 @@ $(function() {
C BitwiseBinary
C BlockMaskedMM
C BlockSparseMM
- C Broadcast
- C Ceil
- C Compiled
- C complex128_t
- C complex64_t
- C Concatenate
- C Conjugate
- C Convolution
- C Copy
- C Cos
- C Cosh
- C CustomVJP
- C Depends
- C Device
- C Divide
- C DivMod
- C Dtype
- C Equal
- C Erf
- C ErfInv
- C Event
- C Exp
- C Expm1
- C FFT
- C Floor
- C Full
- C Gather
- C Greater
- C GreaterEqual
- C Inverse
- C Less
- C LessEqual
- C Load
- C Log
- C Log1p
- C LogAddExp
- C LogicalAnd
- C LogicalNot
- C LogicalOr
- C Matmul
- C Maximum
- C Minimum
- C Multiply
- C Negative
- C NodeNamer
- C NotEqual
- C NumberOfElements
- C Pad
- C Partition
- C Power
- C Primitive
- C PrintFormatter
- C QRF
- C QuantizedMatmul
- C RandomBits
- C Reduce
- C ReductionPlan
- C Remainder
- C Reshape
- C Round
- C Scan
- C Scatter
- C Select
- C Sigmoid
- C Sign
- C Sin
- C Sinh
- C Slice
- C SliceUpdate
- C Softmax
- C Sort
- C Split
- C Sqrt
- C Square
- C StopGradient
- C Stream
- C StreamContext
- C Subtract
- C SVD
- C Tan
- C Tanh
- C Transpose
- C TypeToDtype
- C UnaryPrimitive
- C Uniform
+ C BlockSparseQMM
+ C Broadcast
+ C Ceil
+ C Cholesky
+ C Compiled
+ C complex128_t
+ C complex64_t
+ C Concatenate
+ C Conjugate
+ C Convolution
+ C Copy
+ C Cos
+ C Cosh
+ C CustomVJP
+ C Depends
+ C Device
+ C Divide
+ C DivMod
+ C Dtype
+ C Equal
+ C Erf
+ C ErfInv
+ C Event
+ C Exp
+ C Expm1
+ C FFT
+ C Floor
+ C Full
+ C Gather
+ C Greater
+ C GreaterEqual
+ C Inverse
+ C Less
+ C LessEqual
+ C Load
+ C Log
+ C Log1p
+ C LogAddExp
+ C LogicalAnd
+ C LogicalNot
+ C LogicalOr
+ C Matmul
+ C Maximum
+ C Minimum
+ C Multiply
+ C Negative
+ C NodeNamer
+ C NotEqual
+ C NumberOfElements
+ C Pad
+ C Partition
+ C Power
+ C Primitive
+ C PrintFormatter
+ C QRF
+ C QuantizedMatmul
+ C RandomBits
+ C Reduce
+ C ReductionPlan
+ C Remainder
+ C Reshape
+ C Round
+ C Scan
+ C Scatter
+ C Select
+ C Sigmoid
+ C Sign
+ C Sin
+ C Sinh
+ C Slice
+ C SliceUpdate
+ C Softmax
+ C Sort
+ C Split
+ C Sqrt
+ C Square
+ C StopGradient
+ C Stream
+ C StreamContext
+ C Subtract
+ C SVD
+ C Tan
+ C Tanh
+ C Transpose
+ C TypeToDtype
+ C UnaryPrimitive
+ C Uniform
► N steel
C AccumHelper
► C BlockLoader
diff --git a/docs/build/html/backend_2metal_2kernels_2bf16_8h_source.html b/docs/build/html/backend_2metal_2kernels_2bf16_8h_source.html
index 83ebc44f4..a266656b8 100644
--- a/docs/build/html/backend_2metal_2kernels_2bf16_8h_source.html
+++ b/docs/build/html/backend_2metal_2kernels_2bf16_8h_source.html
@@ -91,7 +91,7 @@ $(function() { codefold.init(0); });
- 9 #if defined(__HAVE_BFLOAT__)
+ 9 #if defined METAL_3_1 || (__METAL_VERSION__ >= 310)
@@ -445,7 +445,7 @@ $(function() { codefold.init(0); });
313 #pragma METAL internals : disable
-
+
diff --git a/docs/build/html/backend_2metal_2kernels_2steel_2gemm_2transforms_8h_source.html b/docs/build/html/backend_2metal_2kernels_2steel_2gemm_2transforms_8h_source.html
index f9ed6a5f8..13d88e7f2 100644
--- a/docs/build/html/backend_2metal_2kernels_2steel_2gemm_2transforms_8h_source.html
+++ b/docs/build/html/backend_2metal_2kernels_2steel_2gemm_2transforms_8h_source.html
@@ -117,69 +117,83 @@ $(function() { codefold.init(0); });
-
29 static METAL_FUNC OutT
apply (InT x, OutT c) {
-
30 return static_cast< OutT
> (x) + c;
+
29 static METAL_FUNC OutT
apply (InT x) {
+
30 return static_cast< OutT
> (x);
-
+
+
+
33 static METAL_FUNC OutT
apply (InT x, OutT c) {
+
34 return static_cast< OutT
> (x) + c;
+
-
- 34 template <
typename OutT,
typename InT>
-
-
-
-
-
-
-
-
-
42 METAL_FUNC OutT
apply (InT x, OutT c)
const {
-
43 return static_cast< OutT
> (x *
alpha + (
beta * c));
-
+
+
38 template <
typename OutT,
typename InT>
+
+
+
+
+
+
-
+
+
+
46 static METAL_FUNC OutT
apply (InT x) {
+
47 return static_cast< OutT
> (x);
+
-
-
-
-
-
-
+
+
+
50 METAL_FUNC OutT
apply (InT x, OutT c)
const {
+
51 return static_cast< OutT
> (x *
alpha + (
beta * c));
+
-
-
-
-
53 static METAL_FUNC int2
-
-
54 swizzle (uint3 tid [[threadgroup_position_in_grid]],
const int swizzle_log) {
-
55 const int tid_x = (tid.x) >> swizzle_log;
-
-
57 ((tid.y) << swizzle_log) + ((tid.x) & ((1 << swizzle_log) - 1));
-
58 return int2(tid_x, tid_y);
-
+
-
+
+
+
-
-
-
+
+
+
+
61 static METAL_FUNC int2
+
+
62 swizzle (uint3 tid [[threadgroup_position_in_grid]],
const int swizzle_log) {
+
63 const int tid_x = (tid.x) >> swizzle_log;
+
+
65 ((tid.y) << swizzle_log) + ((tid.x) & ((1 << swizzle_log) - 1));
+
66 return int2(tid_x, tid_y);
+
+
+
+
+
+
+
-
Definition transforms.h:48
-
float accum_type
Definition transforms.h:49
-
Definition transforms.h:52
-
static METAL_FUNC int2 swizzle(uint3 tid, const int swizzle_log)
Definition transforms.h:54
+
Definition transforms.h:56
+
float accum_type
Definition transforms.h:57
+
Definition transforms.h:60
+
static METAL_FUNC int2 swizzle(uint3 tid, const int swizzle_log)
Definition transforms.h:62
-
+
-
-
-
-
-
+
+
+
+
+
+
+
diff --git a/docs/build/html/bf16__math_8h_source.html b/docs/build/html/bf16__math_8h_source.html
index 217403ab0..f8c7bfe2d 100644
--- a/docs/build/html/bf16__math_8h_source.html
+++ b/docs/build/html/bf16__math_8h_source.html
@@ -458,7 +458,7 @@ $(function() { codefold.init(0); });
-
372 #if defined(__HAVE_BFLOAT__)
+
372 #if defined METAL_3_1 || (__METAL_VERSION__ >= 310)
374 #define bfloat16_to_uint16(x) as_type<uint16_t>(x)
375 #define uint16_to_bfloat16(x) as_type<bfloat16_t>(x)
diff --git a/docs/build/html/classes.html b/docs/build/html/classes.html
index e2a481f31..be7b8f1de 100644
--- a/docs/build/html/classes.html
+++ b/docs/build/html/classes.html
@@ -81,10 +81,10 @@ $(function() {
Abs Abs (mlx::core )Abs (mlx::core::detail )AccumHelper (mlx::steel )Add Add (mlx::core )Add (mlx::core::detail )add_vec (pocketfft::detail )add_vec< cmplx< T > > (pocketfft::detail )AddMM (mlx::core )aligned_allocator (pocketfft::detail::threading )Allocator (mlx::core::allocator )And Arange (mlx::core )ArcCos ArcCos (mlx::core )ArcCos (mlx::core::detail )ArcCosh ArcCosh (mlx::core )ArcCosh (mlx::core::detail )ArcSin ArcSin (mlx::core )ArcSin (mlx::core::detail )ArcSinh ArcSinh (mlx::core )ArcSinh (mlx::core::detail )ArcTan ArcTan (mlx::core )ArcTan (mlx::core::detail )ArcTan2 ArcTan2 (mlx::core )ArcTan2 (mlx::core::detail )ArcTanh ArcTanh (mlx::core )ArcTanh (mlx::core::detail )ArgPartition (mlx::core )ArgReduce (mlx::core )ArgSort (mlx::core )arr (pocketfft::detail )arr_info (pocketfft::detail )array (mlx::core )array::ArrayIterator (mlx::core )AsStrided (mlx::core )AsType (mlx::core )
B
-_MLX_BFloat16::bits_to_bfloat_struct BitwiseAnd BitwiseAnd (mlx::core::detail )BitwiseBinary (mlx::core )BitwiseOr BitwiseOr (mlx::core::detail )BitwiseXor BitwiseXor (mlx::core::detail )BlockLoader (mlx::steel )BlockMaskedMM (mlx::core )BlockMMA (mlx::steel )BlockSparseMM (mlx::core )BlockSwizzle (mlx::steel )bool4_or_uint Broadcast (mlx::core )Buffer (mlx::core::allocator )
+
_MLX_BFloat16::bits_to_bfloat_struct BitwiseAnd BitwiseAnd (mlx::core::detail )BitwiseBinary (mlx::core )BitwiseOr BitwiseOr (mlx::core::detail )BitwiseXor BitwiseXor (mlx::core::detail )BlockLoader (mlx::steel )BlockMaskedMM (mlx::core )BlockMMA (mlx::steel )BlockSparseMM (mlx::core )BlockSparseQMM (mlx::core )BlockSwizzle (mlx::steel )bool4_or_uint Broadcast (mlx::core )Buffer (mlx::core::allocator )
C
-Ceil Ceil (mlx::core )Ceil (mlx::core::detail )cfftp (pocketfft::detail )ChannelHelper (mlx::steel )ChannelHelper< 1 > (mlx::steel )ChannelHelper< 2 > (mlx::steel )ChannelHelper< 3 > (mlx::steel )ChannelHelper< 4 > (mlx::steel )cmplx (pocketfft::detail )cndarr (pocketfft::detail )CommandEncoder (mlx::core::metal )CommonAllocator (mlx::core::allocator )Compiled (mlx::core )complex128_t (mlx::core )complex64_t complex64_t (mlx::core )Concatenate (mlx::core )concurrent_queue (pocketfft::detail::threading )CommandEncoder::ConcurrentContext (mlx::core::metal )Conjugate Conjugate (mlx::core )Conjugate (mlx::core::detail )Conv2DGeneralBaseInfo (mlx::steel )Conv2DGeneralJumpParams (mlx::steel )Conv2DInputBlockLoaderGeneral (mlx::steel )Conv2DInputBlockLoaderLargeFilter (mlx::steel )Conv2DInputBlockLoaderSmallChannels (mlx::steel )Conv2DInputBlockLoaderSmallFilter (mlx::steel )Conv2DWeightBlockLoader (mlx::steel )Conv2DWeightBlockLoaderGeneral (mlx::steel )Conv2DWeightBlockLoaderSmallChannels (mlx::steel )Convolution (mlx::core )Copy (mlx::core )Cos Cos (mlx::core )Cos (mlx::core::detail )Cosh Cosh (mlx::core )Cosh (mlx::core::detail )Custom (mlx::core::fast )CustomVJP (mlx::core )
+
Ceil Ceil (mlx::core )Ceil (mlx::core::detail )cfftp (pocketfft::detail )ChannelHelper (mlx::steel )ChannelHelper< 1 > (mlx::steel )ChannelHelper< 2 > (mlx::steel )ChannelHelper< 3 > (mlx::steel )ChannelHelper< 4 > (mlx::steel )Cholesky (mlx::core )cmplx (pocketfft::detail )cndarr (pocketfft::detail )CommandEncoder (mlx::core::metal )CommonAllocator (mlx::core::allocator )Compiled (mlx::core )complex128_t (mlx::core )complex64_t complex64_t (mlx::core )Concatenate (mlx::core )concurrent_queue (pocketfft::detail::threading )CommandEncoder::ConcurrentContext (mlx::core::metal )Conjugate Conjugate (mlx::core )Conjugate (mlx::core::detail )Conv2DGeneralBaseInfo (mlx::steel )Conv2DGeneralJumpParams (mlx::steel )Conv2DInputBlockLoaderGeneral (mlx::steel )Conv2DInputBlockLoaderLargeFilter (mlx::steel )Conv2DInputBlockLoaderSmallChannels (mlx::steel )Conv2DInputBlockLoaderSmallFilter (mlx::steel )Conv2DWeightBlockLoader (mlx::steel )Conv2DWeightBlockLoaderGeneral (mlx::steel )Conv2DWeightBlockLoaderSmallChannels (mlx::steel )Convolution (mlx::core )Copy (mlx::core )Cos Cos (mlx::core )Cos (mlx::core::detail )Cosh Cosh (mlx::core )Cosh (mlx::core::detail )Custom (mlx::core::fast )CustomVJP (mlx::core )
D
array::Data (mlx::core )Depends (mlx::core )Device (mlx::core )Device (mlx::core::metal )Divide Divide (mlx::core::detail )Divide (mlx::core )DivMod (mlx::core )Dtype (mlx::core )
diff --git a/docs/build/html/classmlx_1_1core_1_1_block_sparse_q_m_m-members.html b/docs/build/html/classmlx_1_1core_1_1_block_sparse_q_m_m-members.html
new file mode 100644
index 000000000..342d7880e
--- /dev/null
+++ b/docs/build/html/classmlx_1_1core_1_1_block_sparse_q_m_m-members.html
@@ -0,0 +1,115 @@
+
+
+
+
+
+
+
+
MLX: Member List
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
Loading...
+
Searching...
+
No Matches
+
+
+
+
+
+
+
+
+
+
+
This is the complete list of members for mlx::core::BlockSparseQMM , including all inherited members.
+
+ BlockSparseQMM (Stream stream, int group_size, int bits, bool transpose)mlx::core::BlockSparseQMM inline explicit
+ device ()mlx::core::Primitive inline
+ eval_cpu (const std::vector< array > &inputs, array &out) overridemlx::core::BlockSparseQMM virtual
+ mlx::core::UnaryPrimitive::eval_cpu (const std::vector< array > &inputs, std::vector< array > &outputs) overridemlx::core::UnaryPrimitive inline virtual
+ eval_gpu (const std::vector< array > &inputs, array &out) overridemlx::core::BlockSparseQMM virtual
+ mlx::core::UnaryPrimitive::eval_gpu (const std::vector< array > &inputs, std::vector< array > &outputs) overridemlx::core::UnaryPrimitive inline virtual
+ is_equivalent (const Primitive &other) const overridemlx::core::BlockSparseQMM virtual
+ jvp (const std::vector< array > &primals, const std::vector< array > &tangents, const std::vector< int > &argnums) overridemlx::core::BlockSparseQMM virtual
+ operator= (const UnaryPrimitive &other)=deletemlx::core::UnaryPrimitive
+ operator= (UnaryPrimitive &&other)=deletemlx::core::UnaryPrimitive
+ mlx::core::Primitive::operator= (const Primitive &other)=deletemlx::core::Primitive
+ mlx::core::Primitive::operator= (Primitive &&other)=deletemlx::core::Primitive
+ output_shapes (const std::vector< array > &inputs)mlx::core::Primitive virtual
+ Primitive (Stream stream)mlx::core::Primitive inline explicit
+ Primitive (const Primitive &other)=deletemlx::core::Primitive
+ Primitive (Primitive &&other)=deletemlx::core::Primitive
+ print (std::ostream &os) overridemlx::core::BlockSparseQMM inline virtual
+ stream ()mlx::core::Primitive inline
+ UnaryPrimitive (Stream stream)mlx::core::UnaryPrimitive inline explicit
+ UnaryPrimitive (const UnaryPrimitive &other)=deletemlx::core::UnaryPrimitive
+ UnaryPrimitive (UnaryPrimitive &&other)=deletemlx::core::UnaryPrimitive
+ vjp (const std::vector< array > &primals, const std::vector< array > &cotangents, const std::vector< int > &argnums, const std::vector< array > &outputs) overridemlx::core::BlockSparseQMM virtual
+ vmap (const std::vector< array > &inputs, const std::vector< int > &axes) overridemlx::core::BlockSparseQMM virtual
+ ~Primitive ()=defaultmlx::core::Primitive virtual
+ ~UnaryPrimitive ()=defaultmlx::core::UnaryPrimitive virtual
+
+
+
+
+
diff --git a/docs/build/html/classmlx_1_1core_1_1_block_sparse_q_m_m.html b/docs/build/html/classmlx_1_1core_1_1_block_sparse_q_m_m.html
new file mode 100644
index 000000000..1cbeda629
--- /dev/null
+++ b/docs/build/html/classmlx_1_1core_1_1_block_sparse_q_m_m.html
@@ -0,0 +1,447 @@
+
+
+
+
+
+
+
+
MLX: mlx::core::BlockSparseQMM Class Reference
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
Loading...
+
Searching...
+
No Matches
+
+
+
+
+
+
+
+
+
+
+
#include <primitives.h >
+
+
+
+
+
+
+
+
+
+
+
+ BlockSparseQMM (Stream stream , int group_size, int bits, bool transpose )
+
+void eval_cpu (const std::vector< array > &inputs, array &out) override
+
+void eval_gpu (const std::vector< array > &inputs, array &out) override
+
+virtual std::pair< std::vector< array >, std::vector< int > > vmap (const std::vector< array > &inputs, const std::vector< int > &axes) override
+ The primitive must know how to vectorize itself across the given axes.
+
+std::vector< array > jvp (const std::vector< array > &primals, const std::vector< array > &tangents, const std::vector< int > &argnums) override
+ The Jacobian-vector product.
+
+std::vector< array > vjp (const std::vector< array > &primals, const std::vector< array > &cotangents, const std::vector< int > &argnums, const std::vector< array > &outputs) override
+ The vector-Jacobian product.
+
+void print (std::ostream &os) override
+ Print the primitive.
+
+bool is_equivalent (const Primitive &other) const override
+ Equivalence check defaults to false unless overridden by the primitive.
+
+
+ UnaryPrimitive (Stream stream )
+ An abstract base class for a primitive with a single output.
+
+void eval_cpu (const std::vector< array > &inputs, std::vector< array > &outputs) override
+ A primitive must know how to evaluate itself on the CPU/GPU for the given inputs and populate the output arrays.
+
+void eval_gpu (const std::vector< array > &inputs, std::vector< array > &outputs) override
+
+virtual ~UnaryPrimitive ()=default
+
+ UnaryPrimitive (const UnaryPrimitive &other)=delete
+
+ UnaryPrimitive (UnaryPrimitive &&other)=delete
+
+UnaryPrimitive & operator= (const UnaryPrimitive &other)=delete
+
+UnaryPrimitive & operator= (UnaryPrimitive &&other)=delete
+
+
+ Primitive (Stream stream )
+
+const Device & device ()
+ The device the primitive will run on.
+
+const Stream & stream ()
+ The stream the primitive will run on.
+
+virtual std::vector< std::vector< int > > output_shapes (const std::vector< array > &inputs)
+ Get the output shapes of the primitive.
+
+virtual ~Primitive ()=default
+
+ Primitive (const Primitive &other)=delete
+
+ Primitive (Primitive &&other)=delete
+
+Primitive & operator= (const Primitive &other)=delete
+
+Primitive & operator= (Primitive &&other)=delete
+
+
+
+
+
◆ BlockSparseQMM()
+
+
+
+
+
+
+
+
+ mlx::core::BlockSparseQMM::BlockSparseQMM
+ (
+ Stream stream ,
+
+
+
+
+ int group_size ,
+
+
+
+
+ int bits ,
+
+
+
+
+ bool transpose )
+
+
+
+
+inline explicit
+
+
+
+
+
+
+
+
+
◆ eval_cpu()
+
+
+
+
+
+
+
+
+ void mlx::core::BlockSparseQMM::eval_cpu
+ (
+ const std::vector< array > & inputs ,
+
+
+
+
+ array & out )
+
+
+
+
+override virtual
+
+
+
+
+
+
◆ eval_gpu()
+
+
+
+
+
+
+
+
+ void mlx::core::BlockSparseQMM::eval_gpu
+ (
+ const std::vector< array > & inputs ,
+
+
+
+
+ array & out )
+
+
+
+
+override virtual
+
+
+
+
+
+
◆ is_equivalent()
+
+
+
+
+
+
+
+
+ bool mlx::core::BlockSparseQMM::is_equivalent
+ (
+ const Primitive & other )
+ const
+
+
+
+
+override virtual
+
+
+
+
+
Equivalence check defaults to false unless overridden by the primitive.
+
+
Reimplemented from mlx::core::Primitive .
+
+
+
+
+
◆ jvp()
+
+
+
+
+
+
+
+
+ std::vector< array > mlx::core::BlockSparseQMM::jvp
+ (
+ const std::vector< array > & primals ,
+
+
+
+
+ const std::vector< array > & tangents ,
+
+
+
+
+ const std::vector< int > & argnums )
+
+
+
+
+override virtual
+
+
+
+
+
+
◆ print()
+
+
+
+
+
+
+
+
+ void mlx::core::BlockSparseQMM::print
+ (
+ std::ostream & os )
+
+
+
+
+
+inline override virtual
+
+
+
+
+
+
◆ vjp()
+
+
+
+
+
+
+
+
+ std::vector< array > mlx::core::BlockSparseQMM::vjp
+ (
+ const std::vector< array > & primals ,
+
+
+
+
+ const std::vector< array > & cotangents ,
+
+
+
+
+ const std::vector< int > & argnums ,
+
+
+
+
+ const std::vector< array > & outputs )
+
+
+
+
+override virtual
+
+
+
+
+
+
◆ vmap()
+
+
+
+
+
+
+
+
+ virtual std::pair< std::vector< array >, std::vector< int > > mlx::core::BlockSparseQMM::vmap
+ (
+ const std::vector< array > & inputs ,
+
+
+
+
+ const std::vector< int > & axes )
+
+
+
+
+override virtual
+
+
+
+
+
The primitive must know how to vectorize itself across the given axes.
+
The output is a pair containing the output arrays representing the vectorized computation and the axes which corresponds to the vectorized dimensions of each output.
+
+
Reimplemented from mlx::core::Primitive .
+
+
+
+
The documentation for this class was generated from the following file:
+
+
+
+
+
diff --git a/docs/build/html/classmlx_1_1core_1_1_block_sparse_q_m_m.png b/docs/build/html/classmlx_1_1core_1_1_block_sparse_q_m_m.png
new file mode 100644
index 0000000000000000000000000000000000000000..3c033d8f85ff4fdaeda5dbdd9234fd18efac0e02
GIT binary patch
literal 981
zcmeAS@N?(olHy`uVBq!ia0vp^tAV(KgBeJ!=X<>wNJ$6ygt-3y{~ySF@#br3|Doj;
z2ATyD)6cwk@ZbSZ-1KbN5}+JsNswPK1CS2}=1jA%FfcGDc)B=-R4~4so7ms1z|$5V
zH)-kr|KjtDn{*dk-koPTNzZPg=9KoRA3bx5=dq+rn$uLE>REqis>
Sb
zx;OUL`fio=p98N?Hr(#2ziZi~)~ySl-Mkc*m%ZI~a`p4)vrUi4rN0dOeQOprSRVWJ
zpY4h4{q^^DO?pzDw`dpUF3xv%}b)vtE>O*`>#)59;yshjpn
zd^bMdX8dxa%2m@dtL1Jy>GgS&`Kjv4_G{U@kFDQPHv4Fu``ep(+7thVzH&Xj_tmQM
z=Z+rq*J!ZY0l)aXH_x|ka`uid${>9D`PJNJN
zRQy#&!LvMX-c^;GA63Kouj=pIwW4k>qM5{39UHH6>^ki;CbxO^$Fu!TLj?E=T6y
z`?~D+E=Tvn+t_bs3r;NdI)15o=jvts*}~g9lb4ALS}rQPGws`+mCFi!Qx9!j_-Ojm
zX`$;4zunxr`sJ^xZOhX{j(yQ<)Z4M{;v|pymAAE9%mUYK%Z}VUYZrGi`^&=FU0(A}
zu9my=#8<`BKYIPyEv8G)O8owQd0o~1p3k?AUPykeA-d*gSnTHuq4`sr_w>Em>U&Y|
z>hz%9*F#0`7v=W%l?g>NAOD(lYUj%9WfQE}k4>JpyU(orXGNCY*U;O=3-;unYuv>m
z`{U|GF1eju9W?^|kswbQO
+
+
+
+
+
+
+MLX: Member List
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
Loading...
+
Searching...
+
No Matches
+
+
+
+
+
+
+
+
+
+
+
This is the complete list of members for mlx::core::Cholesky , including all inherited members.
+
+ Cholesky (Stream stream, bool upper)mlx::core::Cholesky inline explicit
+ device ()mlx::core::Primitive inline
+ eval_cpu (const std::vector< array > &inputs, array &out) overridemlx::core::Cholesky virtual
+ mlx::core::UnaryPrimitive::eval_cpu (const std::vector< array > &inputs, std::vector< array > &outputs) overridemlx::core::UnaryPrimitive inline virtual
+ eval_gpu (const std::vector< array > &inputs, array &out) overridemlx::core::Cholesky virtual
+ mlx::core::UnaryPrimitive::eval_gpu (const std::vector< array > &inputs, std::vector< array > &outputs) overridemlx::core::UnaryPrimitive inline virtual
+ is_equivalent (const Primitive &other) constmlx::core::Primitive inline virtual
+ jvp (const std::vector< array > &primals, const std::vector< array > &tangents, const std::vector< int > &argnums)mlx::core::Primitive virtual
+ operator= (const UnaryPrimitive &other)=deletemlx::core::UnaryPrimitive
+ operator= (UnaryPrimitive &&other)=deletemlx::core::UnaryPrimitive
+ mlx::core::Primitive::operator= (const Primitive &other)=deletemlx::core::Primitive
+ mlx::core::Primitive::operator= (Primitive &&other)=deletemlx::core::Primitive
+ output_shapes (const std::vector< array > &inputs)mlx::core::Primitive virtual
+ Primitive (Stream stream)mlx::core::Primitive inline explicit
+ Primitive (const Primitive &other)=deletemlx::core::Primitive
+ Primitive (Primitive &&other)=deletemlx::core::Primitive
+ print (std::ostream &os) overridemlx::core::Cholesky inline virtual
+ stream ()mlx::core::Primitive inline
+ UnaryPrimitive (Stream stream)mlx::core::UnaryPrimitive inline explicit
+ UnaryPrimitive (const UnaryPrimitive &other)=deletemlx::core::UnaryPrimitive
+ UnaryPrimitive (UnaryPrimitive &&other)=deletemlx::core::UnaryPrimitive
+ vjp (const std::vector< array > &primals, const std::vector< array > &cotangents, const std::vector< int > &argnums, const std::vector< array > &outputs)mlx::core::Primitive virtual
+ vmap (const std::vector< array > &inputs, const std::vector< int > &axes) overridemlx::core::Cholesky virtual
+ ~Primitive ()=defaultmlx::core::Primitive virtual
+ ~UnaryPrimitive ()=defaultmlx::core::UnaryPrimitive virtual
+
+
+
+
+
diff --git a/docs/build/html/classmlx_1_1core_1_1_cholesky.html b/docs/build/html/classmlx_1_1core_1_1_cholesky.html
new file mode 100644
index 000000000..ece661cf6
--- /dev/null
+++ b/docs/build/html/classmlx_1_1core_1_1_cholesky.html
@@ -0,0 +1,327 @@
+
+
+
+
+
+
+
+MLX: mlx::core::Cholesky Class Reference
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
Loading...
+
Searching...
+
No Matches
+
+
+
+
+
+
+
+
+
+
+
#include <primitives.h >
+
+
+
+
+
+
+
+
+
+
+
+ Cholesky (Stream stream , bool upper)
+
+void eval_cpu (const std::vector< array > &inputs, array &out) override
+
+void eval_gpu (const std::vector< array > &inputs, array &out) override
+
+virtual std::pair< std::vector< array >, std::vector< int > > vmap (const std::vector< array > &inputs, const std::vector< int > &axes) override
+ The primitive must know how to vectorize itself across the given axes.
+
+void print (std::ostream &os) override
+ Print the primitive.
+
+
+ UnaryPrimitive (Stream stream )
+ An abstract base class for a primitive with a single output.
+
+void eval_cpu (const std::vector< array > &inputs, std::vector< array > &outputs) override
+ A primitive must know how to evaluate itself on the CPU/GPU for the given inputs and populate the output arrays.
+
+void eval_gpu (const std::vector< array > &inputs, std::vector< array > &outputs) override
+
+virtual ~UnaryPrimitive ()=default
+
+ UnaryPrimitive (const UnaryPrimitive &other)=delete
+
+ UnaryPrimitive (UnaryPrimitive &&other)=delete
+
+UnaryPrimitive & operator= (const UnaryPrimitive &other)=delete
+
+UnaryPrimitive & operator= (UnaryPrimitive &&other)=delete
+
+
+ Primitive (Stream stream )
+
+const Device & device ()
+ The device the primitive will run on.
+
+const Stream & stream ()
+ The stream the primitive will run on.
+
+virtual std::vector< array > jvp (const std::vector< array > &primals, const std::vector< array > &tangents, const std::vector< int > &argnums)
+ The Jacobian-vector product.
+
+virtual std::vector< array > vjp (const std::vector< array > &primals, const std::vector< array > &cotangents, const std::vector< int > &argnums, const std::vector< array > &outputs)
+ The vector-Jacobian product.
+
+virtual bool is_equivalent (const Primitive &other) const
+ Equivalence check defaults to false unless overridden by the primitive.
+
+virtual std::vector< std::vector< int > > output_shapes (const std::vector< array > &inputs)
+ Get the output shapes of the primitive.
+
+virtual ~Primitive ()=default
+
+ Primitive (const Primitive &other)=delete
+
+ Primitive (Primitive &&other)=delete
+
+Primitive & operator= (const Primitive &other)=delete
+
+Primitive & operator= (Primitive &&other)=delete
+
+
+
+
+
◆ Cholesky()
+
+
+
+
+
+
+
+
+ mlx::core::Cholesky::Cholesky
+ (
+ Stream stream ,
+
+
+
+
+ bool upper )
+
+
+
+
+inline explicit
+
+
+
+
+
+
+
+
+
◆ eval_cpu()
+
+
+
+
+
+
+
+
+ void mlx::core::Cholesky::eval_cpu
+ (
+ const std::vector< array > & inputs ,
+
+
+
+
+ array & out )
+
+
+
+
+override virtual
+
+
+
+
+
+
◆ eval_gpu()
+
+
+
+
+
+
+
+
+ void mlx::core::Cholesky::eval_gpu
+ (
+ const std::vector< array > & inputs ,
+
+
+
+
+ array & out )
+
+
+
+
+override virtual
+
+
+
+
+
+
◆ print()
+
+
+
+
+
+
+
+
+ void mlx::core::Cholesky::print
+ (
+ std::ostream & os )
+
+
+
+
+
+inline override virtual
+
+
+
+
+
+
◆ vmap()
+
+
+
+
+
+
+
+
+ virtual std::pair< std::vector< array >, std::vector< int > > mlx::core::Cholesky::vmap
+ (
+ const std::vector< array > & inputs ,
+
+
+
+
+ const std::vector< int > & axes )
+
+
+
+
+override virtual
+
+
+
+
+
The primitive must know how to vectorize itself across the given axes.
+
The output is a pair containing the output arrays representing the vectorized computation and the axes which corresponds to the vectorized dimensions of each output.
+
+
Reimplemented from mlx::core::Primitive .
+
+
+
+
The documentation for this class was generated from the following file:
+
+
+
+
+
diff --git a/docs/build/html/classmlx_1_1core_1_1_cholesky.png b/docs/build/html/classmlx_1_1core_1_1_cholesky.png
new file mode 100644
index 0000000000000000000000000000000000000000..f0f57d4b8bb439f6c553edaf191c18964559e36d
GIT binary patch
literal 909
zcmeAS@N?(olHy`uVBq!ia0vp^(}1{xgBeH~F+Z{fQqloFA+G=b{|7Q(y!l$%e`vXd
zfo6fk^fNCWJa_;UH$5A+1SrQ@666=m0OWIn(Sb3=GT?o-U3d6^w7^KAiMQfya$s
z-q`Qn|IYli)0mb@mB*w$Pix;C#{JMl<=6B%#q;<#shoAo@SHUL!Xy=azO^czb(g;8
zZz-j>xWS06WC
zGwqqgwzxO8e#_jT(sYUCeF~no{hLDFUv5%VmCejvJ^k;VYa34aZ+(|~am}O<{`p~d
z9o27IP5TuGgx>D4JU@zL$hHS0pRx2niix~`VVxZU}*YQi-Y)5VoX
z>}e5en0|rjgHRMhoD=r}?+nHYB*{PQQI^kb@yrhhALeW^>fS^qO9Kl=Uv|KA^fZoCTLT9@~=yl$21%hxT_R4QkNYd@c}ocTd)48yZz
zzqO}Rz1CZ96r<1bJoJd8DOn7K}1
zvhK1UcDZl%U3&lQ`(;x(($d(^Z4Py}PEnp2=|6vM@LHjd-2Axr$F
ztaE8|)&}PCZVEkm&-?P3HOCi%QwjT4E8hOB)$i1eH;e5Lv%IiP_;uRtd!_C+?~brf
z{rWZkr{z|^@Bda^`+2SY>+)M?-h6JEHgo3m%B?_eSp8%Cc5e2npZQ&&jKkpR>gTe~
HDWM4fy=K7t
literal 0
HcmV?d00001
diff --git a/docs/build/html/classmlx_1_1core_1_1_primitive.html b/docs/build/html/classmlx_1_1core_1_1_primitive.html
index 12c2e073c..aa701ef61 100644
--- a/docs/build/html/classmlx_1_1core_1_1_primitive.html
+++ b/docs/build/html/classmlx_1_1core_1_1_primitive.html
@@ -364,7 +364,7 @@ Public Member Functions
Equivalence check defaults to false unless overridden by the primitive.
-Reimplemented in mlx::core::fast::ScaledDotProductAttention , mlx::core::Abs , mlx::core::Add , mlx::core::AddMM , mlx::core::Arange , mlx::core::ArcCos , mlx::core::ArcCosh , mlx::core::ArcSin , mlx::core::ArcSinh , mlx::core::ArcTan , mlx::core::ArcTan2 , mlx::core::ArcTanh , mlx::core::ArgPartition , mlx::core::ArgReduce , mlx::core::ArgSort , mlx::core::AsType , mlx::core::AsStrided , mlx::core::BitwiseBinary , mlx::core::BlockMaskedMM , mlx::core::BlockSparseMM , mlx::core::Broadcast , mlx::core::Ceil , mlx::core::Compiled , mlx::core::Concatenate , mlx::core::Conjugate , mlx::core::Convolution , mlx::core::Copy , mlx::core::Cos , mlx::core::Cosh , mlx::core::Divide , mlx::core::DivMod , mlx::core::Select , mlx::core::Remainder , mlx::core::Equal , mlx::core::Erf , mlx::core::ErfInv , mlx::core::Exp , mlx::core::FFT , mlx::core::Floor , mlx::core::Full , mlx::core::Gather , mlx::core::Greater , mlx::core::GreaterEqual , mlx::core::Less , mlx::core::LessEqual , mlx::core::Log , mlx::core::LogicalNot , mlx::core::LogicalAnd , mlx::core::LogicalOr , mlx::core::LogAddExp , mlx::core::Matmul , mlx::core::Maximum , mlx::core::Minimum , mlx::core::Multiply , mlx::core::Negative , mlx::core::NotEqual , mlx::core::NumberOfElements , mlx::core::Pad , mlx::core::Partition , mlx::core::Power , mlx::core::QuantizedMatmul , mlx::core::RandomBits , mlx::core::Reshape , mlx::core::Reduce , mlx::core::Round , mlx::core::Scan , mlx::core::Scatter , mlx::core::Sigmoid , mlx::core::Sign , mlx::core::Sin , mlx::core::Sinh , mlx::core::Slice , mlx::core::SliceUpdate , mlx::core::Softmax , mlx::core::Sort , mlx::core::Split , mlx::core::Square , mlx::core::Sqrt , mlx::core::StopGradient , mlx::core::Subtract , mlx::core::Tan , mlx::core::Tanh , mlx::core::Uniform , and mlx::core::Transpose .
+Reimplemented in mlx::core::fast::ScaledDotProductAttention , mlx::core::Abs , mlx::core::Add , mlx::core::AddMM , mlx::core::Arange , mlx::core::ArcCos , mlx::core::ArcCosh , mlx::core::ArcSin , mlx::core::ArcSinh , mlx::core::ArcTan , mlx::core::ArcTan2 , mlx::core::ArcTanh , mlx::core::ArgPartition , mlx::core::ArgReduce , mlx::core::ArgSort , mlx::core::AsType , mlx::core::AsStrided , mlx::core::BitwiseBinary , mlx::core::BlockMaskedMM , mlx::core::BlockSparseMM , mlx::core::Broadcast , mlx::core::Ceil , mlx::core::Compiled , mlx::core::Concatenate , mlx::core::Conjugate , mlx::core::Convolution , mlx::core::Copy , mlx::core::Cos , mlx::core::Cosh , mlx::core::Divide , mlx::core::DivMod , mlx::core::Select , mlx::core::Remainder , mlx::core::Equal , mlx::core::Erf , mlx::core::ErfInv , mlx::core::Exp , mlx::core::FFT , mlx::core::Floor , mlx::core::Full , mlx::core::Gather , mlx::core::Greater , mlx::core::GreaterEqual , mlx::core::Less , mlx::core::LessEqual , mlx::core::Log , mlx::core::LogicalNot , mlx::core::LogicalAnd , mlx::core::LogicalOr , mlx::core::LogAddExp , mlx::core::Matmul , mlx::core::Maximum , mlx::core::Minimum , mlx::core::Multiply , mlx::core::Negative , mlx::core::NotEqual , mlx::core::NumberOfElements , mlx::core::Pad , mlx::core::Partition , mlx::core::Power , mlx::core::QuantizedMatmul , mlx::core::BlockSparseQMM , mlx::core::RandomBits , mlx::core::Reshape , mlx::core::Reduce , mlx::core::Round , mlx::core::Scan , mlx::core::Scatter , mlx::core::Sigmoid , mlx::core::Sign , mlx::core::Sin , mlx::core::Sinh , mlx::core::Slice , mlx::core::SliceUpdate , mlx::core::Softmax , mlx::core::Sort , mlx::core::Split , mlx::core::Square , mlx::core::Sqrt , mlx::core::StopGradient , mlx::core::Subtract , mlx::core::Tan , mlx::core::Tanh , mlx::core::Uniform , and mlx::core::Transpose .
@@ -402,7 +402,7 @@ Public Member Functions
The Jacobian-vector product.
-
Reimplemented in mlx::core::fast::Custom , mlx::core::Abs , mlx::core::Add , mlx::core::ArcCos , mlx::core::ArcCosh , mlx::core::ArcSin , mlx::core::ArcSinh , mlx::core::ArcTan , mlx::core::ArcTan2 , mlx::core::ArcTanh , mlx::core::AsType , mlx::core::AsStrided , mlx::core::Broadcast , mlx::core::Ceil , mlx::core::Compiled , mlx::core::Concatenate , mlx::core::Copy , mlx::core::Cos , mlx::core::Cosh , mlx::core::Divide , mlx::core::DivMod , mlx::core::Select , mlx::core::Remainder , mlx::core::Equal , mlx::core::Erf , mlx::core::ErfInv , mlx::core::Exp , mlx::core::Expm1 , mlx::core::FFT , mlx::core::Floor , mlx::core::Full , mlx::core::Gather , mlx::core::Greater , mlx::core::GreaterEqual , mlx::core::Less , mlx::core::LessEqual , mlx::core::Log , mlx::core::Log1p , mlx::core::LogicalNot , mlx::core::LogicalAnd , mlx::core::LogicalOr , mlx::core::LogAddExp , mlx::core::Maximum , mlx::core::Minimum , mlx::core::Multiply , mlx::core::Negative , mlx::core::NotEqual , mlx::core::Pad , mlx::core::Partition , mlx::core::Power , mlx::core::QuantizedMatmul , mlx::core::Reshape , mlx::core::Round , mlx::core::Scan , mlx::core::Scatter , mlx::core::Sigmoid , mlx::core::Sign , mlx::core::Sin , mlx::core::Sinh , mlx::core::Slice , mlx::core::SliceUpdate , mlx::core::Softmax , mlx::core::Sort , mlx::core::Split , mlx::core::Square , mlx::core::Sqrt , mlx::core::Subtract , mlx::core::Tan , mlx::core::Tanh , and mlx::core::Transpose .
+
Reimplemented in mlx::core::fast::Custom , mlx::core::Abs , mlx::core::Add , mlx::core::ArcCos , mlx::core::ArcCosh , mlx::core::ArcSin , mlx::core::ArcSinh , mlx::core::ArcTan , mlx::core::ArcTan2 , mlx::core::ArcTanh , mlx::core::AsType , mlx::core::AsStrided , mlx::core::Broadcast , mlx::core::Ceil , mlx::core::Compiled , mlx::core::Concatenate , mlx::core::Copy , mlx::core::Cos , mlx::core::Cosh , mlx::core::Divide , mlx::core::DivMod , mlx::core::Select , mlx::core::Remainder , mlx::core::Equal , mlx::core::Erf , mlx::core::ErfInv , mlx::core::Exp , mlx::core::Expm1 , mlx::core::FFT , mlx::core::Floor , mlx::core::Full , mlx::core::Gather , mlx::core::Greater , mlx::core::GreaterEqual , mlx::core::Less , mlx::core::LessEqual , mlx::core::Log , mlx::core::Log1p , mlx::core::LogicalNot , mlx::core::LogicalAnd , mlx::core::LogicalOr , mlx::core::LogAddExp , mlx::core::Maximum , mlx::core::Minimum , mlx::core::Multiply , mlx::core::Negative , mlx::core::NotEqual , mlx::core::Pad , mlx::core::Partition , mlx::core::Power , mlx::core::QuantizedMatmul , mlx::core::BlockSparseQMM , mlx::core::Reshape , mlx::core::Round , mlx::core::Scan , mlx::core::Scatter , mlx::core::Sigmoid , mlx::core::Sign , mlx::core::Sin , mlx::core::Sinh , mlx::core::Slice , mlx::core::SliceUpdate , mlx::core::Softmax , mlx::core::Sort , mlx::core::Split , mlx::core::Square , mlx::core::Sqrt , mlx::core::Subtract , mlx::core::Tan , mlx::core::Tanh , and mlx::core::Transpose .
@@ -511,7 +511,7 @@ Public Member Functions
Print the primitive.
-Implemented in mlx::core::Abs , mlx::core::Add , mlx::core::AddMM , mlx::core::Arange , mlx::core::ArcCos , mlx::core::ArcCosh , mlx::core::ArcSin , mlx::core::ArcSinh , mlx::core::ArcTan , mlx::core::ArcTan2 , mlx::core::ArcTanh , mlx::core::ArgPartition , mlx::core::ArgReduce , mlx::core::ArgSort , mlx::core::AsType , mlx::core::AsStrided , mlx::core::BitwiseBinary , mlx::core::BlockMaskedMM , mlx::core::BlockSparseMM , mlx::core::Broadcast , mlx::core::Ceil , mlx::core::Compiled , mlx::core::Concatenate , mlx::core::Conjugate , mlx::core::Convolution , mlx::core::Copy , mlx::core::Cos , mlx::core::Cosh , mlx::core::CustomVJP , mlx::core::Depends , mlx::core::Divide , mlx::core::DivMod , mlx::core::Select , mlx::core::Remainder , mlx::core::Equal , mlx::core::Erf , mlx::core::ErfInv , mlx::core::Exp , mlx::core::Expm1 , mlx::core::FFT , mlx::core::Floor , mlx::core::Full , mlx::core::Gather , mlx::core::Greater , mlx::core::GreaterEqual , mlx::core::Less , mlx::core::LessEqual , mlx::core::Load , mlx::core::Log , mlx::core::Log1p , mlx::core::LogicalNot , mlx::core::LogicalAnd , mlx::core::LogicalOr , mlx::core::LogAddExp , mlx::core::Matmul , mlx::core::Maximum , mlx::core::Minimum , mlx::core::Multiply , mlx::core::Negative , mlx::core::NotEqual , mlx::core::NumberOfElements , mlx::core::Pad , mlx::core::Partition , mlx::core::Power , mlx::core::QuantizedMatmul , mlx::core::RandomBits , mlx::core::Reshape , mlx::core::Reduce , mlx::core::Round , mlx::core::Scan , mlx::core::Scatter , mlx::core::Sigmoid , mlx::core::Sign , mlx::core::Sin , mlx::core::Sinh , mlx::core::Slice , mlx::core::SliceUpdate , mlx::core::Softmax , mlx::core::Sort , mlx::core::Split , mlx::core::Square , mlx::core::Sqrt , mlx::core::StopGradient , mlx::core::Subtract , mlx::core::Tan , mlx::core::Tanh , mlx::core::Uniform , mlx::core::Transpose , mlx::core::QRF , mlx::core::SVD , and mlx::core::Inverse .
+Implemented in mlx::core::Abs , mlx::core::Add , mlx::core::AddMM , mlx::core::Arange , mlx::core::ArcCos , mlx::core::ArcCosh , mlx::core::ArcSin , mlx::core::ArcSinh , mlx::core::ArcTan , mlx::core::ArcTan2 , mlx::core::ArcTanh , mlx::core::ArgPartition , mlx::core::ArgReduce , mlx::core::ArgSort , mlx::core::AsType , mlx::core::AsStrided , mlx::core::BitwiseBinary , mlx::core::BlockMaskedMM , mlx::core::BlockSparseMM , mlx::core::Broadcast , mlx::core::Ceil , mlx::core::Compiled , mlx::core::Concatenate , mlx::core::Conjugate , mlx::core::Convolution , mlx::core::Copy , mlx::core::Cos , mlx::core::Cosh , mlx::core::CustomVJP , mlx::core::Depends , mlx::core::Divide , mlx::core::DivMod , mlx::core::Select , mlx::core::Remainder , mlx::core::Equal , mlx::core::Erf , mlx::core::ErfInv , mlx::core::Exp , mlx::core::Expm1 , mlx::core::FFT , mlx::core::Floor , mlx::core::Full , mlx::core::Gather , mlx::core::Greater , mlx::core::GreaterEqual , mlx::core::Less , mlx::core::LessEqual , mlx::core::Load , mlx::core::Log , mlx::core::Log1p , mlx::core::LogicalNot , mlx::core::LogicalAnd , mlx::core::LogicalOr , mlx::core::LogAddExp , mlx::core::Matmul , mlx::core::Maximum , mlx::core::Minimum , mlx::core::Multiply , mlx::core::Negative , mlx::core::NotEqual , mlx::core::NumberOfElements , mlx::core::Pad , mlx::core::Partition , mlx::core::Power , mlx::core::QuantizedMatmul , mlx::core::BlockSparseQMM , mlx::core::RandomBits , mlx::core::Reshape , mlx::core::Reduce , mlx::core::Round , mlx::core::Scan , mlx::core::Scatter , mlx::core::Sigmoid , mlx::core::Sign , mlx::core::Sin , mlx::core::Sinh , mlx::core::Slice , mlx::core::SliceUpdate , mlx::core::Softmax , mlx::core::Sort , mlx::core::Split , mlx::core::Square , mlx::core::Sqrt , mlx::core::StopGradient , mlx::core::Subtract , mlx::core::Tan , mlx::core::Tanh , mlx::core::Uniform , mlx::core::Transpose , mlx::core::QRF , mlx::core::SVD , mlx::core::Inverse , and mlx::core::Cholesky .
@@ -581,7 +581,7 @@ Public Member Functions
The vector-Jacobian product.
-Reimplemented in mlx::core::CustomVJP , mlx::core::Depends , mlx::core::fast::Custom , mlx::core::fast::RMSNorm , mlx::core::fast::LayerNorm , mlx::core::fast::RoPE , mlx::core::Abs , mlx::core::Add , mlx::core::AddMM , mlx::core::ArcCos , mlx::core::ArcCosh , mlx::core::ArcSin , mlx::core::ArcSinh , mlx::core::ArcTan , mlx::core::ArcTan2 , mlx::core::ArcTanh , mlx::core::AsType , mlx::core::AsStrided , mlx::core::BlockMaskedMM , mlx::core::BlockSparseMM , mlx::core::Broadcast , mlx::core::Ceil , mlx::core::Compiled , mlx::core::Concatenate , mlx::core::Convolution , mlx::core::Copy , mlx::core::Cos , mlx::core::Cosh , mlx::core::Divide , mlx::core::DivMod , mlx::core::Select , mlx::core::Remainder , mlx::core::Equal , mlx::core::Erf , mlx::core::ErfInv , mlx::core::Exp , mlx::core::Expm1 , mlx::core::FFT , mlx::core::Floor , mlx::core::Full , mlx::core::Gather , mlx::core::Greater , mlx::core::GreaterEqual , mlx::core::Less , mlx::core::LessEqual , mlx::core::Log , mlx::core::Log1p , mlx::core::LogicalNot , mlx::core::LogicalAnd , mlx::core::LogicalOr , mlx::core::LogAddExp , mlx::core::Matmul , mlx::core::Maximum , mlx::core::Minimum , mlx::core::Multiply , mlx::core::Negative , mlx::core::NotEqual , mlx::core::Pad , mlx::core::Partition , mlx::core::Power , mlx::core::QuantizedMatmul , mlx::core::Reshape , mlx::core::Reduce , mlx::core::Round , mlx::core::Scan , mlx::core::Scatter , mlx::core::Sigmoid , mlx::core::Sign , mlx::core::Sin , mlx::core::Sinh , mlx::core::Slice , mlx::core::SliceUpdate , mlx::core::Softmax , mlx::core::Sort , mlx::core::Split , mlx::core::Square , mlx::core::Sqrt , mlx::core::Subtract , mlx::core::Tan , mlx::core::Tanh , and mlx::core::Transpose .
+Reimplemented in mlx::core::CustomVJP , mlx::core::Depends , mlx::core::fast::Custom , mlx::core::fast::RMSNorm , mlx::core::fast::LayerNorm , mlx::core::fast::RoPE , mlx::core::Abs , mlx::core::Add , mlx::core::AddMM , mlx::core::ArcCos , mlx::core::ArcCosh , mlx::core::ArcSin , mlx::core::ArcSinh , mlx::core::ArcTan , mlx::core::ArcTan2 , mlx::core::ArcTanh , mlx::core::AsType , mlx::core::AsStrided , mlx::core::BlockMaskedMM , mlx::core::BlockSparseMM , mlx::core::Broadcast , mlx::core::Ceil , mlx::core::Compiled , mlx::core::Concatenate , mlx::core::Convolution , mlx::core::Copy , mlx::core::Cos , mlx::core::Cosh , mlx::core::Divide , mlx::core::DivMod , mlx::core::Select , mlx::core::Remainder , mlx::core::Equal , mlx::core::Erf , mlx::core::ErfInv , mlx::core::Exp , mlx::core::Expm1 , mlx::core::FFT , mlx::core::Floor , mlx::core::Full , mlx::core::Gather , mlx::core::Greater , mlx::core::GreaterEqual , mlx::core::Less , mlx::core::LessEqual , mlx::core::Log , mlx::core::Log1p , mlx::core::LogicalNot , mlx::core::LogicalAnd , mlx::core::LogicalOr , mlx::core::LogAddExp , mlx::core::Matmul , mlx::core::Maximum , mlx::core::Minimum , mlx::core::Multiply , mlx::core::Negative , mlx::core::NotEqual , mlx::core::Pad , mlx::core::Partition , mlx::core::Power , mlx::core::QuantizedMatmul , mlx::core::BlockSparseQMM , mlx::core::Reshape , mlx::core::Reduce , mlx::core::Round , mlx::core::Scan , mlx::core::Scatter , mlx::core::Sigmoid , mlx::core::Sign , mlx::core::Sin , mlx::core::Sinh , mlx::core::Slice , mlx::core::SliceUpdate , mlx::core::Softmax , mlx::core::Sort , mlx::core::Split , mlx::core::Square , mlx::core::Sqrt , mlx::core::Subtract , mlx::core::Tan , mlx::core::Tanh , and mlx::core::Transpose .
@@ -615,7 +615,7 @@ Public Member Functions
The primitive must know how to vectorize itself across the given axes.
The output is a pair containing the output arrays representing the vectorized computation and the axes which corresponds to the vectorized dimensions of each output.
-Reimplemented in mlx::core::fast::Custom , mlx::core::Abs , mlx::core::Add , mlx::core::AddMM , mlx::core::ArcCos , mlx::core::ArcCosh , mlx::core::ArcSin , mlx::core::ArcSinh , mlx::core::ArcTan , mlx::core::ArcTan2 , mlx::core::ArcTanh , mlx::core::ArgPartition , mlx::core::ArgReduce , mlx::core::ArgSort , mlx::core::AsType , mlx::core::BitwiseBinary , mlx::core::Broadcast , mlx::core::Ceil , mlx::core::Compiled , mlx::core::Concatenate , mlx::core::Conjugate , mlx::core::Copy , mlx::core::Cos , mlx::core::Cosh , mlx::core::Divide , mlx::core::DivMod , mlx::core::Select , mlx::core::Remainder , mlx::core::Equal , mlx::core::Erf , mlx::core::ErfInv , mlx::core::Exp , mlx::core::Expm1 , mlx::core::FFT , mlx::core::Floor , mlx::core::Full , mlx::core::Gather , mlx::core::Greater , mlx::core::GreaterEqual , mlx::core::Less , mlx::core::LessEqual , mlx::core::Log , mlx::core::Log1p , mlx::core::LogicalNot , mlx::core::LogicalAnd , mlx::core::LogicalOr , mlx::core::LogAddExp , mlx::core::Matmul , mlx::core::Maximum , mlx::core::Minimum , mlx::core::Multiply , mlx::core::Negative , mlx::core::NotEqual , mlx::core::NumberOfElements , mlx::core::Pad , mlx::core::Partition , mlx::core::Power , mlx::core::QuantizedMatmul , mlx::core::RandomBits , mlx::core::Reshape , mlx::core::Reduce , mlx::core::Round , mlx::core::Scan , mlx::core::Sigmoid , mlx::core::Sign , mlx::core::Sin , mlx::core::Sinh , mlx::core::Slice , mlx::core::SliceUpdate , mlx::core::Softmax , mlx::core::Sort , mlx::core::Split , mlx::core::Square , mlx::core::Sqrt , mlx::core::StopGradient , mlx::core::Subtract , mlx::core::Tan , mlx::core::Tanh , mlx::core::Uniform , mlx::core::Transpose , mlx::core::SVD , and mlx::core::Inverse .
+Reimplemented in mlx::core::fast::Custom , mlx::core::Abs , mlx::core::Add , mlx::core::AddMM , mlx::core::ArcCos , mlx::core::ArcCosh , mlx::core::ArcSin , mlx::core::ArcSinh , mlx::core::ArcTan , mlx::core::ArcTan2 , mlx::core::ArcTanh , mlx::core::ArgPartition , mlx::core::ArgReduce , mlx::core::ArgSort , mlx::core::AsType , mlx::core::BitwiseBinary , mlx::core::Broadcast , mlx::core::Ceil , mlx::core::Compiled , mlx::core::Concatenate , mlx::core::Conjugate , mlx::core::Copy , mlx::core::Cos , mlx::core::Cosh , mlx::core::Divide , mlx::core::DivMod , mlx::core::Select , mlx::core::Remainder , mlx::core::Equal , mlx::core::Erf , mlx::core::ErfInv , mlx::core::Exp , mlx::core::Expm1 , mlx::core::FFT , mlx::core::Floor , mlx::core::Full , mlx::core::Gather , mlx::core::Greater , mlx::core::GreaterEqual , mlx::core::Less , mlx::core::LessEqual , mlx::core::Log , mlx::core::Log1p , mlx::core::LogicalNot , mlx::core::LogicalAnd , mlx::core::LogicalOr , mlx::core::LogAddExp , mlx::core::Matmul , mlx::core::Maximum , mlx::core::Minimum , mlx::core::Multiply , mlx::core::Negative , mlx::core::NotEqual , mlx::core::NumberOfElements , mlx::core::Pad , mlx::core::Partition , mlx::core::Power , mlx::core::QuantizedMatmul , mlx::core::BlockSparseQMM , mlx::core::RandomBits , mlx::core::Reshape , mlx::core::Reduce , mlx::core::Round , mlx::core::Scan , mlx::core::Sigmoid , mlx::core::Sign , mlx::core::Sin , mlx::core::Sinh , mlx::core::Slice , mlx::core::SliceUpdate , mlx::core::Softmax , mlx::core::Sort , mlx::core::Split , mlx::core::Square , mlx::core::Sqrt , mlx::core::StopGradient , mlx::core::Subtract , mlx::core::Tan , mlx::core::Tanh , mlx::core::Uniform , mlx::core::Transpose , mlx::core::SVD , mlx::core::Inverse , and mlx::core::Cholesky .
diff --git a/docs/build/html/classmlx_1_1core_1_1_unary_primitive.html b/docs/build/html/classmlx_1_1core_1_1_unary_primitive.html
index 5644a664e..0e244a441 100644
--- a/docs/build/html/classmlx_1_1core_1_1_unary_primitive.html
+++ b/docs/build/html/classmlx_1_1core_1_1_unary_primitive.html
@@ -109,71 +109,73 @@ Inheritance diagram for mlx::core::UnaryPrimitive:
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
@@ -370,7 +372,7 @@ Public Member Functions
-
Implemented in mlx::core::Abs , mlx::core::Add , mlx::core::AddMM , mlx::core::Arange , mlx::core::ArcCos , mlx::core::ArcCosh , mlx::core::ArcSin , mlx::core::ArcSinh , mlx::core::ArcTan , mlx::core::ArcTan2 , mlx::core::ArcTanh , mlx::core::ArgPartition , mlx::core::ArgReduce , mlx::core::ArgSort , mlx::core::AsType , mlx::core::AsStrided , mlx::core::BitwiseBinary , mlx::core::BlockMaskedMM , mlx::core::BlockSparseMM , mlx::core::Broadcast , mlx::core::Ceil , mlx::core::Concatenate , mlx::core::Conjugate , mlx::core::Convolution , mlx::core::Copy , mlx::core::Cos , mlx::core::Cosh , mlx::core::Divide , mlx::core::Select , mlx::core::Remainder , mlx::core::Equal , mlx::core::Erf , mlx::core::ErfInv , mlx::core::Exp , mlx::core::Expm1 , mlx::core::FFT , mlx::core::Floor , mlx::core::Full , mlx::core::Gather , mlx::core::Greater , mlx::core::GreaterEqual , mlx::core::Less , mlx::core::LessEqual , mlx::core::Load , mlx::core::Log , mlx::core::Log1p , mlx::core::LogicalNot , mlx::core::LogicalAnd , mlx::core::LogicalOr , mlx::core::LogAddExp , mlx::core::Matmul , mlx::core::Maximum , mlx::core::Minimum , mlx::core::Multiply , mlx::core::Negative , mlx::core::NotEqual , mlx::core::NumberOfElements , mlx::core::Pad , mlx::core::Partition , mlx::core::Power , mlx::core::QuantizedMatmul , mlx::core::RandomBits , mlx::core::Reshape , mlx::core::Reduce , mlx::core::Round , mlx::core::Scan , mlx::core::Scatter , mlx::core::Sigmoid , mlx::core::Sign , mlx::core::Sin , mlx::core::Sinh , mlx::core::Slice , mlx::core::SliceUpdate , mlx::core::Softmax , mlx::core::Sort , mlx::core::Square , mlx::core::Sqrt , mlx::core::StopGradient , mlx::core::Subtract , mlx::core::Tan , mlx::core::Tanh , mlx::core::Uniform , mlx::core::Transpose , and mlx::core::Inverse .
+
Implemented in mlx::core::Abs , mlx::core::Add , mlx::core::AddMM , mlx::core::Arange , mlx::core::ArcCos , mlx::core::ArcCosh , mlx::core::ArcSin , mlx::core::ArcSinh , mlx::core::ArcTan , mlx::core::ArcTan2 , mlx::core::ArcTanh , mlx::core::ArgPartition , mlx::core::ArgReduce , mlx::core::ArgSort , mlx::core::AsType , mlx::core::AsStrided , mlx::core::BitwiseBinary , mlx::core::BlockMaskedMM , mlx::core::BlockSparseMM , mlx::core::Broadcast , mlx::core::Ceil , mlx::core::Concatenate , mlx::core::Conjugate , mlx::core::Convolution , mlx::core::Copy , mlx::core::Cos , mlx::core::Cosh , mlx::core::Divide , mlx::core::Select , mlx::core::Remainder , mlx::core::Equal , mlx::core::Erf , mlx::core::ErfInv , mlx::core::Exp , mlx::core::Expm1 , mlx::core::FFT , mlx::core::Floor , mlx::core::Full , mlx::core::Gather , mlx::core::Greater , mlx::core::GreaterEqual , mlx::core::Less , mlx::core::LessEqual , mlx::core::Load , mlx::core::Log , mlx::core::Log1p , mlx::core::LogicalNot , mlx::core::LogicalAnd , mlx::core::LogicalOr , mlx::core::LogAddExp , mlx::core::Matmul , mlx::core::Maximum , mlx::core::Minimum , mlx::core::Multiply , mlx::core::Negative , mlx::core::NotEqual , mlx::core::NumberOfElements , mlx::core::Pad , mlx::core::Partition , mlx::core::Power , mlx::core::QuantizedMatmul , mlx::core::BlockSparseQMM , mlx::core::RandomBits , mlx::core::Reshape , mlx::core::Reduce , mlx::core::Round , mlx::core::Scan , mlx::core::Scatter , mlx::core::Sigmoid , mlx::core::Sign , mlx::core::Sin , mlx::core::Sinh , mlx::core::Slice , mlx::core::SliceUpdate , mlx::core::Softmax , mlx::core::Sort , mlx::core::Square , mlx::core::Sqrt , mlx::core::StopGradient , mlx::core::Subtract , mlx::core::Tan , mlx::core::Tanh , mlx::core::Uniform , mlx::core::Transpose , mlx::core::Cholesky , and mlx::core::Inverse .
@@ -435,7 +437,7 @@ Public Member Functions
-
Implemented in mlx::core::Abs , mlx::core::Add , mlx::core::AddMM , mlx::core::Arange , mlx::core::ArcCos , mlx::core::ArcCosh , mlx::core::ArcSin , mlx::core::ArcSinh , mlx::core::ArcTan , mlx::core::ArcTan2 , mlx::core::ArcTanh , mlx::core::ArgPartition , mlx::core::ArgReduce , mlx::core::ArgSort , mlx::core::AsType , mlx::core::AsStrided , mlx::core::BitwiseBinary , mlx::core::BlockMaskedMM , mlx::core::BlockSparseMM , mlx::core::Broadcast , mlx::core::Ceil , mlx::core::Concatenate , mlx::core::Conjugate , mlx::core::Convolution , mlx::core::Copy , mlx::core::Cos , mlx::core::Cosh , mlx::core::Divide , mlx::core::Select , mlx::core::Remainder , mlx::core::Equal , mlx::core::Erf , mlx::core::ErfInv , mlx::core::Exp , mlx::core::Expm1 , mlx::core::FFT , mlx::core::Floor , mlx::core::Full , mlx::core::Gather , mlx::core::Greater , mlx::core::GreaterEqual , mlx::core::Less , mlx::core::LessEqual , mlx::core::Load , mlx::core::Log , mlx::core::Log1p , mlx::core::LogicalNot , mlx::core::LogicalAnd , mlx::core::LogicalOr , mlx::core::LogAddExp , mlx::core::Matmul , mlx::core::Maximum , mlx::core::Minimum , mlx::core::Multiply , mlx::core::Negative , mlx::core::NotEqual , mlx::core::NumberOfElements , mlx::core::Pad , mlx::core::Partition , mlx::core::Power , mlx::core::QuantizedMatmul , mlx::core::RandomBits , mlx::core::Reshape , mlx::core::Reduce , mlx::core::Round , mlx::core::Scan , mlx::core::Scatter , mlx::core::Sigmoid , mlx::core::Sign , mlx::core::Sin , mlx::core::Sinh , mlx::core::Slice , mlx::core::SliceUpdate , mlx::core::Softmax , mlx::core::Sort , mlx::core::Square , mlx::core::Sqrt , mlx::core::StopGradient , mlx::core::Subtract , mlx::core::Tan , mlx::core::Tanh , mlx::core::Uniform , mlx::core::Transpose , and mlx::core::Inverse .
+
Implemented in mlx::core::Abs , mlx::core::Add , mlx::core::AddMM , mlx::core::Arange , mlx::core::ArcCos , mlx::core::ArcCosh , mlx::core::ArcSin , mlx::core::ArcSinh , mlx::core::ArcTan , mlx::core::ArcTan2 , mlx::core::ArcTanh , mlx::core::ArgPartition , mlx::core::ArgReduce , mlx::core::ArgSort , mlx::core::AsType , mlx::core::AsStrided , mlx::core::BitwiseBinary , mlx::core::BlockMaskedMM , mlx::core::BlockSparseMM , mlx::core::Broadcast , mlx::core::Ceil , mlx::core::Concatenate , mlx::core::Conjugate , mlx::core::Convolution , mlx::core::Copy , mlx::core::Cos , mlx::core::Cosh , mlx::core::Divide , mlx::core::Select , mlx::core::Remainder , mlx::core::Equal , mlx::core::Erf , mlx::core::ErfInv , mlx::core::Exp , mlx::core::Expm1 , mlx::core::FFT , mlx::core::Floor , mlx::core::Full , mlx::core::Gather , mlx::core::Greater , mlx::core::GreaterEqual , mlx::core::Less , mlx::core::LessEqual , mlx::core::Load , mlx::core::Log , mlx::core::Log1p , mlx::core::LogicalNot , mlx::core::LogicalAnd , mlx::core::LogicalOr , mlx::core::LogAddExp , mlx::core::Matmul , mlx::core::Maximum , mlx::core::Minimum , mlx::core::Multiply , mlx::core::Negative , mlx::core::NotEqual , mlx::core::NumberOfElements , mlx::core::Pad , mlx::core::Partition , mlx::core::Power , mlx::core::QuantizedMatmul , mlx::core::BlockSparseQMM , mlx::core::RandomBits , mlx::core::Reshape , mlx::core::Reduce , mlx::core::Round , mlx::core::Scan , mlx::core::Scatter , mlx::core::Sigmoid , mlx::core::Sign , mlx::core::Sin , mlx::core::Sinh , mlx::core::Slice , mlx::core::SliceUpdate , mlx::core::Softmax , mlx::core::Sort , mlx::core::Square , mlx::core::Sqrt , mlx::core::StopGradient , mlx::core::Subtract , mlx::core::Tan , mlx::core::Tanh , mlx::core::Uniform , mlx::core::Transpose , mlx::core::Cholesky , and mlx::core::Inverse .
diff --git a/docs/build/html/classmlx_1_1core_1_1_unary_primitive.png b/docs/build/html/classmlx_1_1core_1_1_unary_primitive.png
index 191d394b31de10715621038ababedb088a3b7120..162024de6b07d41c7496e57c09ced4339db4d3b3 100644
GIT binary patch
literal 32408
zcmdVDdq7iX)-LX)%2-{b`$j|A``EO2}HqGga&$on5n>IaZ+O&C#|27l+B<7nf>%qr@;BDbs
zEfx#-$LIZUM!~JJHJ>a=eV2Ck`O~LQ!OvHiXho%q62hx7tK67hl)6aiG24R=?#&Goa%e8Z}pVY3XnBBPniz!)X{`6$`sQ>ha
zy_X?Xc#`Dv(#R9`k72a<2&8qQV{~fM8Z?oKVThTe3&*GjM3~fE2dMK?hq5EH+Sczw
z@%BARaZ}8u;?BDr$ZoxTO_c}7Gx4y2f!VjPp^D8vTjsU&x2`|s%4PJ6`fJ|jBmw7J
zDkQ&BRMlj|te;|F9Ojb1@R9LHGVaGn5+hd<8-jm$)S-%OfedG3;PV~dk>Qy3N(pb)
zvuvn`6$Te}oYL2yE5axupN);S4!fsUvjsIC(gFpRttY+ucbN1I73d58rkkd(d|FdH
zZT>I!&&*$Zzb`!b@P^XapAWwupM1D`>8cIon`UnLc-4k0q5sD}eqru>_gK`MPq!{G
z8^^TDjE`YDhHBpO-+t(;#TcX8)y@L<(XoS?KKI+o0TK9LyM;q#4EOI3K`29txG~{a
zpo8N#S^l`pwrjFw{JG}ZYl}#^PX-QX>v|N!%SZJ)P3?+pzR^xpb3eEG_aX6&G(jr0
z9cq^g_ho6ouZxsv3MI`{-JXZ$u@%P0-18Fr34WzS#wiyo!ZaNRRaSlm{#i6k!kui81Su5SIR=
zNB+kDI`jYY!Cwh~@>Zkctb2!LnKSlI^NUJ4yrnFRJ$(l9Wb@M5(@%25(=NT~^mm4M
z+7JH!10N^L+-@JpDV;WV$p*H5-I>vCk@J0RQR+7;u2xXbCbmjywBIx^O
z7@j@()2VSx+faC{xPU9BeZ|D6TFO0`8+Ec>-WxlD;zb>mcM~J%8u=dhN6pn$?oLn*
zFUov>T`0sU$535_bCuZU$87!SxmVD|L1nwcOd&@IHF;HJZNxH{(_V|21u=^`xO`bP
zxtVfV9mCdd$%a(B(#-2Mgxdj9Ek(OkLWgl;rX~92dp_wM0Y4mmAnsms67|nt-d}`u
zy65k7iupejy}vw>zC+M~g}1>ga%zT{$q9QIOAyir*`dx%43v30XISe8Qikl9%(FDo
zR5_==ZSPs$pp#OhCCtx6cLY<5iq!afEVLv$g!;GmT8DT-zFc{(iJ^Ktb~mR+%*5B!
z@Oef8FFSz}0-yIc&4QJpE@`K+k63CC=ojMw?T?x-QD#e{`D9Om6!O&lSf5-CaR(Tj
z{5#+Q@)n`WEDg}IOr}T|2?84bngw)0$ap}n^NM2KnYmGn2#t3;Av&1uj3MQ3mT2@Q
z@w`W6zb_+o`z(mhOuYS}?5Mu`S-NHY6){t%|7U%wbA7uDuSyP!;r^75@gylH{h1M!B*`{?qt_-5s#482O9ftEAisLuDSi67;*)`}Y_@#Ab0XZ!
zdc_
zcP}y_a%S!=_`$$P(|<`cijrA^One7Y>^J4xXI
zm|$!;F!*--`b{ve+uR#u@XEQvr_Wpl2fe|)Vqs}yVC1V-9mi&S~u&0jIw%!-PFG;`S%3Wy$8i&L}xA_E9KewljFEf2I
ze6u^_z8AErWx*iw`5^MK+nUcjo=?h1$kNZPhE|**H#Lxz@fhEWW*?VRZGGwMR!cMs
z(yM%nB!!$^2lx5OXFWs1P<3rQi`xzm%<%`+F&S+o0K(iw@nznrXx!Ug9VfKkwWLrR
zXUs_5ivD6lT^4o)eu!8P2g5YU+>FZZ*0EmV8e;%s*f54w^#_akj(6Rqc|5k`kJH<^EN>QR4gn
zZnKmpnpyI|42$RecF~Y>z3>{LtTGW*lz(6PT~4-m=s4tRGXF=cVKo}OJ>=z8w55N*
zZ8M?f8AJ3idQe&0bw+O6#lnHkijJIj3Ilu~N!n$7x%UMLfF+sijx-H}w)9Uj+Y5FD
z$NbCOKDpp4?O>W7We@rtBS4j;Og!gfw1peMvF>+Wgf87evfl
zMd4lOLxoc!dcHVCwN%ztsQDZ=`15y`;K^L?r;DJ0-=a){nG9@MTi>H_`o4QDBdL)@
z!SR+-M@nKfR5NR6;)pl9MkGmmtd-LEfJd3B?Cu%HH!MK$K(3(@Up+Z5zfDo(VJnoa
zAcu&cKebrTYRGiVroH9!2Qu)gMoQ6xdxcayfx3%HH%7vrsU`qQ}Vi4%`
z{Y=H`=Xf26O6*PQHLIxF?_=}8jltHryIUG5-pujcJu
zV<|V^3}xj?QZ;9(&x{`d7~*kn8L9e0nKHrKDYoU2BMdz-Du?^JZ?z49BKegq%mcPC
zcQy7Dd}kSpHoH}k?~qQPFPNPn)BmX-9@^mxN}@l1500;`q^pDJDAR}b5Mx)4hgkg}
zX?kBpByl{(_-%@T(oj|h5le~R)=TU7I&~c`$kN0c%h79CVu@&%UX)QHuXI`^hLsx
z^hYWA@^6r`=!4lgxN~^{%C*FFV$6gDdxbUFd*ZhjAt5$;h?(_LKUVkwqXE~1U#
z3n1yD&nfGQgKl5d2Tq`DFSg4mSr9Di*c-{oeauaUUE4_6;AH4zVR+ZmW2M~1MJH)(
z)Ima=U>XKfO!ww5qNz@T@{kFR_#}k&S{(6|mq)yLpwEE@$d5*TY>7{4y@nfx-9evU
z)&9<&mxYoQpryB!6*%S>L*2Q)@V}Qo&yDT>h%H0fnK}8+G|Wflo*?*ukQ#{^Bm7%?
z{~{q4A5F)(?H71T@+KEhDWuue2tYz{ieoPT#r%WqI)${rU@;`C2+XLI`m~u@<$pNq^i9n)Fuex$7Kp
zAurR}v}f&DH8}gJGtb!)&Cr81Hm=0nWeDR(F{CKIs;s(ipbP(n?c9@vRwPbQrjYW;
z)cUHJJXwvsy*x>-n??iK{UtDrf2We!OzPeSwvVzGHK%5Wcas
z%~N+$-x8v`q5tYm){^W?C8+q4
zs`{~xl~HotTrqv&+>XaCJkj#}Flm)`-k$!k0#iIwG3TZ4c`md>$GV%
z;+D2@WU?b2lt`A@_ccRCnk`5>)r24;*d5RtsQC}MMog;)08GlKEoo8LvwlRlc1Z(2
zSmePWVMVd>3P5*l@V#y$Y19CJj;!7r$alM{k0&k%ESUGXvAw|{=>3Tqj0mae8#3I>
z%YCwJar0yIk*J;tfbTtyKKGm#EPL8~bSwN_NAfrgM%fBzFjs`7DYU|ToSO@Z-*;%O
z$(7aGC4H)Wzqv3%sB$Wgu(U)9=QFY34^F>%p_3^B|KNTzZfiLGwW$m(_IZ<1c9uk-
z-+1>bsB<`Hj6(aowdoUy(r^?EJ#aw|j%T&|8`M{KD!b2g-aaa8@Wt%T>P~sIGEMuN
zgVy97ik)HYZbR#f>nr9*HGT-I&CTZ}M|PA4GrFE8`Ct;<8r(7|ce{AO{I>CDWU!&8;%fbkV{uWYo+Be-R#^+`Z<0V^
zHu9uMJ03dzQQTK=_;3ll8&Z9mYGvGMwDy6vv)OA82_vZ`+dcc3yxjMzA?kKU#5~H9
zc#yNMb4ml)7yhFSDz)+~|8;otx{~=90c#oYe}%jR|3?o75$c}0
zfF$>{b)}Etp<j@{QiP
z%UK$>WC0}kGQMJC;jAzDCyjpMX?Y_}6*#^N=q(7-$H?rCcCN1wok8m71uJp~lnS`8
ze^&;sLZZ6VOzRpLsvv*JZ$Ds39xE0zsaY@iiPvIIg1X`sPA85U>qxSkU4jaIdGomf
zQ`a>yuha2j%Rw(%g!?M0JDu0msk!3jg=_ou>%@{X2rc*PwJSI$Upg=Fes)3h_qiU>
zoZa_KHIIrt&z^FG;CszcHJ`Nm?(rd;-M8m-3lB!3ECYqntRse+WOx@h=l6^yLTz~{
zbu&LlBBx7uiOV$nj_N~?jvk`C`?GDXLX=sdmVg^cEGO>PRBUExb}YYcnU{2Uvmwp;
z3Vjb=A!#z4v_w%M!MCR?Rfil9)Mcqr2)^op8n&~fn^Bv+=mcq{;+Ndv*kG31qu&GJ
zJb-`!sz(L^sMmtE6XuhHDS}R|vLi+N-PjGN+iNE3GG-*dF;vv(x~x!HF&r+0)}@;F
zph@|SiY>lz4OfrM5+&S-?`_|bYPgMJL~Rx+3G=iayCW~7oRJ~T?+AhFk>`Z#m)>Gr`}LS)_vc@m-1Yk
zWyY)ha|_C>4~yr=H%ckiS4lR9yh6?a59{e_HRj6fH8-;nblkA}CT@cO$BGIV+r#eY
zNH+vXY2Jzf6qxUPq$L^#7HqF<&94Tp_ovT>`Z5RfPRrbL+-gTmrDCQ-O-yworZ422E%x}hyiQFXy%7H$a5X`_-S3B{Fup~)FqY`5Y+p%u@j=QHv
zZWJE#>>bQ_G4y8!NpQorj(ivr-~&y+rrFb8xr#B?SiF?RB4Gul3fvRI9`;;yEJsqf
zG`U-Xe=zG=7SvOidOPR6>z7M3>nxY_<>QR2yN#p*>5cU8c++J9DRX)q!NzcVE_P<{
zB3Xnd2qM!eWyjaM<3G_wU6sE_)_xOBt=q&<8(Kgz3wh0wSs^m?-8VWKEjM?2fDx48
z>L9BSApDHR1T??_oUB#^kTtEuEeuQqJ%js5=Wt!$HJPmw36M&nCGNw7D)K&FVKtbP
z`LaNtwheO$C-@u9v1NUaS$(FW-vD;5vGZy9%uDsVr~8lAE?Cz>(%RtrG5j1Q6a9Rk+&W-SsV*)+&C}foYClVp
z+;2F%UKna2CrLJd(c|z>>5K@AsN}R)b<)*WQ3~xUBFVTkZEoM8q{ER(hr|D`u-XW3
ze%m^i3!Rqt&x?^1(1uQ)Oy_#rBJVD7pk$b2LY9~}sAvy|V4=QSVH(5I=eoS9E#iaT
zI)7ionhUMDZ*4zeX_uR~<8eh%{5H*%lD~uS=gZ&7@L5Hzy?{+45n>mnpzv-L^%`0$
z)FY&LCp%=w$dQ^<61L-yv-&wuPyUMqkoKN--SyEEkBY%B8Ar0Z`PRv#p<3?+lP}eJ
zLxY8bT+#S*KwZ{}Nm2;a%IfF~v^@x*biiMImN2WfMBcHQpQ?$&m4IKzxe{qIyh^!F
z`ZhHmL&w+TVAB`KESSkD8R~BfVAt=PZy63OHLn~E9{uLrT$hhteF|$dy?K-pI*I0^
zF}akp2BN~SpMl=ugse+!>`X$hRcqSi!=`6uc@=rdb$gDHD(;vAK8CGnhJF>TNXKCW
z_>B{j_H^0HJaFABz
zC+w{@v|}L!$6a_K2FC9{F($rnsiP(o$Xv)@Evbf7DqtK*6cHSd$Y?U=qg&MGi{FdqGj89*JgtugiG6@11pP7@1}ON#G~UYi`|9u
zS!Axl-`B)*K;DWk5BgNc&Gkyu$TKxpW!nKu_#~SD<)fA-8|5`65}$UDa$$6cziG_P
zc#@3EGsoBylEIdcOfjYyq(f*&It0?!v6q^kdn(`p+35hF4Vrim?ZRaq(yC@n(t#o4
zZ&@{*+suwW8kqj1{3Yae3_3+4Py8Hk99KHK{a{OlnCV>jyT$RLX0vZj8s#`p6E;jP
zsF>MBBn(Xb0J^gm=>!5#a>KNFtg+!$6~D94QlA#6bb#=1E>r_o;HVy8-mk!x(@?iK
z3~Ftz`}mY6-Nxz(BZ*NLgx}78de1y)4>LIKa13aHZPNy;o(kV=b4t$%c!SzjFz05WBZ-twpiJzIiI%GqM^PUYL2}ZECR3ENc&7#%tqJ7F`539(7#eks-rEcZt
z{DZ)-)XaKjQ&IViA7u*f!K^K?%H?ROJC7`P5r`n!q$=|QWQDEjSirr4q4DkHDA&H17=AkCHWyz)&_NZsady=xfIt8+x);N}`Bo>BK-x
znQI-tR7yy$r&Q;g-Q!si!o3Lf-%3}?2S)!@U!QiA?lFpeSU|cIY2v(YkuE?Yy;C0|
z*H!s}IBzUC24;TJ7jv}xRxDa}Zld5VCkQf<=7%In;_0C_R$sSv&w(WKC#UHeMf(+f
z9C77%T8K=B2VA={{TLA>z%Y;fEAB1?&lvKykzx$Ajdtl^OO`@F9*nQvFiwO$u9T=G
zaqyjvb5=3Eh$kCdZ@f_ycM0X);v7n0azS{hhWFU@Pe8}@rLA*x1$k_e+_Gn>}
zm5YicOht8#+zKH8%K-%0m12At2lq|$5#dSuAU$uqp~(O}qi(!sjNJgDb=s<@G-h?k
zPbxdX0mi>>GKB#kQ&29rN|%+O)UOre2lZhx{Bw)+vVL1>2NFtI26=4hi>jlB
z@ea|$rkT_|goNi1sL*MJ3S`-=XVYQP6~*LZs;aIBbVkB!NN0$=w_%tTKY(Q_Em31m
zj6Cd3EVpmR#x=VHil;AMvfU*svihU#?}-O%^Z
z@t}5fR%9Su#9<_|>3#ZZtx1wV_bv3oxjY~{{Wr0!@}d+gm5=QNe8|ljzE3ijErqhy
zsjfW&)7L%^akFdZEhFcujm4%gZL`sAPXetF{34#`;m9q(ZNWWUfzLnU#SNVx;Sfa7
zFuknj8Cf(0V3jlEca5n6xgpMWXro?>Osca^6VByU8no&>(SHH^L;NuX!Ejy63!!fN
zqv}EWwP}UQ=ZGk8n=I5)q}S3Ezxv2_*=iL?7-g|C^;4j~OJU>s0r_@~9*n~0P)!xD
zSS{+4Vd1AN+qC~b)2AWEnD;uGD*$ncyy+N0U`9M6!ad_X90aQk!qe>Jmlen-
zS2gR3*n*h={PN_%j$nzB?4BgC08Qn%9bpnA1wSNDO+9EJ!&ZfQij
zy!_jKow2{93K?7Zp5Pd?^T2C$(U~s_%#&MTC=0j-{n#lagqV~+gaAz2o@1E;*Pll
zdQa~YLOCF)^PF6-^zG<^)Epz1w-q*Yd_$fwKV9gVIj?I#oc1QFwNhOq
zj}Q*T?|~m=S_#I1TM0%J5mXC*ega_8MGmyKrBK!*#YVeuBz!`|Lp#&+!8(?trv`zW
znu^)UYTnXAu78)HCiP3(+C^B}S2U6e3IYez85~fcr+uvsXfc3E^p!cra7x0UTAO(aQA}y`RP$C}_SP|SWwAgP-fAUPWJmWI
zdmkJF?pJ^QjW{IKvH+z#%T$}|%v5A9wkH6c4T82rU8mnKRHjENN^Ri&q(L8h?RhJ(
z|7|0FG0{4Gnux)~uX4Ryrt62$z{&zdQ^HHHS;V6dMJJdDg~+*UO>1A=xde*#>Jvh0
z8+hgOfl+wWD9tq=DLc|6q?CM%B#7nSG(T=Yn|kE-SXb8%+A_wzUt&Y3#%@$QsMbyD
zgLe@sSN~1Rb01xcqIHb(WIaAZrYVvH6v-=qKA-C-{jRR|<|UvG;3|qG!IF4Q5I=`k
z>|8^uyegfS&q)lVbVtc2o=EY8x>oeO<)Gk7u4dMkRfZ9N(SN@oPzb!j6OOe19Y{ZvfEXB;i+z+agu5tECBtXYwVObv&gpYVd%QjVM
zFJMbSE1dIKXeLl`FNF(Ug4}VVn*jJUeH3fB^XqD(fXVD@@{9#Y;yEoJ5I6YFuUm5&
z=&FE#>JwIU_T!Y;c2a?U9a9YLGRy&P%@Q(>>=mUf-R*;eH;xNA(f4hI6gu!}W>e5QSFgo3?+IqFjhZoheAX|1
zL%4=(XAiiJpG!eIXjO_geAF)ZDvP~_C_S_Ldb%;f@o&Jz35K;8Krm0@3MPr1rf4T&
zo$w3X(4avx2uuM0{i~+~%OHOh{9;UG`6n0;y{o;82w?N?kk{7F<(caa-7?-|z^j16
zH3`urzZ&EUZgZ*jw0A+6>4w60ya!gtVW<}@MN&w{`7>odgMI}kP3Fm
zaPxl*iUhidWd%SNbE4ky+qv8nYbsHAEyi;sstn*}>BvW1gvqxC7MznFj~={{)_(S6
z7Lbd7oFCPFlbW0)>DHf(j86gGriZ<$Yke)6IzObT6D%}0D#1b8S!!Mmzu2d}0~oR^
z$2vjQ3~X$@z4U&|wXr2k5ga6TUo)8^ORcM^2T&(_902kJ=3U|R3NeX+p$h8J@p@jl
z-NHt+9&YMj#Qw7jBp%r!yVLu&^>7m>A(d0DbV6h6JrKqPuN&X^6T&yT&yA8tB}p{E
z>QgWubj5O{EB>SiBeHfOTPcf1urCzf>l>fn@_n8b+mF7z3}JAjt+AkWxoM-pK$3ud
zgbtq3FNH+(s7v++&bE9&pU?TZDh4jJj~&dJ)B6DQD1SM1$nQ7yeOgf!vkI8{ojJ*1
zYM-?z#I*4*^vx7?0P=u~Sfp!u;f@CSB}F=E^b>F@a~nOLc-%|Bwz%lA7YO5~p|tcx
zHMOShQNL%JO}=$jN$?#t;BHduGCN$vToqF>vU+^%FwhFCN>%Qqk@4_65Kq_3c6OoC
z$}X6iqV7kt^?{Dhtm+6V?{GSp;)t!rko^I-N|9*pU&V4=`UB+6p92PEZG_?|3KR4>
zkI5Nc)lf)_3QR{VU9ZEdu7{CQMpMx`IOC+&&t+@t1L`&1ZZlSkll-dWlv
zAF7Jpu#P3=G&wA7q(HK_8}tp^&!ad^m6E9As+d%Bv5+!T+SGZ&?3Ksf0~^QE^y^3U
z#-??p^#gzjGW+DHy%}3z9;n#=v7}~-9(zq0pwTtz7xFvW?@&Rk
z4u%*?%O;GjOnT&$i-qoVYU$`BaqqlL$ZXX$)xonmPX;D4{}NMY;EPZHVtpz<5v;I7
zInY6o%7_K$@|YbOSJC+V*hdS&4rYJUXKnkLIuAhY=zt%axMP
z&g!*zw5XwP$NS6ge&)*jA_bMXD3j7UB`e;#$AS!daW`V(mv|x8lEuO%dH^W+{Jnn!oeCB
zA+iahonPi!wYvmHl!?jx8K^*4JV1Op_%D@M+D?+1R}zbhUKYxfhR}gk8h@7KOM`iJ*e38lzY%XoAg$YZm|3~XWy2fmckkP
zY%j$K$}Xm+h{vBqtq_+U$>OGb)nK&|5?xPm9zspK1NfK5cc$e0P>hK`Ca=>B{n1>n
z?FDRVkYY0vT?ynL*@O=f8E%9-Df)wOOTnx40(_gI^F7_qNMicbm?|EpU%U5S>v5#}pToOY(n>X7KlCV^;azp`)3j6`yFIe(>VV#~tQF-J
znUqltU}E#H7YRJm>bTr@fVsIeG77MTU^|vBo>=H%QjAMkw%NEXisE54z&fne`I?AC
zpj>o4HE?cYR4fBfU1b9Hn@VXfa_%|30~yct1NDb|n1Iy*t0i==nbZP67|_EfjE>Z}
zqd((=pvYGh-4^*sEJA#cZ_a!w8K^A=J1Y>6xXZNwsL#0T(ZCO3QW3L2KOlDhyK6o$
zqWSO7fV#M${FHpZN;C@%Ns+_7>tRQxj39KP;g2@Sk9c`I5DTHp{*=PU`R{yL9b6=`
zwAcq(1-5CR%An5AjAW^0%LGwB06QRl!|L5oaO!zl+pn8)
z)+|K?&$If!CO^3KBPyu>r@PnQZRXaGbIDsbm(ZqaZU+WU!kv6V34jefdokK
z+yw-2p@$skiE2Ypdq#H6imq@%jh9!u6`!JL4xYL23E0MD-Q%Hi{NuB>UqMB_jqC=p
zQdLp4_%NcPhFM@30Mpkm{sQ0a3g{^cXM7M8PkJwxH_-jO!F1UHFouN2EMo&N#M0&<
zm=34D4}41-8bKEgB>X#xVkHlgmy>hBnJ|n!E}RL!u>yGScdyu@QKxZ2=JqG@~?)QK>181}Xxf#JMHm+@
z0YxM4sk4nN_PwnL3Y3w5+ZBl&GBh)t4Pf)fK3>eV9S8eli{l_Jj)HBiw5T`wmdbynPSvpE)Y_yhm%+Rz{NFjl%=@+W9Y9#ciV=49;$pFYIbw4HZOZ{$qQ0i}nS`LoH!ia#yAXS}R+i=w%T-FN$j8oX|xL|cZhJ6wUw$5+f$ak@VJ(1D*)Sgl`uNc*ja>nX)H
zN^Gd;f@4MVkJm1w@wkxr_k9VqC-BSW6?#;<6z>b(F@5@)o9AQTcV10jlfF&SnW~ja
z>(u&zO?x?;z&LvEe1V`0s879f-lY5s;2}=d1FIcry~xSp{KmEVC%+b{SN(VDtT>I-nhk^8_RNa73!JcU=LA3vv=p8M$~u~
zs#+ePiFK@JrK2;>I$m$79eY6?zOH;+?zQs;7O=#-@igF9@eUk1xk;d*$GoJX22Q
ze$uVj6NyUkMy%QFhY;IBxCnUzO)o!Z9&T=?IG?itK0h(Mu<@Au?0g(UWMQ|0or(rz
z9*0=HTu#Aj9OR{7G955JnLgxWBF@b8E=S)l1h_!@nzc&8ljCLPK{8JGUXtWQY{Lk!58yYf$Ah%kQW?@CdsWzd;?W5f-skAPtRs
z?^KQC2kQ>W9{z5iDK5$eDCI<9yOwBi%3LS#YNWd>6O?I<3GWLUp7r{86+EG2DPtq6
zjO(6DfrX&wb$eJPHl@2?hP1ef_N%+^pcGl1+V+B{YriZgrtSu7%_U$u(qvm}zRL~o
zfbUf$jOw5DJfD+;x?6Rv$&CZOykxit>_7wJ0Aa;zrr|(TGs*>Ms<>&S(W5*c
zaImDpV;l|eu}Piio9Lgn{dz0898(ZF>Oo_0URcK;4I*iRR>hI
z)hC{aJZVL8Myeu-Ey(B9MFWbRzY8arLQYlC7k%)w0e;nVvod)23BU
z)RB@<`a9@_=p|Q=9R2gG}1b2H>Y@zF#N*Mhq5
zYfs46e-5*9f#=KaTxUzNEe8BU^>ilbW4Pgs_SyS`yyyoE4HrhZoX=p^cttSP+bV7>
zi#soCRkJrQvEvj_cmUC-k5H$Y^<`Bg=4Y_yhUpCUB+E8^!nx!9xcTjChdk9mxw$yO
zVx_bP+6buXd@x11`YPOi`E81_mE22+8)4KK5+!q{UqU3p>cV40;
ztnr|!+wL?y%T1fLcKqDicm9EMPg$$VYPj$M?B@E+ZnFWmDyU=lXlZo}f0Fo7CGARS
zCr>}`c9Z-K)R^m24Q=%0r-8Aj#m1}Yqd?}FWPYEboDUD-e7wP?xm<~J37CMR3Xfsx
z%fwL90(2lc8oUAbCcgo#l(!0z895XClU>X41|pa-9ME1H`yAXuF&Y0j!FtC~U{2JO
z(}=fDSQP&DSyh~kCgw(xfYx4O-_**R^`##_x;21ZS@Uuz4kr(;CTLy2{yW8ASCrYy
z>J*h99Rr}!c|JNf9Q?h(pdWzBohI~OL8YM^ea!TeWwHk^zBSIPBU5_`I?ZpZ=+JuZ
z82~FQa{^fN@`<0pcMyfM?xVQ7o!V+)zTIZiXH(Rvt83k)U?{%TJ`^X`(Zq(*cwk>T
zKY`Z=^hv)Ary&82dCgEdZakn-l%lb|gSMcmORSTP&hv9!LbX?{+X1q2NbSmPbe*&?
zTsYucj#)nmr}y0L>FRwl^pQl9eL9-2jJ=EapGVI|f?aJ)Yy{+_RR`8?zo0zR-Ow`A
z@}_;EsK=5(
zpk@H$c?;z=BY}~GfZ01bnw6@oa(eXI8my?g0}v#nHo)`xr`BiW{$ae5)jn_>QDDLjzt7q??jfx&
zsw-YYqCDtYTZ+5ex=6976gkGL6@QMzk{iykxNPy#-m$<#$~_KIGW=G1lw6L^7>fQ!=%;AR5hW-RbMvn$P#Peey03Ox%CvmH>JKOFz>hnZ$t&b
z&^HsjU|ZT=NPNN8uX``PNS_?Ucly(H>xHg=J(zbrdQWR-*@Vgy)l5xU%EU%N2C=sA
zMQaQ71G^EzdAyA60$$v(aW8I<7cAWH@>KGH2%Ke~QU3K~itcRs05GISz`Q`4g5ifD
zgLulLl}1ZOE(+@G0$%3lt*1xok-Ma#6nA^nlD$C2>M^1BHY}_HOPs3E2NnPWH*xYC
zKGf^;LAa0sG_gwWPFHWmqD%T~Vm);w$OTAX^X2$Fm?)Lf
zT`ot^`%i!^Zp|+nu~$d$Q?xAu)5mi2#m;VJj!fO!_nW?dplu!%%aF)2I~zzq_)2ne
z*Wq_^V<0fDTeu8iNxImYI%nStOR$nwr8J$Ycz_Y>Dvcd@h!04oGC%+
z!A5Wq_Z+fxY|zt_C^)g*%PaYDDCP@a;T&P@*y=7vNY!LIB5s=SjhJjGmk)4QBoTLB
zm#N^tM+I#jSX#Lvt(jk@jxm9?qV|it08YR*-=`(h3Z%=GVFlpgJAXDgT;cgorVdf~
z+9oMx$!mofBHgLRH{cpD)+4mNkonRPhjjhEPfIR6B#&d?1~=W&ZJ$!u2uxHf
z(_XaN)=mJ(5U6Y>4Bl+oE+HaU^niTPWJSN3;sANeya-_>01Yi5W%T3GkTK9WYy55v
z+v;5srwLWgJ6v8F`?LbL3jx2Te`etG3mC+uzhrF`WVIyUN;+Cw9LL9%wdKcxJ#({x
z{$cZL27RqJr~&93LX%%Pv*I`J@e>|ygTUQ$de5qhj65+e;K?dOod(Y-!o43o8uLG$RV|*J;(pf$J;-p?;
z(Y0t?xDddR2n!I!P9EXD{5D0Chd0ncdj0`yVz4}e#Yj`Iqr
z-@yet^d4qG_6i95ZwJkO+jVr!=f{5gP~r}Q?SvDFVnFb=M7b1>67yE=!Wh6Efa8l1
z?E4A9DFu%6vX}kCMO;NU6Z2d?aU-3gQskjo1_X)2$Q6G?480Y+MGm$+U{Sw}>J-)|
zpzOelY4Hbt_MZW@u*c-`S8AcDcI+Vo^%+o=fi8IaD_w9-mp>)(CZyxvnRHN_@!z7<
z!KRu};?qpxEyp*ugpGW=}zTcKZsh@eJib?8~=G)0Q9*VSUd{Ukq4
zA{uhi#*!KYyEr7fzE5b{%MO-<@ui0?`wAv;V+humn&<-9mTWLrM=a!k{dV2$>R}pn
zAH%y*(-)8Yz%f@s5&l4~JrwAWPc^Pzg%pHR*;U>m4JG9;(Xq+a;dI@&Rc|B{<#)_;pS-8+`_Xz8HfpM4@Tt=^EubV_+U*3
z39pK0h;++5biLm|6BkTr?CVbXF76?kN;ch4Inmbnhzm<8oB7eK07{4ap-hD^6S)+8
zs}e>0Q=2`KDSD60Jt;DSZQCCpYGc}paJUQ<7M6c}(vIt<*+5UlesyFrMFj_WV6eCm
zTsx&!8=SM$_d!41w$hl11$tfYoJ~0~+rTFD3CY32Vxp-b_q}Ti$>{4vW}v1JU*%)j(QD2BvJ*p^
zaYWrkf#5&yNAnSDu|}36I6l_e5+HhfvxVgaV~bvw!Moenl&-=h+ahm=G&?yX;b%
z_qAwRxbV3&jAdT*Dhhe|ZK|4A!Ok|cJk=ax!T>C#GYMNkbC>fYg$V>6zu4A`NmioW
z=^{#;*<)z(mKJEFG_Ey1h>2EM6&@#&k$co+v%VZW0dOX=vn}@@IM-9cAMG{LuJHhp
z*62h~OU5-G)a!=<%U<^qWP=e&U;EfYU}9ju4BjM%D)`|P4W$O`8nlKI*mX((;7F`L
zxEU;HNg5p<`!`UT#n|@WvG(Pl(R4#?!y~Y($|nT7-ioRJ1xyN*CA5RuvF24^IRFc2
zI{0f_kozuqG5x}K!Tl<5JOt;zs6T9E+p(O;hUK`)@RM3Zp*`+K+ch14oP#f~Z3Wx<
z+v>DzP+eZTHEmyk;dOnd@v>e6rZ0gGYb*q)^K^e$%DU~-S`Q+tr`cBr#||Cn>`oOV
zz@3={?KDZe@I3j`_pDnTBm?5EfN0#i!!;@HRfo<&Q6>neC~G%buMq`XnnZQE&V`($
zj3Bt#pCYJ_ndTx^IyNf~XA9qvIh3m>YNg6nE5R+$x5>omF_<{uyx(oTt|EGD@ASu>
zE`YAEU6Y%Tvb}O%eqSAKL$ATr@ejSZ$3emY*F`h^hVSmWR1er746uj;ldwJtFmIDe
z<`f+#`B=wEPC~ghYo+nUKcEyqxXD+GG1SBL?Hz~7qyN6p`|LB~8telrh0F!|0@q%i
z$RK;6It}({fcsuqR^AP{pXDtNbIT%bC(VOpUHecfwkpC}Dg*)ibu4{+DGn=XdUI&g
zt9^>5LPGUf^~-frjy2rX`V7~2gq(SO>s=`qu$y?66(Z-7r)FND;B$3nmDftySw
z6ZqxxZkUj9kL9-Fi}=6O>Yo4k=bzy7)%DmL;P+-H`TGV;1MlYlTxVoTrcEOaY}vRj
z@W}KYh5D!KyJl=^t^UWoykTnFmj2(WR~56G!gJ+&KlqzXj7}qIMYX$~?R)=Drc1sM
zlY>1I7$e_wk~HzMv_N_{
zwzilw`o_0RjO`*s@+VTv3?%uu*5Pm=%R*1`qsR~{`_NQ-Lv&&>Gt
zPOQhYRYxlwSFgQZb?x!A@3z{Wd%msw@icvK%u&DNv;Gwrb98gg|NdiB)rz;xb~b1=
zDzQEgLQ{=~Q~&WXc#w@VEY5Asa5!RUPSwf!qxIlNm^K%5(U8q23|%c{Asl~sZ{yv!
zTRlHT38J6GUW~AzhVRPo@ah>V)s$(KgR4h$2ei$~?Oq|aRO2Cj&Tj_E+XHE>#?tCC
z0kzT40{-%4B}!M0%B0|g=vCquQSulp)pe*G%cWlTH*K#>5ihs@ZjDA4Jl|+&ilbFW
z8k_B|2@M?T3Qk{vbX&jro^b}u1b<9(mwf;#BAxg*hvgrcPLqs~0r{>6L`=&XJ(g?w
zdvq~`vQf@1!?#4XJ-T}oZkO#eAyyEzh&1*WbYk%5EEz_PdgU0ApFKIco4SK-0(M&J
z|35q)?GCW3T2Zlcnr&&U&Cyvmi8-@2C%H)$eB{TT{vLUYY{4GWR@-;-A58lZJlK-|
z{YNwvN?24wd}|MFh9ti)fF1P7h0xXDkRXpoC;jd6QH1h#SiiMUx3#M4qCdyG@y+CC
zp|!SEUHTMPTaU$DHtLNaoVqG+Y5qFC{E+Lq7T<7jOHr0M-&@J5ht?=}sMA*Ese2=|
zKI+YEQMNZF(zM7|*~!}tUr4^;y=d-9asZjVE?8a_QmOkQGJT8vJ5rzebN1Y7IJB};
zxRY3jC0}0-6IQMvqx<4&5@n5KReC-b4w2-WLM9*eiMPSW65tT<@85Ci!Pq_kXCwc=
zV7dRe8vmm?qs-ZU)2K)YLvX0yvn^+F><1DSQED(**#(>m7dkhe8};Z
zM(w1|4llK#<9UjNx^4;Ft*z4@Vh4$0qf?36aN_VE1J@zv3%Z3+d(@D)b+8AYV>M{b
zm_bV%&p;Y7R1i_|q3j@hCDm6^917QzNMzN|21hFdi^VKciu7_%AhRz!FUTbLk4^`C
z*j31`Nm!)({Al=7=7Osi7JL-Mp8gN;@c*wr9vP9P1U|=UW?qnz<}+2~Z1+Ye@q?cUK}#*FRaSdZIcd-EVl3{@XYsKj#1RFd>!U2kDNdhpa_
zlb7{?4L}2b@K~Ih*t|&a!td}p+dlI3^ltFapH?WD#{Msz{#W<#5Nk0)c0C5TA_u;ELf3ETxN#xI=$X?(>sWV`(dp&BywI^D^e#$&SKN~FJZNr^ZW%_1>
zoemx?$+*fmGD{K1Q*1oqLGVhY+=CzW3^jcYR_1hwmr8IyD`OTxG4Ka0=^Xx|*j*lR
z&FmnG4%TYrCrB$JVEvex$=AosjHU5Cq*1z?A(atxA9Du2*V{lv()?QUroQ!6UGS;u
z%!K|p&lb1TQw>Y5fyG=~Snk6uSdxPs^{Kpd7zAtP@V!5F#z?<;EsXviV)WtOzYD$d
zQDDR17xXrNMc4W~c3A86-Qt9X!@(@gUGM;sF~ZBkg)N1;?aZ~KpAuS^Bnj)2
z&o}je&3!!4MdvQSZwR77pG3l0c$ek&k=vT9FKD|wB>Cb%Xr`u(P8=ONs_WaecJ9fl
zy~;pp+ym)QJeEuOw=~C=V1G_8Ntd;e3L_HnY?b3yS&TGjH>BQon9`-7eAn2Y6+aZ`
z$=szFa$WY(G{Mk6iy@`oq+nu=DafO8HSBh_ec{ZT+w=i%-1dLt#+R$t-td283NFTJN}DGh+SD*EnRXLVx^8sx2{OJ#1Hr3Ia8J{-fhv)4*2A(@L0Pj7-H=Tf9i+2xKQdtelYI1w|U`qUOY%(
zi>?WsK2y9
zp#Kg;d=+-yMkCAZ0hW7ujC6Q1sO{qA*+^;Si8U`^?NafV#Sq_CzB_jw*A(fn-s?Mh
zWZQetDl`|M!>%KnyaY*Qo?eGn?t<4b*LhujyIc_|xZjfuk%kOERNvI(X+oq-{W-*|
zv|7E2ZV+*|)kn;Yj>5g!o0QLOtyIM
zeNR#(DbKXmp?2@RSh3b6z58m>?uZa(gz@Xz^n7Iyl!EL{+ao7+nbZ&RGvq&7_jb+1
z-Y$PY#Bz!A@h;*>`%k|ItvG{1P4k$4vJ*v<}X3A->JZfH)UIVZn)1
zzTp#<-8IIYvumTuIMNhSL~`TJ>eH2eC?~opOGpks5^DS;Z|ONRe7>dt@R`9}p+BtM
z-$tcer2baG&?vrX@HtiQDX7@F}tsH!#oiK}$!f}~=v
zEGdV%FERR;qe)l}KlVh$k>y1)2%p=h1Ae!UpJVYIr{moj7*ms=Ke=tNXRrcb(4fhI
ze&P+}CmHQ-!kyCux6!NDf^;+g|BIhSBEhH`a(;0%{4GDuU$c4#kktz%oo8DIciPM8
z0r{C*3rXD8mgUMU64WZ9lV*5CZ(kDpvnI?O7`sxgMTEJV){yn(@$+%emIF_do*d#S
zez5N8?%G33a890jRHxZn*FQrN7OeVS+n`}j7gty6FOD2an^{H?Y#^849!&`p;eD(o
zcbH`I=Hz_z&^jKeKZ9I}r8<*kH1MrN=G;Xz82LkEf0|C*azggnrj*F9jG;QgyuGIMfX;C+JR
zM`qVAuIkEe;IkaU06exGj!WtN87fiw0#LFVgvv`3p>oXjE97kNUReO6wrnpDt
z^m2w5dhbxjK+UgcCHO6~_0O2Ce=+zXOJ&2sZG-pbFzUOtZyg>@KB-IULS45cIHT9b
zz@Q85!#SIyib?Xw-_7vE7^y2lfpbT(f|L5cSsgcisjt#k>{txpf?qRSB4>jT`Lt!_
zA~cky*HILK%sdH{wy=c1eJM5ygTs+@s2t#3-T~g_ehS6xU4E|gW1_;B_72`uhu36B
zyLwO!MY?d&1(L3BNs<6y!g>S~YG+4vJHk*={RZkTUrJ8_!?}5y)zeU-g;+6$s?oDs
zIFedg3oL&$nB32`ChV2UtDK(J0->fvu%XgR5mHzcyW5&zZt%h#-Bws0w~ZXWy#WMW
zRDAdcMfn_=xBIv*t!p5wvC7F+$q-VmLkbEC
zru{ban+MvGZ>}(rL(qDfoWnTN>9ejVX2bj8)0LDxSG%T)?`OxtgZ@wg7X$wGQgFld1!_@;qIrDl^bCB*2=ZJ3ueji
z2OFgYrardaUvs98IpnBfsC@oFTXAUwM;5;jQkMfy=@H8wTLa%3O&6N#8u9p&G@Q5E
z2Nrk(IHByi-^C#Tp2Pd$9LM7j9nb{ZRUd4CnL)2`1yhtHn1$r4=^iJz;_l`YQQC7c
zbrU0@YaAi%A;#B>P}@~b0?jdRhHl%R_q?8`
zW%hg~mb2AOO;>sP*zQ63vR>T?0`2$KeJCf$9c!B^4HvMY%h2!z-C=_8`!JEBVYy0w
zfVg0o8Q=gCSRvaxW-NbIGc~fdWOrriQ79L4S!#|GEz9iX)OouBbC7{7NJ>hn1>%be
z-Blgm2(eiae7+4?SAZQ&eJLJK&Z?$pbUuYd=O!dN>+C?hbG5DAZFEqHuYwq{9mI$y
zDLc`Ee^$+3UjjmAB|p
z;OBcr^MsohnRe5Yl=Ff2~C(duuc;Cu0)8WX3y~d4Fg6J?-9E;^)ZTN~1
z4xoqmcR8PqX0q~&bn%e0JZ5@A{4aFI;UyJ|lF4=5e80-Zu{Dxg(lC0l?$(L0;8E?d
z5=lCXj(aDcLm5V>hQ2$??9X{L5;Pe%B;Z`s-T^E>(L-4AEKd2jS#wujtUI#rmm8co
z-Ely_@6EyP7%WU;2brpcG^gU2+}GiBP!dRX=DfD{F!W=pNqx)~Q897ANI9*Y)?A
z@ZslOOEX8y%6oLbsxi0z?xB}bl?9Arg^`7G<92hUJOw=8k*DscmEtK`{y39VIjfl|
zO{|mu>}~p-FKRX1(5B%U^zXy1@owF6SBb{-8$J@QEGBKLyi~wj{cZ8lWwIYl|5~s|
zCLH%(A1S#~j{dQV#_Byg`aUE_>op=cv-=Q%Q7EmwrlGa<41~a2NoDOu@qhRqSQWa^H|-sC#@|l1l~zoiPJajWxG~It>vh~oH*W)A$1XvSEbzI6;_dZY`!=J
zi@3GeNA85(GKqskr5O9!YarTPYJn+6mr4*=?|}Mo{cRbDll?82SM0
z6*}SD{)Kg)4N=zv;$PVSFjxQChli(YDTNWsIyl9;%*v
zhUXuY;r4jwKFJFr9_q-|?aKj`F7pwBGXKI%PR(+bkDpz>*mV&)e^}r7D1*9%9Yotv
zB@(eD`L!q7sm7gq+%N6TTH15K)i$%fUHzx_g1R^90QqqLG-<5piY~pwdV>690V))+
zD6FW(OWV&Q#R;wyyO+k6VYx-i&e&78*q>9eRrq_hnghR{(51<`?4&;9xc90#3nb$U
zg*xOiVrSlzufBT>JpNbOB(iwjmnRFGkAD{0$Wc;QFvFP3yL}uA(7v2qYc#x&7VWy30cfbvIG0
z_S~vS3##obO6TRH2BQ%pUoHlW(i+Y-057?IV%!g!
zt;uyuJnn1a^qu6~iGr-Xr1tPFPmV5$xv5b1A3&;=8l9S0bu`Xm%?#+6P+)8a^kw~s
zL)@3}fCzYa{Rq>0aozhbTU5V#&q`ydVDy&?PY*7M1zgX^W)^xjlnfa+4S%!hdGhYEdSALItbS=t4?Q!J!e|zyV#S}j@RI#x%cI}{*CylU
z9OF`n#=f;t!pihj*N7c|A45iV<2&94>PV5EB>!$UePUu2c|JK~&^<$dMc)AQDXUMV
z=o$HH9ZG!{?mjd>wEL`n%cu%&4-ViM)0eMghB`~+*cd4*)M-TyR>3OkaaJFPEN*?g
zYz>_J6s^Ya1MKq63*nF?FcJn0IKLP|$&PkvY4o~6U6|-+Qt9^6u04^589bU|)ghNN
zQ9g}L-03VuSuMY|xUt=Se^B4hZZ^x*YbLXsntkhO$7-8W`3gao3l@$OC1b_&$JS%k
z@NCEd`?J4;@)&w2Q+7>s(ON5@SlA2!R%E~WPLY=@2Z%w={2BWED-uXM+J~O`Q#XKc
z>Z=qB4OlhgUNx
zC#sJ~Rk|A?UVKH}Q;@G^No$oiB>BUs$4SP27#O;0ICYG;W?3+dyqHw^$goKQ3bq+h
zFd!=44NJs*jgmo#i~?csUNUlRlcsIp`k>8Dj&WhEeS(=Ja;sB4vT^>sTL=oiBb>
z&M-CIKKXNy18fBh5o`iVJG9Fg6S@$cPI5cExEcV-mXrnrNH(ovEDoQQFMND*C-vh9
ziwNh<6tk&ubboR_CR3UyuP<0EA=HL=>30O;@+Hchz`GxWM6uJ?Owu)lbMV3CNAoC3
zYk6^DZ|!rC2#aqc^%#po`JNQwTjU{?>6XPvO$Cz^sfo8K>NVZ~TxjqDp*OTIRr@jE
zbgxAlbRZd|G{!$$^VSX<=K@U@qOasddwP_=?P;4Dq(7Folx}trvs=s_8y}?#4y5*;
zkA$0(oe$a&6d=K+oR_{MWjexMkTyMj2RUS|@iQwr8Q;CS7IqPO9;$_}Y@Qt9wNE=qQ1e8=eZQc#}KmpD7Hb)}eV##?0;ZBzTDCbovw^Q3FIRFR|G=-|vz
zMG);<2zHS$WaAGq4tyR8e{VIM(tmlU*eBmRZp=rhED@0?gO(Qq4Oxp7iYR72BXth=
z0R0F4sXxyCJW#FmCa1j{9#(dbxpc%)UBdz#j_ft_zDyq^+gw2o^|A3CysuC=daP6d
zWJWv80Gk(588)$nqbv?MvzW9_F>@m<7z5bLJ_0rIvMU_#CYyk9<1V29=kS1**FLzA>TOsqQOPJ!|~T1iME!|^Bl0T?3|v+$8(|bj?hXskagr38KnMy
zWH}fJR!s-W#mqwX6-w#Se6;HZi|{dW`WGNbnW=X(qvdVa7h0(M>^0k^4%S^7r;Ml+
zN%Aqpf2SDyW28Pi8v8e{-dq0}WY**gcCt@xh*0XQQ0L7L76L-MI-_GZDwmfOYC!S-tOO>sz
zrmm_&pC*@3}Xe;F@+ro&AL}(
z{bch_(emhCGY8)215;35AnGdNBO_I~b%drU0BLV$*CM7p7>>|&e`=qT63UavyFjrl
zA9dYAR(y6}&USnb%Lzo%E_P7x^4PwHr-a&KohO}eDHfC<;J0AICO2mj$69t{T1q{5
zpeX9d1$;%E^VOLTNrf%TKB|?qUDbdRf~DwSmOSHHjy4`XTp2|H1gHo6UI7H0sJBHE
z056_g>zSf>%NgJ;)xumcf%>#^Nw8AwIF7rMa3Zud1{GWS$rkK7BAt1FUx|~J&Y8$XE
zE@P_zhgjNrebwT+I~JB^Ijf&QC9P)%ISl3yo6SuOq)S@plDlffF(YowuP?ya?
z6qutOOM&H7x&3^gThrZa{T|rvCFb_|k)e?qnaPEpXKX&{hgoZuORcY2gLy1#L~Pwmz4#>yuXOcOMJsnrwMaG*REK3zXK7XTbKq9Z!9rb&)TwsY6my
z=FJ6_J)~uF4-eK5u#C;dey$7&}Fwt-Hm&LCrKzdF`c(Hgo
zCxlmt;J7x+s55!J1t9Go{BgV&1lQT!FAqv|A(6yRy#b?ayQ|pIg)i;oiPRtYaBA*r
zK)zbo6oMQYO!)Ks21Nd~fNIKJ4YUX3_uzN~*UM;wvEiA9G#e2;Ca-J_x;*?!G9T@0
zAC=)r&Ac)z?R{8AHT-n}$SR?Ro2cVp#lhjjpj5Xg%FGS)+#~F3a4O7;ItceD8}{V^
zQ3s15Qux}aw*@z>
zqzm*QDXFUtCw~n-+>aIS{g5|wYqzVQZwX8YWJ{ZX9HgI@1?9PiX4p&n{&MGmjU5>
zpdgsjZ&f+{DhKpT?drvmnNTD7@i@Fl93AX&SoUR(5JNT;aGB!K<8yvT+@aLo)Za-%jQ}{2+BvcSB8$lxS^9`Qg*-*
zpvJtd{VztoDDQaGb)QqirpWskWBe_#h$*C-Jh@Un^-KOQIh3DB#&SUznknENUB<>6
z@|7i@n3Wls&@qdArjogjayx8ooFsg0QfaJ3cAQ`?`4?&OmBS&3d$0Jcnev?`Gyz*t
zu!x`2UU9VU!wXHEgs0S1#Smq5Gsmd};P&%Y7ZUYaxR8G!E<`z(+P?-cPrKMSum&zZ
zjIKSOt$rIJu~&5`RlE~!>w(?`knJhm9F@iPh@7$k=Op_OQNwes>`khShCU6QzW}0R
zzeWLfJyLG6N?Fs)&RUnr+*jNTnB6EVvx|;cTaM)zA55glbGo$h*eA2gLAi7PWb;kD
zP0?FV@t`Qjj`FE4{hyfz6(kJe)zg8Kgk=VxYS+*Eor3D69Z
z3HzmYYvl((){V%vdLsaxyIn=t2p@k%CNxF$L6~fi%=KlqqP%lJ2`7m-vI0PAO4YkP
ze*<9pZ{kCUPxvflP5InRg^^%*Ps{SdIGKU;ErHdJK>ADoNcz$^l2+;9M{tHe$BA@Y
zH8vA%9JFRI|}-E~mY)*C=DyuMJ^cRNHC
z!~_5oQxZl8h$vV^0JS)3!o=o8INPZS7Dh=(jwf75@t}I^p4tKyhrPA1p#*=zjz7ma
zMyk>mO3v3mY&)HF-o3u6w%60cRU+F8qiGAaza5wV-b(_^G
z>=<_nd&@&8SkUkR4-}8=IZ%or@BZFi8Re+yD1K6`+nNwQ$Jf*eR91w9(gKlYZ+ZyX
zNLfQ(kVZBhqWgyrBe0FWL|?hWOi8utb_2zGj>i4EE&v=vkIA@uilU5RP@xwSi6foQ
zx_)(K1@#5S5QJW?X$w{#nnnE#eim=QppS)pFy4)A9}0q^s;)0HfZ7weA*gRSmA&*k
zGavkg0!ed<=q0Jc;dqf(n`a36%MZerbwho|ig&^eaOd3pm0(Ps
zpR5w9%ix7W&gP}ivCS<8JvB3TAuR?vvg*JF*z_tTGDXE$ZzAWUcdjza;GW=46bw`;
z17f6`#I|UE%qQE(1{7Lc0m9?TtTRr5=bvJ`-Me*JT5@BV=5Ck7Fh5^Safa=yk4Ga}
z=Z-{8GF0wVCsqjXzFu-RrIGs*K0#5Ar)D@6&*k+jh1yjiP~#JA+u)eaonmRmF?O<^
z_@OX{#Yi>ZU@g{V%=vxzARNgK9xcka4(O+-8=xV_UHCgg6n;Wc)<=q?94Oh$F49*6
zIj=8zd$75xf6QfD8HD7{8Sbcs`KDLJL{l`Z0&dxQG?Mm#9&YcQfI9YaHMl{)7U~j3
zS4eJOW^KIqnTma+X-8EgPjfhFaWc8VAk>sO>M(7hOd$C(-CKop9w3nA@%JocYXh0{
z9lD_h=gm=-n3Y_;Vanf~mQ?QA8Yn81$Z}d;{#*(cL-a2zb-HuRPqi6IAo{kHSi3){
zVQYoEIZ+wQ;eU^pOU&{230?1$QBmOG;0q$gpz{r=9U+8N*nQF(Q0+~itzK`IDEx&W
zoT8^;8FCt&s_=Y8%AH2>GNki>)vpR?2T{LuBQ4sJd+c&Uygxz!BtDrBCmBa-y7va;
zMASFj|NSmp62Cj^N{PD9*#n)ri!H4#xvf}cbF92~h}$}VS$DD?8${EG?^%C;P_SEB!D6N
zTW^>*_{~J9%W&vP&yNAJzWfYbj|BIQefm0;eEp#Ws15W4#PWQcgg`|t6GLS^haR^i
ztK&=yoJ{XW62VB6MHR968DGM&kz(w*>h%fkCfBj18p}`!RG$F#G^DURws<6B(__$G
zHa+CbWzmQB>SVSkM_Bk4OV$Yn6maLsm46Pl$^E#vQt%Zz0RGuF8UIYYO;MjxWA-WY
zjm08`w#`|h1=7CQs%S$BOGh1+vrpDmWX+9itJ>0!h;H{pNg7?q=*8g-KIpbA#bLa@
zuq{`7_-%TC{XNHLL23OJ)kJB{s1X62%LJCq{dFbj5$IpeaD)#bwr+a*VwSa#jkXLY
zpbb~rg0d?pc3Vawg|Ee4nHyK)kIo&hCVZ#&f_1SRU8xdy5!z%az)NpabW0=SO~97s
z%Y|osUMG{NJCLX4LJflFHcUPrl_<2JL0T;t3-k>wILm8i&mV7HoOr76lgtQD&8zG
z74Jst>6DdTb3GO-dp!vAkLZ&P?Ybu&<
z2y}3l)uug;AgX(t`K7zSS>gIQpfze*&;b`Zi|hPnJ^1n4W4_zYV*YAOLijoS8z0X3
z{m{B#K!IPeH0TL=M-O3Dws22|(?{;jCA?ZN0_G)Su0pl5;7UVFbj{;J<
zUp@*;MwVh12XFd1&h85fg4FKrgHniCl>Ipq!wOuiqi5P!M^2Q_`Q6K+rc;Kj?Jf8d
z3^+^)f%h_fkQSCJ=II>7O53amuIhu%x5#B_loIF|#iJOS>$Sm5Y6-*41tXX{8O2Xg
z8bP@c5r0IF&@+3zqegUDcDe2CdZ6!k#&75?Am>ZBg`k6dQA_Xt+g)GnS~8BMb9brXnQ8BI@uWM)`vVn+PVa)I@3@
zNT}s%Om?+C&2h#LKo5oovzX5&s&JxX{QA4qC08Rn)t={3Jbiy!
zyTrUG^y5Y3ezL~&two&2Uk;}&AUkvH(bXHE>Qb9Izw1;&MWSxV(f%B0K*}WS(FTO&
zKD)|~EygNmahLQzn=z2d#&3hRB%UCIy!T;1oCd3CaVvbA-_JX+5B|0Ky0h(D$lRvn
zTrBzIWdAksHbs}vE(a;=yHO-fLv8Z_eBRjjqQeX1^TO88lQx2L6QSCYiXeW?i2H~;
zNc`>G-+O7Mp0@UIGs$b`=ryyR*F4e_i1LQc=Ygh8Q8q&~+rE?fv6cSaW^q{c9`VHw
zz^|jkQEH0iVtZiQkMR86!abgLzz?y&pHrzHN6RM73<~+An=O<>(|GPMJ>|
zR(EYlCv1XGQqW+g2gvna8eI93;(v-(siQ4D!WobVOi2^PE-<1(sDFdw2i<=H5Wdnf
zkCvB`w%C*I>9r?@atxXWus`ko%N5dP)Ew!9#YrV
zjy)T^^s@t*(SK3&X7Zn3&<%>JQ@{|tiU}dwBGpx(VC~wEw8NXLpb;38{Aj&xK4$P^
zKo1Zmu=-1&5_~vCkvkU<#JFg{1GMz0|ENsoHS%kTvz&a%p71K8JX2T}f5SWc%IJ3H
zdatm^r%5aH8Hm=e_r~r1vX-Py?>@7L1%%faCRd6xM~1KNnQ{0eN#9-&j*hj8uByCn4j0cqzAGl%WG#
zG4?{O>V=xU^ahB#i(=j@XOu5{lXsp!C;hxEN`13A5b&M|9x|Vj$if!GS89g?Wmwbg
z6F-x*pH&pjVXbam3Af>OcJ}8i!?f@gb~CeB=ds_$UOs>t8JzizIX3_koIRja4oL>-
zB{FIU4Q~BR5D|}R_Z?=)PRQ$lTs{#%WQ1wHH4!@hGW2e0PE$ojr;GpO1#$9q;A+
z1({%@C;%*b#0s(U2hMr({2HlXto2csXi;eJu&A0?paXRVb?`CWwl=P?r6B>-X*MIn
zCe0!+8n;?rTKnGZN@Q#W7@30qzmChEViGztZG55~h+}KFp*#hW{Pf&baZ3Pn{vNIB
z$9?XX*e`WWH{Mt8FH&Z(8kR=3J5kSo;X%&uKfijO4CbR01Cgw{=UMkk+PnN;I5uGD
zxS2CHW0>?}lA3C&dFr_q_66dM>HO)O^O~q;Au4yYhqZIGXiTmo!kdSr;2%Tq#)!HB
zY42s%XHm%Hp#BDtzGKF%P>#b+)Z-Yb4Ve8J`>`AMzH3wMdn;0x$N4T1iE-ytvW~WO
zV7sS-?VkIZZTF08*6m(BG{)>SzJL+rSk(GF@t*)WhQ5p(dsoFIHx6V%?G!KgP0Iq@
zMx+}ZrCt18S-aB$$jyCVsDpgW{6puLeSL_!Tzc{X$%PlS?XK=wGueGjW;3SfqcMSu
z@qq>J6iCw@qSj>C&x-(K;lCXh-8P4FXevaXhisSbaLl`p{jL5h5p;UTcRy*z%^np@j#oUctFpfvZC-h)rHMW8P0
zem%_6hxbpU+^H;*w-#W2qgR)?dwt7`Wax0`;rjin1Nmon@5ey{vwF$ta%CW*Y7%^TM{xpc$eSREA3d3=4Uy3LKn{@Zg@98>N<`X^blp|wq~Y+By&gA@hJC8TgKy|p#7}#XoaZdhHgJ|-DpRGg_S{?
z!)VU5bk0FdCTMTm5D8zzyCCh2-NB(*Pk<6Xh#g#)>DqcI+MBzG)#LesJEq#rwWxMM
zzevq;het>$>9W`|NUS*#!UO36OXoKp5b-kmH*#s}u1^J}TYzHq`%qZ)>aZlMcb=kj
zB+}^!cCPrC0G9#?B#;qN1YnRz#=^6o`_Q`fe4WI&ZB}_@1h?l#5|`yxK-u2M$3=8V
z!Okhl$-b`{RNczIjkZj;7;+?U0L^s-g`=d_wN=~Mc)$7~ZlhIF5N3fKK#6~5_X04G
z*6GUFKw2+WFVNAwBs#~-XiI6tf{P2Jc>Oz|AE$gmiuhVc2F`UE1m
zTtsr41m40^{Y-N4a82V}NJh5t=$C3-SO31EmJ)x|B{BY3ekD^s>;C7m>gB4UoSaXS^9y35G#Zq2nKfz!cy+e$o6R
zTbJ>w1DTqlXz!maoHf9-ZY!{IAIfo{9Q
zCa>9`UKALPmm)3!Yu>!NWV$u=+?INsXPSQ$%6Ghjb%vDBI27s++?a2Nmiz)o7iP_6X?=H
z2`Y0(mB~(JcVo%f2v%2_#AC-(wfX)G!&i+8o+ckNlpvWWG0#w2fd}rPVQE8DAg)XE
zvJ*Be%I5|`eQfi_HN{TuA$8?egPJmH+%bfMCcFuXFHO(_TFU>@F!qu^nA-c2SK1>*
z+ILRnwx%QT>}u=gq$`YVm5nzld(Ha8?xf)qiWrR9YV_HOS#K6{`B%`(v(_-(n4UO4
za=G}5bhoexnSa2;b+vG>gI-*qRi?~qDbwP|?sc(F=}lRlj0Lxuj1O1$H_aLEfKV18
zH@pCC=zqBF2z)q2IcE%LCBuCS>IHGBurFK!v>JOGUzlWCUf#)cq?zocy+{+-XCrVZ
zPxKC|dBT}SrnI?)!uP#nuy}1G(uZ45${fU7tTw`Gc}KjrappHGcht(9&1d
zS6rGU1w))JTT7K-R6PN}0h^b4c*1d%%rl@j+zvEJpE*61}g_b0^zl!Mn-$y7=5
z&W5gE2fBOY^QaiMRG!>gK?Hu()z+R1BvlDlZp$o!V?nc*16e=cDGI+oj+)of$M?r9
zG4Ix|1Yoy@Fm`qOqU&MZP_h)SE9Q}ET{DrvS3{2Vc;T5#D5q^r{-DT1nMX;~#Qq9o
zjOlV4HZOLJFx6WK|5R>uM?pm8SA9JclC#lhrc9%rT}j;rn$-@xgu-ab&TN#-3CG><
z%UZ&95M2l*a4Dgm2lfH}oMz3>q_0K$F49Cb#}>z|meH73L1^c+Ab?DD1TepSnV+@K
zj$sqW02!_QEOmQ0BiVb@fFGGH@%hB256o6at}E)Oi*?5(^H}x2j4~$Vf*vrb(ho)^O@1E{E
zXP{nP@!2x$zvTC;)dY7hK@hyx0o+*QI+>e(>1~S2A3n3}cQCA&>wO&=Mq`Up01+X6
z;n}r5*omBwxW
z4LAtSg$|{{vjJi(iyj$Z0CGVY|GxV_Kv{)5Mk;tNB$DnWU*vfAR^?@E9`B8=y{tL4
zEguDg`FP`QB9`)=ROJZT`7>AV^Uwz|$4CELC*)-UWDA;z$1f~n>=ZvvklO2#UUCcK
zrG3lLpo)TAS%&9gRG{(Wm?hp(iIa;eQ*@7hXx*bQ4nRXCxIIx;fd2j!T7hxKL+K9J
ze+#aynFZU|PL!&`8?rEA?7q`rMeHt5fQD$JU0bF{Ahkf^SX!SM|I1L=i8PN;ICDJW
zP2|9NriZ7EzaoB@a*S{IqJhXze(zqo1LV3tkLxMru;#v&)35t?WeeM)?lTQN3cXei
zzB0z*2FTWSn=v)z6venwtMAqG>0EFTk-E8akWY;Ns5
zBN~ahrLu|SeZ@Dtb2#RTxKFFo3=G%JQZkLAizHo!dEo!jOKNuQ2hCbMxT=Mn|IT
zm{dCiS}HH%y$?$FO0y2@IdxIJ`+gJ-&$|fjldISFrdB$_T@3BB2)dtQTrowrHbBYR
zK7s0sI}*_ZD_dDlK1nLR&CLr2R}^6ca#E^7aTYJ*_SI^%wV4Bhj7mk~_bmnOQB>&;
z5x5Ui*(IN!uZsbPw8)>f;8LvUyHFT6nQfb*$WIJdV+8pf>J^&Ksg{78z+*=h9x0L}
zRK45wEtplftKqzP24`n~$MYn0zwW@DwC7#Frs=@^{ow%2W(1t{zM0}hSmT@>5wWX9
z5kkK9^hF`|2Lwkifes1LVOU8u7Sh&5pk@+PeFr*5(yg2ub$ralFq`J+69r!Zaibm
z$jpJR6QB>pJg;ro+N$L%wQr|W-zTh
zgQ;O9;%HIeo<03Pp;J*0dO(Z95BvbWL&xFXG~?kNpv5l+^ornp&6D9HmY0mR^9^h|
z628`m9c!Ml@IPRtRdh4xMhmxgqfyNhMam{7ihv>Ujb)96z#pW)W(fhxh_P=UkM@^NWLpT*F`$B?bM
za^|<%c#!U#wj?SY*)zZ4?E>9@;pY3FXvKSlzIC~(!_(I&(Yx1P92Sf6C^A$oR*@a`
z;T$QDd5}R%A^?664oLT364gxA9c@tRT>+iDE&AW~O{%-P=}f6PR2?U$93R!L;aBDN4|6tu(HDiFPCUMIl2{
k>_J8Sf4uH<)NVzc_?!2evfIFw*weOb^4(ape$V&+2c?alh5!Hn
diff --git a/docs/build/html/compiled_8h_source.html b/docs/build/html/compiled_8h_source.html
index 6e37a563e..f5ec537b0 100644
--- a/docs/build/html/compiled_8h_source.html
+++ b/docs/build/html/compiled_8h_source.html
@@ -168,7 +168,7 @@ $(function() { codefold.init(0); });
Definition primitives.h:525
Definition primitives.h:680
Definition primitives.h:48
-Definition primitives.h:1919
+Definition primitives.h:1947
size_t ndim() const
The number of dimensions of the array.
Definition array.h:94
T item()
Get the value from a scalar array.
Definition array.h:489
diff --git a/docs/build/html/cpp/ops.html b/docs/build/html/cpp/ops.html
index a43770828..9e4d3eb47 100644
--- a/docs/build/html/cpp/ops.html
+++ b/docs/build/html/cpp/ops.html
@@ -8,7 +8,7 @@
- Operations — MLX 0.13.0 documentation
+ Operations — MLX 0.13.1 documentation
@@ -36,14 +36,14 @@
-
+
-
+
@@ -131,8 +131,8 @@
-
-
+
+
@@ -246,6 +246,7 @@
Operations
Linear Algebra
Metal
Further Reading
@@ -1041,9 +1050,11 @@ document.write(`
conv_general()
conv1d()
conv2d()
+conv3d()
quantized_matmul()
quantize()
dequantize()
+block_sparse_qmm()
tensordot()
tensordot()
outer()
@@ -2520,6 +2531,12 @@ document.write(`
2D convolution with a filter
+
+
+array conv3d ( const array & input , const array & weight , const std :: tuple < int , int , int > & stride = { 1 , 1 , 1 } , const std :: tuple < int , int , int > & padding = { 0 , 0 , 0 } , const std :: tuple < int , int , int > & dilation = { 1 , 1 , 1 } , int groups = 1 , StreamOrDevice s = { } )
+3D convolution with a filter
+
+
array quantized_matmul ( const array & x , const array & w , const array & scales , const array & biases , bool transpose = true , int group_size = 64 , int bits = 4 , StreamOrDevice s = { } )
@@ -2538,6 +2555,12 @@ document.write(`
Dequantize a matrix produced by quantize()
+
+
+array block_sparse_qmm ( const array & x , const array & w , const array & scales , const array & biases , std :: optional < array > lhs_indices = std :: nullopt , std :: optional < array > rhs_indices = std :: nullopt , bool transpose = true , int group_size = 64 , int bits = 4 , StreamOrDevice s = { } )
+Compute matrix products with matrix-level gather.
+
+
array tensordot ( const array & a , const array & b , const int axis = 2 , StreamOrDevice s = { } )
@@ -2723,7 +2746,7 @@ document.write(`
title="next page">
next
-
Developer Documentation
+
Custom Extensions in MLX
@@ -2996,9 +3019,11 @@ document.write(`
conv_general()
conv1d()
conv2d()
+conv3d()
quantized_matmul()
quantize()
dequantize()
+block_sparse_qmm()
tensordot()
tensordot()
outer()
diff --git a/docs/build/html/dev/extensions.html b/docs/build/html/dev/extensions.html
index adbf3b2c6..f801e7199 100644
--- a/docs/build/html/dev/extensions.html
+++ b/docs/build/html/dev/extensions.html
@@ -8,7 +8,7 @@
- Developer Documentation — MLX 0.13.0 documentation
+ Custom Extensions in MLX — MLX 0.13.1 documentation
@@ -36,7 +36,7 @@
-
+
@@ -131,8 +131,8 @@
-
-
+
+
@@ -246,6 +246,7 @@
Operations
Linear Algebra
Metal
Further Reading
@@ -777,7 +786,7 @@ document.write(`
-
Developer Documentation
+
Custom Extensions in MLX
@@ -822,8 +831,8 @@ document.write(`
-
-Developer Documentation
+
+Custom Extensions in MLX
You can extend MLX with custom operations on the CPU or GPU. This guide
explains how to do that with a simple example.
@@ -1280,7 +1289,7 @@ below.
auto kernel = d . get_kernel ( kname . str (), "mlx_ext" );
// Prepare to encode kernel
- auto compute_encoder = d . get_command_encoder ( s . index );
+ auto & compute_encoder = d . get_command_encoder ( s . index );
compute_encoder -> setComputePipelineState ( kernel );
// Kernel parameters are registered with buffer indices corresponding to
@@ -1289,11 +1298,11 @@ below.
size_t nelem = out . size ();
// Encode input arrays to kernel
- set_array_buffer ( compute_encoder , x , 0 );
- set_array_buffer ( compute_encoder , y , 1 );
+ compute_encoder . set_input_array ( x , 0 );
+ compute_encoder . set_input_array ( y , 1 );
// Encode output arrays to kernel
- set_array_buffer ( compute_encoder , out , 2 );
+ compute_encoder . set_output_array ( out , 2 );
// Encode alpha and beta
compute_encoder -> setBytes ( & alpha_ , sizeof ( float ), 3 );
@@ -1317,7 +1326,7 @@ below.
// Launch the grid with the given number of threads divided among
// the given threadgroups
- compute_encoder -> dispatchThreads ( grid_dims , group_dims );
+ compute_encoder . dispatchThreads ( grid_dims , group_dims );
}
@@ -1589,7 +1598,7 @@ import the Python package and play with it as you would any other MLX operation.
print ( f "c shape: { c . shape } " )
print ( f "c dtype: { c . dtype } " )
-
print ( f "c correctness: { mx . all ( c == 6.0 ) . item () } " )
+
print ( f "c correct: { mx . all ( c == 6.0 ) . item () } " )
Output:
diff --git a/docs/build/html/dev/metal_debugger.html b/docs/build/html/dev/metal_debugger.html
index 57c07026d..a23b9d821 100644
--- a/docs/build/html/dev/metal_debugger.html
+++ b/docs/build/html/dev/metal_debugger.html
@@ -8,7 +8,7 @@
- Metal Debugger — MLX 0.13.0 documentation
+ Metal Debugger — MLX 0.13.1 documentation
@@ -36,14 +36,14 @@
-
+
-
+
@@ -130,8 +130,8 @@
-
-
+
+
@@ -245,6 +245,7 @@
Operations
Linear Algebra
Metal
Further Reading
@@ -870,7 +879,7 @@ Xcode project using CMake.
previous
-
Developer Documentation
+
Custom Extensions in MLX
diff --git a/docs/build/html/doxygen_crawl.html b/docs/build/html/doxygen_crawl.html
index eeeffb925..753520686 100644
--- a/docs/build/html/doxygen_crawl.html
+++ b/docs/build/html/doxygen_crawl.html
@@ -401,10 +401,14 @@
+
+
+
+
diff --git a/docs/build/html/examples/linear_regression.html b/docs/build/html/examples/linear_regression.html
index 7ed3901d7..7ce87a327 100644
--- a/docs/build/html/examples/linear_regression.html
+++ b/docs/build/html/examples/linear_regression.html
@@ -8,7 +8,7 @@
- Linear Regression — MLX 0.13.0 documentation
+ Linear Regression — MLX 0.13.1 documentation
@@ -36,7 +36,7 @@
-
+
@@ -131,8 +131,8 @@
-
-
+
+
@@ -246,6 +246,7 @@
Operations
Linear Algebra
Metal
Further Reading
diff --git a/docs/build/html/examples/llama-inference.html b/docs/build/html/examples/llama-inference.html
index 8234fe5af..ea01e1615 100644
--- a/docs/build/html/examples/llama-inference.html
+++ b/docs/build/html/examples/llama-inference.html
@@ -8,7 +8,7 @@
- LLM inference — MLX 0.13.0 documentation
+ LLM inference — MLX 0.13.1 documentation
@@ -36,7 +36,7 @@
-
+
@@ -131,8 +131,8 @@
-
-
+
+
@@ -246,6 +246,7 @@
Operations
Linear Algebra
Metal
Further Reading
diff --git a/docs/build/html/examples/mlp.html b/docs/build/html/examples/mlp.html
index b2d096363..43d2faf59 100644
--- a/docs/build/html/examples/mlp.html
+++ b/docs/build/html/examples/mlp.html
@@ -8,7 +8,7 @@
- Multi-Layer Perceptron — MLX 0.13.0 documentation
+ Multi-Layer Perceptron — MLX 0.13.1 documentation
@@ -36,7 +36,7 @@
-
+
@@ -131,8 +131,8 @@
-
-
+
+
@@ -246,6 +246,7 @@
Operations
Linear Algebra
Metal
Further Reading
diff --git a/docs/build/html/functions_a.html b/docs/build/html/functions_a.html
index 3da3d67a0..9ab5d4d08 100644
--- a/docs/build/html/functions_a.html
+++ b/docs/build/html/functions_a.html
@@ -90,7 +90,9 @@ $(function() {
allocator : mlx::core::allocator::CommonAllocator , mlx::core::metal::MetalAllocator
alpha : mlx::steel::GEMMAddMMParams , mlx::steel::TransformAxpby< OutT, InT >
And : mlx::core::BitwiseBinary , mlx::core::Reduce
-apply() : mlx::steel::TransformAdd< OutT, InT > , mlx::steel::TransformAxpby< OutT, InT > , mlx::steel::TransformNone< OutT, InT >
+apply() : mlx::steel::TransformAdd< OutT, InT > , mlx::steel::TransformAxpby< OutT, InT > , mlx::steel::TransformNone< OutT, InT >
+apply_epilogue() : mlx::steel::BlockMMA< T, U, BM, BN, BK, WM, WN, transpose_a, transpose_b, lda_tgp, ldb_tgp, AccumType, Epilogue >
+apply_epilogue_safe() : mlx::steel::BlockMMA< T, U, BM, BN, BK, WM, WN, transpose_a, transpose_b, lda_tgp, ldb_tgp, AccumType, Epilogue >
Arange() : mlx::core::Arange
ArcCos() : mlx::core::ArcCos
ArcCosh() : mlx::core::ArcCosh
diff --git a/docs/build/html/functions_b.html b/docs/build/html/functions_b.html
index e5279524c..8d03d5e6f 100644
--- a/docs/build/html/functions_b.html
+++ b/docs/build/html/functions_b.html
@@ -95,6 +95,7 @@ $(function() {
BlockMaskedMM() : mlx::core::BlockMaskedMM
BlockMMA() : mlx::steel::BlockMMA< T, U, BM, BN, BK, WM, WN, transpose_a, transpose_b, lda_tgp, ldb_tgp, AccumType, Epilogue >
BlockSparseMM() : mlx::core::BlockSparseMM
+BlockSparseQMM() : mlx::core::BlockSparseQMM
Broadcast() : mlx::core::Broadcast
BROWS : mlx::steel::Conv2DInputBlockLoaderGeneral< T, BM, BN, BK, tgp_size, tgp_padding > , mlx::steel::Conv2DInputBlockLoaderLargeFilter< T, BM, BN, BK, tgp_size, tgp_padding > , mlx::steel::Conv2DInputBlockLoaderSmallChannels< T, BM, BN, BK, tgp_size, n_channels, tgp_padding > , mlx::steel::Conv2DInputBlockLoaderSmallFilter< T, BM, BN, BK, tgp_size, tgp_padding > , mlx::steel::Conv2DWeightBlockLoader< T, BM, BN, BK, tgp_size, tgp_padding > , mlx::steel::Conv2DWeightBlockLoaderGeneral< T, BM, BN, BK, tgp_size, tgp_padding > , mlx::steel::Conv2DWeightBlockLoaderSmallChannels< T, BM, BN, BK, tgp_size, n_channels, tgp_padding >
Bs_offset : mlx::steel::BlockMMA< T, U, BM, BN, BK, WM, WN, transpose_a, transpose_b, lda_tgp, ldb_tgp, AccumType, Epilogue >
diff --git a/docs/build/html/functions_c.html b/docs/build/html/functions_c.html
index 727c87084..f2a3a607d 100644
--- a/docs/build/html/functions_c.html
+++ b/docs/build/html/functions_c.html
@@ -79,6 +79,7 @@ $(function() {
Category : mlx::core::Dtype
Ceil() : mlx::core::Ceil
cfftp() : pocketfft::detail::cfftp< T0 >
+Cholesky() : mlx::core::Cholesky
clear_cache() : mlx::core::metal::MetalAllocator
cmplx() : pocketfft::detail::cmplx< T >
cndarr() : pocketfft::detail::cndarr< T >
diff --git a/docs/build/html/functions_e.html b/docs/build/html/functions_e.html
index 0373de1bd..6c5380511 100644
--- a/docs/build/html/functions_e.html
+++ b/docs/build/html/functions_e.html
@@ -85,10 +85,10 @@ $(function() {
Erf() : mlx::core::Erf
ErfInv() : mlx::core::ErfInv
eval() : mlx::core::array
-eval_cpu() : mlx::core::Abs , mlx::core::Add , mlx::core::AddMM , mlx::core::Arange , mlx::core::ArcCos , mlx::core::ArcCosh , mlx::core::ArcSin , mlx::core::ArcSinh , mlx::core::ArcTan2 , mlx::core::ArcTan , mlx::core::ArcTanh , mlx::core::ArgPartition , mlx::core::ArgReduce , mlx::core::ArgSort , mlx::core::AsStrided , mlx::core::AsType , mlx::core::BitwiseBinary , mlx::core::BlockMaskedMM , mlx::core::BlockSparseMM , mlx::core::Broadcast , mlx::core::Ceil , mlx::core::Compiled , mlx::core::Concatenate , mlx::core::Conjugate , mlx::core::Convolution , mlx::core::Copy , mlx::core::Cos , mlx::core::Cosh , mlx::core::CustomVJP , mlx::core::Depends , mlx::core::Divide , mlx::core::DivMod , mlx::core::Equal , mlx::core::Erf , mlx::core::ErfInv , mlx::core::Exp , mlx::core::Expm1 , mlx::core::fast::LayerNorm , mlx::core::fast::LayerNormVJP , mlx::core::fast::RMSNorm , mlx::core::fast::RMSNormVJP , mlx::core::fast::RoPE , mlx::core::fast::ScaledDotProductAttention , mlx::core::FFT , mlx::core::Floor , mlx::core::Full , mlx::core::Gather , mlx::core::Greater , mlx::core::GreaterEqual , mlx::core::Inverse , mlx::core::Less , mlx::core::LessEqual , mlx::core::Load , mlx::core::Log1p , mlx::core::Log , mlx::core::LogAddExp , mlx::core::LogicalAnd , mlx::core::LogicalNot , mlx::core::LogicalOr , mlx::core::Matmul , mlx::core::Maximum , mlx::core::Minimum , mlx::core::Multiply , mlx::core::Negative , mlx::core::NotEqual , mlx::core::NumberOfElements , mlx::core::Pad , mlx::core::Partition , mlx::core::Power , mlx::core::Primitive , mlx::core::QRF , mlx::core::QuantizedMatmul , mlx::core::RandomBits , mlx::core::Reduce , mlx::core::Remainder , mlx::core::Reshape , mlx::core::Round , mlx::core::Scan , mlx::core::Scatter , mlx::core::Select , mlx::core::Sigmoid , mlx::core::Sign , mlx::core::Sin , mlx::core::Sinh , mlx::core::Slice , mlx::core::SliceUpdate , mlx::core::Softmax , mlx::core::Sort , mlx::core::Split , mlx::core::Sqrt , mlx::core::Square , mlx::core::StopGradient , mlx::core::Subtract , mlx::core::SVD , mlx::core::Tan , mlx::core::Tanh , mlx::core::Transpose , mlx::core::UnaryPrimitive , mlx::core::Uniform
-eval_gpu() : mlx::core::Abs , mlx::core::Add , mlx::core::AddMM , mlx::core::Arange , mlx::core::ArcCos , mlx::core::ArcCosh , mlx::core::ArcSin , mlx::core::ArcSinh , mlx::core::ArcTan2 , mlx::core::ArcTan , mlx::core::ArcTanh , mlx::core::ArgPartition , mlx::core::ArgReduce , mlx::core::ArgSort , mlx::core::AsStrided , mlx::core::AsType , mlx::core::BitwiseBinary , mlx::core::BlockMaskedMM , mlx::core::BlockSparseMM , mlx::core::Broadcast , mlx::core::Ceil , mlx::core::Compiled , mlx::core::Concatenate , mlx::core::Conjugate , mlx::core::Convolution , mlx::core::Copy , mlx::core::Cos , mlx::core::Cosh , mlx::core::CustomVJP , mlx::core::Depends , mlx::core::Divide , mlx::core::DivMod , mlx::core::Equal , mlx::core::Erf , mlx::core::ErfInv , mlx::core::Exp , mlx::core::Expm1 , mlx::core::fast::LayerNorm , mlx::core::fast::LayerNormVJP , mlx::core::fast::RMSNorm , mlx::core::fast::RMSNormVJP , mlx::core::fast::RoPE , mlx::core::fast::ScaledDotProductAttention , mlx::core::FFT , mlx::core::Floor , mlx::core::Full , mlx::core::Gather , mlx::core::Greater , mlx::core::GreaterEqual , mlx::core::Inverse , mlx::core::Less , mlx::core::LessEqual , mlx::core::Load , mlx::core::Log1p , mlx::core::Log , mlx::core::LogAddExp , mlx::core::LogicalAnd , mlx::core::LogicalNot , mlx::core::LogicalOr , mlx::core::Matmul , mlx::core::Maximum , mlx::core::Minimum , mlx::core::Multiply , mlx::core::Negative , mlx::core::NotEqual , mlx::core::NumberOfElements , mlx::core::Pad , mlx::core::Partition , mlx::core::Power , mlx::core::Primitive , mlx::core::QRF , mlx::core::QuantizedMatmul , mlx::core::RandomBits , mlx::core::Reduce , mlx::core::Remainder , mlx::core::Reshape , mlx::core::Round , mlx::core::Scan , mlx::core::Scatter , mlx::core::Select , mlx::core::Sigmoid , mlx::core::Sign , mlx::core::Sin , mlx::core::Sinh , mlx::core::Slice , mlx::core::SliceUpdate , mlx::core::Softmax , mlx::core::Sort , mlx::core::Split , mlx::core::Sqrt , mlx::core::Square , mlx::core::StopGradient , mlx::core::Subtract , mlx::core::SVD , mlx::core::Tan , mlx::core::Tanh , mlx::core::Transpose , mlx::core::UnaryPrimitive , mlx::core::Uniform
+eval_cpu() : mlx::core::Abs , mlx::core::Add , mlx::core::AddMM , mlx::core::Arange , mlx::core::ArcCos , mlx::core::ArcCosh , mlx::core::ArcSin , mlx::core::ArcSinh , mlx::core::ArcTan2 , mlx::core::ArcTan , mlx::core::ArcTanh , mlx::core::ArgPartition , mlx::core::ArgReduce , mlx::core::ArgSort , mlx::core::AsStrided , mlx::core::AsType , mlx::core::BitwiseBinary , mlx::core::BlockMaskedMM , mlx::core::BlockSparseMM , mlx::core::BlockSparseQMM , mlx::core::Broadcast , mlx::core::Ceil , mlx::core::Cholesky , mlx::core::Compiled , mlx::core::Concatenate , mlx::core::Conjugate , mlx::core::Convolution , mlx::core::Copy , mlx::core::Cos , mlx::core::Cosh , mlx::core::CustomVJP , mlx::core::Depends , mlx::core::Divide , mlx::core::DivMod , mlx::core::Equal , mlx::core::Erf , mlx::core::ErfInv , mlx::core::Exp , mlx::core::Expm1 , mlx::core::fast::LayerNorm , mlx::core::fast::LayerNormVJP , mlx::core::fast::RMSNorm , mlx::core::fast::RMSNormVJP , mlx::core::fast::RoPE , mlx::core::fast::ScaledDotProductAttention , mlx::core::FFT , mlx::core::Floor , mlx::core::Full , mlx::core::Gather , mlx::core::Greater , mlx::core::GreaterEqual , mlx::core::Inverse , mlx::core::Less , mlx::core::LessEqual , mlx::core::Load , mlx::core::Log1p , mlx::core::Log , mlx::core::LogAddExp , mlx::core::LogicalAnd , mlx::core::LogicalNot , mlx::core::LogicalOr , mlx::core::Matmul , mlx::core::Maximum , mlx::core::Minimum , mlx::core::Multiply , mlx::core::Negative , mlx::core::NotEqual , mlx::core::NumberOfElements , mlx::core::Pad , mlx::core::Partition , mlx::core::Power , mlx::core::Primitive , mlx::core::QRF , mlx::core::QuantizedMatmul , mlx::core::RandomBits , mlx::core::Reduce , mlx::core::Remainder , mlx::core::Reshape , mlx::core::Round , mlx::core::Scan , mlx::core::Scatter , mlx::core::Select , mlx::core::Sigmoid , mlx::core::Sign , mlx::core::Sin , mlx::core::Sinh , mlx::core::Slice , mlx::core::SliceUpdate , mlx::core::Softmax , mlx::core::Sort , mlx::core::Split , mlx::core::Sqrt , mlx::core::Square , mlx::core::StopGradient , mlx::core::Subtract , mlx::core::SVD , mlx::core::Tan , mlx::core::Tanh , mlx::core::Transpose , mlx::core::UnaryPrimitive , mlx::core::Uniform
+eval_gpu() : mlx::core::Abs , mlx::core::Add , mlx::core::AddMM , mlx::core::Arange , mlx::core::ArcCos , mlx::core::ArcCosh , mlx::core::ArcSin , mlx::core::ArcSinh , mlx::core::ArcTan2 , mlx::core::ArcTan , mlx::core::ArcTanh , mlx::core::ArgPartition , mlx::core::ArgReduce , mlx::core::ArgSort , mlx::core::AsStrided , mlx::core::AsType , mlx::core::BitwiseBinary , mlx::core::BlockMaskedMM , mlx::core::BlockSparseMM , mlx::core::BlockSparseQMM , mlx::core::Broadcast , mlx::core::Ceil , mlx::core::Cholesky , mlx::core::Compiled , mlx::core::Concatenate , mlx::core::Conjugate , mlx::core::Convolution , mlx::core::Copy , mlx::core::Cos , mlx::core::Cosh , mlx::core::CustomVJP , mlx::core::Depends , mlx::core::Divide , mlx::core::DivMod , mlx::core::Equal , mlx::core::Erf , mlx::core::ErfInv , mlx::core::Exp , mlx::core::Expm1 , mlx::core::fast::LayerNorm , mlx::core::fast::LayerNormVJP , mlx::core::fast::RMSNorm , mlx::core::fast::RMSNormVJP , mlx::core::fast::RoPE , mlx::core::fast::ScaledDotProductAttention , mlx::core::FFT , mlx::core::Floor , mlx::core::Full , mlx::core::Gather , mlx::core::Greater , mlx::core::GreaterEqual , mlx::core::Inverse , mlx::core::Less , mlx::core::LessEqual , mlx::core::Load , mlx::core::Log1p , mlx::core::Log , mlx::core::LogAddExp , mlx::core::LogicalAnd , mlx::core::LogicalNot , mlx::core::LogicalOr , mlx::core::Matmul , mlx::core::Maximum , mlx::core::Minimum , mlx::core::Multiply , mlx::core::Negative , mlx::core::NotEqual , mlx::core::NumberOfElements , mlx::core::Pad , mlx::core::Partition , mlx::core::Power , mlx::core::Primitive , mlx::core::QRF , mlx::core::QuantizedMatmul , mlx::core::RandomBits , mlx::core::Reduce , mlx::core::Remainder , mlx::core::Reshape , mlx::core::Round , mlx::core::Scan , mlx::core::Scatter , mlx::core::Select , mlx::core::Sigmoid , mlx::core::Sign , mlx::core::Sin , mlx::core::Sinh , mlx::core::Slice , mlx::core::SliceUpdate , mlx::core::Softmax , mlx::core::Sort , mlx::core::Split , mlx::core::Sqrt , mlx::core::Square , mlx::core::StopGradient , mlx::core::Subtract , mlx::core::SVD , mlx::core::Tan , mlx::core::Tanh , mlx::core::Transpose , mlx::core::UnaryPrimitive , mlx::core::Uniform
event() : mlx::core::array
-Event() : mlx::core::Event
+Event() : mlx::core::Event
excess : mlx::steel::ChannelHelper< n_channels_ > , mlx::steel::ChannelHelper< 1 > , mlx::steel::ChannelHelper< 2 > , mlx::steel::ChannelHelper< 3 > , mlx::steel::ChannelHelper< 4 >
exec() : pocketfft::detail::cfftp< T0 > , pocketfft::detail::fftblue< T0 > , pocketfft::detail::pocketfft_c< T0 > , pocketfft::detail::pocketfft_r< T0 > , pocketfft::detail::rfftp< T0 > , pocketfft::detail::T_dcst23< T0 > , pocketfft::detail::T_dcst4< T0 > , pocketfft::detail::T_dct1< T0 > , pocketfft::detail::T_dst1< T0 >
exec_r() : pocketfft::detail::fftblue< T0 >
diff --git a/docs/build/html/functions_func_a.html b/docs/build/html/functions_func_a.html
index 2afeb9d52..b3fcd5beb 100644
--- a/docs/build/html/functions_func_a.html
+++ b/docs/build/html/functions_func_a.html
@@ -82,7 +82,9 @@ $(function() {
alloc() : MPS::Matrix , MPS::MatrixMultiplication , MPS::MatrixVectorMultiplication , MPS::Vector
allocate() : pocketfft::detail::threading::aligned_allocator< T >
Allocator() : mlx::core::allocator::Allocator
-apply() : mlx::steel::TransformAdd< OutT, InT > , mlx::steel::TransformAxpby< OutT, InT > , mlx::steel::TransformNone< OutT, InT >
+apply() : mlx::steel::TransformAdd< OutT, InT > , mlx::steel::TransformAxpby< OutT, InT > , mlx::steel::TransformNone< OutT, InT >
+apply_epilogue() : mlx::steel::BlockMMA< T, U, BM, BN, BK, WM, WN, transpose_a, transpose_b, lda_tgp, ldb_tgp, AccumType, Epilogue >
+apply_epilogue_safe() : mlx::steel::BlockMMA< T, U, BM, BN, BK, WM, WN, transpose_a, transpose_b, lda_tgp, ldb_tgp, AccumType, Epilogue >
Arange() : mlx::core::Arange
ArcCos() : mlx::core::ArcCos
ArcCosh() : mlx::core::ArcCosh
diff --git a/docs/build/html/functions_func_b.html b/docs/build/html/functions_func_b.html
index 3c83a2696..d7756da8d 100644
--- a/docs/build/html/functions_func_b.html
+++ b/docs/build/html/functions_func_b.html
@@ -81,6 +81,7 @@ $(function() {
BlockMaskedMM() : mlx::core::BlockMaskedMM
BlockMMA() : mlx::steel::BlockMMA< T, U, BM, BN, BK, WM, WN, transpose_a, transpose_b, lda_tgp, ldb_tgp, AccumType, Epilogue >
BlockSparseMM() : mlx::core::BlockSparseMM
+BlockSparseQMM() : mlx::core::BlockSparseQMM
Broadcast() : mlx::core::Broadcast
Buffer() : mlx::core::allocator::Buffer
buffer() : mlx::core::array
diff --git a/docs/build/html/functions_func_c.html b/docs/build/html/functions_func_c.html
index 5b6bb295c..bb3e9a6c9 100644
--- a/docs/build/html/functions_func_c.html
+++ b/docs/build/html/functions_func_c.html
@@ -76,6 +76,7 @@ $(function() {
- c -
diff --git a/docs/build/html/functions_i.html b/docs/build/html/functions_i.html
index 70eb7d0fa..7d601d7ba 100644
--- a/docs/build/html/functions_i.html
+++ b/docs/build/html/functions_i.html
@@ -95,7 +95,7 @@ $(function() {
iS : MLXConvParams< NDIM >
is_available() : mlx::core::array
is_donatable() : mlx::core::array
-is_equivalent() : mlx::core::Abs , mlx::core::Add , mlx::core::AddMM , mlx::core::Arange , mlx::core::ArcCos , mlx::core::ArcCosh , mlx::core::ArcSin , mlx::core::ArcSinh , mlx::core::ArcTan2 , mlx::core::ArcTan , mlx::core::ArcTanh , mlx::core::ArgPartition , mlx::core::ArgReduce , mlx::core::ArgSort , mlx::core::AsStrided , mlx::core::AsType , mlx::core::BitwiseBinary , mlx::core::BlockMaskedMM , mlx::core::BlockSparseMM , mlx::core::Broadcast , mlx::core::Ceil , mlx::core::Compiled , mlx::core::Concatenate , mlx::core::Conjugate , mlx::core::Convolution , mlx::core::Copy , mlx::core::Cos , mlx::core::Cosh , mlx::core::Divide , mlx::core::DivMod , mlx::core::Equal , mlx::core::Erf , mlx::core::ErfInv , mlx::core::Exp , mlx::core::fast::ScaledDotProductAttention , mlx::core::FFT , mlx::core::Floor , mlx::core::Full , mlx::core::Gather , mlx::core::Greater , mlx::core::GreaterEqual , mlx::core::Less , mlx::core::LessEqual , mlx::core::Log , mlx::core::LogAddExp , mlx::core::LogicalAnd , mlx::core::LogicalNot , mlx::core::LogicalOr , mlx::core::Matmul , mlx::core::Maximum , mlx::core::Minimum , mlx::core::Multiply , mlx::core::Negative , mlx::core::NotEqual , mlx::core::NumberOfElements , mlx::core::Pad , mlx::core::Partition , mlx::core::Power , mlx::core::Primitive , mlx::core::QuantizedMatmul , mlx::core::RandomBits , mlx::core::Reduce , mlx::core::Remainder , mlx::core::Reshape , mlx::core::Round , mlx::core::Scan , mlx::core::Scatter , mlx::core::Select , mlx::core::Sigmoid , mlx::core::Sign , mlx::core::Sin , mlx::core::Sinh , mlx::core::Slice , mlx::core::SliceUpdate , mlx::core::Softmax , mlx::core::Sort , mlx::core::Split , mlx::core::Sqrt , mlx::core::Square , mlx::core::StopGradient , mlx::core::Subtract , mlx::core::Tan , mlx::core::Tanh , mlx::core::Transpose , mlx::core::Uniform
+is_equivalent() : mlx::core::Abs , mlx::core::Add , mlx::core::AddMM , mlx::core::Arange , mlx::core::ArcCos , mlx::core::ArcCosh , mlx::core::ArcSin , mlx::core::ArcSinh , mlx::core::ArcTan2 , mlx::core::ArcTan , mlx::core::ArcTanh , mlx::core::ArgPartition , mlx::core::ArgReduce , mlx::core::ArgSort , mlx::core::AsStrided , mlx::core::AsType , mlx::core::BitwiseBinary , mlx::core::BlockMaskedMM , mlx::core::BlockSparseMM , mlx::core::BlockSparseQMM , mlx::core::Broadcast , mlx::core::Ceil , mlx::core::Compiled , mlx::core::Concatenate , mlx::core::Conjugate , mlx::core::Convolution , mlx::core::Copy , mlx::core::Cos , mlx::core::Cosh , mlx::core::Divide , mlx::core::DivMod , mlx::core::Equal , mlx::core::Erf , mlx::core::ErfInv , mlx::core::Exp , mlx::core::fast::ScaledDotProductAttention , mlx::core::FFT , mlx::core::Floor , mlx::core::Full , mlx::core::Gather , mlx::core::Greater , mlx::core::GreaterEqual , mlx::core::Less , mlx::core::LessEqual , mlx::core::Log , mlx::core::LogAddExp , mlx::core::LogicalAnd , mlx::core::LogicalNot , mlx::core::LogicalOr , mlx::core::Matmul , mlx::core::Maximum , mlx::core::Minimum , mlx::core::Multiply , mlx::core::Negative , mlx::core::NotEqual , mlx::core::NumberOfElements , mlx::core::Pad , mlx::core::Partition , mlx::core::Power , mlx::core::Primitive , mlx::core::QuantizedMatmul , mlx::core::RandomBits , mlx::core::Reduce , mlx::core::Remainder , mlx::core::Reshape , mlx::core::Round , mlx::core::Scan , mlx::core::Scatter , mlx::core::Select , mlx::core::Sigmoid , mlx::core::Sign , mlx::core::Sin , mlx::core::Sinh , mlx::core::Slice , mlx::core::SliceUpdate , mlx::core::Softmax , mlx::core::Sort , mlx::core::Split , mlx::core::Sqrt , mlx::core::Square , mlx::core::StopGradient , mlx::core::Subtract , mlx::core::Tan , mlx::core::Tanh , mlx::core::Transpose , mlx::core::Uniform
is_open() : mlx::core::io::FileReader , mlx::core::io::FileWriter , mlx::core::io::Reader , mlx::core::io::Writer
is_ready() : pocketfft::detail::threading::latch
is_tracer() : mlx::core::array
diff --git a/docs/build/html/functions_j.html b/docs/build/html/functions_j.html
index a0c1ec1af..8657cd14e 100644
--- a/docs/build/html/functions_j.html
+++ b/docs/build/html/functions_j.html
@@ -77,7 +77,7 @@ $(function() {
jump_a : mlx::steel::BlockMMA< T, U, BM, BN, BK, WM, WN, transpose_a, transpose_b, lda_tgp, ldb_tgp, AccumType, Epilogue >
jump_b : mlx::steel::BlockMMA< T, U, BM, BN, BK, WM, WN, transpose_a, transpose_b, lda_tgp, ldb_tgp, AccumType, Epilogue >
jump_params : mlx::steel::Conv2DInputBlockLoaderGeneral< T, BM, BN, BK, tgp_size, tgp_padding > , mlx::steel::Conv2DWeightBlockLoaderGeneral< T, BM, BN, BK, tgp_size, tgp_padding >
-jvp() : mlx::core::Abs , mlx::core::Add , mlx::core::ArcCos , mlx::core::ArcCosh , mlx::core::ArcSin , mlx::core::ArcSinh , mlx::core::ArcTan2 , mlx::core::ArcTan , mlx::core::ArcTanh , mlx::core::AsStrided , mlx::core::AsType , mlx::core::Broadcast , mlx::core::Ceil , mlx::core::Compiled , mlx::core::Concatenate , mlx::core::Copy , mlx::core::Cos , mlx::core::Cosh , mlx::core::Divide , mlx::core::DivMod , mlx::core::Equal , mlx::core::Erf , mlx::core::ErfInv , mlx::core::Exp , mlx::core::Expm1 , mlx::core::fast::Custom , mlx::core::FFT , mlx::core::Floor , mlx::core::Full , mlx::core::Gather , mlx::core::Greater , mlx::core::GreaterEqual , mlx::core::Less , mlx::core::LessEqual , mlx::core::Log1p , mlx::core::Log , mlx::core::LogAddExp , mlx::core::LogicalAnd , mlx::core::LogicalNot , mlx::core::LogicalOr , mlx::core::Maximum , mlx::core::Minimum , mlx::core::Multiply , mlx::core::Negative , mlx::core::NotEqual , mlx::core::Pad , mlx::core::Partition , mlx::core::Power , mlx::core::Primitive , mlx::core::QuantizedMatmul , mlx::core::Remainder , mlx::core::Reshape , mlx::core::Round , mlx::core::Scan , mlx::core::Scatter , mlx::core::Select , mlx::core::Sigmoid , mlx::core::Sign , mlx::core::Sin , mlx::core::Sinh , mlx::core::Slice , mlx::core::SliceUpdate , mlx::core::Softmax , mlx::core::Sort , mlx::core::Split , mlx::core::Sqrt , mlx::core::Square , mlx::core::Subtract , mlx::core::Tan , mlx::core::Tanh , mlx::core::Transpose
+jvp() : mlx::core::Abs , mlx::core::Add , mlx::core::ArcCos , mlx::core::ArcCosh , mlx::core::ArcSin , mlx::core::ArcSinh , mlx::core::ArcTan2 , mlx::core::ArcTan , mlx::core::ArcTanh , mlx::core::AsStrided , mlx::core::AsType , mlx::core::BlockSparseQMM , mlx::core::Broadcast , mlx::core::Ceil , mlx::core::Compiled , mlx::core::Concatenate , mlx::core::Copy , mlx::core::Cos , mlx::core::Cosh , mlx::core::Divide , mlx::core::DivMod , mlx::core::Equal , mlx::core::Erf , mlx::core::ErfInv , mlx::core::Exp , mlx::core::Expm1 , mlx::core::fast::Custom , mlx::core::FFT , mlx::core::Floor , mlx::core::Full , mlx::core::Gather , mlx::core::Greater , mlx::core::GreaterEqual , mlx::core::Less , mlx::core::LessEqual , mlx::core::Log1p , mlx::core::Log , mlx::core::LogAddExp , mlx::core::LogicalAnd , mlx::core::LogicalNot , mlx::core::LogicalOr , mlx::core::Maximum , mlx::core::Minimum , mlx::core::Multiply , mlx::core::Negative , mlx::core::NotEqual , mlx::core::Pad , mlx::core::Partition , mlx::core::Power , mlx::core::Primitive , mlx::core::QuantizedMatmul , mlx::core::Remainder , mlx::core::Reshape , mlx::core::Round , mlx::core::Scan , mlx::core::Scatter , mlx::core::Select , mlx::core::Sigmoid , mlx::core::Sign , mlx::core::Sin , mlx::core::Sinh , mlx::core::Slice , mlx::core::SliceUpdate , mlx::core::Softmax , mlx::core::Sort , mlx::core::Split , mlx::core::Sqrt , mlx::core::Square , mlx::core::Subtract , mlx::core::Tan , mlx::core::Tanh , mlx::core::Transpose
diff --git a/docs/build/html/functions_p.html b/docs/build/html/functions_p.html
index 0475a32f5..d581b8645 100644
--- a/docs/build/html/functions_p.html
+++ b/docs/build/html/functions_p.html
@@ -85,7 +85,7 @@ $(function() {
Primitive() : mlx::core::Primitive
primitive_id() : mlx::core::array
primitive_ptr() : mlx::core::array
-print() : mlx::core::Abs , mlx::core::Add , mlx::core::AddMM , mlx::core::Arange , mlx::core::ArcCos , mlx::core::ArcCosh , mlx::core::ArcSin , mlx::core::ArcSinh , mlx::core::ArcTan2 , mlx::core::ArcTan , mlx::core::ArcTanh , mlx::core::ArgPartition , mlx::core::ArgReduce , mlx::core::ArgSort , mlx::core::AsStrided , mlx::core::AsType , mlx::core::BitwiseBinary , mlx::core::BlockMaskedMM , mlx::core::BlockSparseMM , mlx::core::Broadcast , mlx::core::Ceil , mlx::core::Compiled , mlx::core::Concatenate , mlx::core::Conjugate , mlx::core::Convolution , mlx::core::Copy , mlx::core::Cos , mlx::core::Cosh , mlx::core::CustomVJP , mlx::core::Depends , mlx::core::Divide , mlx::core::DivMod , mlx::core::Equal , mlx::core::Erf , mlx::core::ErfInv , mlx::core::Exp , mlx::core::Expm1 , mlx::core::FFT , mlx::core::Floor , mlx::core::Full , mlx::core::Gather , mlx::core::Greater , mlx::core::GreaterEqual , mlx::core::Inverse , mlx::core::Less , mlx::core::LessEqual , mlx::core::Load , mlx::core::Log1p , mlx::core::Log , mlx::core::LogAddExp , mlx::core::LogicalAnd , mlx::core::LogicalNot , mlx::core::LogicalOr , mlx::core::Matmul , mlx::core::Maximum , mlx::core::Minimum , mlx::core::Multiply , mlx::core::Negative , mlx::core::NotEqual , mlx::core::NumberOfElements , mlx::core::Pad , mlx::core::Partition , mlx::core::Power , mlx::core::Primitive , mlx::core::PrintFormatter , mlx::core::QRF , mlx::core::QuantizedMatmul , mlx::core::RandomBits , mlx::core::Reduce , mlx::core::Remainder , mlx::core::Reshape , mlx::core::Round , mlx::core::Scan , mlx::core::Scatter , mlx::core::Select , mlx::core::Sigmoid , mlx::core::Sign , mlx::core::Sin , mlx::core::Sinh , mlx::core::Slice , mlx::core::SliceUpdate , mlx::core::Softmax , mlx::core::Sort , mlx::core::Split , mlx::core::Sqrt , mlx::core::Square , mlx::core::StopGradient , mlx::core::Subtract , mlx::core::SVD , mlx::core::Tan , mlx::core::Tanh , mlx::core::Transpose , mlx::core::Uniform
+print() : mlx::core::Abs , mlx::core::Add , mlx::core::AddMM , mlx::core::Arange , mlx::core::ArcCos , mlx::core::ArcCosh , mlx::core::ArcSin , mlx::core::ArcSinh , mlx::core::ArcTan2 , mlx::core::ArcTan , mlx::core::ArcTanh , mlx::core::ArgPartition , mlx::core::ArgReduce , mlx::core::ArgSort , mlx::core::AsStrided , mlx::core::AsType , mlx::core::BitwiseBinary , mlx::core::BlockMaskedMM , mlx::core::BlockSparseMM , mlx::core::BlockSparseQMM , mlx::core::Broadcast , mlx::core::Ceil , mlx::core::Cholesky , mlx::core::Compiled , mlx::core::Concatenate , mlx::core::Conjugate , mlx::core::Convolution , mlx::core::Copy , mlx::core::Cos , mlx::core::Cosh , mlx::core::CustomVJP , mlx::core::Depends , mlx::core::Divide , mlx::core::DivMod , mlx::core::Equal , mlx::core::Erf , mlx::core::ErfInv , mlx::core::Exp , mlx::core::Expm1 , mlx::core::FFT , mlx::core::Floor , mlx::core::Full , mlx::core::Gather , mlx::core::Greater , mlx::core::GreaterEqual , mlx::core::Inverse , mlx::core::Less , mlx::core::LessEqual , mlx::core::Load , mlx::core::Log1p , mlx::core::Log , mlx::core::LogAddExp , mlx::core::LogicalAnd , mlx::core::LogicalNot , mlx::core::LogicalOr , mlx::core::Matmul , mlx::core::Maximum , mlx::core::Minimum , mlx::core::Multiply , mlx::core::Negative , mlx::core::NotEqual , mlx::core::NumberOfElements , mlx::core::Pad , mlx::core::Partition , mlx::core::Power , mlx::core::Primitive , mlx::core::PrintFormatter , mlx::core::QRF , mlx::core::QuantizedMatmul , mlx::core::RandomBits , mlx::core::Reduce , mlx::core::Remainder , mlx::core::Reshape , mlx::core::Round , mlx::core::Scan , mlx::core::Scatter , mlx::core::Select , mlx::core::Sigmoid , mlx::core::Sign , mlx::core::Sin , mlx::core::Sinh , mlx::core::Slice , mlx::core::SliceUpdate , mlx::core::Softmax , mlx::core::Sort , mlx::core::Split , mlx::core::Sqrt , mlx::core::Square , mlx::core::StopGradient , mlx::core::Subtract , mlx::core::SVD , mlx::core::Tan , mlx::core::Tanh , mlx::core::Transpose , mlx::core::Uniform
Prod : mlx::core::Reduce , mlx::core::Scan , mlx::core::Scatter
prod() : pocketfft::detail::util
ptr() : mlx::core::allocator::Buffer
diff --git a/docs/build/html/functions_t.html b/docs/build/html/functions_t.html
index 90c7cbe9a..f0e8fd5b0 100644
--- a/docs/build/html/functions_t.html
+++ b/docs/build/html/functions_t.html
@@ -99,8 +99,8 @@ $(function() {
tile_stride_b : mlx::steel::BlockMMA< T, U, BM, BN, BK, WM, WN, transpose_a, transpose_b, lda_tgp, ldb_tgp, AccumType, Epilogue >
tiles_m : mlx::steel::GEMMParams , mlx::steel::GEMMSpiltKParams , mlx::steel::ImplicitGemmConv2DParams
tiles_n : mlx::steel::GEMMParams , mlx::steel::GEMMSpiltKParams , mlx::steel::ImplicitGemmConv2DParams
-tm : mlx::steel::BlockMMA< T, U, BM, BN, BK, WM, WN, transpose_a, transpose_b, lda_tgp, ldb_tgp, AccumType, Epilogue >
TM : mlx::steel::BlockMMA< T, U, BM, BN, BK, WM, WN, transpose_a, transpose_b, lda_tgp, ldb_tgp, AccumType, Epilogue >
+tm : mlx::steel::BlockMMA< T, U, BM, BN, BK, WM, WN, transpose_a, transpose_b, lda_tgp, ldb_tgp, AccumType, Epilogue >
TM_stride : mlx::steel::BlockMMA< T, U, BM, BN, BK, WM, WN, transpose_a, transpose_b, lda_tgp, ldb_tgp, AccumType, Epilogue >
tn : mlx::steel::BlockMMA< T, U, BM, BN, BK, WM, WN, transpose_a, transpose_b, lda_tgp, ldb_tgp, AccumType, Epilogue >
TN : mlx::steel::BlockMMA< T, U, BM, BN, BK, WM, WN, transpose_a, transpose_b, lda_tgp, ldb_tgp, AccumType, Epilogue >
diff --git a/docs/build/html/functions_v.html b/docs/build/html/functions_v.html
index 786549f12..801da911d 100644
--- a/docs/build/html/functions_v.html
+++ b/docs/build/html/functions_v.html
@@ -83,8 +83,8 @@ $(function() {
value_type : mlx::core::array::ArrayIterator , pocketfft::detail::threading::aligned_allocator< T >
vec_size : mlx::steel::BlockLoader< T, BROWS, BCOLS, dst_ld, reduction_dim, tgp_size, alignment, n_reads, TCOLS, TROWS > , mlx::steel::ChannelHelper< n_channels_ > , mlx::steel::ChannelHelper< 1 > , mlx::steel::ChannelHelper< 2 > , mlx::steel::ChannelHelper< 3 > , mlx::steel::ChannelHelper< 4 > , mlx::steel::Conv2DInputBlockLoaderGeneral< T, BM, BN, BK, tgp_size, tgp_padding > , mlx::steel::Conv2DInputBlockLoaderLargeFilter< T, BM, BN, BK, tgp_size, tgp_padding > , mlx::steel::Conv2DInputBlockLoaderSmallChannels< T, BM, BN, BK, tgp_size, n_channels, tgp_padding > , mlx::steel::Conv2DInputBlockLoaderSmallFilter< T, BM, BN, BK, tgp_size, tgp_padding > , mlx::steel::Conv2DWeightBlockLoader< T, BM, BN, BK, tgp_size, tgp_padding > , mlx::steel::Conv2DWeightBlockLoaderGeneral< T, BM, BN, BK, tgp_size, tgp_padding > , mlx::steel::Conv2DWeightBlockLoaderSmallChannels< T, BM, BN, BK, tgp_size, n_channels, tgp_padding >
vectorDescriptor() : MPS::VectorDescriptor
-vjp() : mlx::core::Abs , mlx::core::Add , mlx::core::AddMM , mlx::core::ArcCos , mlx::core::ArcCosh , mlx::core::ArcSin , mlx::core::ArcSinh , mlx::core::ArcTan2 , mlx::core::ArcTan , mlx::core::ArcTanh , mlx::core::AsStrided , mlx::core::AsType , mlx::core::BlockMaskedMM , mlx::core::BlockSparseMM , mlx::core::Broadcast , mlx::core::Ceil , mlx::core::Compiled , mlx::core::Concatenate , mlx::core::Convolution , mlx::core::Copy , mlx::core::Cos , mlx::core::Cosh , mlx::core::CustomVJP , mlx::core::Depends , mlx::core::Divide , mlx::core::DivMod , mlx::core::Equal , mlx::core::Erf , mlx::core::ErfInv , mlx::core::Exp , mlx::core::Expm1 , mlx::core::fast::Custom , mlx::core::fast::LayerNorm , mlx::core::fast::RMSNorm , mlx::core::fast::RoPE , mlx::core::FFT , mlx::core::Floor , mlx::core::Full , mlx::core::Gather , mlx::core::Greater , mlx::core::GreaterEqual , mlx::core::Less , mlx::core::LessEqual , mlx::core::Log1p , mlx::core::Log , mlx::core::LogAddExp , mlx::core::LogicalAnd , mlx::core::LogicalNot , mlx::core::LogicalOr , mlx::core::Matmul , mlx::core::Maximum , mlx::core::Minimum , mlx::core::Multiply , mlx::core::Negative , mlx::core::NotEqual , mlx::core::Pad , mlx::core::Partition , mlx::core::Power , mlx::core::Primitive , mlx::core::QuantizedMatmul , mlx::core::Reduce , mlx::core::Remainder , mlx::core::Reshape , mlx::core::Round , mlx::core::Scan , mlx::core::Scatter , mlx::core::Select , mlx::core::Sigmoid , mlx::core::Sign , mlx::core::Sin , mlx::core::Sinh , mlx::core::Slice , mlx::core::SliceUpdate , mlx::core::Softmax , mlx::core::Sort , mlx::core::Split , mlx::core::Sqrt , mlx::core::Square , mlx::core::Subtract , mlx::core::Tan , mlx::core::Tanh , mlx::core::Transpose
-vmap() : mlx::core::Abs , mlx::core::Add , mlx::core::AddMM , mlx::core::ArcCos , mlx::core::ArcCosh , mlx::core::ArcSin , mlx::core::ArcSinh , mlx::core::ArcTan2 , mlx::core::ArcTan , mlx::core::ArcTanh , mlx::core::ArgPartition , mlx::core::ArgReduce , mlx::core::ArgSort , mlx::core::AsType , mlx::core::BitwiseBinary , mlx::core::Broadcast , mlx::core::Ceil , mlx::core::Compiled , mlx::core::Concatenate , mlx::core::Conjugate , mlx::core::Copy , mlx::core::Cos , mlx::core::Cosh , mlx::core::Divide , mlx::core::DivMod , mlx::core::Equal , mlx::core::Erf , mlx::core::ErfInv , mlx::core::Exp , mlx::core::Expm1 , mlx::core::fast::Custom , mlx::core::FFT , mlx::core::Floor , mlx::core::Full , mlx::core::Gather , mlx::core::Greater , mlx::core::GreaterEqual , mlx::core::Inverse , mlx::core::Less , mlx::core::LessEqual , mlx::core::Log1p , mlx::core::Log , mlx::core::LogAddExp , mlx::core::LogicalAnd , mlx::core::LogicalNot , mlx::core::LogicalOr , mlx::core::Matmul , mlx::core::Maximum , mlx::core::Minimum , mlx::core::Multiply , mlx::core::Negative , mlx::core::NotEqual , mlx::core::NumberOfElements , mlx::core::Pad , mlx::core::Partition , mlx::core::Power , mlx::core::Primitive , mlx::core::QuantizedMatmul , mlx::core::RandomBits , mlx::core::Reduce , mlx::core::Remainder , mlx::core::Reshape , mlx::core::Round , mlx::core::Scan , mlx::core::Select , mlx::core::Sigmoid , mlx::core::Sign , mlx::core::Sin , mlx::core::Sinh , mlx::core::Slice , mlx::core::SliceUpdate , mlx::core::Softmax , mlx::core::Sort , mlx::core::Split , mlx::core::Sqrt , mlx::core::Square , mlx::core::StopGradient , mlx::core::Subtract , mlx::core::SVD , mlx::core::Tan , mlx::core::Tanh , mlx::core::Transpose , mlx::core::Uniform
+vjp() : mlx::core::Abs , mlx::core::Add , mlx::core::AddMM , mlx::core::ArcCos , mlx::core::ArcCosh , mlx::core::ArcSin , mlx::core::ArcSinh , mlx::core::ArcTan2 , mlx::core::ArcTan , mlx::core::ArcTanh , mlx::core::AsStrided , mlx::core::AsType , mlx::core::BlockMaskedMM , mlx::core::BlockSparseMM , mlx::core::BlockSparseQMM , mlx::core::Broadcast , mlx::core::Ceil , mlx::core::Compiled , mlx::core::Concatenate , mlx::core::Convolution , mlx::core::Copy , mlx::core::Cos , mlx::core::Cosh , mlx::core::CustomVJP , mlx::core::Depends , mlx::core::Divide , mlx::core::DivMod , mlx::core::Equal , mlx::core::Erf , mlx::core::ErfInv , mlx::core::Exp , mlx::core::Expm1 , mlx::core::fast::Custom , mlx::core::fast::LayerNorm , mlx::core::fast::RMSNorm , mlx::core::fast::RoPE , mlx::core::FFT , mlx::core::Floor , mlx::core::Full , mlx::core::Gather , mlx::core::Greater , mlx::core::GreaterEqual , mlx::core::Less , mlx::core::LessEqual , mlx::core::Log1p , mlx::core::Log , mlx::core::LogAddExp , mlx::core::LogicalAnd , mlx::core::LogicalNot , mlx::core::LogicalOr , mlx::core::Matmul , mlx::core::Maximum , mlx::core::Minimum , mlx::core::Multiply , mlx::core::Negative , mlx::core::NotEqual , mlx::core::Pad , mlx::core::Partition , mlx::core::Power , mlx::core::Primitive , mlx::core::QuantizedMatmul , mlx::core::Reduce , mlx::core::Remainder , mlx::core::Reshape , mlx::core::Round , mlx::core::Scan , mlx::core::Scatter , mlx::core::Select , mlx::core::Sigmoid , mlx::core::Sign , mlx::core::Sin , mlx::core::Sinh , mlx::core::Slice , mlx::core::SliceUpdate , mlx::core::Softmax , mlx::core::Sort , mlx::core::Split , mlx::core::Sqrt , mlx::core::Square , mlx::core::Subtract , mlx::core::Tan , mlx::core::Tanh , mlx::core::Transpose
+vmap() : mlx::core::Abs , mlx::core::Add , mlx::core::AddMM , mlx::core::ArcCos , mlx::core::ArcCosh , mlx::core::ArcSin , mlx::core::ArcSinh , mlx::core::ArcTan2 , mlx::core::ArcTan , mlx::core::ArcTanh , mlx::core::ArgPartition , mlx::core::ArgReduce , mlx::core::ArgSort , mlx::core::AsType , mlx::core::BitwiseBinary , mlx::core::BlockSparseQMM , mlx::core::Broadcast , mlx::core::Ceil , mlx::core::Cholesky , mlx::core::Compiled , mlx::core::Concatenate , mlx::core::Conjugate , mlx::core::Copy , mlx::core::Cos , mlx::core::Cosh , mlx::core::Divide , mlx::core::DivMod , mlx::core::Equal , mlx::core::Erf , mlx::core::ErfInv , mlx::core::Exp , mlx::core::Expm1 , mlx::core::fast::Custom , mlx::core::FFT , mlx::core::Floor , mlx::core::Full , mlx::core::Gather , mlx::core::Greater , mlx::core::GreaterEqual , mlx::core::Inverse , mlx::core::Less , mlx::core::LessEqual , mlx::core::Log1p , mlx::core::Log , mlx::core::LogAddExp , mlx::core::LogicalAnd , mlx::core::LogicalNot , mlx::core::LogicalOr , mlx::core::Matmul , mlx::core::Maximum , mlx::core::Minimum , mlx::core::Multiply , mlx::core::Negative , mlx::core::NotEqual , mlx::core::NumberOfElements , mlx::core::Pad , mlx::core::Partition , mlx::core::Power , mlx::core::Primitive , mlx::core::QuantizedMatmul , mlx::core::RandomBits , mlx::core::Reduce , mlx::core::Remainder , mlx::core::Reshape , mlx::core::Round , mlx::core::Scan , mlx::core::Select , mlx::core::Sigmoid , mlx::core::Sign , mlx::core::Sin , mlx::core::Sinh , mlx::core::Slice , mlx::core::SliceUpdate , mlx::core::Softmax , mlx::core::Sort , mlx::core::Split , mlx::core::Sqrt , mlx::core::Square , mlx::core::StopGradient , mlx::core::Subtract , mlx::core::SVD , mlx::core::Tan , mlx::core::Tanh , mlx::core::Transpose , mlx::core::Uniform
diff --git a/docs/build/html/functions_vars_t.html b/docs/build/html/functions_vars_t.html
index 3b6040778..8cd202b1b 100644
--- a/docs/build/html/functions_vars_t.html
+++ b/docs/build/html/functions_vars_t.html
@@ -88,8 +88,8 @@ $(function() {
tile_stride_b : mlx::steel::BlockMMA< T, U, BM, BN, BK, WM, WN, transpose_a, transpose_b, lda_tgp, ldb_tgp, AccumType, Epilogue >
tiles_m : mlx::steel::GEMMParams , mlx::steel::GEMMSpiltKParams , mlx::steel::ImplicitGemmConv2DParams
tiles_n : mlx::steel::GEMMParams , mlx::steel::GEMMSpiltKParams , mlx::steel::ImplicitGemmConv2DParams
-tm : mlx::steel::BlockMMA< T, U, BM, BN, BK, WM, WN, transpose_a, transpose_b, lda_tgp, ldb_tgp, AccumType, Epilogue >
TM : mlx::steel::BlockMMA< T, U, BM, BN, BK, WM, WN, transpose_a, transpose_b, lda_tgp, ldb_tgp, AccumType, Epilogue >
+tm : mlx::steel::BlockMMA< T, U, BM, BN, BK, WM, WN, transpose_a, transpose_b, lda_tgp, ldb_tgp, AccumType, Epilogue >
TM_stride : mlx::steel::BlockMMA< T, U, BM, BN, BK, WM, WN, transpose_a, transpose_b, lda_tgp, ldb_tgp, AccumType, Epilogue >
tn : mlx::steel::BlockMMA< T, U, BM, BN, BK, WM, WN, transpose_a, transpose_b, lda_tgp, ldb_tgp, AccumType, Epilogue >
TN : mlx::steel::BlockMMA< T, U, BM, BN, BK, WM, WN, transpose_a, transpose_b, lda_tgp, ldb_tgp, AccumType, Epilogue >
diff --git a/docs/build/html/genindex.html b/docs/build/html/genindex.html
index f7fe3ca9e..3b58c2384 100644
--- a/docs/build/html/genindex.html
+++ b/docs/build/html/genindex.html
@@ -7,7 +7,7 @@
- Index — MLX 0.13.0 documentation
+ Index — MLX 0.13.1 documentation
@@ -35,7 +35,7 @@
-
+
@@ -128,8 +128,8 @@
-
-
+
+
@@ -243,6 +243,7 @@
Operations
Linear Algebra
Metal
Further Reading
@@ -815,6 +824,8 @@ document.write(`
add() (in module mlx.core)
addmm (C++ function)
+
+ addmm() (in module mlx.core)
ALiBi (class in mlx.nn)
@@ -909,6 +920,8 @@ document.write(`
array_equal() (in module mlx.core)
as_strided (C++ function)
+
+ as_strided() (in module mlx.core)
astype (C++ function)
@@ -965,6 +978,8 @@ document.write(`
block_sparse_mm (C++ function)
block_sparse_mm() (in module mlx.core)
+
+ block_sparse_qmm (C++ function)
broadcast_arrays (C++ function)
@@ -985,6 +1000,8 @@ document.write(`
ceil() (in module mlx.core)
children() (Module method)
+
+ cholesky() (in module mlx.core.linalg)
clear_cache() (in module mlx.core.metal)
@@ -1020,10 +1037,14 @@ document.write(`
conv2d() (in module mlx.core)
- conv_general (C++ function) , [1]
+ conv3d (C++ function)
+
+ Conv3d (class in mlx.nn)
-
-
+
+ svd() (in module mlx.core.linalg)
+
swapaxes (C++ function)
swapaxes() (array method)
diff --git a/docs/build/html/group__ops.html b/docs/build/html/group__ops.html
index 8bae2068f..ad56fc697 100644
--- a/docs/build/html/group__ops.html
+++ b/docs/build/html/group__ops.html
@@ -769,6 +769,9 @@ Functions
array mlx::core::conv2d (const array &input, const array &weight, const std::pair< int, int > &stride={1, 1}, const std::pair< int, int > &padding={0, 0}, const std::pair< int, int > &dilation={1, 1}, int groups=1, StreamOrDevice s={})
2D convolution with a filter
+array mlx::core::conv3d (const array &input, const array &weight, const std::tuple< int, int, int > &stride={1, 1, 1}, const std::tuple< int, int, int > &padding={0, 0, 0}, const std::tuple< int, int, int > &dilation={1, 1, 1}, int groups=1, StreamOrDevice s={})
+ 3D convolution with a filter
+
array mlx::core::quantized_matmul (const array &x, const array &w, const array &scales, const array &biases, bool transpose =true, int group_size=64, int bits=4, StreamOrDevice s={})
Quantized matmul multiplies x with a quantized matrix w.
@@ -778,6 +781,9 @@ Functions
array mlx::core::dequantize (const array &w, const array &scales, const array &biases, int group_size=64, int bits=4, StreamOrDevice s={})
Dequantize a matrix produced by quantize()
+array mlx::core::block_sparse_qmm (const array &x, const array &w, const array &scales, const array &biases, std::optional< array > lhs_indices=std::nullopt, std::optional< array > rhs_indices=std::nullopt, bool transpose =true, int group_size=64, int bits=4, StreamOrDevice s={})
+ Compute matrix products with matrix-level gather.
+
array mlx::core::tensordot (const array &a, const array &b, const int axis=2, StreamOrDevice s={})
Returns a contraction of a and b over multiple dimensions.
@@ -2378,6 +2384,69 @@ Functions
Compute matrix product with matrix-level gather.
+
+
+
+◆ block_sparse_qmm()
+
+
+
+
+
+ array mlx::core::block_sparse_qmm
+ (
+ const array & x ,
+
+
+
+
+ const array & w ,
+
+
+
+
+ const array & scales ,
+
+
+
+
+ const array & biases ,
+
+
+
+
+ std::optional< array > lhs_indices = std::nullopt ,
+
+
+
+
+ std::optional< array > rhs_indices = std::nullopt ,
+
+
+
+
+ bool transpose = true ,
+
+
+
+
+ int group_size = 64 ,
+
+
+
+
+ int bits = 4 ,
+
+
+
+
+ StreamOrDevice s = {} )
+
+
+
+
+
Compute matrix products with matrix-level gather.
+
@@ -2651,6 +2720,54 @@ Functions
2D convolution with a filter
+
+
+
+◆ conv3d()
+
+
+
+
+
+ array mlx::core::conv3d
+ (
+ const array & input ,
+
+
+
+
+ const array & weight ,
+
+
+
+
+ const std::tuple< int, int, int > & stride = {1, 1, 1} ,
+
+
+
+
+ const std::tuple< int, int, int > & padding = {0, 0, 0} ,
+
+
+
+
+ const std::tuple< int, int, int > & dilation = {1, 1, 1} ,
+
+
+
+
+ int groups = 1 ,
+
+
+
+
+ StreamOrDevice s = {} )
+
+
+
+
+
3D convolution with a filter
+
diff --git a/docs/build/html/hierarchy.html b/docs/build/html/hierarchy.html
index ece2f6176..1b2cdec32 100644
--- a/docs/build/html/hierarchy.html
+++ b/docs/build/html/hierarchy.html
@@ -295,71 +295,73 @@ $(function() {
C mlx::core::BitwiseBinary
C mlx::core::BlockMaskedMM
C mlx::core::BlockSparseMM
- C mlx::core::Broadcast
- C mlx::core::Ceil
- C mlx::core::Concatenate
- C mlx::core::Conjugate
- C mlx::core::Convolution
- C mlx::core::Copy
- C mlx::core::Cos
- C mlx::core::Cosh
- C mlx::core::Divide
- C mlx::core::Equal
- C mlx::core::Erf
- C mlx::core::ErfInv
- C mlx::core::Exp
- C mlx::core::Expm1
- C mlx::core::FFT
- C mlx::core::Floor
- C mlx::core::Full
- C mlx::core::Gather
- C mlx::core::Greater
- C mlx::core::GreaterEqual
- C mlx::core::Inverse
- C mlx::core::Less
- C mlx::core::LessEqual
- C mlx::core::Load
- C mlx::core::Log
- C mlx::core::Log1p
- C mlx::core::LogAddExp
- C mlx::core::LogicalAnd
- C mlx::core::LogicalNot
- C mlx::core::LogicalOr
- C mlx::core::Matmul
- C mlx::core::Maximum
- C mlx::core::Minimum
- C mlx::core::Multiply
- C mlx::core::Negative
- C mlx::core::NotEqual
- C mlx::core::NumberOfElements
- C mlx::core::Pad
- C mlx::core::Partition
- C mlx::core::Power
- C mlx::core::QuantizedMatmul
- C mlx::core::RandomBits
- C mlx::core::Reduce
- C mlx::core::Remainder
- C mlx::core::Reshape
- C mlx::core::Round
- C mlx::core::Scan
- C mlx::core::Scatter
- C mlx::core::Select
- C mlx::core::Sigmoid
- C mlx::core::Sign
- C mlx::core::Sin
- C mlx::core::Sinh
- C mlx::core::Slice
- C mlx::core::SliceUpdate
- C mlx::core::Softmax
- C mlx::core::Sort
- C mlx::core::Sqrt
- C mlx::core::Square
- C mlx::core::StopGradient
- C mlx::core::Subtract
- C mlx::core::Tan
- C mlx::core::Tanh
- C mlx::core::Transpose
- C mlx::core::Uniform
+ C mlx::core::BlockSparseQMM
+ C mlx::core::Broadcast
+ C mlx::core::Ceil
+ C mlx::core::Cholesky
+ C mlx::core::Concatenate
+ C mlx::core::Conjugate
+ C mlx::core::Convolution
+ C mlx::core::Copy
+ C mlx::core::Cos
+ C mlx::core::Cosh
+ C mlx::core::Divide
+ C mlx::core::Equal
+ C mlx::core::Erf
+ C mlx::core::ErfInv
+ C mlx::core::Exp
+ C mlx::core::Expm1
+ C mlx::core::FFT
+ C mlx::core::Floor
+ C mlx::core::Full
+ C mlx::core::Gather
+ C mlx::core::Greater
+ C mlx::core::GreaterEqual
+ C mlx::core::Inverse
+ C mlx::core::Less
+ C mlx::core::LessEqual
+ C mlx::core::Load
+ C mlx::core::Log
+ C mlx::core::Log1p
+ C mlx::core::LogAddExp
+ C mlx::core::LogicalAnd
+ C mlx::core::LogicalNot
+ C mlx::core::LogicalOr
+ C mlx::core::Matmul
+ C mlx::core::Maximum
+ C mlx::core::Minimum
+ C mlx::core::Multiply
+ C mlx::core::Negative
+ C mlx::core::NotEqual
+ C mlx::core::NumberOfElements
+ C mlx::core::Pad
+ C mlx::core::Partition
+ C mlx::core::Power
+ C mlx::core::QuantizedMatmul
+ C mlx::core::RandomBits
+ C mlx::core::Reduce
+ C mlx::core::Remainder
+ C mlx::core::Reshape
+ C mlx::core::Round
+ C mlx::core::Scan
+ C mlx::core::Scatter
+ C mlx::core::Select
+ C mlx::core::Sigmoid
+ C mlx::core::Sign
+ C mlx::core::Sin
+ C mlx::core::Sinh
+ C mlx::core::Slice
+ C mlx::core::SliceUpdate
+ C mlx::core::Softmax
+ C mlx::core::Sort
+ C mlx::core::Sqrt
+ C mlx::core::Square
+ C mlx::core::StopGradient
+ C mlx::core::Subtract
+ C mlx::core::Tan
+ C mlx::core::Tanh
+ C mlx::core::Transpose
+ C mlx::core::Uniform
► C mlx::core::fast::Custom
C mlx::core::fast::LayerNorm
C mlx::core::fast::LayerNormVJP
diff --git a/docs/build/html/index.html b/docs/build/html/index.html
index e97ca1e75..7e8403c53 100644
--- a/docs/build/html/index.html
+++ b/docs/build/html/index.html
@@ -8,7 +8,7 @@
- MLX — MLX 0.13.0 documentation
+ MLX — MLX 0.13.1 documentation
@@ -36,7 +36,7 @@
-
+
@@ -130,8 +130,8 @@
-
-
+
+
@@ -245,6 +245,7 @@
Operations
Linear Algebra
Metal
Further Reading
@@ -867,7 +876,7 @@ are the CPU and GPU.
diff --git a/docs/build/html/install.html b/docs/build/html/install.html
index b278367c2..458329dda 100644
--- a/docs/build/html/install.html
+++ b/docs/build/html/install.html
@@ -8,7 +8,7 @@
- Build and Install — MLX 0.13.0 documentation
+ Build and Install — MLX 0.13.1 documentation
@@ -36,7 +36,7 @@
-
+
@@ -131,8 +131,8 @@
-
-
+
+
@@ -246,6 +246,7 @@
Operations
Linear Algebra
Metal
Further Reading
@@ -794,7 +803,10 @@ document.write(`
Build from source
Build Requirements
Python API
-C++ API
+C++ API
+
Troubleshooting
Metal not found
x86 Shell
@@ -944,16 +956,19 @@ should point to the path to the built metal library.
MLX_BUILD_METAL
ON
-MLX_BUILD_PYTHON_BINDINGS
-OFF
-
-MLX_METAL_DEBUG
-OFF
-
-MLX_BUILD_SAFETENSORS
+MLX_BUILD_CPU
ON
-MLX_BUILD_GGUF
+MLX_BUILD_PYTHON_BINDINGS
+OFF
+
+MLX_METAL_DEBUG
+OFF
+
+MLX_BUILD_SAFETENSORS
+ON
+
+MLX_BUILD_GGUF
ON
@@ -972,6 +987,22 @@ macOS SDK will be used
+
+Binary Size Minimization
+To produce a smaller binary use the CMake flags CMAKE_BUILD_TYPE=MinSizeRel
+and BUILD_SHARED_LIBS=ON .
+The MLX CMake build has several additional options to make smaller binaries.
+For example, if you don’t need the CPU backend or support for safetensors and
+GGUF, you can do:
+ cmake ..
+ -DCMAKE_BUILD_TYPE= MinSizeRel \
+ -DBUILD_SHARED_LIBS= ON \
+ -DMLX_BUILD_CPU= ON \
+ -DMLX_BUILD_SAFETENSORS= OFF \
+ -DMLX_BUILD_GGUF= OFF
+
+
+
Troubleshooting
@@ -1067,7 +1098,10 @@ wipe your build cahce with Build from source
Build Requirements
Python API
-C++ API
+C++ API
+
Troubleshooting
Metal not found
x86 Shell
diff --git a/docs/build/html/kernels_2steel_2gemm_2gemm_8h_source.html b/docs/build/html/kernels_2steel_2gemm_2gemm_8h_source.html
index 5d0d95950..4c7b304c7 100644
--- a/docs/build/html/kernels_2steel_2gemm_2gemm_8h_source.html
+++ b/docs/build/html/kernels_2steel_2gemm_2gemm_8h_source.html
@@ -389,7 +389,7 @@ $(function() { codefold.init(0); });
-float accum_type
Definition transforms.h:49
+float accum_type
Definition transforms.h:57
diff --git a/docs/build/html/linalg_8h.html b/docs/build/html/linalg_8h.html
index 961869cf7..10ab25b35 100644
--- a/docs/build/html/linalg_8h.html
+++ b/docs/build/html/linalg_8h.html
@@ -119,6 +119,8 @@ Functions
array mlx::core::linalg::inv (const array &a, StreamOrDevice s={})
+array mlx::core::linalg::cholesky (const array &a, bool upper=false, StreamOrDevice s={})
+
diff --git a/docs/build/html/linalg_8h_source.html b/docs/build/html/linalg_8h_source.html
index 6a06c190e..bf88ac647 100644
--- a/docs/build/html/linalg_8h_source.html
+++ b/docs/build/html/linalg_8h_source.html
@@ -147,12 +147,15 @@ $(function() { codefold.init(0); });
-
+
+
+
+array cholesky(const array &a, bool upper=false, StreamOrDevice s={})
std::vector< array > svd(const array &a, StreamOrDevice s={})
array norm(const array &a, const double ord, const std::optional< std::vector< int > > &axis=std::nullopt, bool keepdims=false, StreamOrDevice s={})
Compute vector or matrix norms.
array inv(const array &a, StreamOrDevice s={})
diff --git a/docs/build/html/mma_8h_source.html b/docs/build/html/mma_8h_source.html
index a90008258..e673f2bd1 100644
--- a/docs/build/html/mma_8h_source.html
+++ b/docs/build/html/mma_8h_source.html
@@ -296,87 +296,158 @@ $(function() { codefold.init(0); });
-
-
-
-
-
-
-
-
-
208 thread
const Epilogue& epilogue_op)
const {
-
-
210 C += (
sm +
tm ) * ldc + (
tn +
sn ) * fdc;
-
-
-
-
-
215 for (
short i = 0; i <
TM ; i++) {
-
-
217 for (
short j = 0; j <
TN ; j++) {
-
-
219 thread
const auto & accum =
results [i *
TN + j].thread_elements();
-
-
-
-
-
-
225 epilogue_op.apply(accum[0], C[offset_c]),
-
226 epilogue_op.apply(accum[1], C[offset_c + fdc])};
-
-
-
229 D[offset_d] = outs[0];
-
230 D[offset_d + 1] = outs[1];
-
-
-
+
+
202 template <
typename BinaryEpilogue>
+
+
+
+
+
+
207 thread
const BinaryEpilogue& epilogue_op) {
+
+
209 C += (
sm +
tm ) * ldc + (
tn +
sn ) * fdc;
+
+
+
+
213 for (
short i = 0; i <
TM ; i++) {
+
+
215 for (
short j = 0; j <
TN ; j++) {
+
+
217 thread
auto & accum =
results [i *
TN + j].thread_elements();
+
+
+
+
221 accum[0] = epilogue_op.apply(accum[0], C[offset_c]);
+
222 accum[1] = epilogue_op.apply(accum[1], C[offset_c + fdc]);
+
+
+
-
-
-
-
-
-
-
-
-
241 short2 dst_tile_dims,
-
242 thread
const Epilogue& epilogue_op)
const {
-
-
244 C += (
sm +
tm ) * ldc + (
tn +
sn ) * fdc;
-
-
246 dst_tile_dims -= short2(
tn +
sn ,
sm +
tm );
-
-
248 if (dst_tile_dims.x <= 0 || dst_tile_dims.y <= 0)
-
+
+
+
228 template <
typename BinaryEpilogue>
+
+
+
+
+
+
233 short2 dst_tile_dims,
+
234 thread
const BinaryEpilogue& epilogue_op) {
+
+
236 C += (
sm +
tm ) * ldc + (
tn +
sn ) * fdc;
+
237 dst_tile_dims -= short2(
tn +
sn ,
sm +
tm );
+
+
239 if (dst_tile_dims.x <= 0 || dst_tile_dims.y <= 0)
+
+
+
+
+
244 for (
short i = 0; i <
TM ; i++) {
+
+
246 for (
short j = 0; j <
TN ; j++) {
+
+
248 thread
auto & accum =
results [i *
TN + j].thread_elements();
+
-
-
252 for (
int i = 0; i <
TM ; i++) {
-
-
-
255 for (
int j = 0; j <
TN ; j++) {
-
-
257 thread
const auto & accum =
results [i *
TN + j].thread_elements();
-
-
+
+
+
+
254 if ((j *
TN_stride + 1) < dst_tile_dims.x) {
+
255 c_elems[0] = C[offset_c];
+
256 c_elems[1] = C[offset_c + fdc];
+
257 }
else if ((j *
TN_stride ) < dst_tile_dims.x) {
+
258 c_elems[0] = C[offset_c];
+
-
-
-
263 D[offset_d] = epilogue_op.apply(accum[0], C[offset_c]);
-
-
-
266 if (j *
TN_stride + 1 < dst_tile_dims.x) {
-
267 D[offset_d + 1] = epilogue_op.apply(accum[1], C[offset_c + fdc]);
-
-
-
-
-
+
+
262 accum[0] = epilogue_op.apply(accum[0], c_elems[0]);
+
263 accum[1] = epilogue_op.apply(accum[1], c_elems[1]);
+
+
+
-
+
+
+
+
+
+
+
+
+
+
275 thread
const Epilogue& epilogue_op)
const {
+
+
277 C += (
sm +
tm ) * ldc + (
tn +
sn ) * fdc;
+
+
+
+
+
282 for (
short i = 0; i <
TM ; i++) {
+
+
284 for (
short j = 0; j <
TN ; j++) {
+
+
286 thread
const auto & accum =
results [i *
TN + j].thread_elements();
+
+
+
+
+
+
292 epilogue_op.apply(accum[0], C[offset_c]),
+
293 epilogue_op.apply(accum[1], C[offset_c + fdc])};
+
+
+
296 D[offset_d] = outs[0];
+
297 D[offset_d + 1] = outs[1];
+
+
+
-
-
-
+
+
+
+
+
+
+
+
+
308 short2 dst_tile_dims,
+
309 thread
const Epilogue& epilogue_op)
const {
+
+
311 C += (
sm +
tm ) * ldc + (
tn +
sn ) * fdc;
+
+
313 dst_tile_dims -= short2(
tn +
sn ,
sm +
tm );
+
+
315 if (dst_tile_dims.x <= 0 || dst_tile_dims.y <= 0)
+
+
+
+
319 for (
int i = 0; i <
TM ; i++) {
+
+
+
322 for (
int j = 0; j <
TN ; j++) {
+
+
324 thread
const auto & accum =
results [i *
TN + j].thread_elements();
+
+
+
+
+
+
330 D[offset_d] = epilogue_op.apply(accum[0], C[offset_c]);
+
+
+
333 if (j *
TN_stride + 1 < dst_tile_dims.x) {
+
334 D[offset_d + 1] = epilogue_op.apply(accum[1], C[offset_c + fdc]);
+
+
+
+
+
+
+
+
+
+
+
@@ -391,13 +462,15 @@ $(function() { codefold.init(0); });
METAL_FUNC void mma(const threadgroup T *As, const threadgroup T *Bs)
Definition mma.h:93
simdgroup_matrix< AccumType, 8, 8 > results[TM *TN]
Definition mma.h:62
STEEL_CONST short TN
Definition mma.h:44
-
METAL_FUNC void store_result_safe(device U *D, const int ldd, const device U *C, const int ldc, const int fdc, short2 dst_tile_dims, thread const Epilogue &epilogue_op) const
Definition mma.h:235
-
METAL_FUNC void store_result(device U *D, const int ldd, const device U *C, const int ldc, const int fdc, thread const Epilogue &epilogue_op) const
Definition mma.h:202
+
METAL_FUNC void store_result_safe(device U *D, const int ldd, const device U *C, const int ldc, const int fdc, short2 dst_tile_dims, thread const Epilogue &epilogue_op) const
Definition mma.h:302
+
METAL_FUNC void store_result(device U *D, const int ldd, const device U *C, const int ldc, const int fdc, thread const Epilogue &epilogue_op) const
Definition mma.h:269
+
METAL_FUNC void apply_epilogue(const device U *C, const int ldc, const int fdc, thread const BinaryEpilogue &epilogue_op)
Definition mma.h:203
METAL_FUNC void store_result(device U *D, const int ldd) const
Definition mma.h:147
STEEL_CONST short TN_stride
Definition mma.h:39
STEEL_CONST short tile_stride_a
Definition mma.h:56
simdgroup_matrix< AccumType, 8, 8 > Asimd[TM]
Definition mma.h:60
short Bs_offset
Definition mma.h:73
+
METAL_FUNC void apply_epilogue_safe(const device U *C, const int ldc, const int fdc, short2 dst_tile_dims, thread const BinaryEpilogue &epilogue_op)
Definition mma.h:229
METAL_FUNC BlockMMA(ushort simd_group_id, ushort simd_lane_id)
Definition mma.h:76
short sm
Definition mma.h:69
STEEL_CONST short simd_stride_a
Definition mma.h:47
diff --git a/docs/build/html/namespacemembers_b.html b/docs/build/html/namespacemembers_b.html
index 67aa59de6..b17e67ab0 100644
--- a/docs/build/html/namespacemembers_b.html
+++ b/docs/build/html/namespacemembers_b.html
@@ -84,6 +84,7 @@ $(function() {
bitwise_xor() : mlx::core
block_masked_mm() : mlx::core
block_sparse_mm() : mlx::core
+
block_sparse_qmm() : mlx::core
bool_ : mlx::core
broadcast_arrays() : mlx::core
broadcast_shapes() : mlx::core
diff --git a/docs/build/html/namespacemembers_c.html b/docs/build/html/namespacemembers_c.html
index 8ed56d281..818b93804 100644
--- a/docs/build/html/namespacemembers_c.html
+++ b/docs/build/html/namespacemembers_c.html
@@ -83,6 +83,7 @@ $(function() {
check_contiguity() : mlx::core
check_shape_dim() : mlx::core
checkpoint : mlx::core
+
cholesky() : mlx::core::linalg
clear_cache() : mlx::core::metal
clip() : mlx::core
collapse_contiguous_dims() : mlx::core
@@ -102,6 +103,7 @@ $(function() {
ContiguousStridedReduce : mlx::core
conv1d() : mlx::core
conv2d() : mlx::core
+
conv3d() : mlx::core
conv_general() : mlx::core
copy() : mlx::core
copy_gpu() : mlx::core
diff --git a/docs/build/html/namespacemembers_func_b.html b/docs/build/html/namespacemembers_func_b.html
index 00707b647..d124b1d39 100644
--- a/docs/build/html/namespacemembers_func_b.html
+++ b/docs/build/html/namespacemembers_func_b.html
@@ -81,6 +81,7 @@ $(function() {
bitwise_xor() : mlx::core
block_masked_mm() : mlx::core
block_sparse_mm() : mlx::core
+
block_sparse_qmm() : mlx::core
broadcast_arrays() : mlx::core
broadcast_shapes() : mlx::core
broadcast_to() : mlx::core
diff --git a/docs/build/html/namespacemembers_func_c.html b/docs/build/html/namespacemembers_func_c.html
index 48c83537f..7afc7ea56 100644
--- a/docs/build/html/namespacemembers_func_c.html
+++ b/docs/build/html/namespacemembers_func_c.html
@@ -80,6 +80,7 @@ $(function() {
ceil() : metal , metal::fast , metal::precise , mlx::core
check_contiguity() : mlx::core
check_shape_dim() : mlx::core
+
cholesky() : mlx::core::linalg
clear_cache() : mlx::core::metal
clip() : mlx::core
collapse_contiguous_dims() : mlx::core
@@ -92,6 +93,7 @@ $(function() {
conjugate() : mlx::core
conv1d() : mlx::core
conv2d() : mlx::core
+
conv3d() : mlx::core
conv_general() : mlx::core
copy() : mlx::core
copy_gpu() : mlx::core
diff --git a/docs/build/html/namespacemlx_1_1core.html b/docs/build/html/namespacemlx_1_1core.html
index c755c715f..6a9605979 100644
--- a/docs/build/html/namespacemlx_1_1core.html
+++ b/docs/build/html/namespacemlx_1_1core.html
@@ -153,10 +153,14 @@ Classes
class BlockSparseMM
+
class BlockSparseQMM
+
class Broadcast
class Ceil
+
class Cholesky
+
class Compiled
struct complex128_t
@@ -1212,6 +1216,9 @@ Functions
array conv2d (const array &input, const array &weight, const std::pair< int, int > &stride={1, 1}, const std::pair< int, int > &padding={0, 0}, const std::pair< int, int > &dilation={1, 1}, int groups=1, StreamOrDevice s={})
2D convolution with a filter
+
array conv3d (const array &input, const array &weight, const std::tuple< int, int, int > &stride={1, 1, 1}, const std::tuple< int, int, int > &padding={0, 0, 0}, const std::tuple< int, int, int > &dilation={1, 1, 1}, int groups=1, StreamOrDevice s={})
+
3D convolution with a filter
+
array quantized_matmul (const array &x, const array &w, const array &scales, const array &biases, bool transpose =true, int group_size=64, int bits=4, StreamOrDevice s={})
Quantized matmul multiplies x with a quantized matrix w.
@@ -1221,6 +1228,9 @@ Functions
array dequantize (const array &w, const array &scales, const array &biases, int group_size=64, int bits=4, StreamOrDevice s={})
Dequantize a matrix produced by quantize()
+
array block_sparse_qmm (const array &x, const array &w, const array &scales, const array &biases, std::optional< array > lhs_indices=std::nullopt, std::optional< array > rhs_indices=std::nullopt, bool transpose =true, int group_size=64, int bits=4, StreamOrDevice s={})
+
Compute matrix products with matrix-level gather.
+
array tensordot (const array &a, const array &b, const int axis=2, StreamOrDevice s={})
Returns a contraction of a and b over multiple dimensions.
diff --git a/docs/build/html/namespacemlx_1_1core_1_1linalg.html b/docs/build/html/namespacemlx_1_1core_1_1linalg.html
index 96409f4be..8520bbfdc 100644
--- a/docs/build/html/namespacemlx_1_1core_1_1linalg.html
+++ b/docs/build/html/namespacemlx_1_1core_1_1linalg.html
@@ -102,8 +102,36 @@ Functions
array inv (const array &a, StreamOrDevice s={})
+
array cholesky (const array &a, bool upper=false, StreamOrDevice s={})
+
+
+
◆ cholesky()
+
+
+
+
+
+ array mlx::core::linalg::cholesky
+ (
+ const array & a ,
+
+
+
+
+ bool upper = false ,
+
+
+
+
+ StreamOrDevice s = {} )
+
+
+
+
+
+
◆ inv()
diff --git a/docs/build/html/namespaces.html b/docs/build/html/namespaces.html
index 5666343a0..225ce3ca4 100644
--- a/docs/build/html/namespaces.html
+++ b/docs/build/html/namespaces.html
@@ -196,91 +196,93 @@ $(function() {
C BitwiseBinary
C BlockMaskedMM
C BlockSparseMM
-
C Broadcast
-
C Ceil
-
C Compiled
-
C complex128_t
-
C complex64_t
-
C Concatenate
-
C Conjugate
-
C Convolution
-
C Copy
-
C Cos
-
C Cosh
-
C CustomVJP
-
C Depends
-
C Device
-
C Divide
-
C DivMod
-
C Dtype
-
C Equal
-
C Erf
-
C ErfInv
-
C Event
-
C Exp
-
C Expm1
-
C FFT
-
C Floor
-
C Full
-
C Gather
-
C Greater
-
C GreaterEqual
-
C Inverse
-
C Less
-
C LessEqual
-
C Load
-
C Log
-
C Log1p
-
C LogAddExp
-
C LogicalAnd
-
C LogicalNot
-
C LogicalOr
-
C Matmul
-
C Maximum
-
C Minimum
-
C Multiply
-
C Negative
-
C NodeNamer
-
C NotEqual
-
C NumberOfElements
-
C Pad
-
C Partition
-
C Power
-
C Primitive
-
C PrintFormatter
-
C QRF
-
C QuantizedMatmul
-
C RandomBits
-
C Reduce
-
C ReductionPlan
-
C Remainder
-
C Reshape
-
C Round
-
C Scan
-
C Scatter
-
C Select
-
C Sigmoid
-
C Sign
-
C Sin
-
C Sinh
-
C Slice
-
C SliceUpdate
-
C Softmax
-
C Sort
-
C Split
-
C Sqrt
-
C Square
-
C StopGradient
-
C Stream
-
C StreamContext
-
C Subtract
-
C SVD
-
C Tan
-
C Tanh
-
C Transpose
-
C TypeToDtype
-
C UnaryPrimitive
-
C Uniform
+
C BlockSparseQMM
+
C Broadcast
+
C Ceil
+
C Cholesky
+
C Compiled
+
C complex128_t
+
C complex64_t
+
C Concatenate
+
C Conjugate
+
C Convolution
+
C Copy
+
C Cos
+
C Cosh
+
C CustomVJP
+
C Depends
+
C Device
+
C Divide
+
C DivMod
+
C Dtype
+
C Equal
+
C Erf
+
C ErfInv
+
C Event
+
C Exp
+
C Expm1
+
C FFT
+
C Floor
+
C Full
+
C Gather
+
C Greater
+
C GreaterEqual
+
C Inverse
+
C Less
+
C LessEqual
+
C Load
+
C Log
+
C Log1p
+
C LogAddExp
+
C LogicalAnd
+
C LogicalNot
+
C LogicalOr
+
C Matmul
+
C Maximum
+
C Minimum
+
C Multiply
+
C Negative
+
C NodeNamer
+
C NotEqual
+
C NumberOfElements
+
C Pad
+
C Partition
+
C Power
+
C Primitive
+
C PrintFormatter
+
C QRF
+
C QuantizedMatmul
+
C RandomBits
+
C Reduce
+
C ReductionPlan
+
C Remainder
+
C Reshape
+
C Round
+
C Scan
+
C Scatter
+
C Select
+
C Sigmoid
+
C Sign
+
C Sin
+
C Sinh
+
C Slice
+
C SliceUpdate
+
C Softmax
+
C Sort
+
C Split
+
C Sqrt
+
C Square
+
C StopGradient
+
C Stream
+
C StreamContext
+
C Subtract
+
C SVD
+
C Tan
+
C Tanh
+
C Transpose
+
C TypeToDtype
+
C UnaryPrimitive
+
C Uniform
► N steel
C AccumHelper
► C BlockLoader
diff --git a/docs/build/html/objects.inv b/docs/build/html/objects.inv
index d24ee03514e9a99627c755189e45a3e215b2c89e..62853102ba4e5a22ddefef345a650fa254c3a234 100644
GIT binary patch
delta 23133
zcmZU3V^}3%*KTfS+qP}(Y)n&4GuigUo!fD;y{pNSZQI6VyUCc&yx)7y`SbmM*0t8N
za9{VrNelF1J9NA#G*^O{7$|XHE|Kmj*C>HyvmHYOn{|UDIRmSVUSEt(wQf}
Zepa$_Yz)^!M_hUD(u0*tENiYtBYA0I~_BiG$j#?qrx
z^ebe!pBWT&88GipGZdET|p#nn~3NBPPrMM
zp-@NDRa2H#IQw0*BC)zy*A#>T@Le}IKvM^G%plw(Fth{+AD({Ml<`}v+`Q=m4}A#X
zy$APyMQs+cnSW&%had3c`0F<$xYD+wxVfsk5~%2uV6^cp1caL7&FtL9D|?1{D0{5#
z8rG1q=-R7hhvln|ruAHRR~ccLA{EIl=xgO!;2Nkw{E%WSB*Qhk{$^I3XY$+jTk_&~
zu&vl6$-&G(+b0pg20@=CWx%SlD_SNtkD_`r9GSV_s*b5urxCR#c%-8zy&@j3R*`UgPzk_IdV-?@
zO+#${NQSk5h*K8xvNr4A+?Wi>&m_|N7V}C&yPZ`Jfajx0U(PxNNflw2OedU_4ASJ9w#
zuv;;^CSBO%DhY7+|NMqiK7gvmj?g@gA(sJc%c?v3k`^=!TU11?ALQo-$~?Qk1C2yW
zXhAnoYUrYBkKwYd0Zjp7
zj+4Zueu|=&zou;f4@tjTb8xFZn&;r
z!Ci1;wdTZ;AiXBZzYWIe(Wp54pHOXN}m(%#-`wo7ay#
zaW2Gi$^-dPRf^UrP+dBa9nfAVY@|XrSp&~v%hPXLSrlt$Z)DgB6W}j;4ktL34Z#Rs
zOt-PnUA7Ic(*MfPHmLu#Sjf}LqqwWaHp?VLciASK&fwF}NnPARZjp-19
zO!4lNI>1WRA)S8S>6;kqCA?A)egrceTQg4h++{+Q*&|*A?eoF$J&;BU{TbU1QyCWd
zftn08`+=?uF>k9X=1pW(Fu2RFY-I2Ej1pxr67j53It05altk$9(VHnI`)*HLkrx+x@F>b7zd;=^|Sf|dpm6F
za(6Q?l8VP8N(4aJ8k7)C$R~*usH)RU1@ARx@JvIuLn-A^F;!wD;DXrPJxb
zrOviFn{lhCb4rGaDb1=Nzyf<6N`)OUm4JhOvtCsQN%kalDBe|SWEv6j^5#c~NVRyx
zn{J36gXcH@8azLJ8ikic`L^Lf*EWO(*QQ
z!*1HIhmOdMY496_>ZWUIpL>i?Az9h(%}X+(qa-&Y_uu10Ag}d0ZR?EId?O~XP6%&a
zXYB}Invs_NTCi)X4$VzKp%sBW{N7(PKrhC2}osV&7Jbr3rvP+EM
zF!uX5YKYMIGpY6~ZzZYFQj%JjYHs+L#b^Y*#7yuf)@m%B?l2u8M5YeIrpXs?teA
zrE{8xs*ydT20gCc#u=BF-E7>(G3ts}QbOwCE=>IkDsOEYRVoRUF@>DijuW_6)VTtOHgsHw?S|$
zX<=f?bAo>y3}6Kf4v}hH6<2abPGkM1>er5+Huze5!JeurFS}3nar81hs{ybF2>
z;a^D~S*XSvofqjd7Hh`)>t>2axjqq$4jJgSHnQOmR4EF=16^iOp$`fJ5$7w(O9D&v
z`JnzqQ&`T7YEHE$en=8Wu$7YZKxf8WBL93Z8_p~{i_AZYBT{uJ$jxBvbA}~0tsMww
zq!DnNe8FG2EoW6Md}4E4B%^{+L+jK>y==)emZ?Lb;1n)gK@K=}Wpju4*E{2Kj3O1Q
zt|`4j$=@Q7x{9HDZ)ANr;lHsBe2lIc-9;UzIfTRyNR4&)QNYlr{E<;Pqf$>5F>#vO
zqXVw^%S?~SQ=xZ;Oxsx=x%4d}m4L=nsuwSQSK}|S{4E~$pFVw`H4k95)+UZoagZL<
zmEd--`-c#)Yx`>E&2ErOYD#*C)VMa(G087Q8xNvizwz{%Ti#e%O
zLsLwhc0p>Fb~!iLX6-$8+
zEiKilAoZoNG`k)G(`bYz(}w29|Kz>a60Ke7#U?omw{~oi84J
zb;bxuvfNJ~BHqbM7z>IXKu|0-rmY<|lRiRZp%xc78LR^S?NtN#4R^4V>tr1$C-&`
zP)|NRBg*POWIgGl^h-z&ro1EM;Ft8hL%tK3%PqX!C_
z$D|JYlZnKPh(YQ+G#0B&>l(6zJ`PHRS7y|&2~GTGI6+9$q!B*swz*>U9<9z*%WZw&
zlRYRjXo6PRg5Fz@XVfg?`O4DM=kt#{9P02UP`dH?X+q!I6)%rSZh^PnOU&%VdGa^6
z8uBH(^6eyeo^;mqUh@o8;zm`WC`qPsrXq79+WzPF&6Q+!JfM*#J*5a&CV^=L+4Q_<
zO;86``+2gZTMzlNecfFW-Jf*UjA8Qx(Z3*!{&4mHl2H;oU={-n=FPhGH)}LMYV|)o
z2)A-}+ew*|(7uNCqjT!16J~Yh*IzJ
zvH#^``jBw5UQhd)qTu6tbC^0jh&L7g#m#@ypcdq!Zo`jw?lMWQETPZ+V>HoQC3O@M
zN%w1?5yqksgv_K5ck;n^vp>cK6{XD${ht_r5Sf>O@{9=rqOthMOxoHC
zqh<>7^%&F7s?s5`SE|Be?~}m)AuWQ}K^fUrUJ2)iD*0Z<-uI6k&+glED@Qr?0=ytQ
z?1OmK$llD9=a&O2od>(2o%
zg?E~59W|Sp9a4zC*>;xlU*evAu-|OJy=zaM9SM|A54Vt)POlCm$ue%`AMbx5SS2ut
zJ-EI9wCH_R`qHzLrM!!BdkcodC)0owcr7Soa)V0@Emh?{tba$3hTRfAz_l2vUJNR?
zznSB+@gu29)275>vtb7%H3lBs_I$FzKL5_V*~dx$d3K`rD>4HGOrhb7wT7eHj>@%a}Q8m3C0?tsde7~d&!`g%gj@4wI<}E=VlI*D~t^8}^-WdZvoJ@gL<=KjT
zVT>%NnjmO{8Ilv9Hb3Y=D($9MY-0JZwbK6iuPCJ{>Yxtfye)&Us2fr|W7qawrjwB;
zefQpakGMaZ;THhzE&*ylXib|`&Pk++%gh~D)r@4;tJ(LM^W*_!vw9u
z5`8=i7AYLhbE)M_329b*Ml2ud(uR@a8xx~89)&bn7+t6z?#6>rtF)9TSQMc%ybXnA
ztvk(bB)?hlUZ4+6CJCXi755g6k?}EY*5>xYtutZT+w#F
zGqR{R2ATEx^-jODEBTCxZSi+Qp>Z{khS~MIsBcqJH0*fspRS^)+valqa_bal6~6mi
z9X88AS1!rWs>W5_zJFYwk$j9mZrM-A+}hDKJ%ihHa(YXKRrw&P2BxMEyVWo0R!%gK
z0fb#0hWzN@`Z|2{=leFOG62%%pKg~R>hy1VQf4I9*q~fqVc*V*b
zbQPYyo0_`Wb5}m}%Plri%`)tA_%tEV1`_=Ldd0Vn#p6FL6w;XB`#W!%=JC>#{vbxC
zjY(_kB
z_#=|{H$gNlLV|nTZIxowCB^L~qVY`iOcsvf@gKyM@cr(&5YR$w-$Tr(vA
z>EIUMK*OVuaFlQS(BY2K&cadsGh24EMo1;@=w)}tH{z_f;f1v?Ox#tv8H<+fOnur~
zV`#{K{&9nG`E3Op?E2_!=u{64hN#en&6fl%Du=QX^PPOP`pi5W5F-JGYgMG!HwBQB
z_NZ%0uzuSnVNT?YPFpQg1`+2o6uQl>jX_nbGtH^yr3!aQ^7!F*seAPHjpG@eqB!&)7=6p1L_PUkoaceL(6v;Gc}
zz*32A>$P1OB3x?k2Ez8Gi_tfqBq$;rM;M*V?yqqqs`}Y6ieH5R+2g?~6ac-~hklC=
z+?Qd5>lgo|Q=hLAZIAy{Xly2{Ksab=kux(Gi+k8Hv83~z{2#E;vn;u|l*d~Kj5VC}oDSuJ1Jk~MLE%<`q-GAPV(PwvNbBicts9WJWYv(@696{_mJ
zcRVTz)yhE3t3=^g4%#AdFa?QU62NmAyixyOj`v`r{{Lo-|1)CL>UtMsd4yvaQL#8R
z7FPaWBoBW421n%1r!QVsqL?fXQG$*d%^hg$&g!e`Y{ll>%IZ8Ro~iQW!<^I0`3BgC
z_epL<4bJ@07gwshiYgUC&koX@8#6nk51n-I9KI
zoNKZVmfUINp{&Ak36HQyq&WmF!`*Z4zZGFQC9lp>n%h
zKYRM&$9GKhvzlEmnWJ=KVx1Zn;}FYn>TrC9`gyOB(r@6qX9@!dgd8=jbc+_H`IpOk
z!priT@Ej2c4M<1tpKpckM~CjJ`qtPp-Y*YP5=mT{BB;wHu77)wdfT|K@4nVa_2n}y#R>rlXu$gb^qiLgtv?N4c$CPsI*~c{~z56nP$*rK3QR*xtO9OYF9fEKY7Ns%#
zNN;$_RT`5LUW>B>(c&X+)*pgesQY9D^-;T6|H@fD3dn9lq|3e%rtIEZ_;$w{69@I8
z+E-BF77^I}SC|bYSD>b3nAH+{v=K@Cm+EyJF?8_QocYa$6E^XLfdqo!v7bPhi3Ah+
zyD0KA;hU;$Pm+&z7q!~byUVr9yFmMfwR)n%*CchmGkPqGVFa=->@l5N{(qU-SZ)?i
zMtAPd+=q+cj&~XZ*rU7cS8CZ0T)M3m&x|e;n?pa=l30n!KmwKc>vcZGHeRWj(m53u
zfPgO_+NWdmizSNO0MqJCI8x<*Ow7H1mX*2{J=6^waO4yJo(rhoIt%m^!JcsZiKysh
zYY`m2Fmk*d^%+~Mz)qq@U@)~rlcpd{X+BZ+PwW8Y0uR}lK1NA(*H05EQ_RzhIKKR}
zHC?&>-~q~Hkj$_vLT~Pg@r#P%B77VzD;*bzpUp?dXeu=brDy`|Ds;l_qEm`tkG*)K=0A{DcHZ~31HcbyI*xC5beEvl3WMq~Y&O;Qay0aQQ7n+Ak2
z`K*p7zkFuo1gRGLpIt9NM|Fl;sg(}=eV@s<2I>Z7mx>hkMbJVFOOQ^La
z#(fiX(7p-kTt?7w479Yp+*t3#)83nPq<98^
zCd2O~`_JeZ%6+=ChEs~^wxUNyp*hGQ8Yu)6BIjGc^+}$N7gY%2I8;&6$&-XyRx~7;
zwN8Fgs$@@?ddUzc6q(k4@L%;`%gKJ|tmxVj-qnq!NBQEDhU$_Sg7Yzng4Y4O=xYlH
zpHvW;1uoLQStr#KFp-&e;(&<-_|a~u`Cv{ag58HGH}&JI(YZ&@)LU^$bmKWeBBnI?
z7JP{Hm2#94)BSIih?^-V!;`tAKZ2dC-R`^!L$+^wn6IayyTPYD6^S=Gi?&}A<3(JF
zS9JD^0?Oykj6I)&;6m<%(_KLt5yOlMRbc!|1)^Z}`*#}bZCMqJ7Z$9GW$5(1)M|Z}
zB1FG}j5=LYs4*%TAsLOVDn=kkmISTM6}IN!GR)w6Yz)5ITznDveg?SkOH0cXJTWZV
zB{`{pM6TGc)b@EXnok(p4m)r&@(ze%Vn_-SK
zC6wG&{fxOP6ahJ8W`cA}=`_C#{UzRxy*vg^+rlTl<2%)^q%`8a^u%(s_?JAzzcB#@
z*y=m%PMNWNyfyXd!ZFD6{$KR8Th2iTA-sKBzWW#&2iL9qM4Y^<)
z87rP+Iu83YCXe>{DMlT|8M>eK!YhP?s^#hoC&%3QQcrAkt1A-hr$_Gm9QVW=`XrP$NynUaO#1aBqHr7iHbV4K~2YqwFc%43|@nOTx=k@g8isc
zBey||jN-Hz3nVOQc)u=efb^QRIOy#@Z2yhA6~UZ=d!T1pXfn{KxFKF^uBU-yaM8@?
zs6*+5(haU^{ns4m2zyjKQTRCLm{X8Tx=jg9M(h1FZiVyMX%Tf~tK3BLxzCDHS_y**
zBZmL8d+Ka&O2dwP@p7NN4dWLvh4un46l7DyeN_$!EqokAN*xD@Fv-TDxs8q|=am00
z`xWL492lAQX>SORP=VL5JDw5IbDo?Ox
zWEEib@EhP&Qv#q$#5C0hY(w1x<#!v@X!4HC>bOY+QSol^E2{9(IN4mRqaH^(j~M|gg&33J
zsUJF~)3?F%D(|uiC;+EyUGLaX{8QJ4ep0(*J^2);JgW&}uL1Y`BUtVsXV?fc_F>bQ
zAcU!}3UY4fpI)HTCp~>9O7&uAF!RkbtR+B*RHH3qtI$tdW$E1*(0uJ#Gs(1*O`GGV
z+hJ>jf?jqeMn1v8m+Ozk9=Ok*Msp?EYScb8!C)P06EGtzI$C7(;*4Q(Mj
zL_a)eU0}rQbjDbzQAZ~H)RlLF)OuS_??{pm+HRXOy;A=r0j?OXL9zi|%T8o?@)0zq
z=u9TX#<|LkY7p}0b3cLisv-?cO^*{;Q}_opw3soCpp6
zgpc#$>mXdVoFweRHzQ>zTe$GR5x1JpWnV@LpH6>m9et$QIpVVL>K*O-$ERJWTeRB!
zXd!~hkqz%56;7W=d==3BIW3V(k2TT5PAyjB#6J8q5#KdSx+@~)3FeF=9|FY^&b-(_lI22^|pxCuS?wckK
zEEgrWu6m)KgMIader#8ONBhOH81nYFpF<*Drfk6<;BgmPGACPvkxEcVmW7&-c_)@m
zv0_!8(GErp5_0EM^?Igqng%r!@9sbT{Ib}eNZ}=oQRnm<4eU3F{&R5k^_407Si}^P
zp4GO|u;5DXhiC$3@}KhRm@~@dpK?wml@_P)gcLmd==i#2@vL?s`I&uXw70-yYYd}K
zN5-pPCI9pLbKu!}j`8Z-+PM-2cXEuAI1=BwXhZ>x*Z}93cce*_rq3Xj0=HSGfe1nL
zQ+)5B(>y8X6f$Y2!!)Zj|Ia@0Qj_KAo8rRIAbhfcYV|a+yQ|bp$!cE$aYG~OnhC4)
z6ghj`dLgkl1^-hGIU@BgoW}1{buuf%rqxwK%IMS-Pi0DxEwLMdr*CmSEL&EZESn$7
zoPU=udv*G`IvLhOK2AVZQZ8S!_Qm2qgq=3hcXS^ARdX5pKcg|8jwr-uI!2QuDUOFJ
zkSHbot{_q_1~*{`u!COWpn?{}z!*iGhav1S&y)coA`m#@>D|avgS*aT%`uXzJ<#aJ`>_v)8Jxi0!hB3nInVFd
zo9wjlQQ)%2pQ#Y?Pklwls_Eirlj*`ez=
z>8cpUGM6suR&Zlwid|lKr+8zPi(OuM9rMETbUS^)K_Tk?^$=HrYz8|9euW%3r@CarWqis(>I02EzYBK-&7A6vBYD5L3mmvl
zdoYgY*iK}A#Zez{!0CJ^z%iUcWtSg6Ky|Z8RRraM&x$(Xmyy`55YiwAbBAZ5C=FOg
zHu>!iJnEtTJOU(6DlLvyuelRHU)$=#=z*5=R_Z^d^DxY<)3*jy@&T~kk{Gq0_#Fx)2lQ)(LTz
zStx7sQlq?j^&2kl2-qwoz=%JCMm!-Hs{?6%Yd&YRn2J)ZT^GxoBwxb(w@QJ!VO+;E
zk3UQYpPFQ%j9^X<=d|7a(ac33!jP{vm|wph5H_63kG64&mA_Bi*LFJEoP#*M
zB*m=bgG>dGk0LfT
zrv`QE3^e99F@08o{ITY&jY1K$nq_u9J&dPzB@ykHyvuVp*zCPacLLw5J6^h7AF5Us
zRUlV$o814}=O#={JPH;4`EZsbni@Ih^wzB7#2+&&GO1Bs%rqn~)6
zl&m$6oTM#6Gz5w6a`@pVBI2Xem5KC^^_`ECf*VjzYx<1w2jP1F*jGaFZ5lD-LrCF$
zTJf^l?dA^bRp4bfblgpm4)SL3r4asU8KbwCDN6K2AA&wb_}R(N*T*`Lgivglw}l8Q
z&+r
zkAk+0^OA7OeJP0~R1Cj<{F&MZ-l+@cX#`0bl)a@G84+1(0@MLxFLIpzy>)y%h#
zC*QtOy3>pgjvEbFc^s+MCtV{$7ckJ>Eg-+Iqs<6NY_GOAY+M!mX)LfCFPxY6fnhUx
zeJTt2qxxt6eUN(8>j-oeXN!Dw*Tqr51~
zZ4O0B;HGq7!f#VtgbRn>LF(Pa24-*KzSS)$bHs#{FVcNz2>i!m!Q^WmjBDEods
zr*5U|dtdZVo}T85`o~io28uPStAQiS+E0HE_HA`fA$?jFr4{$i<+`u)xj}gxKSWwY
zVwTmt6C#yx%Vr?ION2}UKKftBAuPWGyk<U7%cOJkWM#AkZwH~08MC2f;z_ARRiPlA57Y@bIC>$N^>VFGzc%qdxVmW5*C
zchNZ0L-^_uQGq}93FZY*KsneykP*w=iwLG>2Z7kll^stIW-TYa6lNQ+WMkQ<9Cu_;
z4QO#DQcgd5Hj#!!A&O>|uoN{3z~)IdHWxC%=1O&0#^eOW%cO
zS(az+LWlqY)+DbMBt?yA@e!{}5f5g6i9{58U-%hRyAW+i{+zKV+?7tS5*lL}SC6vf8RH*>e
zJ3L#p@IL7r)B+89+wnd2<5FVWmQqxAObqDhXDc?CzCp1MDiu)cmP9`M#K3L+zNr~_
zjZkaKzBseA6#<9g`_QyDm(1Q%bI*y+P=)|(q(hheRD?tQTm|g}i-fEjLWl9#SuIkQ
zKmH(-L1?TAw`_1k9BkHc{e(pmN+&i7TAUF7r0q)A;4bhx^os@l`nUa>Kt!UXNDR?VGXh=**3`#8tl9W)nU@YUvvIr2tgG>?tIKW!%dUj*g+U!h
zq2GXaeCIJgM|7iZxYhIz!IUynIU3ID`9J4`+svRb40f{`m_w&9eg}6~QUOfy
z@K%kdIA`i`{A^)Q%$JQm!GE3<0HGGnM1~^fLD3PcQ;8}nJ4b&hqtPNxw}Pi?X2bVJ
zSI_1>?856em*=#eP_*%%zrn%kAC$@Eo^}<>V$g+3vg=*5494L@*>3A;jeCcp7tz4y
z+EcXeg^dzbbH~Vr(#XiW1+Lmgixl+FD1@5H&S>iaIZ^jgj|w66sOapdpl}PWDXnz+
z?-T>};e?^E_z>;>h#=nyt|bfnA^h~45jy@gMz`#RqtX^ZeCa1g>OQ!BL*z;OXuT}X
zi0U&)nEA#L;Q^_H@(k9s$RAm0)_THJ%+nUpCLl2v)~eceAD>!!zqhoTAt{^+^xFWd
z^q(uPfP%D3}4glm+B7VzUOE=7UsNtg$;mZB5*E*CsR!
z(K}?XTbr8f^9&G>_rd7d
zyO+dA0BbPSaJ4)ykW4E_E&?hp1=TWV;E>8VMbz!U7&~-s4Ix9XBwb&2a~sw
z0~`v*oP_1eEOuVuqOJw^ThwwR29YD7b_Eh3r-N5i+0t622zy?K@?ivcAM6eIzwV?27d5z3P$Nt_O0Mp>&0CIM`V#d60EL2z?VVMT->{5Z;WO0dkW
zTLAt%E%g~zzU;TrOIJl3NP;esQXM2z)GA3(p8RNrmz0u^+wt6R2>MF(4b{kBW`9s#
z8H-9aB%9mdQ?1nZ0f4+Ugk7@u!jV^qsTZ6LDXcQ>+Bu^61_o)~o^S%QH#>>4WA+0z
zwcqm#y|zFoVB>-VM3-F}NyK#SH_}-lj7`=^*uq~s+?UjuW;M8KP!iK2u3dP&H#qF|
zr9)U~Xu^*RCvBrGwSQguMmLyxcNF0SCV#
zqnz-DdR*`&kibk(Hdni_{-{@0zS%v4gB7)h+!aLH_q`{shqsrBE;y1{eX
zzYTy!ZJNc)L_$sD7sxoZV>SY+lL
zYM^wBVqN=Q9rJ4)Offn@f%JGiw(GpHZgwboAE|^SqWo5zv5ySj+YXk?-w7F>SO<5T
zFFgx=F8@wf>w|t+jmc8lB3R*s9BDcr>$ij>JolT!?}CM`%ms!itF
z`I0dRPpwCg8frN$8W%W)kx1(~s}Vxk1Pe-E;jPC~=*3|OE>r)zHy)sWhf>vQ}TJo5q9R}la
zUcOl)8m@c`2J<&X8UqetfkkMnpr5pPJP^%hF->J_S}TygzR
z3p%jxw5auVhl#Q&pqH0!HA6UMGeCK?Ww+~Kf1C#vI2~MSZrrZ3UP;rMG47qPjFjgX
zK<(zw(2g2^ne36@W;4
z1y$5cmQ+i)BYx+Yd?=V1Zo@bMLtzv&yP@|FFo?oK=#StFR-Fp}8gK#zh@$_&*bEEv
zixkh}C0v8^@aI-HZFf@gGLL?}=X$UX;T!sQ#D+3}Q7oD6ZsdxTd4jNR@B{QC&Mdzi
zK{OzopJbri0?4FJLmm-~nEex=jrk%7#DQyv$2oI-iVezZ6VGHVgkq4|aZ3~Q-Hv7r
z8iqNNo{aqiZXFAgvWFr7iH60|)<6w$U283paoGzqUt@?3$K?R1s5{Vz2@QNlk~pz5
z&S>oTsoe+iZQusCT||I205T@I4>d8k(=LL3yuqH}ZKycdS-66#Xv?Z8CGCMjb{M?n
zn;l{l%ieH?e;_QjQ413nIFFw}F;s9gD4m~Yrce+I9YnHfQ~Cx%9`?|H9dQarU32W=
z48(5NhBBkq5(%Wf{=K;pe1t8@6hX&aN)s$=%OXo?ZtVnfS&D~972FQRo{v)mJvoP<
zNLXZ>fj8S3#SVko)%3#|rwfvTIUERu4!uFCLt~FcdI5m6O`5{JVf6*}PzM)}6%}x;
z=$4nrZd29+Ci_7TTtzrPbwiS=N|nv{c4fIbP`B!MbYwEaNlhx{;l{TC;E^A!JKm1l
z25P7MGWBna2&A>MuE~1%!pA>y*8n?+oOa)g`!h=?*$0p@owK(GEjd4!U^qS-8_S36
zA^8v(Vo>X)=6`kJkR+cSm;=x+YfsY1LdLC48Z%!}ITb*K7F77-y8}a##G&{w%s&Dn$O@zt^-giSl}g_Z52FVVcpH6)2c2aZ!-4q}ZGbcS^GW
zL#Jm-$QXf;7j!UXp0RrSP$6JM0$wz(^Aa$p;ZtlGJc|6+WQ^Ii_~8j{6g6~hwq@;T
zy_i+SL!4L3WBODaDyz7C(rQ+Pn`pI&)g!jz#1rvw#Y5qriI*sIs|UK
zp|VX|fi>pe78JJG77Ai8{2>Cf2gtOuFA$kEWPUoCbd}HdWaU~89%c~bgNZ;AwJiKq
zKnyC^@ZXfbu^`_K!o;I8=M2W&LVfeZ?8(fN{uPvsVHti6K7gX9rg
zy+qXJyOgY^F-phMwkT}PqSMOKe3vD)HKw_1OtZNbK?K7E@SjmHT|&{R^3|=Wid?Y>
z<$*PDy`1JDJrOeJ*FAEoEr5C~485e!!fZ%-tg&L$gfgOWNwLCY(o4-drK`5%X2Iz_
zRt<8|wOD)FV05TvI#uJrZ4IHW15@6xJXsQfEbGM^xVQa*UJW|?W7uHHi(bV@9A(W?
zUFeylb72yKB1e=KvXR_C`4oMdeaznUHEeUbgtdw!auHI{Zi>N%yNLqWZ5=D5{ldgN
zL?A2<69qWL$VZ+}^3=44)61=XAz5Fz(4@^VZXC8ydX9o3meT@{j}D<2NF6q|;wtIk
zyY{Ewp5Zx8$L`MoN7#lF?6o-y!Wd)3gYVf0Ra1+wxUvj^gj%6%I>|3V7WyNQ6nK8^
z2R3?sgIrLn^-F42DHF#u#-gLgii5j4OYx;!OnxFoyCWrMAkg`3do;hmnAh3i`^3L4
zs#oI(g`%GnDN-0W8+UQY`l%l*VX{|88wIm_c}&O^3n30m5f0ck*0|aBf7J|6MUjs!CF(oD
zePGr8g76k7+q5QbX+|05Ymf8udIyzfI@Vr?5k;exaEdIo=WjWiV{pqISwF-w0AD0E
z1Zz-+%UcxR)>ENnf+6m>*7?)jeiW~QbOQ*1FB(E01pnqhh-1K?QLIa~v!riJHy17T
z1h0Es;bumv8jF8PIj2%jIkf<15?(v_z+;VP%zG$)?j#3q$4@TLZhB1XQLjPyq@z}q
zWNchOBtJmg1D~2d&Wb0((2%i&in~%w?{EtYutA<+
z5l*6sdJIHBZDL@f#^pDdd;ml#w2f-1qy{UdLIxv414jxVz8O=&WOGo0wG0cDTJ&us
z{kB$yFNhvfs;r(?ciS@9vPpl4TM&<0vmI&{+IXIN%dIt~90^Y9>^M@mXE(xDE7dF=
zNSfbdbSqgE<04QKX}s~-S5+un8Pw>0K`FiyQ=ytm1jA*3Avu4$Cs;@7VK5T{LH)d-
z%@gjt7)rTh3G;()bepDPqL-tIweh0I5=6Ts
zpWe9yYV_P^esvI~%k`GkJLzi;um73)Qa_`cz%bl$837X}=k|czXrrRKN<*yuO3R!<2UIJRIgzj0c{JQWpeXq
znf-nSl{G%bl%*q@kxT%ipcwB=Jr-yhB5y1cEu6K}f3b$1amEcrbIWKN@W1WF^Z!bq
z9*W`0q>)$2H!BnJPeb98hg`^XQQC&0kgN+l2r;}!)
zOd9qEWY#q_C8G)`og=N7wL~mE`(PW12GwO^hOIp{_j_dGyrr}0RUASO6DsiMNgG9v
zC~dW(lQt9$hwH({=-HyEuGUCgFv9eHg3aKb4cu4+vT`FWf*FO%-TPv1%JGaXlrA89%I4Qf|9-awA!1vMh2;;vePT(
z6&VKwW$7^MGhCzLks4%@;{*lw%K8@@32kT>Xj#xy$i~9Wax87dw%mC5u1JKzQdm^$
z15$#M-0BUl%KqQ~HX&Qgeuh0D==GU-p|^(<4Wd-lET-0A`NdeaqPQ&tK?XGM
zSMcDn!{`fOfta$nr{idHYgpB+hp@zTk^xWOb%VJGW^D4+jLT)@;-(`VOEaUDzp}`1
zV!MW#fIV3y0+T8pd`xI5gW;Rb%t(Xv?~NI=a4KxmaxMf<4SCU6CuXd1Bmmp`y`$)`
z0&*5T4iPpg;6KEob#)pZGmNsOxJ8`1tp9k^*YV&jkQ~WutMDT%0e{%VXAv2k@Ac58
zWN=c>#H0q0gX;**u!$pbX^sOv{#VE+0Pd=|OX;3ytcFvUur$>*vHM}J1yavc+Ao-snlwbi
z;ui#!nV?V7jcV;y19qGxluayQEpV~qd!=O775WCu2>(w2xEV+0RA^IP0#O&-Sq9H6
z3qyqH5zGfyXFcr1iiuPBu!KRJs+OSk!MZGoL$#eMb0U8SZz9@ICju)``;Of>iBsQ7
z)QTe9Dn#zECW5#xNCc6(k0glggvGw?Tebt2RQXh>OE2(=V3|4&hGExrti*-4!qZqq
zjF!4QC?z(HB+2YFH6fHWOCv9YwaC;>Et;GLoEQo)i?9ifOK#t=Kf!&B!MlU_p>YAuH&)C(h6Z<;;Jtduw9
zEka+N6vk-GG9UfIhjV5T%{Bs7$8}(DQv{2!{_0FFqV_Ce8P9@@sF`j1XjW>K1I5LB
zFN8rtYxC0>VR{@A!QdMsxQx^R0Ttb4XcHpv(il||`?R4hQ#YKG-a{XM>mN`3=CC`9
zh}%-&gu*nc?bX~?TQ%%V9k|KtFmyy_c|sB!h*dJoWabcj2Lfbp*r5-gm%g-+wyss*sw!ryB5dz
zWYgTxOr1e@Q_puZ72&{yF;s(xf!%jw49!Ha%n`7zYB4zRVJR?wRPoSLThRi6`lW%z
zh~`jmmYS;KF#Wd2)Lo1|swEc0+n+L2V1g*ghr<$J$je~I{Fr;Trxqk*Vmo7Q2tN^a
z+g0ZuLJYvFJ{(kWh?YSco#4<#zMAjip_+++ix35H+%2_M&8f5QS<2Bi)xNBl!BK&i
z9S32oPR*#bL}KuN3a2N5*$NS7dI%0-{bS1tBNm2SZ5N=qp_zc0@FUApCr{whVpNG?
zumjtP;cpYy>N1h3jY@E&)B&Er!XV;>R*D!1=8+R8$}@FSgs0|IF`ipMAv00rL*d?5lDNm9E!rxA$W=Hyr=Hef+NdPVErU=_|G;s^->U36bAl--{Qv(`8G&
z+^CmFcKu4Pdic~xe6M7`yS#q4%L}&KYU$fo7w+~B<-hK>BB|Ir{e8?Ah>07S|D}EW
z)DiFB(cFH2a&WL@PM`NYX{SDGc)p%kzB}K-+}G!P4>M#2Zerf6FKzcOG(NmOonM@q
zSJ`|%rM=pC-~8CL-x|;>eEOO|lkllc_{+06!+fJ2yeo=P;eUPqqc~Qm#2-cdsjNOK
zf$R6!h6?JEpam(4r>n@d#ERqo@TzN1&5P@$>BS3RTA-@0x_tlRasSh6)0qFWenuhw
ztzvu8D%QIq{c_9urOG<@qZ?lhf0e!5_};0Qi5mN+azMkGkvKoJOU-XHm}#NWNjU*p}5RlUPR{e+j?eQwa9^KE-}NL2^54Or^aiz(_JB
zlEP#P3P&T*Cu$x~uodP_3~BEk)nw^y
zf7J)8%$c_5PhY*idpONQYIrKShG)9rZ4a!f^ZM=^A;v;|&daMwW?5D>g;b^Q^Qos(
zROe3$nGu~oGxYxUnE}aHzYuAD(Gh+1iwPCUOE^nt>83q~HW_8iJxk~bC&%A5SwXEe
z&FFZ3ZbV4!-B%$!tx-=urzG|FtI#0xe{|n(Qc99P-4)U^%W3cWl(MF>)_O*h?*A3d
zH91LW`3xMCCYgFnZA`6@p0;^q%1mxdt1
zjkNn&!}t1kCggc7s0~_)Xld08Q%;%xE@tp5u|aP2(u99kQu`7lw5)h;NN8JhO4}MD
zwJlTH!wlNPAnn1B)}gaHL!^!&C*IJz?nQ2hy?tw<0(~eR8ix%B;k{r~qoG9fl!6(V
z3;L)(G~^qO_xtiiu+KTwvpS3Ef2mI|P56uCwH8$NR!Ga;zA|FwIn^>N5j`tjm~hT;
ztp@!nQ>UsDX^ylh+0nQPYWc6P&9soujO=clqz3q#cShvpv7nZFBcmr)Z;WW=ZxuB{
zYGm}Z>WvBQoPr)MX5&JZWYZ-FOqVn>T{B#qLHEYgwULKiPOZ{LM$f9=e;Coy8`5Aq
zGHyn{9a%*#;6`YGSG_W17UK6A)JkfITfH+PCkLQI(WhbPDL=6T`%E4K&tiJ&(@PWn
zy4>ZG+PYdHEqD9Mh*^DA`9NBvyOq=PpWm9O@WLu0IV7~Scy2(r5$FZPfw+~?k+yFp
zXr=lRdCmo40KACYlU`m-e{j}2!DPL$37i*^d(O)XGfs!j=?rl?hMX>)(;eb;CpeS5
z{*+6hJ~3}h+L5_q?YuFgEh{o>Sq+eul^N&f8Kj>FSwBzG?#lGzX3ykD!8e4B5O;F-
z^tW$KRFDT&L1D5A7dfp8m$wrtR3+o7YLRtm&fgKbC-1x(XHG?Ke{LaF;yWhwXo6vv
zn@?n@#Pk#J?XMKRU;9MAPBR?0
z#B8+^8m{K~B;gBT$ZC`Ksa~drDwW2e@i;mj$AD+E6@{n>E=N~L!+Cx+&0OwYNHy08
zXc*PwDK<$Ul06PNe+mCg!GG=(OzcS^E;MSF7)=%ziOY)G1x1skMCu}+;j}%nw_l5cpvmlT?_OrP%jW|DSqQwaBV
zpK5PUkh~Ilf6r^L%e+qXlMFHDSIs%HO;J|nL*<>^9qrv)0~M;hoT0#3N=JWwXF^^Q
zmVho2TGH}hQs6RWJAo!v)*I!t9n*T#?2#dDOP)m1DL9sXLh-Ct|Cy`TroUIBeQHGI
zP`MmEnPbFOP}vGSn?W(=)N;)A1(_R_WKJ&1e4s3If3w2O$)%YO6lZQ&o)x)3tAP@&
z%!;(4muWRrsFh);k~lNcsD5>95Q#P>|csq+}yikZhqD)AkIdNjo#
zSS>07e@TeEJ_;EWsZ3Opm-^o2d%{5=t+ZCHyELXswON?0Rabmat!^SCy=qpe%pB@t
zMQv0!vU-iWDa_O<53{woiVi4qO)#MgVgFr!^WEgmJ0zeD@ooA(jlmF4JvlsZX4ves
z@LpYBde{+c!Aanr?eut@tq@zLe})@lwzI$;+xhVn+b8P2zN<)&@srVhJ~;Nod83QO
zJ>&WLIAK{3b`Pq=cQoqJGy|;DM?s=#Bs5&j^9jNuWxbT#JhFoE7H1_>(D6AAjd>PPsy%FRIt1lfX!q?QyT}dnT&UO6fId-x-mA6t7ff
zi0EnY!icjOuwPh}qoA7IZ}dvgK6gq5UN2*Mu}mfQOkH0Z`sFN2_Fn=7sQ6FI;jAQqLI_l2r
z2`z*=132q^VPEuKlU@RIU5IlMebEtJOy($LzTKCE6`oW2d)D^pBvnD-Df%SE6a$5z
zk3S9nOu>Kd6HFXZmSt8`7AQ~Y?|Is%Q&a@Qt+`Wv`a*e{_o#Cl4g5vY_@fzq3`QT?
z9bMYnxlHcmqJQ4VrN4{I=oT(nv-G1mkxG3}qn=HX5UfhwvnchHHBrj}YR7ag2!Yv+
zRFAUrZi<{{wY8So#5y07x?(9Iezi@fni1y4RpL7$^=O)5!**q{-H?`s?tkPoyq&jGDij3TqDNIs!aq~+
zpZf$8<8134bHX;Ri^M(G`S}DP79$km=GB@ce@jU|`O)!wJ5*)fj(D3@e
zy2=Sud5IU-oyVovFGq*&j|QPd{2N!;|!mk1;KZ0Qr~ej&&El97Zu@lrY7|rN%PEr
zgtqj1717a+Fa8z#j*`e3`+Z@Kuw+B!e0q
zG0I379U(S^W2b$os}B8lCgg^_@>W7iI@UjLlYebK<@NLC!0Kl@^;LFAiE9cIbt=J+
zX7f`$A*}-KD--5pF|Xn;a$5e&TdfLh9Mt*F@d#}02GOIQ>*7!1hJ++2b}J7i4w)h6bv+m7|gU
z8-KfA1g$@Je>q(Lx4&IJnb-M5_if_N2fFV+&wWbK?8ehbw!8G&?^vrp-(5Zztx0_P
z{B-Z*)7MW->x}MSzt8_I*P;z4zcJ0tlPQvy?N4R7(_T_JC2~u4eriVO&&zAf7FjOQzAEnm!}hi-wa*)Ihy{4>VKP?J3mL{93qd?=W$GUTq2L#=W(ZazOBkb
zd92NeP69Vfr)OqVl+wG1=;$27yt#BjS4%iqnBVOa!FgV6%?U3eH-wj`6NLZ%-S_`6
z=TfO}Xw-u#hMy?}Ki}d1JPBWw#{>_f@b$&WU*pA-jY)$%Gs^-xI&kyV~b-cWY+^}7qnh`p5
zLPtwDX{vt6wU-l*q`sxeuXhs<$&~!K6JN1|e)qkB)GcG@1%Y`}mz#a_Th3FW12Cvh
zCykkE>2tp0l_<+PBFx_j^CI6{{1)O|TBT+oNa6mgv@wgmO~kZHRNV#3ag?24+3Y6T71k-zeQ&5v$@u1zP&=|_sM8VFs?&LIrq0}g87UJ|
zaTIK?cDc{g;znAh%S{3^jSj4W5ZX9hovWx$m+M!PYM0Q!
z)kB7oT1Cz;4YjECAD%Ff(z4f`uTP9))D_F=14<*MV{Tp$n41f!A84C8kU_of(h<}o
z#Y%UAgX&DjO{dhyV&+hD7SyS5erKq{fzN57t(ca+c|#$eZBB2+hm|oksOs3p{}PP<
zb>zljW7%7EdaoyR`RQjtiGQq{lb9y`k^FHr0h1)7=T`wJ?hE
ze*WPDEqFZ%+)$pLO;P>$BZ20}A8#ps{Ba7N)OdPr$Drx_T>kVOfdQ#+8Sr3&;R9)E
z>BIfh(ue7(r4Qn_qw0*5z~?G+%XWD@#hH_GmEY$opUzcY9A%kvwtxGnl+8g}w)?I~
zOIr6HL%GvvIN_A0Wme1QDla|~^t#C0YH@iy#aRnNl&MnR^5DS)!!K&%k2>ij2u7?U
zbi>zqK0*7BgJk=|jxds+C2k1M4=32*|J>{z7A+RHPV*d3>-wPH8i_-#zNoyobqC*4w6H|%hz
z%FlDjPvSitPS0tosC^QT>eIpKE7f^vuG!3*xvv;Py*#su%YRQs>lh=#H}sdM#!Bo2
z`}n%4W;V>L^o<(V$L4yJ+s*K~H;VYJCfDaQRR#}zQN;VI+@1n6z>-qPuadk_uhXhj
zH&qvPRUTgFsWu3Ar|Mujs_Ln_bzPti_W6!g7?3vV#RJOg0d?x@Q<5{VP@zwj_Cv+I
zWk)GE>G_e4F3O=sI4b>Nca(Gam26J|^^l~2>8$>kbWe-X
z`F#zN(SJ<8UhbBAc+S5sJ#enuNbZKUd%zpg7Xdx^<@MrK~3uEh1>a6CHHsy
zm9t%QqOK#_e6M(bxqWv!Ow;_`6>LVN0l|~s-UzA@QKxjfaIXhw-a?ww^;fFXsVG
zXheFmFO$qLxCd`0(uA_NJM;`We*WRZp(+pal{^7xbvwNys`TOCzn!lRgte;RIZ=(%
zyf_!B_Bi$o6#WGUodiS;#pjc6T1^2>lf7CPf5YYX8MM6%TZ3}CB)bv&x%vAnQlGbg
z>Bz4yThYsYF?khNw^OK#DzxI~muGPy-v2}We%H}-$JD0+w7UMwzn-;~>_ts?t8-TB
zRL^;{;YIc+*;f3epQ?pvpFiRG(^>jM^QgHm;``I1<~j@c?yfrS)f=u5Xxph0iQbex
zf8Rp1&rY@1zW)h&zq_l1d&;|N08ghP)JB?Io$9%MgzTG4j$S4-g|9=^PI!)ukXLRe>^;4703PU>T>7txm1c5v;sv`m+yZ(?tgl1
z+S-5C)YSQ%6bK>pEJ=-;R0uanpP(tPk6H*@FLC+Ohr?(XhEg1dXr5ZprWV1clC-`m}D-r4=%
za{J!uI@Q(Pt35DhoiIrd81`ggVQ9|?tn*eEgQCeC1L*sj_bUa}udL0`$SVaSP*9W%z1XfEN0)^0m}I>*tOlRqsF?-_Y9
zFA1{6e(895KDj0D_icFW5q0YOdsc}*Zx#LeEE=GrebM@>w~=Y^;k~L4#0K?Y+!5&=
z-15F!N1s!zGyn5!>c+r67vD1eOkhbIm!|B#V2Q5Q1vw4;q?Qwy+7y2F_kI37dk-o(
z5SEg!+$02PR7t5*AwOT`N^VdlU!z+873cfv%=c|&l3eutB=PF!u5_8FUO*=Fk5dca
zoRj|f1KMPka{8JQ`>8*>e!O*_%PwAfM|$+_Vl9~K<4W>qPXdDv9F0VWxXhL0#R
zFHnAFoVSd2nF{FCt>^Hgisz=V1@A#Mp(0TvpNKunRDjsbIKZnDf*eI8}57>y))gG#|PX3y)jv)CV|HxUEmBsrc}!
z(1$H`*qrJT22rBi0^}}p+CvA+p#Y94}L*3KDc;Yyo!1R
zp1lrkSiIFXGskkq!IBL#Hy(cO*bj>TE(ybo>`=+%~?=G-ym<{hBAb486G#>FVy9Ib-V9UHydjve*BT9Ua?e}
z-6*&?^bs3UM#2<(J-K#F#nJ$sIZ~2#Zb7-M_Mi$5^Hr|G;^OZDZ?lSmt+39Vry4%#
zBa&h!rNmew+|IsL+^l^hXZtIMa7ZR)jK@$b9SoJZBhJ6Ckny&Q@y~xD33Y!_{R)e>HEbA#Q^*T%8=dy+wcL
zz3K6W{Ak|6J7}hP&|}`Jfsh_{6US41WgQK6Ex_OMl-FUc@ba7LCMJ{a3oveGB|iAO
zdv8j#bwHwv(a$3q|20%J5;tqZU1!BchRi^n$oG?>dbYrum$YYJmO_phs?Lga#Hy}*
z?yL?@3Fp?xFAs!E+Vp#@z(Y(<^rSfd2#>>R#f-||REMCN8dQ)eZ5k=VDJvRoTen>DeQW~+P{6ZB@_27?x|jkscS$1Jb#!Ba!tKyvJ}B#pR|T&H;i~>!Lpu$Ju7zQr7sTg7<5f>9xoE
z7g>p%ySVy0jLatMUbbOBiJtlovfl)P-@TQ0OooWMP%fa150^$MT=HFc7NDsYPvqQH
z>`j;&8e0>-R8Fg&j|~%vKU4&wTue#cOIfci5vna-zRaf_o*qp$xui2hGR)^KAGAB-r5H7$k)-=6KCNB!D5Gw^
zP{AERoxuS`7u3VlXT@~#RW1AEYex2aPKME?1+U4Fv~x;{wmI36IE*ve>BGMwOvmD<
zUT#$6g_U?HTF7;gA6AB{2LuYng(jW56DqMHjiL0N8O?}zWR#>9xKMtA$?zx6@DP|9
z&i5IFJ)Vo@@OBcU!nA-Y~(up}2$rqLq9+o`#)3
z-p$OUbc_M3?zUU@eg-P;7sHy)R+3r=vz9}$f|LX_$-BWe0parEIz)n>PajS@8(<5N
z?I?1fVXSNP(d{1Eg>WXN%nGHkL~pcvwtp3uNa8Dk-1G+GZ>TH&Z56Z%k?u)(?+wUE
z>Q}5p!^D$LuI@&e@}Bq*ols)lYwLj@dOvvAyOoMHs%acwe`13O{NwAgwnL(t1np7gvXS&T+g-h>d&J1sF>LyrF^09f>
za!sFDaYUTu)vXWWi<8X^W~|N+jqsptruFs`Ai8JV2~<4D9=-hteNE%ZlD&HYtPT0l
z&NMeRD6pC|BSw9x351E8#(Q))evO^H!|2cb<=>d|2SFE~5$UxdUdidRf<3S+Uqizh
z3bhWRa|oIKJuurZd%h#5MnLb5p2$hlI7Yo@G1WzX{*_likuZPxSaA>S#oz>IpJ=W@b+t
zXmqJlA*VA&2bJ`pRc#McpKQOrgHm#}d|=$ObszWBnCF8_LxtVp3lk@&&VwznS_P=F
zBGY$R;}YdB6Aq}Wh$NXFwKPR)5Zd%9JA9CxhQR2sBKKhH^(N2z=n#<<_R#Wy7aOAG
z$*}TflfS}78pccJTqOL~5|rehI%C0dY{95Ag!=|TYj2p8UmxiZPNniefMRsv$r?|<
zDJmOoo#tiS?a?3RMG!JAHN_qT(pYxkHEdG15_9E=_;X$qp
zqG!#aC^E#kHqBWb#sTuwA_lq1i;dedOLZfIm%z5LbogPVc_=v-NNE9isitx6t);3r
zG|iLzE-H%8C{s!ymQJu;P(1O1mi!ygHx2-!5pILY=<}cZ&
z(1yUSUYR
zQssOm=f)A-Y@JV1PbxbSZH@WLKBi$rZ)8+FFA=d@)7eP;#04gt%@r1(6SuF?$tL
z4zyWjU9(aqcur-+@`FF~wo^ksVyB-`?4q@kQRpFfbV3eOjLg1k7FPb_nM6Rq6*Z(get0bvW*#nbR~{(bg=FeKfjU(^k*qhe1v07)Mmt!5xqQr
zZQ>G4{tQ)dSmIWZm*){c64ApqZh$d~VjoH1XJnF@MmBAaMX1e=gVC?~EAi5Adg1XULb&EL*$}1p8_|
z&a%Fna^8<;j!@yfr9^O^f`X4)29VS_(w5(EL}#2EnMaV%Gr3OT$nkpvi$idFTX*;X
zZ9U9%x7?VH9#G#M%uMdp`F(>VPPU!C@;}^nQgd!vbJmzA{
zK`e!Tffu50@LR^5>nNA52of}_gg`ToT<#|*@t^G)QMKd{ka>^Gb7<8)hO`h
zU$c*SJZ#*9UaC26Yk?UN`P
zQTLM*^(QT$Q$d{A$fr`MPchGRG-MWzXo9FdSk;~v>B4oER#8-sGjTG-=U)L1-nku4
zwm3cpMHSB`BaeAGp1v2hjS2tn9OpdzKRM1uBTPWpY935MDVa|}p3sRq4qtB@zxxj=
zM#hB8|0gP-9D1~gv>9JhNuBe;LXwL1pi2BdJW|ALvyk;r~-(o22r;0U<^oBr+<3
zQmlgFZn^d8%aoKUXc8$%W+;e8}j$eSBy1k9^p{sqH~W`vXYUZUx_^TXMOAcY=>r
zy?yS^2Fjb~g={|AexshM6piYn_Yp~u5kP@_F@}=vWoe5l0!0i}xze*5*rxP;;<`WP
zeJbR8s(m&+%`94Iczu#SEA{X(>nZ&lj`6_1Hj~gnd_!im--Y}#p0_sNs8yKgDy&x6
z8ci3g)+ezzxyIVUHbgjm^s~0+Zb$uh@GxKI2>#&_NS1;@HpzBMBm#n<+%=OE_jm9s
zekv68h~om;r7LGMHq+~Qky+mnD<@2jk%UH{8kWuhX8I_&c8PlZMyosAOsSzHJG2K!
zMFd`|t7!3mlBApH(F|wNwyIqf?{mbsPmQQlE7pZv^@Nqs_-1*JM>DH#!GFXj(k2{V
zYu<7~LfMHFxa4;875WXI@}Yw<5orkxCf$r@RkQ(ahHtDettr&6_>cPQ-~MP&dyN|{
zGJZ*2mFydWt@x+1pJ%rSIFU*lmx{K;o}(zggD=^NbVUB%F#7vC;WCc7YXwdIim5E4
zI(TG2IMR`Ztun;YDie@7Sh+4;Upth^D!Y0}Za&&y_p5M@
zlI!J)HAuMtXAVnq#B<#oC9HYBvFo}+yKX9rl(hW;o;Vr6TXmPM_bFVIeepw%Q=Szg
z-NnDXhLVWA$~FBUXK)TTaJD?DQB1t6KTaj}Q{m5e|2eMK#y?(5xw@SW_DdfF$xP*q
z!=)R>G)Uw7{Gh-1knxeGB9rLvPGo9k5sB54hYvPd^9_l0S@R!hwA1>v+tsCO!z^m5
zAIt1H;L>7YC>`r(JFxyq8_65BKKAb8Qc6kp)Zv*lkVD{KQgc1V!Z6$*-1Lc~+BCj>
z7|1i{!3EdZ{zepm|-1*0vt;jCC$-4+w=R-!XCFuObr3H8FQQBdLNWsDQS;maO
zk<9Op3$&Sy2DnlNFEq)lx|fZ4?`-rZQhq&3e={kJDsW~*>;tv;j$0>>p97<;oAc&4
zd+*#|`M;+K6k46FE&r!)Yy2gdYPV@uzuVvbqP%(D?!-eV!BV)w;3;Ym3GS`AP9IsZ
z9&;u1PU-4`SXAYU<=lzh%WB)?(NCRG|I$B^ssDH0yZB4EXOjqzYz_8pqEA9b!2$x%
zB-^fSjf*SFF5@7V#cfTczqO~S(`yaD8ic1OZ_pGKimq)
zb}#)s%X_x2;g}oJmSu~Oxcp7S0zwsIVe_nbu52cMVKc>mPQwSTj%4MEV&SMenDGH+SH8{_K+uH8eh|J!!Z0
z_DeG6>%TiAn{@YiqH2X?z9kcX6u^h-_UyD7r|BtefI7X``m~%=Xu11+^Px+haY?EO
zwa+ruSR0EGwCQ4!4O<U%RNm+jN_oH}wT
zAvERNMeJk&dTsvj3EUJMJpfWuJtWT(Ik~NRG6vkG40{DUjC1bVXMvkYw;S@cob`k7
zd%<+(j|^GmTk(t8VAXP>Txd!*Oe*GX5^X`nY7Q!23F*-sHJ4nMV*`{VXu=`AzR!&&x6}AfB}cEh#pCR*OTYQ{0iR&$GDC%0rYt$RZK0Yf5}+1)388}#WVE7%2#4iasoC?4XL-Fhq9oYB$zIl@IHlPW&=_l
zMNx3q5M!|y(wtyb=oIfGUGsbxM{M152#&=Dc~`bO_!~>=-%RlHY+I{3u}VoSO-8ZU9YRd?(R$;%FEoyL75*mA^cSKq3X-$d4{lO
zVh7U%oCw1?*gE~9*o?yvX>>klqO=3o%%v%
zb5yYVPiZQVDA`p1e~h|7%SdBYaLZ|B+riy#rBqpP0w6BIsuB6J4uLfjp0!
zoC_R57EGj(*peZxcBj4$@?^-Ruw59TQPB5ch7qZIQBY_sy^Q!Z1vT$ETRjxB-Tg5+
zQ&c=6IK7~rpusjIC%pI-aJK0oV?&qh*hY~$r$QJsc{ETkJM(;cCiOeV+8KTK(A=dY
z$!b(F^|Npvb>AnF_v}VQ;e(Y`gxFhz^lTr4u>C>E(Cmgkfj%|#D_&S4c)sEG@JGmg
zbBcesc^XuS^e#U;6Pn;LW&)@F@6dFb4QCP^YKRSGxxT#~tQtsmUiqFC$^|b)n2e5-
z8_x$T7gILFF3OyDhN;LzkFVo0iM0~w?mA(fkDd5zj0scnV?*5d&~wh~`Kn~|!c7m;
zb5J&$*7r#-~tN-Pd51H7L^L`q-=xWxVz{#
zD~}cuB91(XWhpY&sB;W)el1JbvdtV$Te7Ca#|e1=ON~@K*27J2DTt>GFD#S)om^PD
zv#(Ky<8f)Mps^!gVT&%3*)?-oe!CScxf5i_5;Gv)CdBGV-vFistEi+YGuzOZ@GpjF
zx=qys{qbaEKm4-@7FN3?2d;L?tp@K9zujkzDp7Xv4pz1!i!{mv6}52dK^}9%5L>?`
z!e9vgP_|!Jjgb^hln_DeC%i7(No$Ns^@I}p8@|u7LisjREe+NVHD|l!r8b(y$1_kZ
zuDlET6s8sYJ$>DtURl3$Fu?de8H#3(7V=%_X5M!GXg$9xyh`~J_P#j&Y~QNhDwi7%
zyN&wc<~d3kpIb+%oKzya^c#3*tqyOi)Eu1Y1
zB!DkX6h4^BuY(}i4=jDT($`cDS;
z&>&ajHozp<+i=E`jQ)_&sB(ZVJy;A==~F2+k-n@DAF{+$nNA4jB;q3@t8H%4biPhb
z0>%A=DDE@Yp(RJdg`$h+$cLmgi4Iq20I3hYU8H3bA*qUyumrMhRVVT!Z!OC^0(lz|
zx^~iM=!<5ch3a`Wlj5gF^3JM2g3#7_f}jo_6rU_OA8@FLHMyWg5HNR9kS!#1fh+Db
z3*kGejPsR(H8{yRaXjv(&mS~ff`xkrrRD<|QWvg9ZchkTr4vOQJqIGvmmyM|pvja{
zC3klbB5QP~_bVg>eMyb7Jd|++cR=tCHEhEoC~}!dtCJLLfww81`o9xxjF$Y;h|kpT
z5EB-~OM4@;YD%#PBl6Vw4z!nzJ>?^`G%Si{Y2rP!Xn4^SAF;>tMy9h}7?0(l&cIIA
zRT*hUM3Pl}IEAar?-
z-p_oIE3u=HAT~X1&xzZez9R97iGmuv6|@*KILL6WnPryXov|cdP$3XMc(0lFeNq`^
zWnhGMo<`BjoXaAv6MHqJ|1dAAn)+VNPyeGG4YwLN%=-9AM*~UucSYo@t-2L;7UOd0
z;V3Ey2}`gF-@MQGe&Yoyb!&6)9`zZ;+h^HvtUNe>^uEY17x3kar&yt#@_B0miPbf8
z|7P=i$tI4KbW5stiRA98@m#0~Dh6yOk~k6zX6PunW{0HXz_f2^M4yHJr<`(>^UYW4
zES2Coop(9z`U&R5$zQ<%ryHYE0Y84^E|akY$B9_@P*uUm$PfKpE1ONRAs{hSUuf9|oCZ<*@SVy_
zp=RI{prgR*7llAO7@9!~aM3J!)jBe$-t=caj#h0hw^@b{epzq@<=jdAeMxyH)N4T(
zQW4-Mfl7kw+B7^31;Q*Zt`1+NtGtg4B$f$9o_}yxRewdHhyK->4SKC#f}4UXRZh#Z
z@^GPApFalMDK$GMyH`+}C>j-yzreu|R(
zi>9RRBuK&$KJ9qiu>3x0tzH6tWY9^%PvK#>x$bu(}7rY2dCdYMpwn$c5WSUuNZ8YJ+%qf=2y0nu4R%|tdl-QXNB&d
zU_jxZz(SiM343r$b3R+$RC2R*u)R*Rty%02)j9E|*LCI^fjj
ziW}F{^w0g7rx8i6vW0(Oz>XwZDoHGaYL4r>!g0LERaTRKTyYAQIfRe%5^DY!I$mcM0XRZ#Swm&*vUbgq$!y|kTy;S^3fBUPiO!*>D
z>fRIqRJ||+R7Vsmh^FuUp4T#Czu&g=5ArgtXmDRRk|GC+n-lG;BhXfls*(g2paUGm
zb1)RJ-v;)`^;+-YkQI<^gKbI5zIi3&&a0&E3Wn&LwpfAN5Kj_CH3wAw@mh}lW>?Ck
z9kWH*UaIb`d$Pz!D{(Y@A#=iomNppbAfLfuwD;8w>GD
zP4mM&f=0f#w+E^^-9;0kpe_A0g?7u{#x=_%98|NxB^9{|Pb5UOEDsY$v&*8-pl*@m
zw%QV%BNiFRvv^1kFDqyDzAEUO{gVq51&eA)#&FBV0*V86@Z4g*SjymwWK023kY%6r
z?r`Rq#=o}#f7cA%hqa0a1?jFrzdAx0r;Yq#rhuo+obbmBgLxwBCVKfI(D%bDNqyh)u!V%ZqS$ll0yNVjZ*_AI^57$a=oyWqA_fcvv$l}_cohQX?#vx9
zyg+BcZ*N;y?{*2L8jlo#738mVFjZ3r!QY74{*8Bq@-!>WX#3fyn)LAT3i>=qtWbU_
zS!IFDoz6-b(DW7ftH?YGx>9yE_&Ok>{%cv<5-?L7YS%rD>ErjX2&C&5eXb(SclD=IL-opI{TYEtH`FrQ9}dw3}Pvf
zYt3{lSE4T0m1mOQP@TT{5}ma7Ow(7k?~<_*7QU(-Nbk$@A^Y9DlYZpyo;&0m!_}A5
z7{i)I9YTYo*Yl!ZD}7Swf<+b*MX|&JUz;(-Y7m@gUJd`=v%tVs`vI@y&1U?Jz<0*4
zURuiazunM+Svdnb^UBm4mg(6NT4n^c-n6Eym$9}249aNgq1g$=_YBi@8S~o4{^USI
z%NLh+eqc~iH_}aPPLx&;8hr5|k1*e$cZghvDB38ZU|Xvcrou(SGJWoyf7=wCs)s(6`hw8qst!L4z`YVP^p{2E+ZOneQwqYQ54Bej~{5{gT?h_*;m@;d0
z#0+#q!sYlfD5-Z3Q*8_B_ZKe2Kq`?b_U^m!Ac>y$le}ZSnYqDM@G5DejYDAiszoAM
zG>POlvN+-Xd;=VdBlaF(ghEjlL^5zS?}M4LYG*E)KW8EN#T;7#XnXEABwO{t<0NJb
zaQQky7gGB9z%IcR$l!~X3V&s{di}SLa7R^pxW4BWYI)h{@jYK1Hs0n&%9G{Ac#8fg
z{_)20GSH(plOrV8_1tbuY?a_@?DO2uZOf9MA>JqAiHv3%_q0ld5!Ond=yb1|*;`hl
zQdSEEBVVG=)gSZS(GL{6gm^jj-}G0otvJrTgdJkWikJ8U2rE^GRj1W^J?rR}B))yO
zUx!C!fd0E`H0(WUL1y(v%&a|f`(wC|745M9Ot;Ud_o?ewfz#U9@niEFww3U)m;6aV
zG0EHA4ARCxH>mV#F(rm?<=+$_A
zG_ifUeZ9Orxwq$I11CK}74JppW9s};(2sT2T(d&^BNv@ibxa5%xFs3U{OM5#lKrXS
z6nZ%ej_A+c3FIvJajBH>!(uJJdbAlgp|VSnc(|6%qQx!L-2B;POGpw;op~X<;w5Q2(<&Y-uv2*(7JYG5J3+iQ5Smtp
zx?H+BZ`ir{^IIK;JJL$17c-5q`(pB`_^h^4ZKf}HMjF-E#jV0wqjC$bZ#?M-#a;Hx
zk>K005WD1DtZ(2t$&sNFO)Ub?_llxOlJzF?cwS~(a!&(%=gMCvGn?K6*7<7Zli!Cj
zA_+0pucm6>QEh%i8O?IP7;FAU=7s_+a0nECw)jSshJ&dDGUpv*szWwK!}`d=|E^iA
zzY`*?JvhR?1Uz1T{cGIcI}d#QvxC?_Hx12uCm!#A6+rjcRECO-)_s4mt$HS}2*$H#
z2?U*c*$h+vnCLU*EVr(0;{AR-r_EXJ%;iA0_m90kKIi-ZsD22Azk2^W>koASV8YfW
zuJNEp-WsKdKbz(79v@R$!EG6!-7O&yT4EPsSS_+{u!?O+5*@y^OD#|>+qiD5KyvT#
z?30>}5WiCUN8MsLB)NMW2KqXllHQ`xrgf5S6_OHfBlSii3p+=?S4NJ#!8)guYqa
z|Dm{iqrT}UwfwtA{=1*Hs|Pwax=ghbzT-+z`W|R{oPQB;nwP_29`A5#xtoh7$ZuiT
zd-Ln87{=N3o_KlJ`h$OtFsyyVDrs#@B_5c!f&ELyz3zz+&u_|+h==67zFqRBXDie%
z5*9(Zi9T;%1)5%b@b0z!lgfrVE&RQ8Rh@wkx-+aNv8T9SpRpbd^zNYELKO{XUpq!0
zT7OiALDfZf=uf{u=35McODzpBZ>m`$svO^wC9TtFESPcJv23_oQo&bcj&d))$sdDCRi<`tTz-~>9Eqv9_Hy(&j6#)-A0oexue=g#mR_fZPL<=Fa3IPC}8b_~o
zQk7ME=qBg&-H|JjcjVp+0~@D$G$pR@$`&`>%oX&rpO44r))j_68bma`OrIWV-(lGM
zCa+b^zk-XSaiZ8Jz3z_=y;-q*kLdqVr8;DXZT#u@b?ZlEDlS-$8eobiKMvbs1(W(#cLxx_54NWu4ews~nZuHDPGgiY-%iwu)
zP`n4bmFCsJHnU>N$bl;k7n_UpaaXoYEHx?uIVWKjnKbX6qo`6B)TzhrVhrO;XjFJJ
zAY_3igvOXqw{M(pHSP4EYgv6L}Mykb4)?_PPx(W+=ByDK|1@tY3jF
z{PgR>elgJ|j5}n_O?n`FT6YLCkBLpFJE3d&%yEExmzq@!Pc@C48r2-v(@_*G9W6J^
z-CWOo`<48pv8otP(H
zyU9!^-xHu?zb
zyweMH=Hq
z`U7Po0%T%g#eZ5+!#tb8kE9|YcQhLHAOeq;9hP4{&yx3~4Q3Q0fQ(nr>+6Ed4ktH<
z5XR@J>n#;<^NjiYElDO9Ukd^z_MSm-Vn+lqVqgZeORDJu$-qAAmlhmJW80plHg?6n
zrhu*TkaDjSgl23T%6;)9Q|^a407(!s9=mxq#~vatVt0~kG0`Z-ol)$T?N~C|(C*T}
z!c!|ZI{b^7y{tI6Tjav(a1_#3#0ZKDN9MG#fqh`NRs+6-%Uc&;=OtN(I41vtj91P^
zAR`o(P9b2vd0|hO?w+!w(aZ)S-Jube={m(J3s2y785ZIPE1dpqkp%Vg0_?XL*;1>%
zlR;`dkvJeC(lk|w6>kl7hlI)90wsdqLn(V2d>t3@S{upx_CdP
z1viwb&8{@{pfmWIVPf$-DL(0SD}VH!P9rGW9-T6hZt-t
zkR&e+K$rksPR$eUpZCi+9_zuhLLuT7DhfYy6AGiitfaSTJ{dnZAkDB@hN$0ovGc0*
zKCB_kd0rr~Zv?kQ5vZD@lBylLmiX<$2Ti@E(M{}
z`BpA2X6($dt{FqzgMY}axkku|o51;mB52zd3C*mn>>UGfvaHREGT8@ZBC?~S&A=gV
zY!EH@Si{!ZU}Q5)z{7})h5A5boc`pW;hqrmBbyD&JbSb$Ol|zAnPw>}dcq49zrl5g
zcn&+~V4vI+WtYwxl=G1`1t%~q63Q`!2{*4;okQb%F?1}Y7j%t^i2OU~GWuHx1rtWJ
zsT~^TN_)CqC?=7B9@^FV0y|2LavsBfTvkd94jUl102N<+0?}eR+jq^N4}I9-
zL&WVYZnjx&Iis|R@^1|r%+Z@@$|TQ;GTUJnp$aSu9#n6G1&tMm4ZDTWNw8`>B|<{q
zWj!zrskr@Rz`
zMSOS|9g6BKDtpprKnqAtn!`OhXYlONaHP&PW3(?81jhY2t`56B6C0g_paOoeZBYkS
zn-;c?sO*+nWy~L>fsuQ}aY`qo;Xg)k6%0fm(*M{!TedsmV2^ETOi$$%0M(6%Kmo%$SeoGsnu@R4GE?jDKd?M
zeL^Fr+*5l!CJ^2S=>WN;oLaFJFj#;qLtYX*dd
zO4S*ifSJjxVNC^%h#!T+?fqjc29L@j8ih>;BRG(@cM>({b80P53=BZebj4pJ4lQ?q
zeexkqyQ>Ez6uM+f^CZ|_gQvN+kOO#3aGwIP2Ev3yWG{bjf3_g0Nen6zg+Lm6muKHNG43TA%W69;R
z4kz^&!6elImq1}xrEP(rX|%r|LQG?+24wR~uHh0EX^vdSQU)kP7*L&TDC($uVX-sH
z{pGE1R-hP0v8UvIu0zaB?dGTzl%bU&rwrl%T>XOdq+G<-eQFQj@e_<7|BDy=GEvPQ
zW@6Nx4QY;@W2xd6Yz1C|1>SHrx<5pd6;_>#KNo!A7$$#;41jXf6s=?7eqmQ3KL5(C
zKq++q7={^$;G@>0Eam&3GqPo=BVc!y)mT%RrJzc?V~vw}(_Z+Q`b6Ryarw@u
zesgD}i8_*=1Z9M2!#!(`5~027&skQCbC(mnOK+-2jE!Vwa)emgo%y*Vl{zrXd|(=OFJ}FtD=GR6
zUex%TmYyS%*Tu@CgwXEL1C~@-{
zw?0eS9zD7AR1@2
z3f0a)W}B&-v5{)Rd0(haWmbm?FB}LFGhxER2z?_DL8j%HD8r(z
zd2LN?e^ICo$yqie)@?*BfI=%kIHBXefP<7CW#BiV-@vLr#%LdL;7~*0_%Wog!#Y@V
z+69@ELAgBuEyM9h2j(mUq5|Nc#J$Tn3O}rhfn^{#vH%}7I52@CoEM4+?`&CFe9J_<
zCXPijF}o#=Rom-LuV+tF}KE(RPyxEH6n^w<5#2bN3S{kr{~nr?Fik%nn^*-
zp;Y9QqAg)3UBM0l)tH#Gx20g05t(UqbG_3dQD1wiFtULsIjkGnIusE*fp$z+D+gzaUEwNo-Dbg$y?rbgq
zayzD%#2e~}An3+{bVU7OrnEnPW;g4ggN6Tj?YGdi&@Vl)Yg(>y2pr^0NHRpOWuVpF
z9*ADif+ah`$I8UQq#~J7
zUNawRu_uTP*gRFfKzzH{fO*ta9|uWFtYP9zt91Gn(=`UH*k$U_&(wNGNI$f~n9R$=
z%_Bqhe78aDZ?+B1;g?c(A-no~OgO=tmLrC!Dmm9HMFg}iZqb*)_Ei$UJ;_EheO&e=
zwZfu$p#T*YB|Z?^19vp&lFqg|Fhuto#-1P}RWzQGW1H)#DRsz=7-a7cN938#PIKu!
zO0=_W6j-yKfjrlF;1|6*`pAK?d6&yGv-z@^Xvlb!8uq*TT-`0cMO=~I{v9=?eFZHm
z$3pelBiI64-)Ua}1G328!E7%(zMe58ueetL?K1R90F-3W0G(<{I;61E2E5z+6?Iu-
zzt2SPlgCpTu+$ZeKsL?ovNi#AtpKkD1@%k{UprGeR(Hq`WCT|ah41hj@)6dfE40#=
zBOoPxps#%j12x7FL+fsn6yA(lp0sg913_0o;gEWQq4@xrIkAM|T}#%mF1%TUT0b(h
z9EfZg*LhFk)~*<2&;ihnBN#}RV$<-$4Fy`AU=K=UeZ)A6V>lUYUL^#B9%AE;LBr+z63bh;eEfv^WD>4AuS_5?7B><9psN5k(pNfLL%TR(I0o
z?r8@=68S6iAv+)!DfVoAU$wtiHI;YbSz#4P@bvO#{09d$P#
zqud!4w>M;vDk;Q*e9aIqObM0*#HacQkpLiH_Nq4QGtE;AgvgaXV2g{s9*zvE(Kw#M
zg@`%j%18hE`cg%ExoEm)Rb)}Dy5V&^sDv1V?x?>Ul%0@AjY`EP_JXd
zC&Jc}l@3j|Y18nPstZ(9;rvbq+*A_?AyfnJpxcEX))X@dFwX*3&L{(Mxecnqx!V93|LR~js9a0E_~%5ZrD7EN6*EZrWif(mv-eV_0b@$TZ=Vxc*G0vUvW
z8z&C20X;DrB|{K=?!RZz8OniT{`m&kpL^@XRmw0W4y3asV#xUfAAP+a^sc_s%8bPH?L8s(>
z9@r)>M8%bI%+m>wFnFz5po9=K0ozneJgE30ar&3os0W9?bKy5Wi}r_-4yY7IjOs7`
zc33uLVO;w^A?t9a?Ws6+K8^wnPIugj*L04C8B*$E+Y7{06|P#PO&a5v#AuA9Uq6XX
zEx7Gffp{eO^M?wo?9+c>>RQ&4nD8QMbdK}7eiM$so_S^VVnzxLip=wYLh$S*WIJc*
zFov34v3a`RU?=qQ6ShDI+f1=g%*83d`>LLISlotfp6x;<|mLx
z{Gwxp(NJeqJsOZJ0LJA!KQ_cAs!)NKjiE9=*>=AfLGiywMe`p)o{^f}erhhAlOTtU
z>Y`d=kD;PqH+}|bL%DRh@uXZXSJE;g2Qi$}(WwYHK-k`i{!zEc
zU0_JLdd|3nwzU{5wiS&Pw_Bj(D?5g{?5qdSs0Ln!(1?7OGz8akj4$Z%(sy=;qX0`a
zgA&VQ4|7;l6ZKrrw$?P+g8o4=SfcdVCVDfc+ofeFpTnZe$}tMB$8D~C$QMY~CmKjPp^J{`v@^927qVbf8eB)@
zN~W|%?$sr!0yvpRWmytHP>-2V_XOigZ%Ss=(lXF9dCQcg%K)eramN!N>{DHP!kr@t
z<(~td*LFG3@~HA(6JMwndlO
zli@1~+4--WU{~XA+&tbrQ(vbA>|9=EKVgGFB92NF&Fed85_Dm
z<6yZJm1c4U%~)BOn+uhgls)_KVf9=$CsiJv&ZH^cAvP>t5SoR4*2Yk?WP3UGTxIw}
z?KdrdqS;Wu*a~?9?Q!&k)GWld2!)u2Z0KtZ7*jxoOWs_x%!@89
zAr%E$V4gO$LckXG5_njx2QPFV)vP%MF{|!f1Rt7%Y*c1e3CPpT^GdLyHk7(XL*O?l
znq*FMZdUfrZ=<=f?2Xe9D-uM8n^m~l;jg-~b865@!r}q;PW$9}*4cEMMtLF-sDX6h
zX>IUrtf{bV05&gdfml*Sl?JG4R3lC6f*N|Ar;aef+IH9{|W072y3LOoLgdr`&2M#H;G0<0n#lTFQmx4e1nMWPu@eDWMFqfk|zl8g?w;
zAnev}4vfJ%U-q=wF17Z|SubMGmn=j)YumPHGWhGdUCYQ|qEgP?j$v-vk!k>`9#pSN
z&^C=y3q=F$iZ|#V*&Ay>j1Y2ubd35%x2NGMiDP%n
z;>=sZT%!p__!!4R{uS2(nJPXL5-p$?8kd6=aWV6~`(FEEU4*{3_DRbDy$nvI+LUyp
z+Oop7`p30@np3?k2%3CzESKN?7fRI}szj2BW1o@+z%%FrHdA4>htaGY$#o3FpAdAz
zyKaY}jnUngNeL9Gps0S6SmkWT#S;NIiGgf&cLR~rw
z%h_9lIb)(qcZ-UhsLl2{&=}kF7=x=&FPTGaGQue&?g7W4wb-~`=A$EeP;cEKausbw
zwH{@ojFI9^m~?^UP(F}T6?SlX)%RZKypifpH1;5%GcKjKF1Gmg6jg}a_ZQHJ*O2*-
z2bQg1#TtLcm2&(isU{_o?xCiif;#&{277jOlroA&i>X+-GAWi`Ij(y$>V&@M73Eoa
zQ5YQET$!DnL5oWvHyUuU(5;KRc%S2lK|*Wu(->iT91_9c8zZ=k
z)HZw--DPMKBJk1}RTBHOp)OPRhyXB+5-Z^74}J!REk;1>frmJtWUhr^3GM}~4y;gf
zw;oUZ=14hN+bMC}
zREr*=>t|}kRo(0Z!)*!6g9jgFOb};~GoND|iqNSX1(CWg2KZDYD%c39Q>&R;*+d+i
zBDO=|V+nPCIvugG#}VO?DZV}`BP7%@F=4@82+m;UQbvq7vrrwPa6&s}mTmbw
zOl;U8v|Wp1e6ne7Xr@kjyQ$~9nTl{=!WgQ-!@%ylF@|O$Smp>=SG9hd_^=chs(9$B
z-D813{nEfc&GK)e;Nh9ZMO1DlkEmtwQ
z^BwSiX)&tAFxY|Z#PGL?Yjv5()ZQXEQtFsYU||sPLMufK1oOy=6Xlt@kHJ%Osu<5L
zpmB6d%L)+eSfMvLe|Ej!^>nLk=W5pVR0-S5PFhre_6DROnm^XVe%Ta~VNTep{gP&fAof7d>K(|!zgi0JgG*b`NAH?M@q
z@1Ng_7q-)7OTFBvmq&K}6s~&s)JS}~(I_~RpzU>$?1NR;8)#t2#
zd)EdZK0lpboSIkJe50hjKzQH$*t8e=>lHp-+2165Y7_qQEY2`rs|Rn2VpRA)-u@_#
z6)N#ZQGYC}4@%(r(X*j~x+G{pisI=iaxJmqxIcW>wWsFA^=9(o1uan3S6#mS@wor#
zbJLjrQ9q*)|5mZRXcg<-Lw>nR{ZeHNAsENFILye{0aE(%!?v$WMu+
zux5h7(FpX3n#U7tg?a5G?L9{^RnrwKwuG!S$?r*;hf_RhS`r#nlf1{H9-8nx6N#R+
zOwW^4GA}4hNZu#HLld57BGI#!X*x?aBV`H6`$TwX!s8H$94(V$LiJ3fde%}+mflu<
zu*sZhd;av(+q-{<(>$bxr;=-UrW@Y&z^Xd0?>>@YEY#<`yqaW|WmQv1Rr)@kdOAgQ
z{-ll>(fKnwZ*QL&kbLzEk>(d2(O18iP>}}kvxJszdN6O3QO4Y}gr0D6{B4sJ)LPSw
zj_2n_gw)=B71Glh_4IQ}Qh&b+4Kh#n{U)U(xw>5;J+ptDc6?7MYbtB4XEf>lU(sBX
zlZ2Mfz(Hw}smIjD)C%con^&gHsUdFl&X62$=-hZSM2%M_
z%sgjXBCitBvZ@!RoMpjwGkBHQAh&vH#6QDq@IkA=O^t^5bRd#KLQ9F~CWPm2`_V|d
zpY?mIe`kL}p4Wofpp}T0R=qIgl=<&s2CotuFF``fisy!ewneA3tszp|
zGNnDtpgj!I9t>$6I;%58>KJn34ZZ7LN<>d7n31`l
zkNQJHzTtSkFJA=vlv6#cvzVUx^wNaCNIuts%HDqpX}Q~1M$9~?T4p7pXT=K>&Ka)N
zpkHO`R8=C)kv1he8dpIr|Mj(*7V?>q-Hnsf0Dtq&h`c-&)N*fR^u+3o5v}~KqDDxK
zjGk7#F`=DP(8I-ST*#7ay5xZAl4hoBhKn=k-k7>J^03RPRocktS=AdOT6#koY)8h;
z=(m3(tH=f12o3P6SBA_&{62$PNeywUcShvo0CXt&Gz>lEM|NPJ$YbDHOiz7!X~JKZ
zyIfLRS1Y9DZeJNOtIrA_NQ-p0a(e#rTN4#tSVbg^y?{6nw=z1?_RR!s
zy%S8<8=EiyUPSImFE7kE9Xh8o#OWAvx^#a|cZk!S;7s!RV=jgI-nlVpN9K;T^Tv#}
ztjMfoH9%TcW}Kg9kbW9u{WM9tE7K2~J(C{<-|WpT^PSv1{q0*5736_cP?)U3MNX^2
zq$vdycnNyLQTPv0Lj!8Y5VA$p6lL{*FJp&$1FqHd4zB7Lx
zWKQYtdD^E_R7J{aQQ#ke?jqBpyu6vD{guM^YoF-XX@=vLn5|Yq!__>WBzz$ZS#9z@
z)yvdSrP4Sw9!JOH81QViq7W5f`0om7IM1)9nakY^spc914WoKI#U=?vvd1AO;h!n^
z&wYZ4{UO9fKkag#$wD7-iBG$@XR?2+M_tgGEN6g6e0dJ!=_ofQF-&$OoRUz9@0pHMXCt;1Gmnu};yVWQXo^9wT2us*CVh{md1^#OT-dyk
z($ktOZlW^bTh!)WzHP3B&(YQ(+3=of^klnL%c}zx!UKGgn7z#q+LTD
znLg?HjTvoG)?|NL$PCcpjTtQ=WVnh9kWK{;^wTOs=Tr<&sO(w2Kbo8UHu)V@`*4cq
z6M^JYkKxlKe7+~F;GWX=Y1GpRD$3y+&9R#412uagpSM-c2`sHZkEwk;#a3ewPj~8G
zu9T(EfgD?X@LH<77c2f;qfSF0mEs8uJ8iBh@Tuosr^)EX&Kqpqk{oo)gXT`D0T
zDx#jeR;ykr6V>FUzIXYaa8g7otySwTjj2*?7G`VJ6(3Zqo5)D7npG+@hx%Ai8`X`h
zUZZXbGj)H;!)$G?q65lY6HMqr*c8^^5jMFU3<+pMe4D;cV=%;1kCqM`Dm6P!x>pxZ
z9(Dv$UlhBRAz|iY4O5{vl&ZYSe2uon%z?L
zO3*;)b(s7;;jFxnYzAH39JbjM-
zl(Mdl&6z4bvpL?&>-D)$Wu)4%$TP}NGS01-UXjzw34ZzoA%jJha;|^cRj8T{TkW+h^*kW=H0p^7>|imUOp??p70n`%Gvzndt5?F}S@%GD1A1
zlX+4wf1I9BpPC>HEBAsr>dxy4ErdG4HS2g-U-Vv+UIKGnh|zmrbVL`EXkO;qeMwjg
zIi$#`z
z=B7DwIxo(^eIt!On&HP_^r2mMq`m0Kt`dpO-Wje1TbI*BwGfrDG!Yr~%d`F}nO*3rRt}M12(jv;8
zf1HN5^L9#wfr!`I|}};Pq3S0I8qpneTL&WgZi{!b1~S4
zbav!c?2h&J=e_J10UwF)3Gir~;p4L)_>N8L
zJC5erILYs#B3!Q1q`o6*o*9tP7D=xnI=b;?(xM=(#&wckxjGNWc<6noCv?MU*nc|T
zSyWe*HHnSSq7MpRH5p7YsL>IljD*nb68va3Kh+b`D$u?%VLle~D*hs;<-feus?f$kov*o$(00?_
z*_8J;;ndJ~!8%_6`%Cx3U!2u!qmDo6RJguuCBMP%x4)?07x8A+Pd1&t)lrMnPjv!p
zUo;FyV{I9tdZ8-UjX>OiOk-ThwD$AYr
zj=?FBTe9<0GeU<>=x7O@DZ;<9I(`08KhjKz+z?)#P7r=QbZ6se`WvdRe{ZgA9FcQ~
zJWiiyvH-uS%0qdq&52F|H%zBzW>l11T|{(rrg?Megszs*og(~pp9s#+pN%&nyolTo
zUY<@6{{7qU|7p&pQs2<12U83`QwV;(!~b~_zABFgTK~O>+)|yNP7r=q$eIqGjwNa?5poIz`xys^)A>`dg~z(FD&A>=Shy
zyNKMdU7nf|I&?xuOE_t&e#o_VzK*26rOB^Xz7EMWU$6g)9rXLh4Ww=vJ1+>#o4VZW
zo8NMt5*>g+eL88(R7;=p9j`<|R_QFLQ|0{5Oof3P1gC{`)ZOa@f5#EBe5bA