@@ -826,6 +899,8 @@ document.write(`
+
-
log2() (in module mlx.core)
+
+ log_cosh_loss (class in mlx.nn.losses)
logaddexp() (in module mlx.core)
@@ -940,7 +1021,9 @@ document.write(`
mish (class in mlx.nn)
- Module (class in mlx.nn)
+ Module (class in mlx.nn)
+
+ modules() (mlx.nn.Module method)
moveaxis() (in module mlx.core)
@@ -956,6 +1039,8 @@ document.write(`
N
+ named_modules() (mlx.nn.Module method)
+
ndim (mlx.core.array property)
negative() (in module mlx.core)
@@ -965,6 +1050,8 @@ document.write(`
new_stream() (in module mlx.core)
nll_loss (class in mlx.nn.losses)
+
+ norm() (in module mlx.core.linalg)
normal() (in module mlx.core.random)
@@ -992,12 +1079,14 @@ document.write(`
+ PReLU (class in mlx.nn)
+
prelu (class in mlx.nn)
prod() (in module mlx.core)
@@ -1037,6 +1126,8 @@ document.write(`
ReLU (class in mlx.nn)
relu (class in mlx.nn)
+
+ repeat() (in module mlx.core)
reshape() (in module mlx.core)
@@ -1077,6 +1168,10 @@ document.write(`
sinh() (in module mlx.core)
- size (mlx.core.array property)
+ SinusoidalPositionalEncoding (class in mlx.nn)
+
-
-
tree_flatten() (in module mlx.utils)
tree_map() (in module mlx.utils)
@@ -1218,7 +1325,15 @@ document.write(`
U
diff --git a/docs/build/html/index.html b/docs/build/html/index.html
index 452870082..ebef9fc80 100644
--- a/docs/build/html/index.html
+++ b/docs/build/html/index.html
@@ -9,7 +9,7 @@
- MLX — MLX 0.0.6 documentation
+ MLX — MLX 0.0.7 documentation
@@ -133,8 +133,8 @@
-
-
+
+
@@ -277,12 +277,14 @@
mlx.core.quantize
mlx.core.quantized_matmul
mlx.core.reciprocal
+mlx.core.repeat
mlx.core.reshape
mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
mlx.core.savez_compressed
+mlx.core.save_safetensors
mlx.core.sigmoid
mlx.core.sign
mlx.core.sin
@@ -302,6 +304,7 @@
mlx.core.take_along_axis
mlx.core.tan
mlx.core.tanh
+mlx.core.tensordot
mlx.core.transpose
mlx.core.tri
mlx.core.tril
@@ -350,11 +353,35 @@
mlx.core.fft.irfftn
-Neural Networks
+Linear Algebra
+
+Neural Networks
mlx.nn.value_and_grad
-mlx.nn.Module
-Layers
-mlx.nn.Embedding
+Module
+
+Layers
-Functions
+Functions
-Loss Functions
-Optimizers
+Optimizers
-Tree Utils
The design of MLX is inspired by frameworks like PyTorch , Jax , and
-ArrayFire . A noteable difference from these
+ArrayFire . A notable difference from these
frameworks and MLX is the unified memory model . Arrays in MLX live in shared
memory. Operations on MLX arrays can be performed on any of the supported
device types without performing data copies. Currently supported device types
@@ -646,6 +684,7 @@ are the CPU and GPU.
Random
Transforms
FFT
+Linear Algebra
Neural Networks
Optimizers
Tree Utils
diff --git a/docs/build/html/indexing.html b/docs/build/html/indexing.html
new file mode 100644
index 000000000..f32eaffb7
--- /dev/null
+++ b/docs/build/html/indexing.html
@@ -0,0 +1,703 @@
+
+
+
+
+
+
+
+
+
+
+
+ Indexing Arrays — MLX 0.0.6 documentation
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Skip to main content
+
+
+
+
+
+
+ Back to top
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/docs/build/html/install.html b/docs/build/html/install.html
index d896c4491..0e2e06a09 100644
--- a/docs/build/html/install.html
+++ b/docs/build/html/install.html
@@ -9,7 +9,7 @@
- Build and Install — MLX 0.0.6 documentation
+ Build and Install — MLX 0.0.7 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -278,12 +278,14 @@
mlx.core.quantize
mlx.core.quantized_matmul
mlx.core.reciprocal
+mlx.core.repeat
mlx.core.reshape
mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
mlx.core.savez_compressed
+mlx.core.save_safetensors
mlx.core.sigmoid
mlx.core.sign
mlx.core.sin
@@ -303,6 +305,7 @@
mlx.core.take_along_axis
mlx.core.tan
mlx.core.tanh
+mlx.core.tensordot
mlx.core.transpose
mlx.core.tri
mlx.core.tril
@@ -351,11 +354,35 @@
mlx.core.fft.irfftn
-Neural Networks
+Linear Algebra
+
+Neural Networks
mlx.nn.value_and_grad
-mlx.nn.Module
-Layers
-mlx.nn.Embedding
+Module
+
+Layers
-Functions
+Functions
-Loss Functions
-Optimizers
+Optimizers
-Tree Utils
+Tree Utils
mlx.utils.tree_flatten
mlx.utils.tree_unflatten
mlx.utils.tree_map
diff --git a/docs/build/html/objects.inv b/docs/build/html/objects.inv
index 4f89b9506cc4f74cde0361d6a1310c33441a9527..33e8aa0139d85b3ff42b45e3e9f6a38becf6b4e0 100644
GIT binary patch
delta 6370
zcmV<87#-)GE1fcsI{`P5JUoBPlH4}3-Qz0|A#Y0_wRW?UE!)=-(u_u}3D2&AT|`&W
z;>#qf)vd3;1fO^$h&(JW99Cx{&q;zs5HI2s`Sa^j)u`9+)l+s-vVQsJDNou~zUlwj
zU8?fOV{+}P_F5E4^YUY17k_=}iu~0N@*rHt@tBoacRXsqLiIrDe#C$7r3Mnyr9&ut
z(hA_+ca2I4c3NZcAkL1&f{4jOt5F`W<*Cc6GKSL^r$Eva0!dQ}B+ns`Jf}fUdDSWq
zX^5fV$`=T!e4#*^qC84G|>N=~Q7iY={U#U2v=m4c2BQh}A~~p)NR9Ag?Y#^Xeml
zP!}AF&g)zx&lr}D2ta?KRj(sxs5)u7YyiOkmpB@>t(p!*)FC8Tlf1AS(xG$+<@Fbh
zQ#Vzun(l=M^(6>~x5?YdO${>w6=O3
z1r0nMgS(wO7!YV1=opOdsZB=^LyfxjrE`kdZ#&)0Q9ajaj(vYf5H9Off#T_yAYfjd
zQM^863+_$51Mp3~1=+{#sK+HR2)*Os*?MvcmB7!9jtK+e&*9cXSf*Kl|Gsrp5VD6#
z(G+z|5U^3FtS@j%;C6S7+yHMc3Em7%VPV+nTEew31cf2nzs6TT1gV39kjtfGw}1Q6
zTl^Q=Zu~;q?E8P76#J+!WSwVlO%6d}$Q}1)`_}w*O&Wzm>Zma26@F9n<{(^G`zg5L7dcfIX=U
zN>Z1nVDhYnVEXbDOjVwePL=(C5ZW3>gFd}ULA*X90Cj%{L*0d-s{9GP&LM_`YcZsj
z2DyMJ)Sx?6kI6Ofj_ERa$py3{H=x^Po)3j?LlbnVe)7t0dY|xN=O_v?d&*J_ZipbD
z;MOG6j;Q7lf@vOUkgR+Hary`f>IuBAC;GCUq4tOF|zbRbi*9yZb+d8Vd#-xpqIX^M)+O({;@}16N!NE$AhHpdWfe?CF4#J)HI
zlB*Va7wYl>7`(#PC<-zwll=U;tQx2vXoS(AZJl79);WfPtFWK1E>FUoF_be6^5!2P
z%$t7%fNB(|rh)E2oI4sONmH-`xCGIF*(u47Wz~U@a~KV}PD%ri!3pUi>541#IE@h`
z)H8pE^Gt(e#kBx|bO;5eu+`+iq&T=>OHYOOR`|lk5DKh%QvFp!ZR#9D!SzGhtj?iV
z#vDVymFk>y88#^~$B=N}c8t{5C<3x76;yY+d;sQ{XI~YHH3SiWHP+DAWIMlWQ^3}a
zfO)ZnjYTiC?3ozZm=>8`=@D=?8z~Rq^^kHD%TJDl-J$jSa)=3CDBQWGAdk
z<41=n&e!5W?Li*{!@ytF%MR2&Dh#RnQ@d(n3=9M7|7B&j19^%JL$^w$J8*T_2I#zc
zgrWu(kS2H6T!%UC^tijGU5GH*wagyDZnYgrL0ikTY}lI*vWs#
z3N3UltGjHdz{>8jrppEjEE={6_D|A<-(36p&Ja3TDy4q6PkbQi&G#)f}!-~37Bseg*Em=00Hz}
zHQ4T#As_?pZ
zIJ&eC(GNCj?ANq}cwoPmeNU%6DGbky@*qDXFRH=tmJlB-E7jO+GLeSz9?ZvV+!x0K
z(WQNeD!t~41}O$#AmnvYPxJfk#?%nK5w?kW?5g8P-R^`p5JF6+OO}79jVf`5IWmMi
zZydu={q19`?=j2u^1OZrPj5I3+l~*iH1og&L~s1j$?DBj^0@zQn$baqDoKyuRCc~}
zdl-!95X0{DBbk>;q1OxcqFdo1jyh@jV*TykL9oV#(1*#h-F0t9hZy>PdbZ;s9l|y-
zlZRX#Z>b#$Ws`Aw*ZO~7gfMuhm+s`hmV2MvIWmY`r_+Xa5O;{%M&19jx8r>ly^8#Q
zDoNkz9cuHUCG!pG#d`3cJ}acFGZgng{xdHYqWu6T&;$Pb@gD1K=-9Rh%b7+L@%K!;OA-td4Ok?U*kx}d9d65XvC3V$G+>=RL^@cr*Inj
zQGcg&Bff#*k>q-$(7sOku%~1Qb%s<2{+P-fl5G*#gNFlu^k5D?wg{Z2@&L4zR%5%x
z0aADjQG$EyHr#)^x8Xi^8|)V4z;9B_7HN~fdiW3gr{8Qmwg~Je`LO3vHrs?4mG>MT
zZg|j%_9MzqHU@`VTZ2$@vz1Q4dxZTI3in|yVh?lO?7hoFq(wGjZL#ep^SGi^?Ov+Z
zrD~&8*qj{HpHye
zjvvAJROQ#9fA7vaACQS>d+s_$bBEI0`80PvO*&0Idvn$YXwur5`Hrwa*W%1*dsE{P
zuD3h5-jZBzeJ-`kX%iFT2XIE*n_73-@^RSa4*L$s{I5NQ>@tj7=(6uaJ3uYb9Kbai
zZ?R{ak(htRKY;W2-h#k3BR$uDz$g2uwS#SHVol%xE(&<73)_s|0RI4u@SSM_m#bce
zl1DX?wl-f6l<1a^Mj0#^8tB2jQGCznk!~l{`F8bl5df6qtQ-
z`8#9OzBQY6*gKsTo~hePg`9ynz*7*!e8$RJwXJ{ZdpB9B`$ngx(x3U2UH|fU*)#0l
zfb(Yn#H@A{Z2tk7C
zeAGvn+)QKQgvg5a=AXa}5uzyb{8ow?B1F*^Rn_$udh@NZfkJd$lhwKEZY48eJgS1V
zxW9ijwl~0>;rp)9y8*WsjRb3Vg0&=Awi7I)z*zIMkq#S9$-c
ze3vHgRo*3>qj8C_E!;U@3^tZ+fkPIXcz^5w{9_X_dENo(Zj3kFAL!M{EyNGrBBbH`
z_xw)~bcu^2>n-Es8gSC}&DF!T|KsoGxSxM1_FTdc9OURnI(oew1fQaUsQ16U2NC-S
z8|~Uy-$3Y03Cwopo0!Nj6pwHKtF%%ZQ@kdKUQ@+0H%M{
zNyAO10Z0D3R?RrEs*4`yq6@cK_*MVML6#D;m`PU;SL8K`(c8zkVVHUtCTQd*)tpq_52{L`Pl%#w&^QI^*I&PgvnsPPRnKB^e#WEhb9s9-CiIf_xsgoJ{aNtFJSC52d`!tCQjMue)+F+w@l3U*w1Ia
z%Y97XJ{NkD#=C*_%VBm*cW7MP7;ojc_;DQH4|KxQWW-SLFc$oBQ8UcX+91kuGx!Jj
zFLiB_y#HVKt!lpNfVD3gs;GaZzz9;7hc#raF|Uc60G3wn2eW-!Uvz<>&K6BX2!`Mg!Zd%wEgd6==K>O4NT&ED>90fqrui#T=@>aY7m(;e
zGDR&`y?hwx&%j#2_aoR?U$}q#7~15C?
z_^M%W3hOy^lJ*I%x)@5moLeU;kKihhp|s2KQIhfqu0|P18?I({CAaVc+y)-tmiGV;
z@@OdI_rO*54#9twLPJTHgT*A}5lp=_lr{7_V*iZB1Itj><;*fkx&%*n4W+%(?Q(Ay
zY+W{x(-3o$q)Q;mZ78{Q^pp&F1yAPJE0WAl32a*}k3
zsG=O3agm>smPtg#=J*V=GM%(cf-6ABCvrsVB;^qdojQMnasH%6I!3_|v4eQ-D4sh8
z&!CBiFfD?3(lf`{T7anaINB)u1;M}g1d(}H=L}jd7jgKk3^)ZKsR#47)Y6&l6in?s
zfZIBkUJ?$$5a{Fct@=pPF^af+ADf11`WcPB)&gWxOhSOtv;CFKz@sX;#7i~B3?ErO%*$EGj^3#DsiQQe_j
zcQn}@i>*Z2N}8<{Ea^aqEu$+kW7T8^S7jEc%Zyi<8C;uLpgJ>VeKv4~Hh~cvafq
zb=rg~wPDt3gI8-4s@H~Bv2eWxs+Cu(c%_Ewlv#hJc#VcClvAJ69$+?|vGG^BRuLC-
z1c+uSNom^zTQL&jI+EQe(~)1kd{es#3KQcRlicYR>9Y)
zLs2!
zGeGj}F+2;7P$@#O=zmJrDxylJd>(&CL41EN0kVt0-#-xDIWWtm0u)qTjaI=DM&)yv
zN~+Q`3zlvwpUo6nm7ZC!WL4R0mJX}*%z~-83h;fSr*Kz?h{~z5*%lpFX_*91uoa?N
zr97l>7cp5^HdXC)8R}S%0pFDz+LFxNj|}a_A$H-!_us%X_<{L*-t4`nJd3hgva^3O
z=oXO3EISKQomK|j0#u?!^KeCMyx`&Gzn6`SH?;SK(`mo$dvn)
z0jFRJ{PMX&doH=VEhpVzKHnl5EFFKN;3xzGMAQ~T)*eo5_a?P(ld)qH+p9_K(q!z<
z9Hff59E(n|v`hk0B8J#J31b;_$FukjQGX}+z6(gq7~^wQkY&&64U#CBqD`7`n~UGsoDF%ki%!dNF^%m$Sr|
z!12$3XE|s#iJ=88U8{(SKLb2oqaFX26zft^eJMsK3VQhv?;ggh2iwg}?&Id{;GVUq
z0is1dO1f4N7oz49Ewa_pwh6vCH9+P{T1(m|SSr?hUQ_m3dgd4#@V@XSHKSYY7s+fk
zhpCq>J+p}FS+kiIIc;f~1fqYK4UxG@+cM;hrBDL2rAJjp;V%gO#V3foDOMeCHjO3X
zEq%M-Np|xI17BJ&zp>Cd;C!mgBgvxhIf9@21bLLaUJiHYOkWvr3jQg+IK6%CzK=;>;LJscw%~I9v4ixF
zAnPB3eQfX!UUT5(xdiigVUYKOwQA=$1afpQNNk@dEJI!apB}t7kM-zbxmiT;j~oV5
zF%K%1v`_F(CkBb<^NfF`Zx{TNi#eo$^NuC$6TB0SL1OF8Yug2K_Ay9npP4K}USXb^
zOi&nSWW6(w#&S+2wu78AiRJtLZ}~<4Ey70yQ#gSGg~`K%*CTWZq-8_CK33N{la?rK
z99HWdRl6L8N`y^Et9l2kG9-^kw1`5IG%bQD%f%K#n@mLz-|BU~l!*o3xLo^-7uu5Zle
zTGWuGVGv9eI)KI!u9L1+@D=FzoStr-BpiaNLC2@HW$UDC6&(FSfNfPRla@)uMXLEM
zi=?$Q3^9gL*ED~r=?#~RD4Wr60L;g(I*wHirW1CdK=*}%k^hOx|2iqE|G956}tp8sV11U=>CF${-%FdT=M><9@nU3M-rL(tUHtGbbjxv8K^q$b2Sy<6jE>;0vMs?Ek
zL7i&wbkasVB8wA7(hnYle0OtVhli8lHYmQVEI%V!
zu~r7%kz9X1eOAb@Whibj4i*7v{qpH3kzh&t1WRO^Pi?AFOWPKsf@;*OP)2I>#BdDNN;$JvDp@Lj5C9-Ds!(Yi(l22j5fd=QSFwD
zg0W1sawtEeSua!?2Ei7#<`cPU*wVKPuE^V`xM%5$jwv`m!wPFBIkZfW}Ym(+$bP+ioxHIprt1-
zX`g@CDIkACT9yXAq=%gQ{hpi(cmBT!&;R?;
zb+fYrGz40_gN04Jm)E0(SlgYft;^a%tl3W1%w^3GR#xwwjVezRtcDt^
zE80tk8-l*GoUf~o8wgyrb6w@!wa_^ELIZz{K2r!+ta`CT2qmoTX~mLpmXNG1BRiBB
zKk+3(r*D6Uu+PK4=a6n5`%sLla|~SuO6?l1_u}YnIb`_koWqrp|J8qszpmNo>#^+;
z`|BaYJcj&78|3{^?zgKxQqZ_YS83O{%2z+k*V(G1IbH1XqitlkAN`+_vl32axcF91
zb@UuYh3N09Gd=IGimI_X?Y3uR{qJa#9|s#1ypR*&uO{1JD(|ATeYcs~m1H@lM6_wy
kg5*c8X?U@(2GgR4?jS9{3BMMtU&T<1%kSd<0StN>RC^Ib^#A|>
delta 5467
zcmV-h6{PB&GMy`sI{`M4JUo9*liW6vz2~n$gnTV&x{h=6TDEs0q#2D`4(+Xis$y5s
z;+IKQtGj>w0%VZ{hy;<3?SsSWOyqk2SOftO%va=3Z;w@zzI~rQX2(?3&;LB;N!zMB
z{m<@FmG2J8wX51|Q6$atyTUF0_Rflhzh5xeIasF*1o
zLfKNS0N#Dqq)8#J)=)f%v!P-^#N=CRP#&)3vCFD5hNFvfAg2=qayoG!c@BZ(IS+Eo
zt2PCZ`WOzbe1?$9XAY!E$}?7@K7xfh9xLpE^$|g+3yyW+!P=|@v2;Wb>Vjhh>eVG^
zy>vtn>VjkO^*R^H6NZ0fA_7oo*P93)s!p0N>p?KUDUOG2tEK}HO$ZCtB+uf3v@ab(
zdHcoS)J;{VP4_H=`Vs`g$K>_O-3Dd^Dlj+?zF~BQ;P?@C@C^ea1Xr#!cDf%R#PB^a
zx`#Uh1r6LCy}KPJ7!YV1m>3N1+nA0Zh6Z)xJB?Mue%q(jI;MY5HCjg<5`@dTv_SDp
zOb{@y&M2M^S%bT)Hvrz%Ymho_LqAS|LFf%P&)S_+qzQcAn3ymiejl#ggyktK@ZYzI
z3PNhA6Iw+R69jD1W2Orn6L{R6BUixNOM(wWx3Dm5buHmu=!3$L?O(&AAA&SNLCC4;
z*yUeudW-);+m(M`Xq&xksiKYwL)Li)_hcUwhTL#%)-TOp*Q7~tND~zXy~0llZ4Sb9
zRi3qQvxlfKs9v$P^=;t^;f{ep=*Of%L3Kz7?vU!v#unOh7z6sPsgl!C&uC#gF2cgYjJm}-y3B-TX5do+@47CqIRrw>@&OU~P
zYcZsj2f2VK+@kxGJ|x$?JDjG?ODUimxdPoz`+O*L8`_{dnLDrSZqE}r>=?yCW{=qk
zgX<$0D0nmpjU#FE0Kqg5JV;hPf;c*YgL(wp^~g8t2^xR+Ili_g<>{cOuPq2RhH;?J
z&@Vt2XFz`*9y;?M0(1)rgY7qNBfzXmlTA7`QS$CtZf^
zAB-_9+>Z^5>N$#mtjZMXJ5xRYbI7yrDT>txF@QBT(AVrZzv=z`+ROX#y@(i#eA-_oY6^aOt}L*SROVR*AQc&?i4h|NCy=rG0kT6{~l
zpbvpz;P2`422>pthAgY5*whn4U>Ml^FDttZ$hXKabepE94Y($31$2DTgQ5oB_1xTD
zbM3dIPlwGty@UvpUCZo27umpZi{zl4H>zUTwYRC(dsI1I=tbi6_EZcNcyl_vGZlXW
z1zyUv3HBg1d3Ig0FDR2kMc
zLkJI6f=DF`Q|Q6v;TeM(Vpuq^Qhc4T)0lxHqHXoifvcBW2n&YYxPe2C@Dm_3^rHhw
zXdIbd>&t-xuk^JeOk@1YWXcC%4oO~>=L5RBU4*jWB?egXAQupY8*rSXB4VtS
zVDEl&q@r`?TYp&Ja3+iQRP6WWB+#W{bFBEe8}
zc?RakCB??(K7avwNgM36MjsFYg7-$|C>OFU-~KWkm06tnFB!0@JPWf2VfH@EuX<^G
z((A&r9BB&iz<&Sr!yj@SQ`(2n>xlZ_x7C&qXAJScJ|xd+gMm#UK3GdSjD
zAG6{7j|{|=_93d%H5YvsI2C^mU|=75te}}a)c;MB)6eEr>bPYmY@!$I!GHRkB0sFY
zxCipzajOiykc&VM`18km>^F7`@PVLKvW@&)efkxE(4{?yd*~*<=WgQrRL^pCpADNE
zLZB(ehs&TEWn7KAv<4t_X%FI@=GQpHVSGZg@2of$?2y!TQ$20PyoG=B&=2M?>MQXr
z438u?!({tbPwcbDqz_#T{yX{TZ$CWN2;7l#Cm%ZPk#~(?G0g0MamE>_);K^4k0DBM
zk6niQ+Wf_I%7{m<$7|5EGXSIX^du%((HYjy-$-)3rc1+x&xP^-nw?lmJffk_~j~Khpgw^CE=1`
zc%PWP^gnHGMY{u+wBFiyosn7g?!a}gw}4(}^p-$7XbrSjNG-YQs_jp7B>5pNk5zxI
z-G$ChzS6N5N=u^YG(~u^xZAW^3@kyH{QbtLd10J3t4r$L)-`o_xjt8=YTHx~m|1Ct
z+hbFiMWBYqB~|yVsK0FdkJQ+VNYP^IBkwY3-(2tp&Fg
zjRk8rg0(DIwh=7jz(m_WYm-Y0Y}5N3FV2On>MkqvlRTHe*c$Fw
zUko-@+X9CyR`I@W0Q|a&m^^KOTpo(B+#mQI?lr^@-p01&{LlDL5Oj)*Bb(y_!x!I4
zr-!R=SN+G|&2c|cY-vIt9OTe*Lz8|21iwWEQSX0$4_&hg0coqWO%2I4TS}jjLeFX1cf)%5(4=Q`
zR?+-Utg44g4b8YMbUsgJ3pdw8>@F)G(nhc9QXbVUbHn`bQB9^9ugGf_L)(YAe$}(<
zm$ZNCQ`#KUTCWUBGM~qCJ!pLb``d3{=$w{Wc}kz$vZHli_?P)7&Bbn2u}XaW@PFE`
zhvGbUOZ29pO#j2~uf@LxGY!i_ww8Jdb6Y>e{!jCH|^S(UI%+W-#xpaQ{jUbKhXC
zhMDnLD>J|TZJb`wc&+^Uj8{v?1g=+MHUNLT3sFDsM#rtEhrPPtk`(H{_*0MO_Ou|;vHioB=0#QuBgHcp%W0-mfy%X!APax7
zn8iw3tQJO!WhoYuj7c+3ils!B
z6Wua|D%(PWOv!SFIkB=@7)g-&7~q+*E~|x6!gB$ME-cf91X+_2rtuuUS{Nlf7m(<}
zGR2!@PWl2&<4Iq&FiLnXAkl?oiZ*|#&x|W)no{Dbg^>hFp&=gT*I(g^AFd4^hADV{P9AR1SE%C(UsAvH+zg(G_Vg=FcaK{DpqI@(E=fEp$AABUrz
zWGSjq!t$9XEJarjzPlG`uNCVeahmH5%*0M1o
z@^75lO8lZiUXrB>2gyU@lW=hAbGLwuX~GXo#zC@t<0xfdC=`x^eG+qwamrw>7zYV6
zmV=yALJ!3_2uf!TGB3W|S3!R_$y1-hRhU2aSGdwvbK{rxje{eWu#ZEi;(p5D7o`JvI==gmaKmsBp<)YRlc31
zsq8UIE8C;uJS5I_FXr_Um@j7-)9Wk8MA9t!a-v1%eZ_f5
zkZd1e98ZX=g^>iA_#vJ>$Fmpk^ojiurYWnx^2{-|79eUpjy4K^LGUj=L1f=`kw-|%
z#Uv0?0Ve^v05E^cy*ht$og@?x0JxoueTs3AKxIJAHz#>k3nRrf2*fnp&@UMDji7;;
z;-+SxYzqm53lN4;=Rhrt6xB75vpo7e^%(6aU~9wQ&WLVd*y0HQjia2PI1k1266AEx
z%U`j!kVL3JOkt}osHIgzwMV)3JlS5vR-$Yr&sLI1{{Ujk_>O?GLv;?
z2KQzb=*~>opAFohO`t~`S(i3=pEjXRZG^qr;N9AU`n8dDEZnbwZk6>a-l?HJ6?Q4!
zqoEF!^ylp?ub9p{z^j&4iYrn8M57*ta&07*y}-B*>@>=DQB)yA&Up1+&>9m(#WTbV
z)0Y>@F_A<810a8yML5*bN^-Fcl#tzqaC{_EvP1R-VLl?&gJ-!urM
zY?kbRS{P$IPXNi2$M8fFEfs`fk|e31wjbz90Nv>g=GX;)|GVlA2MD
ziDa5G5Y2y_XjS=kiYd>CscLV^P{(Er`0?P-*B2$1kW*=^+?6>ySN>IWsC
z7D8)eD-x-IlY|;Xa_-RKN9nOg^uQy3)KPTE5j)<<9c&aFX~YgQa>p122N-+7Bq_(F
zmZTh0oZ$tccwsnR2+~Tz_?99|D(EIqP)S}7UW-Ys6d+J}X;WR;rGQ_6Bx+6a3Rvn+
zYH5FsQk4i#>64TsQh5ScELkY!nPY57bNtps8v}gQ$$X?5|3Y<@y{HwNd`B&5KJqeVV@cG1%p0z(3m}9EIC{}N?!p)lkSyTS}Cq`B`2Cxv6O2g
zx%L%6wp7egypKd8S#n-mRZDs17#r}s@V0;XqNl(oMJ_RiEytxiGezaH#7vV~mvT&m
z2y#JWOCc{6^2VxA0<@(^RYu`22>!(LI4`ai
z4noqwg2Zl?S{3pVoRM~GJ?3<_YPEj|$H<
z=qlbvvaGovvGZ(q?S$mh1!>(pyei}+I}*G6&C
zN`PnCp@RPG)tlrDqxh4u3>1IY#gsGLF9)pq_<&`}yUBT6r8wo8DK5qduw~0uW${)S
zr4>2DsIQ_d10lMUajq2^Ph}ZsgCyrxa$84J`F7eW%(=NfOL0#XaMIr(`?aN#F%>Zt
zFGsIY4|_$Oqm_MCEp7R-?R8wrm#LtewrQ5VqHeZ;mttMCQRVCvm2!WC(iHEbjml`R
zsFowHrg$H{N=NM#m16|i)LIF8g$COzs_99&sj#0ADYq?kDwdP$B0RYcNWN{MoweeW
zZ6QP*ZVRtR3$eBvS=%LR3$bP!S+gZ;hOmmJSu28e$}$imM#6);S|ye|$x@Am54V(R
zMxC0aU`!xz7L!>@$;^K`C`>3P{N!n%uyKw+iYt%Oml#gWGNHGlw0+p;{@-KBr0&Ww
z&dwq9%k=jgZua%e79SfvI>+!dRezYj>91>c{C;S=#65asnTL@78iTs;%l&>epUPWp
zTq?*|t$g#-*v?k3LXQ`>{9qdx?g#&;
- mlx.core.Device — MLX 0.0.6 documentation
+ mlx.core.Device — MLX 0.0.7 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -278,12 +278,14 @@
mlx.core.quantize
mlx.core.quantized_matmul
mlx.core.reciprocal
+mlx.core.repeat
mlx.core.reshape
mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
mlx.core.savez_compressed
+mlx.core.save_safetensors
mlx.core.sigmoid
mlx.core.sign
mlx.core.sin
@@ -303,6 +305,7 @@
mlx.core.take_along_axis
mlx.core.tan
mlx.core.tanh
+mlx.core.tensordot
mlx.core.transpose
mlx.core.tri
mlx.core.tril
@@ -351,11 +354,35 @@
mlx.core.fft.irfftn
-Neural Networks
+Linear Algebra
+
+Neural Networks
mlx.nn.value_and_grad
-mlx.nn.Module
-Layers
-mlx.nn.Embedding
+Module
+
+Layers
-Functions
+Functions
-Loss Functions
-Optimizers
+Optimizers
-Tree Utils
+Tree Utils
mlx.utils.tree_flatten
mlx.utils.tree_unflatten
mlx.utils.tree_map
diff --git a/docs/build/html/python/_autosummary/mlx.core.Dtype.html b/docs/build/html/python/_autosummary/mlx.core.Dtype.html
index d88144d99..85913057b 100644
--- a/docs/build/html/python/_autosummary/mlx.core.Dtype.html
+++ b/docs/build/html/python/_autosummary/mlx.core.Dtype.html
@@ -9,7 +9,7 @@
- mlx.core.Dtype — MLX 0.0.6 documentation
+ mlx.core.Dtype — MLX 0.0.7 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -278,12 +278,14 @@
mlx.core.quantize
mlx.core.quantized_matmul
mlx.core.reciprocal
+mlx.core.repeat
mlx.core.reshape
mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
mlx.core.savez_compressed
+mlx.core.save_safetensors
mlx.core.sigmoid
mlx.core.sign
mlx.core.sin
@@ -303,6 +305,7 @@
mlx.core.take_along_axis
mlx.core.tan
mlx.core.tanh
+mlx.core.tensordot
mlx.core.transpose
mlx.core.tri
mlx.core.tril
@@ -351,11 +354,35 @@
mlx.core.fft.irfftn
-Neural Networks
+Linear Algebra
+
+Neural Networks
mlx.nn.value_and_grad
-mlx.nn.Module
-Layers
-mlx.nn.Embedding
+Module
+
+Layers
-Functions
+Functions
-Loss Functions
-Optimizers
+Optimizers
-Tree Utils
+Tree Utils
mlx.utils.tree_flatten
mlx.utils.tree_unflatten
mlx.utils.tree_map
diff --git a/docs/build/html/python/_autosummary/mlx.core.Stream.html b/docs/build/html/python/_autosummary/mlx.core.Stream.html
index c60b3051b..cf2a91e74 100644
--- a/docs/build/html/python/_autosummary/mlx.core.Stream.html
+++ b/docs/build/html/python/_autosummary/mlx.core.Stream.html
@@ -9,7 +9,7 @@
- mlx.core.Stream — MLX 0.0.6 documentation
+ mlx.core.Stream — MLX 0.0.7 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -278,12 +278,14 @@
mlx.core.quantize
mlx.core.quantized_matmul
mlx.core.reciprocal
+mlx.core.repeat
mlx.core.reshape
mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
mlx.core.savez_compressed
+mlx.core.save_safetensors
mlx.core.sigmoid
mlx.core.sign
mlx.core.sin
@@ -303,6 +305,7 @@
mlx.core.take_along_axis
mlx.core.tan
mlx.core.tanh
+mlx.core.tensordot
mlx.core.transpose
mlx.core.tri
mlx.core.tril
@@ -351,11 +354,35 @@
mlx.core.fft.irfftn
-Neural Networks
+Linear Algebra
+
+Neural Networks
mlx.nn.value_and_grad
-mlx.nn.Module
-Layers
-mlx.nn.Embedding
+Module
+
+Layers
-Functions
+Functions
-Loss Functions
-Optimizers
+Optimizers
-Tree Utils
+Tree Utils
mlx.utils.tree_flatten
mlx.utils.tree_unflatten
mlx.utils.tree_map
diff --git a/docs/build/html/python/_autosummary/mlx.core.abs.html b/docs/build/html/python/_autosummary/mlx.core.abs.html
index 8d825ce07..5121ef588 100644
--- a/docs/build/html/python/_autosummary/mlx.core.abs.html
+++ b/docs/build/html/python/_autosummary/mlx.core.abs.html
@@ -9,7 +9,7 @@
- mlx.core.abs — MLX 0.0.6 documentation
+ mlx.core.abs — MLX 0.0.7 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -278,12 +278,14 @@
mlx.core.quantize
mlx.core.quantized_matmul
mlx.core.reciprocal
+mlx.core.repeat
mlx.core.reshape
mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
mlx.core.savez_compressed
+mlx.core.save_safetensors
mlx.core.sigmoid
mlx.core.sign
mlx.core.sin
@@ -303,6 +305,7 @@
mlx.core.take_along_axis
mlx.core.tan
mlx.core.tanh
+mlx.core.tensordot
mlx.core.transpose
mlx.core.tri
mlx.core.tril
@@ -351,11 +354,35 @@
mlx.core.fft.irfftn
-Neural Networks
+Linear Algebra
+
+Neural Networks
mlx.nn.value_and_grad
-mlx.nn.Module
-Layers
-mlx.nn.Embedding
+Module
+
+Layers
-Functions
+Functions
-Loss Functions
-Optimizers
+Optimizers
-Tree Utils
+Tree Utils
mlx.utils.tree_flatten
mlx.utils.tree_unflatten
mlx.utils.tree_map
diff --git a/docs/build/html/python/_autosummary/mlx.core.add.html b/docs/build/html/python/_autosummary/mlx.core.add.html
index 5c7c12ada..d3fdd72c6 100644
--- a/docs/build/html/python/_autosummary/mlx.core.add.html
+++ b/docs/build/html/python/_autosummary/mlx.core.add.html
@@ -9,7 +9,7 @@
- mlx.core.add — MLX 0.0.6 documentation
+ mlx.core.add — MLX 0.0.7 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -278,12 +278,14 @@
mlx.core.quantize
mlx.core.quantized_matmul
mlx.core.reciprocal
+mlx.core.repeat
mlx.core.reshape
mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
mlx.core.savez_compressed
+mlx.core.save_safetensors
mlx.core.sigmoid
mlx.core.sign
mlx.core.sin
@@ -303,6 +305,7 @@
mlx.core.take_along_axis
mlx.core.tan
mlx.core.tanh
+mlx.core.tensordot
mlx.core.transpose
mlx.core.tri
mlx.core.tril
@@ -351,11 +354,35 @@
mlx.core.fft.irfftn
-Neural Networks
+Linear Algebra
+
+Neural Networks
mlx.nn.value_and_grad
-mlx.nn.Module
-Layers
-mlx.nn.Embedding
+Module
+
+Layers
-Functions
+Functions
-Loss Functions
-Optimizers
+Optimizers
-Tree Utils
+Tree Utils
mlx.utils.tree_flatten
mlx.utils.tree_unflatten
mlx.utils.tree_map
diff --git a/docs/build/html/python/_autosummary/mlx.core.all.html b/docs/build/html/python/_autosummary/mlx.core.all.html
index 290f4bc14..3e584be21 100644
--- a/docs/build/html/python/_autosummary/mlx.core.all.html
+++ b/docs/build/html/python/_autosummary/mlx.core.all.html
@@ -9,7 +9,7 @@
- mlx.core.all — MLX 0.0.6 documentation
+ mlx.core.all — MLX 0.0.7 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -278,12 +278,14 @@
mlx.core.quantize
mlx.core.quantized_matmul
mlx.core.reciprocal
+mlx.core.repeat
mlx.core.reshape
mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
mlx.core.savez_compressed
+mlx.core.save_safetensors
mlx.core.sigmoid
mlx.core.sign
mlx.core.sin
@@ -303,6 +305,7 @@
mlx.core.take_along_axis
mlx.core.tan
mlx.core.tanh
+mlx.core.tensordot
mlx.core.transpose
mlx.core.tri
mlx.core.tril
@@ -351,11 +354,35 @@
mlx.core.fft.irfftn
-Neural Networks
+Linear Algebra
+
+Neural Networks
mlx.nn.value_and_grad
-mlx.nn.Module
-Layers
-mlx.nn.Embedding
+Module
+
+Layers
-Functions
+Functions
-Loss Functions
-Optimizers
+Optimizers
-Tree Utils
+Tree Utils
mlx.utils.tree_flatten
mlx.utils.tree_unflatten
mlx.utils.tree_map
diff --git a/docs/build/html/python/_autosummary/mlx.core.allclose.html b/docs/build/html/python/_autosummary/mlx.core.allclose.html
index 55d5f9e20..5e8ab00b4 100644
--- a/docs/build/html/python/_autosummary/mlx.core.allclose.html
+++ b/docs/build/html/python/_autosummary/mlx.core.allclose.html
@@ -9,7 +9,7 @@
- mlx.core.allclose — MLX 0.0.6 documentation
+ mlx.core.allclose — MLX 0.0.7 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -278,12 +278,14 @@
mlx.core.quantize
mlx.core.quantized_matmul
mlx.core.reciprocal
+mlx.core.repeat
mlx.core.reshape
mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
mlx.core.savez_compressed
+mlx.core.save_safetensors
mlx.core.sigmoid
mlx.core.sign
mlx.core.sin
@@ -303,6 +305,7 @@
mlx.core.take_along_axis
mlx.core.tan
mlx.core.tanh
+mlx.core.tensordot
mlx.core.transpose
mlx.core.tri
mlx.core.tril
@@ -351,11 +354,35 @@
mlx.core.fft.irfftn
-Neural Networks
+Linear Algebra
+
+Neural Networks
mlx.nn.value_and_grad
-mlx.nn.Module
-Layers
-mlx.nn.Embedding
+Module
+
+Layers
-Functions
+Functions
-Loss Functions
-Optimizers
+Optimizers
-Tree Utils
+Tree Utils
mlx.utils.tree_flatten
mlx.utils.tree_unflatten
mlx.utils.tree_map
diff --git a/docs/build/html/python/_autosummary/mlx.core.any.html b/docs/build/html/python/_autosummary/mlx.core.any.html
index a34b8d8b1..2bcd32878 100644
--- a/docs/build/html/python/_autosummary/mlx.core.any.html
+++ b/docs/build/html/python/_autosummary/mlx.core.any.html
@@ -9,7 +9,7 @@
- mlx.core.any — MLX 0.0.6 documentation
+ mlx.core.any — MLX 0.0.7 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -278,12 +278,14 @@
mlx.core.quantize
mlx.core.quantized_matmul
mlx.core.reciprocal
+mlx.core.repeat
mlx.core.reshape
mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
mlx.core.savez_compressed
+mlx.core.save_safetensors
mlx.core.sigmoid
mlx.core.sign
mlx.core.sin
@@ -303,6 +305,7 @@
mlx.core.take_along_axis
mlx.core.tan
mlx.core.tanh
+mlx.core.tensordot
mlx.core.transpose
mlx.core.tri
mlx.core.tril
@@ -351,11 +354,35 @@
mlx.core.fft.irfftn
-Neural Networks
+Linear Algebra
+
+Neural Networks
mlx.nn.value_and_grad
-mlx.nn.Module
-Layers
-mlx.nn.Embedding
+Module
+
+Layers
-Functions
+Functions
-Loss Functions
-Optimizers
+Optimizers
-Tree Utils
+Tree Utils
mlx.utils.tree_flatten
mlx.utils.tree_unflatten
mlx.utils.tree_map
diff --git a/docs/build/html/python/_autosummary/mlx.core.arange.html b/docs/build/html/python/_autosummary/mlx.core.arange.html
index 1145a257b..eabc10e9b 100644
--- a/docs/build/html/python/_autosummary/mlx.core.arange.html
+++ b/docs/build/html/python/_autosummary/mlx.core.arange.html
@@ -9,7 +9,7 @@
- mlx.core.arange — MLX 0.0.6 documentation
+ mlx.core.arange — MLX 0.0.7 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -278,12 +278,14 @@
mlx.core.quantize
mlx.core.quantized_matmul
mlx.core.reciprocal
+mlx.core.repeat
mlx.core.reshape
mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
mlx.core.savez_compressed
+mlx.core.save_safetensors
mlx.core.sigmoid
mlx.core.sign
mlx.core.sin
@@ -303,6 +305,7 @@
mlx.core.take_along_axis
mlx.core.tan
mlx.core.tanh
+mlx.core.tensordot
mlx.core.transpose
mlx.core.tri
mlx.core.tril
@@ -351,11 +354,35 @@
mlx.core.fft.irfftn
-Neural Networks
+Linear Algebra
+
+Neural Networks
mlx.nn.value_and_grad
-mlx.nn.Module
-Layers
-mlx.nn.Embedding
+Module
+
+Layers
-Functions
+Functions
-Loss Functions
-Optimizers
+Optimizers
-Tree Utils
+Tree Utils
mlx.utils.tree_flatten
mlx.utils.tree_unflatten
mlx.utils.tree_map
diff --git a/docs/build/html/python/_autosummary/mlx.core.arccos.html b/docs/build/html/python/_autosummary/mlx.core.arccos.html
index 1be44b3e0..447f2888e 100644
--- a/docs/build/html/python/_autosummary/mlx.core.arccos.html
+++ b/docs/build/html/python/_autosummary/mlx.core.arccos.html
@@ -9,7 +9,7 @@
- mlx.core.arccos — MLX 0.0.6 documentation
+ mlx.core.arccos — MLX 0.0.7 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -278,12 +278,14 @@
mlx.core.quantize
mlx.core.quantized_matmul
mlx.core.reciprocal
+mlx.core.repeat
mlx.core.reshape
mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
mlx.core.savez_compressed
+mlx.core.save_safetensors
mlx.core.sigmoid
mlx.core.sign
mlx.core.sin
@@ -303,6 +305,7 @@
mlx.core.take_along_axis
mlx.core.tan
mlx.core.tanh
+mlx.core.tensordot
mlx.core.transpose
mlx.core.tri
mlx.core.tril
@@ -351,11 +354,35 @@
mlx.core.fft.irfftn
-Neural Networks
+Linear Algebra
+
+Neural Networks
mlx.nn.value_and_grad
-mlx.nn.Module
-Layers
-mlx.nn.Embedding
+Module
+
+Layers
-Functions
+Functions
-Loss Functions
-Optimizers
+Optimizers
-Tree Utils
+Tree Utils
mlx.utils.tree_flatten
mlx.utils.tree_unflatten
mlx.utils.tree_map
diff --git a/docs/build/html/python/_autosummary/mlx.core.arccosh.html b/docs/build/html/python/_autosummary/mlx.core.arccosh.html
index 81af1b973..a065186d7 100644
--- a/docs/build/html/python/_autosummary/mlx.core.arccosh.html
+++ b/docs/build/html/python/_autosummary/mlx.core.arccosh.html
@@ -9,7 +9,7 @@
- mlx.core.arccosh — MLX 0.0.6 documentation
+ mlx.core.arccosh — MLX 0.0.7 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -278,12 +278,14 @@
mlx.core.quantize
mlx.core.quantized_matmul
mlx.core.reciprocal
+mlx.core.repeat
mlx.core.reshape
mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
mlx.core.savez_compressed
+mlx.core.save_safetensors
mlx.core.sigmoid
mlx.core.sign
mlx.core.sin
@@ -303,6 +305,7 @@
mlx.core.take_along_axis
mlx.core.tan
mlx.core.tanh
+mlx.core.tensordot
mlx.core.transpose
mlx.core.tri
mlx.core.tril
@@ -351,11 +354,35 @@
mlx.core.fft.irfftn
-Neural Networks
+Linear Algebra
+
+Neural Networks
mlx.nn.value_and_grad
-mlx.nn.Module
-Layers
-mlx.nn.Embedding
+Module
+
+Layers
-Functions
+Functions
-Loss Functions
-Optimizers
+Optimizers
-Tree Utils
+Tree Utils
mlx.utils.tree_flatten
mlx.utils.tree_unflatten
mlx.utils.tree_map
diff --git a/docs/build/html/python/_autosummary/mlx.core.arcsin.html b/docs/build/html/python/_autosummary/mlx.core.arcsin.html
index bc2ebeb49..f3ae323aa 100644
--- a/docs/build/html/python/_autosummary/mlx.core.arcsin.html
+++ b/docs/build/html/python/_autosummary/mlx.core.arcsin.html
@@ -9,7 +9,7 @@
- mlx.core.arcsin — MLX 0.0.6 documentation
+ mlx.core.arcsin — MLX 0.0.7 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -278,12 +278,14 @@
mlx.core.quantize
mlx.core.quantized_matmul
mlx.core.reciprocal
+mlx.core.repeat
mlx.core.reshape
mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
mlx.core.savez_compressed
+mlx.core.save_safetensors
mlx.core.sigmoid
mlx.core.sign
mlx.core.sin
@@ -303,6 +305,7 @@
mlx.core.take_along_axis
mlx.core.tan
mlx.core.tanh
+mlx.core.tensordot
mlx.core.transpose
mlx.core.tri
mlx.core.tril
@@ -351,11 +354,35 @@
mlx.core.fft.irfftn
-Neural Networks
+Linear Algebra
+
+Neural Networks
mlx.nn.value_and_grad
-mlx.nn.Module
-Layers
-mlx.nn.Embedding
+Module
+
+Layers
-Functions
+Functions
-Loss Functions
-Optimizers
+Optimizers
-Tree Utils
+Tree Utils
mlx.utils.tree_flatten
mlx.utils.tree_unflatten
mlx.utils.tree_map
diff --git a/docs/build/html/python/_autosummary/mlx.core.arcsinh.html b/docs/build/html/python/_autosummary/mlx.core.arcsinh.html
index 89f1910b0..012b8fa98 100644
--- a/docs/build/html/python/_autosummary/mlx.core.arcsinh.html
+++ b/docs/build/html/python/_autosummary/mlx.core.arcsinh.html
@@ -9,7 +9,7 @@
- mlx.core.arcsinh — MLX 0.0.6 documentation
+ mlx.core.arcsinh — MLX 0.0.7 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -278,12 +278,14 @@
mlx.core.quantize
mlx.core.quantized_matmul
mlx.core.reciprocal
+mlx.core.repeat
mlx.core.reshape
mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
mlx.core.savez_compressed
+mlx.core.save_safetensors
mlx.core.sigmoid
mlx.core.sign
mlx.core.sin
@@ -303,6 +305,7 @@
mlx.core.take_along_axis
mlx.core.tan
mlx.core.tanh
+mlx.core.tensordot
mlx.core.transpose
mlx.core.tri
mlx.core.tril
@@ -351,11 +354,35 @@
mlx.core.fft.irfftn
-Neural Networks
+Linear Algebra
+
+Neural Networks
mlx.nn.value_and_grad
-mlx.nn.Module
-Layers
-mlx.nn.Embedding
+Module
+
+Layers
-Functions
+Functions
-Loss Functions
-Optimizers
+Optimizers
-Tree Utils
+Tree Utils
mlx.utils.tree_flatten
mlx.utils.tree_unflatten
mlx.utils.tree_map
diff --git a/docs/build/html/python/_autosummary/mlx.core.arctan.html b/docs/build/html/python/_autosummary/mlx.core.arctan.html
index fb178b438..23dc4ad5f 100644
--- a/docs/build/html/python/_autosummary/mlx.core.arctan.html
+++ b/docs/build/html/python/_autosummary/mlx.core.arctan.html
@@ -9,7 +9,7 @@
- mlx.core.arctan — MLX 0.0.6 documentation
+ mlx.core.arctan — MLX 0.0.7 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -278,12 +278,14 @@
mlx.core.quantize
mlx.core.quantized_matmul
mlx.core.reciprocal
+mlx.core.repeat
mlx.core.reshape
mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
mlx.core.savez_compressed
+mlx.core.save_safetensors
mlx.core.sigmoid
mlx.core.sign
mlx.core.sin
@@ -303,6 +305,7 @@
mlx.core.take_along_axis
mlx.core.tan
mlx.core.tanh
+mlx.core.tensordot
mlx.core.transpose
mlx.core.tri
mlx.core.tril
@@ -351,11 +354,35 @@
mlx.core.fft.irfftn
-Neural Networks
+Linear Algebra
+
+Neural Networks
mlx.nn.value_and_grad
-mlx.nn.Module
-Layers
-mlx.nn.Embedding
+Module
+
+Layers
-Functions
+Functions
-Loss Functions
-Optimizers
+Optimizers
-Tree Utils
+Tree Utils
mlx.utils.tree_flatten
mlx.utils.tree_unflatten
mlx.utils.tree_map
diff --git a/docs/build/html/python/_autosummary/mlx.core.arctanh.html b/docs/build/html/python/_autosummary/mlx.core.arctanh.html
index a6fb0fd05..e518bec32 100644
--- a/docs/build/html/python/_autosummary/mlx.core.arctanh.html
+++ b/docs/build/html/python/_autosummary/mlx.core.arctanh.html
@@ -9,7 +9,7 @@
- mlx.core.arctanh — MLX 0.0.6 documentation
+ mlx.core.arctanh — MLX 0.0.7 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -278,12 +278,14 @@
mlx.core.quantize
mlx.core.quantized_matmul
mlx.core.reciprocal
+mlx.core.repeat
mlx.core.reshape
mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
mlx.core.savez_compressed
+mlx.core.save_safetensors
mlx.core.sigmoid
mlx.core.sign
mlx.core.sin
@@ -303,6 +305,7 @@
mlx.core.take_along_axis
mlx.core.tan
mlx.core.tanh
+mlx.core.tensordot
mlx.core.transpose
mlx.core.tri
mlx.core.tril
@@ -351,11 +354,35 @@
mlx.core.fft.irfftn
-Neural Networks
+Linear Algebra
+
+Neural Networks
mlx.nn.value_and_grad
-mlx.nn.Module
-Layers
-mlx.nn.Embedding
+Module
+
+Layers
-Functions
+Functions
-Loss Functions
-Optimizers
+Optimizers
-Tree Utils
+Tree Utils
mlx.utils.tree_flatten
mlx.utils.tree_unflatten
mlx.utils.tree_map
diff --git a/docs/build/html/python/_autosummary/mlx.core.argmax.html b/docs/build/html/python/_autosummary/mlx.core.argmax.html
index 3533fe61f..fd7009150 100644
--- a/docs/build/html/python/_autosummary/mlx.core.argmax.html
+++ b/docs/build/html/python/_autosummary/mlx.core.argmax.html
@@ -9,7 +9,7 @@
- mlx.core.argmax — MLX 0.0.6 documentation
+ mlx.core.argmax — MLX 0.0.7 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -278,12 +278,14 @@
mlx.core.quantize
mlx.core.quantized_matmul
mlx.core.reciprocal
+mlx.core.repeat
mlx.core.reshape
mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
mlx.core.savez_compressed
+mlx.core.save_safetensors
mlx.core.sigmoid
mlx.core.sign
mlx.core.sin
@@ -303,6 +305,7 @@
mlx.core.take_along_axis
mlx.core.tan
mlx.core.tanh
+mlx.core.tensordot
mlx.core.transpose
mlx.core.tri
mlx.core.tril
@@ -351,11 +354,35 @@
mlx.core.fft.irfftn
-Neural Networks
+Linear Algebra
+
+Neural Networks
mlx.nn.value_and_grad
-mlx.nn.Module
-Layers
-mlx.nn.Embedding
+Module
+
+Layers
-Functions
+Functions
-Loss Functions
-Optimizers
+Optimizers
-Tree Utils
Returns:
-The output array with the indices of the minimum values.
+The output array with the indices of the maximum values.
Return type:
array
diff --git a/docs/build/html/python/_autosummary/mlx.core.argmin.html b/docs/build/html/python/_autosummary/mlx.core.argmin.html
index 579a62bae..125c64741 100644
--- a/docs/build/html/python/_autosummary/mlx.core.argmin.html
+++ b/docs/build/html/python/_autosummary/mlx.core.argmin.html
@@ -9,7 +9,7 @@
- mlx.core.argmin — MLX 0.0.6 documentation
+ mlx.core.argmin — MLX 0.0.7 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -278,12 +278,14 @@
mlx.core.quantize
mlx.core.quantized_matmul
mlx.core.reciprocal
+mlx.core.repeat
mlx.core.reshape
mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
mlx.core.savez_compressed
+mlx.core.save_safetensors
mlx.core.sigmoid
mlx.core.sign
mlx.core.sin
@@ -303,6 +305,7 @@
mlx.core.take_along_axis
mlx.core.tan
mlx.core.tanh
+mlx.core.tensordot
mlx.core.transpose
mlx.core.tri
mlx.core.tril
@@ -351,11 +354,35 @@
mlx.core.fft.irfftn
-Neural Networks
+Linear Algebra
+
+Neural Networks
mlx.nn.value_and_grad
-mlx.nn.Module
-Layers
-mlx.nn.Embedding
+Module
+
+Layers
-Functions
+Functions
-Loss Functions
-Optimizers
+Optimizers
-Tree Utils
+Tree Utils
mlx.utils.tree_flatten
mlx.utils.tree_unflatten
mlx.utils.tree_map
diff --git a/docs/build/html/python/_autosummary/mlx.core.argpartition.html b/docs/build/html/python/_autosummary/mlx.core.argpartition.html
index a9189019f..82326572e 100644
--- a/docs/build/html/python/_autosummary/mlx.core.argpartition.html
+++ b/docs/build/html/python/_autosummary/mlx.core.argpartition.html
@@ -9,7 +9,7 @@
- mlx.core.argpartition — MLX 0.0.6 documentation
+ mlx.core.argpartition — MLX 0.0.7 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -278,12 +278,14 @@
mlx.core.quantize
mlx.core.quantized_matmul
mlx.core.reciprocal
+mlx.core.repeat
mlx.core.reshape
mlx.core.round
mlx.core.rsqrt
mlx.core.save
mlx.core.savez
mlx.core.savez_compressed
+mlx.core.save_safetensors
mlx.core.sigmoid
mlx.core.sign
mlx.core.sin
@@ -303,6 +305,7 @@
mlx.core.take_along_axis
mlx.core.tan
mlx.core.tanh
+mlx.core.tensordot
mlx.core.transpose
mlx.core.tri
mlx.core.tril
@@ -351,11 +354,35 @@
mlx.core.fft.irfftn
-Neural Networks