# Load the model
model = MLP ( num_layers , train_images . shape [ - 1 ], hidden_dim , num_classes )
mx . eval ( model . parameters ())
diff --git a/docs/build/html/genindex.html b/docs/build/html/genindex.html
index 079fe8183..16a3f31d9 100644
--- a/docs/build/html/genindex.html
+++ b/docs/build/html/genindex.html
@@ -8,7 +8,7 @@
- Index — MLX 0.2.0 documentation
+ Index — MLX 0.3.0 documentation
@@ -131,8 +131,8 @@
-
-
+
+
@@ -203,12 +203,13 @@
Devices and Streams
Operations
Layers
Optimizers
-
A
@@ -685,17 +693,17 @@ document.write(`
(mlx.core.array method)
-
AdaDelta (class in mlx.optimizers)
+ AdaDelta (class in mlx.optimizers)
-
Adafactor (class in mlx.optimizers)
+ Adafactor (class in mlx.optimizers)
-
Adagrad (class in mlx.optimizers)
+ Adagrad (class in mlx.optimizers)
-
Adam (class in mlx.optimizers)
+ Adam (class in mlx.optimizers)
-
Adamax (class in mlx.optimizers)
+ Adamax (class in mlx.optimizers)
-
AdamW (class in mlx.optimizers)
+ AdamW (class in mlx.optimizers)
add() (in module mlx.core)
@@ -717,12 +725,12 @@ document.write(`
apply() (mlx.nn.Module method)
-
apply_gradients() (mlx.optimizers.Optimizer method)
+ apply_gradients() (mlx.optimizers.Optimizer method)
+
+
apply_to_modules() (mlx.nn.Module method)
@@ -797,10 +809,10 @@ document.write(`
Conv1d (class in mlx.nn)
-
-
+
cosh() (in module mlx.core)
+
+ cosine_decay() (in module mlx.optimizers)
cosine_similarity_loss() (in module mlx.nn.losses)
@@ -871,14 +885,14 @@ document.write(`
erfinv() (in module mlx.core)
-
-
+
expand_dims() (in module mlx.core)
+
+ exponential_decay() (in module mlx.optimizers)
eye() (in module mlx.core)
@@ -981,7 +997,7 @@ document.write(`
ifftn() (in module mlx.core.fft)
- init() (mlx.optimizers.Optimizer method)
+ init() (mlx.optimizers.Optimizer method)
inner() (in module mlx.core)
@@ -1045,7 +1061,7 @@ document.write(`
linspace() (in module mlx.core)
- Lion (class in mlx.optimizers)
+ Lion (class in mlx.optimizers)
load() (in module mlx.core)
@@ -1102,6 +1118,10 @@ document.write(`
maximum() (in module mlx.core)
+
+ MaxPool1d (class in mlx.nn)
+
+ MaxPool2d (class in mlx.nn)
mean() (in module mlx.core)
@@ -1173,7 +1193,7 @@ document.write(`
-
+
softmax() (in module mlx.core)
Softshrink (class in mlx.nn)
@@ -1362,16 +1382,20 @@ document.write(`
state (mlx.nn.Module property)
Step (class in mlx.nn)
step() (in module mlx.nn)
+
+ step_decay() (in module mlx.optimizers)
stop_gradient() (in module mlx.core)
- Stream (class in mlx.core)
+ Stream (class in mlx.core)
+
+ stream() (in module mlx.core)
subtract() (in module mlx.core)
@@ -1454,7 +1478,7 @@ document.write(`
update() (mlx.nn.Module method)
update_modules() (mlx.nn.Module method)
diff --git a/docs/build/html/index.html b/docs/build/html/index.html
index 23e7376ee..002929d6c 100644
--- a/docs/build/html/index.html
+++ b/docs/build/html/index.html
@@ -9,7 +9,7 @@
- MLX — MLX 0.2.0 documentation
+ MLX — MLX 0.3.0 documentation
@@ -133,8 +133,8 @@
-
-
+
+
@@ -205,12 +205,13 @@
Devices and Streams
Operations
Layers
Optimizers
-Optimizer
-mlx.optimizers.Optimizer.state
-mlx.optimizers.Optimizer.apply_gradients
-mlx.optimizers.Optimizer.init
-mlx.optimizers.Optimizer.update
+Optimizer
-mlx.optimizers.SGD
-mlx.optimizers.RMSprop
-mlx.optimizers.Adagrad
-mlx.optimizers.Adafactor
-mlx.optimizers.AdaDelta
-mlx.optimizers.Adam
-mlx.optimizers.AdamW
-mlx.optimizers.Adamax
-mlx.optimizers.Lion
+Common Optimizers
-Tree Utils
+
+Tree Utils
mlx.utils.tree_flatten
mlx.utils.tree_unflatten
mlx.utils.tree_map
diff --git a/docs/build/html/install.html b/docs/build/html/install.html
index c40118b45..f20cc97f6 100644
--- a/docs/build/html/install.html
+++ b/docs/build/html/install.html
@@ -9,7 +9,7 @@
- Build and Install — MLX 0.2.0 documentation
+ Build and Install — MLX 0.3.0 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -206,12 +206,13 @@
Devices and Streams
Operations
Layers
Optimizers
-Optimizer
-mlx.optimizers.Optimizer.state
-mlx.optimizers.Optimizer.apply_gradients
-mlx.optimizers.Optimizer.init
-mlx.optimizers.Optimizer.update
+Optimizer
-mlx.optimizers.SGD
-mlx.optimizers.RMSprop
-mlx.optimizers.Adagrad
-mlx.optimizers.Adafactor
-mlx.optimizers.AdaDelta
-mlx.optimizers.Adam
-mlx.optimizers.AdamW
-mlx.optimizers.Adamax
-mlx.optimizers.Lion
+Common Optimizers
-Tree Utils
+
+Tree Utils
mlx.utils.tree_flatten
mlx.utils.tree_unflatten
mlx.utils.tree_map
diff --git a/docs/build/html/objects.inv b/docs/build/html/objects.inv
index 5485cc867cfa7984fa04dc4921d8d274c97e605c..5da5df16b39019221684f88b345fd705cce00b69 100644
GIT binary patch
delta 8077
zcmV;8A9CQ{JGwuRIRP_~IzE5NlH4}7-SaCL;oeemrFOkjOS0P$lC4(Bp|@kOlE_S0
zBtUUURn^x&44eR55V*`*C{zjDbIt`wVjk-1@zs&H5VNC>4xu3CrY7l>pNoya=9=AMv
z_kBx>I^dKzOMxcrK4$eS!5$AiPu;
zJQwWOxh@`YToU1-!e)QHM8H%{(e_l4;0R|NOxy9cM>0wXNY)ll(IvaG_OZPBDtVfg
zH>B;K;;6oQ$?(<0wsT{_jG^)r&N@z)t|&UVgw=7nU_{Zejl|C11B%iAO-#}H%fOO)
zz77@LO-&0FY3FGY=;*Ob$1wT|bs1MqF5=$pa??lh*kFAW5HEin?MaR0lNc{y#m`ti
z0okIvYjzOcHCvPdw<8~CpqF~b$Fue2)TBUvHxlC~#D9ldA0aDgjeox-%1bGzO00_#
z<0Wj#kqU!jfi8C!0Ar^0}A9geAk
zNiIkhs0Uw?Q!!M1Ue05nv4kdagSwlC3$fH12~vyCnDt}j6i+*$AWB-CQDnm2L&F)a
zWSTG_T~|C*B;WDnT2PSuN=qcCM8H%vFOgI=*z6QV!_E|&NQfF+XZUIh(w?FlBK
z?26k0{62r)z&4L_6T-5~V#rb8ZV%)hz8n8X_9)
zEsjY+{ABoUoB;%gMNWz)UXPmZoG=g_W4I(M07TSLhH5k@07%4eQ-KNq6Ol98DUAW?
zME4&}r!fHCKUY*+M3@6473olo5NYqB!M~{s3ZkTylnDdSi8{IHDQl9Vy_`=Y}0`#q@ad`defwYW^4iUxB+gLxnoWwZumL749LKp3qI1lv$*~5RX
zfqvreysXIc-9lR2@K{`!uV_7{j++%d#4r<=4
z%y;LsP2m?C7(F37CMFW1HrA0u({8OHX-Pv0TwW^#NHwp?2YWOX8__xKhK$
zLi9vVqwbxEbdAggj%pBm^Ssi{p0`l>~r9yvgMNJ0*X(MvVak
ziP0am_d^~Y`nEXY+#4|<9nLMV1K#~Cn&J`HiJUM1U0>k(kkZ~mlNS|d=NvmaU_n80
zh9hJ!$py&*m0h}yEpdZ&GYBGFFEorM+*oATw-iT3VUh~TQo%F>l4byCu0=HvIewaB
zXYfoUNZsQ2pfQi@Kd4brVGe(h)CG=3*aI{_F9cU_C;&k8Oj_LXJO%I(p$DrLlu60h
ztG6HNe<-kJi~(Gexl3|Af9AZ>Yb+y)CWw#hfG0b+WdAAp+ZjhTLmaYq^E%Qv
zrgy+9(lPjeqydulzU2)c`gooZ;3LAgG(&u3Z}4PqT(S@K11U?&&hdX_GsGeL{lmw<
z<9VdDOCkEy4eo84Ar9FeOf=`%k>L2&D2MW6@kCl2-wbidC?mQ+k7QEz4$Y_H5nDFQ
z5FgnAPX@fIpQv8!9LYoNrOq;tmqSHhl3K-iAg_vMoT;(5F8JhEuuaaq=ed^dcG9mH
zd`ia)t;&`#yuyU^F;9PsPI0X6($gjGL@T+pN48tWRsoLN#CuiRsb7d>Z{5z}5?hht
zwEtxKYt;Zrkyf(Nd7m?0b|=e
z7SJL7^64JeBpU%P5h|5nBERs@9}pD6+M&2dHKXsLX7oMzF~~=$Yx+o{8RODXlt&cu
zh_a#Mv@EL6yi=Tr^Qz{&)lJ$N;mS{?(IziTg>oQfhl%x
zR&3?N?o9CIKqbc!x>tpux)$n#um)VR_z{=&fX$YG*@4gEd+_T4fGq*5VUZzP7Dm%#
zO9Lt4C@0fT}XX)|GCxyD4c@r&)Y`x~`6Gfrp|a
z&X7^&8#8}0hnQ?GQ4RE>4@pd0L3e>2Gs($N5V7AP`Ejo9+Ej=We%>)
z!Ide;!Ek33JwpwnTVwG76;xWRF?MILKA`nxht?ZV>y4{L=23QFh(1G((L00vwpyh-
zR(8iM!;DCqgW9%&t}5xX`ud?sqYOD~4ImdKDgD9x3wD1EBP`5v!EGr8q}Fj4QL&9XdOVUgI^1n!O4(Q
zIL=&9Ki}wj>6$v{IhlIr)--sAm%=QjY`o})ETmc7RGV~}6Fr>4n@~$JIHAE}rTOr~-su?d~Ps#)-wJ@-iP;=-cXQ&U=y7FY$_)Tt3CV
zrAD>Rd|YmKpkGiiC#i+cT8wwKpe~T+3gKZtLTsL0)$BNkIoW=RpejOpWFQNpJAeLP2RLg
z2V9&H>A9CP!V5Y1^CHrEDlMqDv2uS2EeR}U0`2^3YtWL=a+{UmmrwUHUHf0ck-#f(
z|NHy>4h6$g4&k8_x86#0y9|Y+l{X$|k$5w((VF0VnM0D7f
z@>X~OKg=2{yQ#|Z)Q15N>f7XNpLQJ-{&u7CH?Q=!p!T{x%xk5!RoQBPfdX}*F-y{+rLhk<`qW!haxRhb-A&dW^)xGJkmTa6ewH2;(Km-JZ6-OC2H
zB-hvEQEs?ZYa$j30m>vO)1dztMDIR{u;j?=TKK%YEMQ)&-ZKZF^>lEy;(4b+()p|I
zlIc`;AoHwa+uy05YEm#cHjU8NewF+vF=LRj76PQzt3jk`KAFw}YvX@k#E(o`tU#gx
zSmg57FgWQ;v}TaMGi{zFX3no&1fEIyo?J;KTgmBrejIAi!qF+z=XP|eL#_Aadt7nt
zMxS&2yGxd2(&hl6i`TeY1_gm>?j1x6q=HL}?
zB`T?mvJ+_uI}(`Hv>AUnKYmK*poOol8jwSTYn+<3$<+LE`kAyx()6uxNFIs}<>2bz
z>)(F+ngE|H>dT%YvK9WfsYIIRbh_kB7-c1SG|8t>osvJ~pUGl2CV6G?>Er+Dc=-tf
zZccGi3jdgFRpHeos%r@43S+O=y}dug-&9V@%WrlpmrR$IqDp_4De5KrLkA~%vdkwm
zPNj`f>cMpa^8A#l9c4hWWMnQQhhmwulYyB+e3aM^b9AClv<2nI!)_;S{Tjcm&g7vj
z68MvtDFPqGFFH6|!QIb959oSxW}H+Ry_4g{)4S9q(}&>tGkppIRSD8oz}e!Y;1=A8^k7~2H@EB%uUt$2uklKv-eze~U}
zbm_~e*+7#pmMxGa$JkIePtg0MB9@AVUBw$HpO>iG{~YSiPwHjMc!O@b
z4oMi$D|u=K#^_v-LKjf!l0@7pj!~LUK})T`7@Z4J=mIK*
zlOzvELMVStha;s{V2sWMDRcpqf=N<0N5?D8S4O8+U=nd@I!Xt48)RS-eU~~$%{AV9qYn!mU{}vMTx6}
z-;vVu+3zV76D6w-Uq;H#2Qa5lOpNTFjP_@9bS8J`54iJuz+K!2+{v3yu!_%~REmfw
zO?56eQgl9=JB4DRWbrSO_dSY!jr5VjJau_E*ojRPl;Sqx4g^ei5)pd<26tgY+(DI@@%JXD;
z9<93uS;Fh6ZRI1O@hO$YmXZt#C{#4QWh#wr1rZcbSS<~R5?QSmNbS&&l)4Cq$t8cJ
z@p3c9k(^_Kvho-OgyM7%Luv(vbq+Y41Eh0^(Bah;m4^xdXvyG^T89wo9E~nWf5Gry
zT!Fw4uXV#qtQK&!OX`V?(;sh&!0NBAi%30@A^rItxp-~k8o3mR2x!tGe)vEd+d3jB
ztRQz!qf{gSP?EtRwN9Oxi7hNGoTGnj#D-i@x17f-&zGC0R$x%4M)YE}xvNu$das1-
z%25+$>6(!NeN;0AYM?|kO-Or+muOG6Kzj<(Q+CBD2y<6N1O{}jn5??GIdu(Y6k-6*
z9drv0voKgZn+(Q>OT45=L&lH$BY
z2yxyzG9oNOBu6$B*C0qTk7<82o(9BP0%>e34WShTYTH|^!n}VjHG)D4q>#6g$BQU?
zM@NN5Ng;>I3=}LIGh8+w!W*k=oV}Ym!2vbK5P7PsZ5OY)
zR9QPBEHqgvOvo9R7!7cxRB%X59*Txi(*Q?J2XCwWbB%A~3tLM=glvD)SEP$?+)HCi
zKn8^skl*YA0n}o?)^X>AJgqTabivrFOH7pPj}|NXqlG1Zv_ZkHDG=jx6Tp>I!2van
zs7<7?t-?dK31(|2CQcU7zFi<5(NIelziP|3UMq5?
zR$cV<5MS(Tb4yAGhIAegB9D&3W00QKi0Svy-j$J|0V&KPGhlgHcwrf4NtwHtEVztJ
z7~t|s+q*ImdBv|pU1S}tH3Dq0F*u+mH!I4!6{FQgfUPzLZ>xWac4*O4#BAjRu+*mD
zklIh|MB4&!6n{oduyID>pPV$dbYxIiL0@T%ky)-YPTlZ;;DG4~D{EV52=;;-L>sJe
znqW<+1%}@T|lD=smxvW$t7MJCyUPu+$S7(8qtT_4lswk6qv2l7|^rcRBZZH1o~f@q;AYfD3gZHqjU7O_cASz971EJ6e~3A8{Ux$S8n_l}|nRrrFU
z6pTvZPdJZ}_MD5J!f{F5N$5yn;{`Rt6U=`Z=}1}gZ1vO^3%a0Na;wv3H)Pkg)(drS
zGzfj?v#m~Id%He>%{m9%1X-tmr|-q@^sD@x_z&lXcsxgVgU59*hgJhj^MZPLkhO8Z
zF<`tp`Pe@9cs^GbfQ{$$I;ZqT+_?*~1+M5#*C9#dh0F*c;|}N44oxC&Wkw4gK*4`Q
zNN~B6JX+Aa+&r~I1A^~-JJdJr4XJm=2K5cx0jYCvb%GiAzU2)c`goxcU?`1YaZ;F6
z%@9Z~Hf?Y4f^R^E!t+!5Lx*&Y=Op-#>i(J6=m#8!|E=q|oxA!F^ye
z#MGKX1A^*=e0C%_%{9u9T}K3z)tP_rIN2FuXwIP_L0!qjt1TCUr$$f`d4GMplzHWS
zYKJC~m)N5P51?T1OaDZ{qbWA5O&PkJOyR?*^W*hhg9xP+%2ZrMAe8f-=lU{%Zl4n?
z0g2w7h07qVQ7~oAmqvv7w3X!+j|%UNw4?7Z!?@|CAV_PhAP|#!N7kZS4@jY
zr&`Ytd<+b9qFs;1Z?jaSIPHE#*I>wzb$p~dyeRXZgq|<`4tG{y(4AF$oI86nW+P8f
z*};lhuI1b5g@J~AF%057Y6^LMNKab4I|~%@Wr>Y*L@hel`=+eHkTdJ}NOyYp=&cA8
z=s>?ZduyAcZwrbq$`uMyRuO*)Wpymf4Q~ZR;`Ph2L%4}*BBQjQH+?%!K=$i7m?H?u!4CqI@d{?>y_+qp_wtPISPR&tC|p
zOQ_5ZMHLbd`QNlG9NUAjfQGOY7jWUrr+ZviZ3GySV^~NEm7`9EM&y6z7ykJJLP1!Y
z3L+q&aE}@&xVHxi?!yBG_vFW*ucJoj%otN`4i0HjRH~?*DjH4|WkbhlSyZ2Sr#6-p
z)dzOuxIvyR7Rw{QVaQW{>bpxz*>{9rGsg5{4i0Jd1kve{)~1dK2q?_=LlQ(OvjQ1<
ztMGuVy6N%A)z}eYY}9{%8PY~fjQF|YE${sc>Z7r)8%8w37ynxNc(B(SYf=3c2zr3Xf{lY=VqmVkeL~C@%dX7#<$Qd$@5^&3@rb|%#j
z)`oG0x(q<6=0(%A{Bf`N2zPuK!*Q;hs#JD?QwF0&hU4`4(s|+p2lQ>fMS3
za&Hxl^{=|S)wh3<0rG2%Li@zexjd!)X3y3ai+F81~mg<@;Q23RXY7#G04s(4A%X8iTgwRMT?3)n{!T;TsP!V
z)^X$FRQ2~7sEP;=aBN5^pe&*wr}aDsOrYa5fu0?ifV_b;MW{CZt08S+DDkEj-Hc~A
zkxS$RpMHP4aJRp|LccAG_oV8JO^qmq5uWKppg~_gu4XwSx#qHB
zT){T&ukH4`=cNRF_FC^wh^HRe#S?H@n(X$|w@`m&A5p+lV;Kut!MkK(CsV*#b3sF-
z#+Og`P0O48PL2eiZ(|(NrTgFC?{`OlL03dL(v5MTvLr`v$I=cK(6O}v(nIptaHc1&
zZ*|k50)2I?kPgen@2#FL9H5I!G~nBj*EMH(WV6sU*NX2{@OFD~{&QPl{?lrL0*u}9
zLS%nqIOOzVr#bKE-xH}mRt#Vn&l^*)FY50J(oZwlkoXd*q4TtNx4`9YL%Hz~ginzZ
zoRIWy`8!#8S{3p{L(tb>{xx~q6<0Y}CRZ=ZwX)Nii5WX|#hJWztu&1e&qR1Oq3>0Y
z+W+@)3{u+@yTp1NwtM~uBD`Hal8zMZ@nU~c=&G;pW@5FitwxsnKgwEC149iB;-}-0zN&QST4!EVyh?u__@k8~_M%>i3&Ea>;Yh?C>jWj9<
zWhHsER?C<%Q=fRW8y~2)sfy>PT&~9-3VU50XfSnVf*lH)hIOjPgf)W{zLJ$}zco5Vc5$BESB|C0gfy|Vs&koWZX
zUVoR&^NDPH570~G@&Ydz{8kN})Cacg0?{K)r#{$mm#&MqUw4P$7ZsFE7>jQ%yxh;u
bD8AHZHy2>@DF&9h@4Xi2Ect(bmRkV?VQG%3
delta 7597
zcmV;e9a7@DKi)f#IRP?}IzE3(bKJJl-Q!oFD%p-5$-BGrwVm8l*@m!yofG~hgZkC6|dfldwCSRdHm0@$h(eTseksD
zx_X`ELtl48S?2BI>(VX$>e83RPe1bm^Bq|xE7@n6CM<;_I_+od9~*x{V7hdSWlvZU
zzMH-kd5I2dEFR_A5m=NlzwI>3-B2BSSyw3@RU9HYok%376GT!JB$A?lNsdL`2_loh
zKy=k3iK%*oNZPzQ(;_kmkm`7>X+M}jM5!)xt_w`o$%@FO5K*cNohuSopHt&fh$z*C
z&IQMHF7pQ(mqtXWs8xTj5inJgx4kqZG{OZ3({^>+6B#uGBy00WbVF)N$5>u{)jUmG
zH=^wyaa3QUWb`(9JGiM~rcgy1=g~K|t|U6Rg&lok!$_j5HZFE~9!QMwXJU4bPX?7V
z@^lz?H+L{dq+O(AFuJEUox&Jv)V1$4he*4(Pn$7{hlU!Xf<%AmWG_l8pT%sjfzq#suMLt
zjfoPr;wV*tV@_^&SL6nHcgg9^&;*Oq)avjqX$U_-K(gwX_o7n&(Fhk9OnQHHBJzJJM1<;^Om!Wjs*8K-bs7vv
z*U^|dnB+oafkyBrahDH8pPiPOP%NQ~+@S83Nkb}i7n|)n>BqS0ruE(%G4mGBa+Q9i1d+mCpy}2P05PJB{yw#F5bZ!)OVs57
znsb|zUd_5ZNF!9Iy`?c3NSus5jB|hhv8qYc#p_Ayoihfat18+h8vsPq$%@ozFaVIK
zqAdj)08Av#Wir207)S_QYS>0kI>NX)D;C$O4Z>%zpCn%
zG%|m(U~$s#?GDQCElk)oIc;1xV<5Vk))49PAkCS^a)wE+ej(Ca{SqN+s6-7+bWP;B
zhH3KCiP)82Kro>^=0%oQ#2;^gZID)VzlT)8Hb`kN^%VtEu8Um+#^^#Sl$Cs8BLpI=?}fT*(#vGVKy*XV%cdZ8
zQf3T5r{2kVFKP3WGX|vlv18J1MFEs`B}jFr%SULkLVgoeE(3xH2V#%b005$f_GW9q
z7u&8~OIur(=G9;{7rYX@XR>u;sgqGJ0d^V*&bBy>aUD)vai&S`COKh2F=;>Ggji
z#<_0gk=7{;(Q%3MP~M6?>?1Hv{7pRWU{$C%rS6gLs)-R8C)WRyRlkFILdK~(Ax=AZ
zHEe@)Uf!m%Mpn2dci#?X-RzX@u4$hlE;dy1jyN!Jkr1`Bj|5G-wTGk?jmT+rZ4e;U
zvKs@>72AoiY(GI$MYf(Sn@-R~kzIccT~6y9=<*So?0g>Xs9XjVBkb}!(Tl39+m6g@
zVI%ZURL-Xuky%($zj{Y&HGC{YFZ43%-i6o@osgjS?LsspA~I~V6uwHRDm=^
zmI%p($O0|3clBLQ9I{Oikc`$cM7E)m-&V2L6t$&_HB`&SD?CwT)0J`v@kD>+(h!KO
z&yU|oR2l#h)lH@+*rlL#Y8)U$Jlyteexz|u7?6(EK!Bs>?nmC_544fri~;ESoHp>&
z2rHQ6LS%s&obknYqRp@^Ac)Z39f&5}*csTjl13$Al7h%m
zz%&DqW&mjJ^I{Ns(letjySaZzkh-PuK@%1C|B|LMRdI-<%xNsDJV5i~B4~}h0RTi#
zqNVMrGk_Qod4_34xs+A)>g`AQUkQBYRRLUc@qpyNd4Ih8F^$M@L<=M!d$(+{#4)`C
zHds#PYa-2cNb|m}n|kQ+JQE;BLPp3Kx8lQ~{C_m;VPOYfu3NtdsJnwp>
zv4(peSKpm1^xhKv4xWGBa5QcgA604RfeDel4M!)dw-EW={$JBZM-}S)l>HF$eChWn
zSm>C-?(h?lS9z&d4EC}+@R&xEx2l->`F9Ac*ciK+?b==UHae=%PhWC`-+Cp;``}xS
z)v0>_a>t`@V4Iv~tM_y29Aep2oZjHn>Y1@I?xEV-9?Zxnb8~;1eo`W@!ELkdf0$f+
zsG^s-|0nX(TfJ#>UJOOPcLQud|M`ocbf8S}faI@v5t7=w@PGmFmrpmej?V-LiAYxo
zUgVeh^9KTjDh*P+%8qJm)X6IAFQU)0Y3(uZ)%f{#P`BrcF`M!83vl0%y4W{R7yCwh
zAM_14_D_&R3nqW0lcb0w6p^fku9l~~_*{2pyFgxisE##lT(ZRyO4fIMcWGtyjUcdK
zLb_fw1R_;Bp!lk%qV)QtAUv}cC?GtGA~I*>k!5pkkQ{i>M>rA5_t_FVcv65w{^d{3cf||z&C#tHW$GW65vw!T;?KnU%>*6
zGVhLj#lqa%d+WED;T`s~cW+f})x+*eh}A%$=NP(IxuUwy%>&_0#vJgoG4^E6mVi6$
zaKO(y*wYqU0$z9MKE;ge=6*n0gCr^
zF_>|KKTCd~Oq07a?J1dSs?2pLa~;Z@rit=khW$XAV)y3h6Dp)@@#f!yiTs4tn;lwj
zK&>~SmRM%sf$94LasED72=LX4aX9D?Cl8d_YIlFa+gBK0mGa3v{m_Eafw-a+EKO}I
z0*ej@;;KWie6g(vuS^{1OB2va$F?-Ef^i@&VFYU-+ls-&!GSb$;7vdHTFo-p%fvj|
z^Aqk4NP10#eRB_EzYOva4L{eYLP@A3Jk!>$Ra$gGyH-&+IW
ztyzD%!`^AS_zd7yDBz&}KpxctQv|DO-E~6sZe*ng#K*SQA9hx~dU-tV97As~h69hW
z>`{LyE3!_OvdCLCmddo_dHOJCG=i7AaMTr_^P%fxUS(BLyuif=iGX-j;jGf1pq%WcE
zB2Bx0ZrNZ1#%BzV%jdgEeAQe_&V@)(sI1je=tqWPx9{01q@w&7O&?}vSn~RJxCgL
zFsK8&9b|Fh-9;3I%OUmb*nIk!DRmf2O&C3)@k90(ek}D@JP!6G?q7=s{o#L!S(#WV
z6zHHqhY|E&hv?lWWn=R(p}+-s=YYO5f12qsc5E8eQgsG(s*f5onM`T^r+Qq+brM}P
zn~N?2cHy7Y9|v8+0;$I-&vY7!4*Y3W_sF3CK8(c=%&iSOM4+DddvPxc-MaAi^>HXw
z^EY1M_#tR#x(4GHn~_k$a#MdyqvN0)hnZl^R@QNS64025t7GCJFIBI&dp*u!XRSEv
z?_BjgZG|2$HD1Ov<@9ASTfBvOxgBJ6Ct6V*1SM&zs{}1GCS3=v#5V$brKIQ`D`7p
zJqp!|>IU0y$=}t#rJk-PP%T^grtMn+B|duvkSc1xAvpI;@bj#;T!g9-)8x$vD;*O?9vCK>d5E
z*X^-ftkLJ`o?Pg=0Eg;_sL;urgD*xVlu
zk-^e_maz#hxo2eQl$3axB?lCDZ48lK60KroqvW2ErP5ELMdoa1+_tb(VoE&yl*E$T
z7M4mhiKm|%b=!Z!PN9VSI-KE8u{MvTsdD0Bgp(wC$U2p}lU2Ly1>h|#$ag)X2{@{&x-
zL%hyUd!hRF6w(gE%UZDi;qpcFOt7(NcV7$#XLMRb2T)rI?3md=Gl5SnKaxNBpW
zOru1ZuaMBIuMC$rlqd$gTW34NWfr9hhOfif&TzR$se;ubELfp_J@V7--(Fj3P=HOxU5qvRE)iaVzc@|c(5vJex+=act%XlK|Q#WYDP&k%<^44uRnts7)M
z=AeIzCG#Dl6_@15+%d6aDrCIEoHv;}CWc9oj2E%?C2-%$u*s7Na&E$94tW?N$1+86
z{uGjXMutehOwzfAbgmIPCZ97!X-VeH19M8PBM5bYMwg_&Q1~yQKw^s5JDMD;1)NFF
zBThz02Tj)RpjA58$&h)Y3EEEn6An07GADmDUTw}r%^ed%Wt}FeENQBFU}ltT)CAEm
zi#3mV86tT#L3zo_$vq=OWVWX0l)(sgG%`ZEYqTDaC7VZ_ES(%1FAH=~5M2|UX&Wsu
zrP=1Tg&{I(QxxU|+}ty=WWsH{%6(W<_sNPPOu&VcZpn!@JF7QA#!2bTArDI>^2UG5
zJqCC4W8xN`GlF9!>~!GVx1yx3san^t*fmP6qN-J}TE$R_xk+jgu1JZhNlC6s8L3N&
zt4v9*O&O_9iLB3-T%m2GMq6B!w)8q}W0l$>Yqh0UYa6TA7FV(4dW}>ou2$)l8mm)e
zmC|c8R-u^soW5j0%UPca?%EhSA3IGe1G|g1izQRH;}wTs8-X^l
zWL9^y!txazcT5bEnw=oBW{2m#mEp6%Qw71vGRMbI>EhA4&gpkNFtc<*dA!t;exAEF
zmQFU07g=()bJxbwiRcL;Rj^`eYz&p5o}#m+wC91Dp%T~=)U1s6JTS9Vl6!x&kSgnG
zC<{v_tVi?f!k&OOr4@+J>0eAU&1lKtLNMZdSOkrcs|jxtUD#KtQ*RT>{+V#+OmfeR-rw>IMd0d}P~z&Bv`9k|X#kNXDRNEG
zxDF+*lM4CiM44e&5TVv+PYQqP)X4g)M2h5$2}NfQPPVFhvTRlN5n1KI$rAdvDgm#VZN>I$A$Mv`slKslR(5ah8B;nB1d)e8;UP&cOTzN|R6dl^)`S#glNqqREWNRe
zvZXBCOqSe6rV7yNYAhehAnK~V7bck_T5AIMVv{+cray|*IhoLE6Tp90o6Os4svny7
zlCWAi13b0aoKpKiU1(b&f#MI;0$W`P{4b{qdpg>bRv6RzQDoMXemt}%1P97U*p&8#
zhU6%?A@sqTpbOTddSLYLU}7J^Aisbi?fwMzeWrJOk{5C#%ec5j+~vg1jQIBRff4oD
zy&XlX`aN~|>Q=N>JrsY1L`UHRy(oM)Dhj>f=j!or#oza?-A~f8<83ySC3~0S7s5^z|N0Ht?!0QL`_Mv@31b#e(
z`D{p)F)3yVLQ7&-Ov9H47?
zQz29Y*(#8#lJpk}|0NViyeY>1-HEUWwjg}1*`5$2f8-bYybt+dU*AfClng{}5!&L+3W4OdmuJFk3kX#>)dB}y
zjF4FYqa91gz#~pZNx(o4z)IY}V_t^O)j$uxt^R+|hZHaA%4JNp#A^PtL-e1h>OTyf
z9AbyAJ@yM+44FiNsQb}bJM}(we`5
zZ^(pBt^%F1@Z6NMN)&jm3YJ39u4eGHcpgD5;F(cqz-;nQVcL?cNyIP7D%v>HTV
zr)PiV+8HI`22s{c)WKt3W~b@^B&>rs-`eMGdg&!xke6J7zgK_ruljEm&-noHL^4Bw
zSs>P22msUa{glp+;GCNe6t)i3_m9(Gj%x>C)A8@Y;qQo~v>;m|hb^})44F6?FXZN0
z=Dw9-Qjz1;-g)PBEGCA@MNZSX4t1_mI&**SakP+~z?{1_hD=FL7V|D_u`*;{a`6R(pY&J?e8rqOjFexp@4mv@}Lb-~TgP@%CbvA$I
zH|h2TVG%^*vC7~=J6>dQYDGm3X$C@}%syNQkrOh~AV}IiE@PYFP8RyBPu_
z*{BmzbyRt&xBc#n!a+l^I0bMKHMxJzXw-|A9?lAdVp+YoK+OK6y?2TYhJvyE$Z#T4
zCkPZ6fIUyL&C$2Hq&MXT1t}W`LD`%j^^Cg$B8kT3l_P{n>H>SfXgKNa@Kk!Jb~f@e
z5DH=TAw!5wqhVm6yfbHpVcFe~x$t-}*rrT>i1j~_pWf6j7gF>dzkt3aYfL
zpbQCx8`3cP#vexCq=(Tr;`^ZQP)a|1#kgvVIi)E{S0(+clIg3G)zH=Qloy}t&U}NG
z7ayu)O`8JQV(}u17feN3-}QgprIpn;LR<^RjbbsUGWu{JX+gqcGJ=14mpjQF{z+q#c`dxOS?VK{C=Eb-57@Ze}P-uE|Kj2E%5
zS{7Uw#NtBg@2iRx2u47tf{inzFtAYGKTK+iWmo0LY93$h`|3Pe;oE<;z2^L#+0LRE
z(%LZ2QP&2NYL+)mTR-d-pWs0ZlbsgIoj#WTnekX0VNjM69odwp`c{qSl>6A$`e>)B
zZ|ml9PmmjQPP`lCsG%SA@m(V8WGRchRnK9j9nT0JCWOL=OOh|(g(^K++s~@zb3SyP
z%&V*_is#mFzy(Dc6rz7dd{G@g@!U{X5-7&iOAc*4!1(L})F_qnEUQd)=D(?1_qnkGY*55N
zF>>79v!CRd0*tWR_Y}WCTe#H(?rr(HvaWk|gFO2&afzT1-4cJ)K>JoUh3KCfrcDC{
zY5UP3mZ)`c80Q30##hKgbkpwaD;uDrYb=2{rQs<6Lcb4r&3wR3$u0Bf);vLO+RKR-{F)PnZH@{psg9+AhD>Jrcb|L~lr#C#Dzd(B*$;@j5Iu(v0?lvL9yjjWL-G
zx*sQ#*+1N|PUP+J;?j?{@hWac|7Ru0yxQ|G#j8JcIaUqoxf!W<5{R;HAEC=}ON^qA@0N<;3iI)Vk}=Huy+e^`tYS2dLG$lWr}wNl-yCrS(THUDEMKR=qA
z
- mlx.core.Device — MLX 0.2.0 documentation
+ mlx.core.Device — MLX 0.3.0 documentation
@@ -46,7 +46,7 @@
-
+
@@ -134,8 +134,8 @@
-
-
+
+
@@ -206,12 +206,13 @@
Devices and Streams
Operations
Layers
Optimizers
-Optimizer
-mlx.optimizers.Optimizer.state
-mlx.optimizers.Optimizer.apply_gradients
-mlx.optimizers.Optimizer.init
-mlx.optimizers.Optimizer.update
+Optimizer
-mlx.optimizers.SGD
-mlx.optimizers.RMSprop
-mlx.optimizers.Adagrad
-mlx.optimizers.Adafactor
-mlx.optimizers.AdaDelta
-mlx.optimizers.Adam
-mlx.optimizers.AdamW
-mlx.optimizers.Adamax
-mlx.optimizers.Lion
+Common Optimizers
-Tree Utils
+
+Tree Utils
mlx.utils.tree_flatten
mlx.utils.tree_unflatten
mlx.utils.tree_map
@@ -679,7 +693,8 @@ document.write(`
class mlx.core. Device
-
+A device to run operations on.
+
__init__ ( self : Device , type : mlx.core.DeviceType , index : int = 0 ) → None
@@ -725,11 +740,11 @@ document.write(`
next
-
mlx.core.default_device
+
mlx.core.Stream
diff --git a/docs/build/html/python/_autosummary/mlx.core.Dtype.html b/docs/build/html/python/_autosummary/mlx.core.Dtype.html
index 834073fbb..ea3a337a4 100644
--- a/docs/build/html/python/_autosummary/mlx.core.Dtype.html
+++ b/docs/build/html/python/_autosummary/mlx.core.Dtype.html
@@ -9,7 +9,7 @@
- mlx.core.Dtype — MLX 0.2.0 documentation
+ mlx.core.Dtype — MLX 0.3.0 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -206,12 +206,13 @@
Devices and Streams
Operations
Layers
Optimizers
-Optimizer
-mlx.optimizers.Optimizer.state
-mlx.optimizers.Optimizer.apply_gradients
-mlx.optimizers.Optimizer.init
-mlx.optimizers.Optimizer.update
+Optimizer
-mlx.optimizers.SGD
-mlx.optimizers.RMSprop
-mlx.optimizers.Adagrad
-mlx.optimizers.Adafactor
-mlx.optimizers.AdaDelta
-mlx.optimizers.Adam
-mlx.optimizers.AdamW
-mlx.optimizers.Adamax
-mlx.optimizers.Lion
+Common Optimizers
-Tree Utils
+
+Tree Utils
mlx.utils.tree_flatten
mlx.utils.tree_unflatten
mlx.utils.tree_map
diff --git a/docs/build/html/python/_autosummary/mlx.core.Stream.html b/docs/build/html/python/_autosummary/mlx.core.Stream.html
index 3a5f40ce4..5bb9eba44 100644
--- a/docs/build/html/python/_autosummary/mlx.core.Stream.html
+++ b/docs/build/html/python/_autosummary/mlx.core.Stream.html
@@ -9,7 +9,7 @@
- mlx.core.Stream — MLX 0.2.0 documentation
+ mlx.core.stream — MLX 0.3.0 documentation
@@ -43,11 +43,11 @@
-
+
-
-
+
+
@@ -134,8 +134,8 @@
-
-
+
+
@@ -206,12 +206,13 @@
Devices and Streams
Operations
Layers
Optimizers
-Optimizer
-mlx.optimizers.Optimizer.state
-mlx.optimizers.Optimizer.apply_gradients
-mlx.optimizers.Optimizer.init
-mlx.optimizers.Optimizer.update
+Optimizer
-mlx.optimizers.SGD
-mlx.optimizers.RMSprop
-mlx.optimizers.Adagrad
-mlx.optimizers.Adafactor
-mlx.optimizers.AdaDelta
-mlx.optimizers.Adam
-mlx.optimizers.AdamW
-mlx.optimizers.Adamax
-mlx.optimizers.Lion
+Common Optimizers
-Tree Utils
+
+Tree Utils
mlx.utils.tree_flatten
mlx.utils.tree_unflatten
mlx.utils.tree_map
@@ -578,7 +592,7 @@
-
- mlx.core.Stream
+ mlx.core.stream
@@ -675,31 +689,20 @@ document.write(`
-mlx.core.Stream
-
-
-class mlx.core. Stream
-
-
-__init__ ( self : Stream , index : int , device : Device ) → None
-
-
-Methods
-
-
-__init__
(self, index, device)
-
-
-
-
-Attributes
-
+mlx.core.stream
+
+
+mlx.core. stream ( s : Union [ None , Stream , Device ] ) → mlx.core.StreamContext
+Create a context manager to set the default device and stream.
+
+Parameters:
+s – The Stream
or Device
to set as the default.
+
+Returns:
+A context manager that sets the default device and stream.
+
+
+Example:
@@ -716,20 +719,20 @@ document.write(`
@@ -206,12 +206,13 @@
Devices and Streams
Operations
Layers
Optimizers
-Optimizer
-mlx.optimizers.Optimizer.state
-mlx.optimizers.Optimizer.apply_gradients
-mlx.optimizers.Optimizer.init
-mlx.optimizers.Optimizer.update
+Optimizer
-mlx.optimizers.SGD
-mlx.optimizers.RMSprop
-mlx.optimizers.Adagrad
-mlx.optimizers.Adafactor
-mlx.optimizers.AdaDelta
-mlx.optimizers.Adam
-mlx.optimizers.AdamW
-mlx.optimizers.Adamax
-mlx.optimizers.Lion
+Common Optimizers
-Tree Utils
+
+Tree Utils
mlx.utils.tree_flatten
mlx.utils.tree_unflatten
mlx.utils.tree_map
@@ -678,7 +692,7 @@ document.write(`
mlx.core.abs
-mlx.core. abs ( a : array , / , * , stream : Union [ None , Stream , Device ] = None ) → array
+mlx.core. abs ( a : array , / , * , stream : Union [ None , Stream , Device ] = None ) → array
Element-wise absolute value.
Parameters:
diff --git a/docs/build/html/python/_autosummary/mlx.core.add.html b/docs/build/html/python/_autosummary/mlx.core.add.html
index ab6758c77..c6f35148d 100644
--- a/docs/build/html/python/_autosummary/mlx.core.add.html
+++ b/docs/build/html/python/_autosummary/mlx.core.add.html
@@ -9,7 +9,7 @@
- mlx.core.add — MLX 0.2.0 documentation
+ mlx.core.add — MLX 0.3.0 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -206,12 +206,13 @@
Devices and Streams
Operations
Layers
Optimizers
-Optimizer
-mlx.optimizers.Optimizer.state
-mlx.optimizers.Optimizer.apply_gradients
-mlx.optimizers.Optimizer.init
-mlx.optimizers.Optimizer.update
+Optimizer
-mlx.optimizers.SGD
-mlx.optimizers.RMSprop
-mlx.optimizers.Adagrad
-mlx.optimizers.Adafactor
-mlx.optimizers.AdaDelta
-mlx.optimizers.Adam
-mlx.optimizers.AdamW
-mlx.optimizers.Adamax
-mlx.optimizers.Lion
+Common Optimizers
-Tree Utils
+
+Tree Utils
mlx.utils.tree_flatten
mlx.utils.tree_unflatten
mlx.utils.tree_map
@@ -678,7 +692,7 @@ document.write(`
mlx.core.add
-mlx.core. add ( a : Union [ scalar , array ] , b : Union [ scalar , array ] , stream : Union [ None , Stream , Device ] = None ) → array
+mlx.core. add ( a : Union [ scalar , array ] , b : Union [ scalar , array ] , stream : Union [ None , Stream , Device ] = None ) → array
Element-wise addition.
Add two arrays with numpy-style broadcasting semantics. Either or both input arrays
can also be scalars.
diff --git a/docs/build/html/python/_autosummary/mlx.core.all.html b/docs/build/html/python/_autosummary/mlx.core.all.html
index e63c3c9db..ff06dff65 100644
--- a/docs/build/html/python/_autosummary/mlx.core.all.html
+++ b/docs/build/html/python/_autosummary/mlx.core.all.html
@@ -9,7 +9,7 @@
- mlx.core.all — MLX 0.2.0 documentation
+ mlx.core.all — MLX 0.3.0 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -206,12 +206,13 @@
Devices and Streams
Operations
Layers
Optimizers
-Optimizer
-mlx.optimizers.Optimizer.state
-mlx.optimizers.Optimizer.apply_gradients
-mlx.optimizers.Optimizer.init
-mlx.optimizers.Optimizer.update
+Optimizer
-mlx.optimizers.SGD
-mlx.optimizers.RMSprop
-mlx.optimizers.Adagrad
-mlx.optimizers.Adafactor
-mlx.optimizers.AdaDelta
-mlx.optimizers.Adam
-mlx.optimizers.AdamW
-mlx.optimizers.Adamax
-mlx.optimizers.Lion
+Common Optimizers
-Tree Utils
+
+Tree Utils
mlx.utils.tree_flatten
mlx.utils.tree_unflatten
mlx.utils.tree_map
@@ -678,7 +692,7 @@ document.write(`
mlx.core.all
-mlx.core. all ( a : array , / , axis : Union [ None , int , List [ int ] ] = None , keepdims : bool = False , * , stream : Union [ None , Stream , Device ] = None ) → array
+mlx.core. all ( a : array , / , axis : Union [ None , int , List [ int ] ] = None , keepdims : bool = False , * , stream : Union [ None , Stream , Device ] = None ) → array
An and reduction over the given axes.
Parameters:
diff --git a/docs/build/html/python/_autosummary/mlx.core.allclose.html b/docs/build/html/python/_autosummary/mlx.core.allclose.html
index a41b4a0b4..67bffe425 100644
--- a/docs/build/html/python/_autosummary/mlx.core.allclose.html
+++ b/docs/build/html/python/_autosummary/mlx.core.allclose.html
@@ -9,7 +9,7 @@
- mlx.core.allclose — MLX 0.2.0 documentation
+ mlx.core.allclose — MLX 0.3.0 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -206,12 +206,13 @@
Devices and Streams
Operations
Layers
Optimizers
-Optimizer
-mlx.optimizers.Optimizer.state
-mlx.optimizers.Optimizer.apply_gradients
-mlx.optimizers.Optimizer.init
-mlx.optimizers.Optimizer.update
+Optimizer
-mlx.optimizers.SGD
-mlx.optimizers.RMSprop
-mlx.optimizers.Adagrad
-mlx.optimizers.Adafactor
-mlx.optimizers.AdaDelta
-mlx.optimizers.Adam
-mlx.optimizers.AdamW
-mlx.optimizers.Adamax
-mlx.optimizers.Lion
+Common Optimizers
-Tree Utils
+
+Tree Utils
mlx.utils.tree_flatten
mlx.utils.tree_unflatten
mlx.utils.tree_map
@@ -678,7 +692,7 @@ document.write(`
mlx.core.allclose
-mlx.core. allclose ( a : array , b : array , / , rtol : float = 1e-05 , atol : float = 1e-08 , * , equal_nan : bool = False , stream : Union [ None , Stream , Device ] = None ) → array
+mlx.core. allclose ( a : array , b : array , / , rtol : float = 1e-05 , atol : float = 1e-08 , * , equal_nan : bool = False , stream : Union [ None , Stream , Device ] = None ) → array
Approximate comparison of two arrays.
Infinite values are considered equal if they have the same sign, NaN values are not equal unless equal_nan
is True
.
The arrays are considered equal if:
diff --git a/docs/build/html/python/_autosummary/mlx.core.any.html b/docs/build/html/python/_autosummary/mlx.core.any.html
index a662e5079..6a876441d 100644
--- a/docs/build/html/python/_autosummary/mlx.core.any.html
+++ b/docs/build/html/python/_autosummary/mlx.core.any.html
@@ -9,7 +9,7 @@
- mlx.core.any — MLX 0.2.0 documentation
+ mlx.core.any — MLX 0.3.0 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -206,12 +206,13 @@
Devices and Streams
Operations
Layers
Optimizers
-Optimizer
-mlx.optimizers.Optimizer.state
-mlx.optimizers.Optimizer.apply_gradients
-mlx.optimizers.Optimizer.init
-mlx.optimizers.Optimizer.update
+Optimizer
-mlx.optimizers.SGD
-mlx.optimizers.RMSprop
-mlx.optimizers.Adagrad
-mlx.optimizers.Adafactor
-mlx.optimizers.AdaDelta
-mlx.optimizers.Adam
-mlx.optimizers.AdamW
-mlx.optimizers.Adamax
-mlx.optimizers.Lion
+Common Optimizers
-Tree Utils
+
+Tree Utils
mlx.utils.tree_flatten
mlx.utils.tree_unflatten
mlx.utils.tree_map
@@ -678,7 +692,7 @@ document.write(`
mlx.core.any
-mlx.core. any ( a : array , / , axis : Union [ None , int , List [ int ] ] = None , keepdims : bool = False , * , stream : Union [ None , Stream , Device ] = None ) → array
+mlx.core. any ( a : array , / , axis : Union [ None , int , List [ int ] ] = None , keepdims : bool = False , * , stream : Union [ None , Stream , Device ] = None ) → array
An or reduction over the given axes.
Parameters:
diff --git a/docs/build/html/python/_autosummary/mlx.core.arange.html b/docs/build/html/python/_autosummary/mlx.core.arange.html
index 354393787..303a50f82 100644
--- a/docs/build/html/python/_autosummary/mlx.core.arange.html
+++ b/docs/build/html/python/_autosummary/mlx.core.arange.html
@@ -9,7 +9,7 @@
- mlx.core.arange — MLX 0.2.0 documentation
+ mlx.core.arange — MLX 0.3.0 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -206,12 +206,13 @@
Devices and Streams
Operations
Layers
Optimizers
-Optimizer
-mlx.optimizers.Optimizer.state
-mlx.optimizers.Optimizer.apply_gradients
-mlx.optimizers.Optimizer.init
-mlx.optimizers.Optimizer.update
+Optimizer
-mlx.optimizers.SGD
-mlx.optimizers.RMSprop
-mlx.optimizers.Adagrad
-mlx.optimizers.Adafactor
-mlx.optimizers.AdaDelta
-mlx.optimizers.Adam
-mlx.optimizers.AdamW
-mlx.optimizers.Adamax
-mlx.optimizers.Lion
+Common Optimizers
-Tree Utils
+
+Tree Utils
mlx.utils.tree_flatten
mlx.utils.tree_unflatten
mlx.utils.tree_map
@@ -678,7 +692,7 @@ document.write(`
mlx.core.arange
-mlx.core. arange ( start , stop , step , dtype : Optional [ Dtype ] = None , * , stream : Union [ None , Stream , Device ] = None ) → array
+mlx.core. arange ( start , stop , step , dtype : Optional [ Dtype ] = None , * , stream : Union [ None , Stream , Device ] = None ) → array
Generates ranges of numbers.
Generate numbers in the half-open interval [start, stop)
in
increments of step
.
diff --git a/docs/build/html/python/_autosummary/mlx.core.arccos.html b/docs/build/html/python/_autosummary/mlx.core.arccos.html
index 68f53cd96..f2228f316 100644
--- a/docs/build/html/python/_autosummary/mlx.core.arccos.html
+++ b/docs/build/html/python/_autosummary/mlx.core.arccos.html
@@ -9,7 +9,7 @@
- mlx.core.arccos — MLX 0.2.0 documentation
+ mlx.core.arccos — MLX 0.3.0 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -206,12 +206,13 @@
Devices and Streams
Operations
Layers
Optimizers
-Optimizer
-mlx.optimizers.Optimizer.state
-mlx.optimizers.Optimizer.apply_gradients
-mlx.optimizers.Optimizer.init
-mlx.optimizers.Optimizer.update
+Optimizer
-mlx.optimizers.SGD
-mlx.optimizers.RMSprop
-mlx.optimizers.Adagrad
-mlx.optimizers.Adafactor
-mlx.optimizers.AdaDelta
-mlx.optimizers.Adam
-mlx.optimizers.AdamW
-mlx.optimizers.Adamax
-mlx.optimizers.Lion
+Common Optimizers
-Tree Utils
+
+Tree Utils
mlx.utils.tree_flatten
mlx.utils.tree_unflatten
mlx.utils.tree_map
@@ -678,7 +692,7 @@ document.write(`
mlx.core.arccos
-mlx.core. arccos ( a : array , / , * , stream : Union [ None , Stream , Device ] = None ) → array
+mlx.core. arccos ( a : array , / , * , stream : Union [ None , Stream , Device ] = None ) → array
Element-wise inverse cosine.
Parameters:
diff --git a/docs/build/html/python/_autosummary/mlx.core.arccosh.html b/docs/build/html/python/_autosummary/mlx.core.arccosh.html
index 1dfdfe8f1..82819d070 100644
--- a/docs/build/html/python/_autosummary/mlx.core.arccosh.html
+++ b/docs/build/html/python/_autosummary/mlx.core.arccosh.html
@@ -9,7 +9,7 @@
- mlx.core.arccosh — MLX 0.2.0 documentation
+ mlx.core.arccosh — MLX 0.3.0 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -206,12 +206,13 @@
Devices and Streams
Operations
Layers
Optimizers
-Optimizer
-mlx.optimizers.Optimizer.state
-mlx.optimizers.Optimizer.apply_gradients
-mlx.optimizers.Optimizer.init
-mlx.optimizers.Optimizer.update
+Optimizer
-mlx.optimizers.SGD
-mlx.optimizers.RMSprop
-mlx.optimizers.Adagrad
-mlx.optimizers.Adafactor
-mlx.optimizers.AdaDelta
-mlx.optimizers.Adam
-mlx.optimizers.AdamW
-mlx.optimizers.Adamax
-mlx.optimizers.Lion
+Common Optimizers
-Tree Utils
+
+Tree Utils
mlx.utils.tree_flatten
mlx.utils.tree_unflatten
mlx.utils.tree_map
@@ -678,7 +692,7 @@ document.write(`
mlx.core.arccosh
-mlx.core. arccosh ( a : array , / , * , stream : Union [ None , Stream , Device ] = None ) → array
+mlx.core. arccosh ( a : array , / , * , stream : Union [ None , Stream , Device ] = None ) → array
Element-wise inverse hyperbolic cosine.
Parameters:
diff --git a/docs/build/html/python/_autosummary/mlx.core.arcsin.html b/docs/build/html/python/_autosummary/mlx.core.arcsin.html
index 0060b83b8..7ac16d2f8 100644
--- a/docs/build/html/python/_autosummary/mlx.core.arcsin.html
+++ b/docs/build/html/python/_autosummary/mlx.core.arcsin.html
@@ -9,7 +9,7 @@
- mlx.core.arcsin — MLX 0.2.0 documentation
+ mlx.core.arcsin — MLX 0.3.0 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -206,12 +206,13 @@
Devices and Streams
Operations
Layers
Optimizers
-Optimizer
-mlx.optimizers.Optimizer.state
-mlx.optimizers.Optimizer.apply_gradients
-mlx.optimizers.Optimizer.init
-mlx.optimizers.Optimizer.update
+Optimizer
-mlx.optimizers.SGD
-mlx.optimizers.RMSprop
-mlx.optimizers.Adagrad
-mlx.optimizers.Adafactor
-mlx.optimizers.AdaDelta
-mlx.optimizers.Adam
-mlx.optimizers.AdamW
-mlx.optimizers.Adamax
-mlx.optimizers.Lion
+Common Optimizers
-Tree Utils
+
+Tree Utils
mlx.utils.tree_flatten
mlx.utils.tree_unflatten
mlx.utils.tree_map
@@ -678,7 +692,7 @@ document.write(`
mlx.core.arcsin
-mlx.core. arcsin ( a : array , / , * , stream : Union [ None , Stream , Device ] = None ) → array
+mlx.core. arcsin ( a : array , / , * , stream : Union [ None , Stream , Device ] = None ) → array
Element-wise inverse sine.
Parameters:
diff --git a/docs/build/html/python/_autosummary/mlx.core.arcsinh.html b/docs/build/html/python/_autosummary/mlx.core.arcsinh.html
index ebac630d2..8ae5bb763 100644
--- a/docs/build/html/python/_autosummary/mlx.core.arcsinh.html
+++ b/docs/build/html/python/_autosummary/mlx.core.arcsinh.html
@@ -9,7 +9,7 @@
- mlx.core.arcsinh — MLX 0.2.0 documentation
+ mlx.core.arcsinh — MLX 0.3.0 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -206,12 +206,13 @@
Devices and Streams
Operations
Layers
Optimizers
-Optimizer
-mlx.optimizers.Optimizer.state
-mlx.optimizers.Optimizer.apply_gradients
-mlx.optimizers.Optimizer.init
-mlx.optimizers.Optimizer.update
+Optimizer
-mlx.optimizers.SGD
-mlx.optimizers.RMSprop
-mlx.optimizers.Adagrad
-mlx.optimizers.Adafactor
-mlx.optimizers.AdaDelta
-mlx.optimizers.Adam
-mlx.optimizers.AdamW
-mlx.optimizers.Adamax
-mlx.optimizers.Lion
+Common Optimizers
-Tree Utils
+
+Tree Utils
mlx.utils.tree_flatten
mlx.utils.tree_unflatten
mlx.utils.tree_map
@@ -678,7 +692,7 @@ document.write(`
mlx.core.arcsinh
-mlx.core. arcsinh ( a : array , / , * , stream : Union [ None , Stream , Device ] = None ) → array
+mlx.core. arcsinh ( a : array , / , * , stream : Union [ None , Stream , Device ] = None ) → array
Element-wise inverse hyperbolic sine.
Parameters:
diff --git a/docs/build/html/python/_autosummary/mlx.core.arctan.html b/docs/build/html/python/_autosummary/mlx.core.arctan.html
index 8ce6a8e35..c1f632425 100644
--- a/docs/build/html/python/_autosummary/mlx.core.arctan.html
+++ b/docs/build/html/python/_autosummary/mlx.core.arctan.html
@@ -9,7 +9,7 @@
- mlx.core.arctan — MLX 0.2.0 documentation
+ mlx.core.arctan — MLX 0.3.0 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -206,12 +206,13 @@
Devices and Streams
Operations
Layers
Optimizers
-Optimizer
-mlx.optimizers.Optimizer.state
-mlx.optimizers.Optimizer.apply_gradients
-mlx.optimizers.Optimizer.init
-mlx.optimizers.Optimizer.update
+Optimizer
-mlx.optimizers.SGD
-mlx.optimizers.RMSprop
-mlx.optimizers.Adagrad
-mlx.optimizers.Adafactor
-mlx.optimizers.AdaDelta
-mlx.optimizers.Adam
-mlx.optimizers.AdamW
-mlx.optimizers.Adamax
-mlx.optimizers.Lion
+Common Optimizers
-Tree Utils
+
+Tree Utils
mlx.utils.tree_flatten
mlx.utils.tree_unflatten
mlx.utils.tree_map
@@ -678,7 +692,7 @@ document.write(`
mlx.core.arctan
-mlx.core. arctan ( a : array , / , * , stream : Union [ None , Stream , Device ] = None ) → array
+mlx.core. arctan ( a : array , / , * , stream : Union [ None , Stream , Device ] = None ) → array
Element-wise inverse tangent.
Parameters:
diff --git a/docs/build/html/python/_autosummary/mlx.core.arctanh.html b/docs/build/html/python/_autosummary/mlx.core.arctanh.html
index b2abe480a..0a5c41eee 100644
--- a/docs/build/html/python/_autosummary/mlx.core.arctanh.html
+++ b/docs/build/html/python/_autosummary/mlx.core.arctanh.html
@@ -9,7 +9,7 @@
- mlx.core.arctanh — MLX 0.2.0 documentation
+ mlx.core.arctanh — MLX 0.3.0 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -206,12 +206,13 @@
Devices and Streams
Operations
Layers
Optimizers
-Optimizer
-mlx.optimizers.Optimizer.state
-mlx.optimizers.Optimizer.apply_gradients
-mlx.optimizers.Optimizer.init
-mlx.optimizers.Optimizer.update
+Optimizer
-mlx.optimizers.SGD
-mlx.optimizers.RMSprop
-mlx.optimizers.Adagrad
-mlx.optimizers.Adafactor
-mlx.optimizers.AdaDelta
-mlx.optimizers.Adam
-mlx.optimizers.AdamW
-mlx.optimizers.Adamax
-mlx.optimizers.Lion
+Common Optimizers
-Tree Utils
+
+Tree Utils
mlx.utils.tree_flatten
mlx.utils.tree_unflatten
mlx.utils.tree_map
@@ -678,7 +692,7 @@ document.write(`
mlx.core.arctanh
-mlx.core. arctanh ( a : array , / , * , stream : Union [ None , Stream , Device ] = None ) → array
+mlx.core. arctanh ( a : array , / , * , stream : Union [ None , Stream , Device ] = None ) → array
Element-wise inverse hyperbolic tangent.
Parameters:
diff --git a/docs/build/html/python/_autosummary/mlx.core.argmax.html b/docs/build/html/python/_autosummary/mlx.core.argmax.html
index 4fcda6728..0ae3fd951 100644
--- a/docs/build/html/python/_autosummary/mlx.core.argmax.html
+++ b/docs/build/html/python/_autosummary/mlx.core.argmax.html
@@ -9,7 +9,7 @@
- mlx.core.argmax — MLX 0.2.0 documentation
+ mlx.core.argmax — MLX 0.3.0 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -206,12 +206,13 @@
Devices and Streams
Operations
Layers
Optimizers
-Optimizer
-mlx.optimizers.Optimizer.state
-mlx.optimizers.Optimizer.apply_gradients
-mlx.optimizers.Optimizer.init
-mlx.optimizers.Optimizer.update
+Optimizer
-mlx.optimizers.SGD
-mlx.optimizers.RMSprop
-mlx.optimizers.Adagrad
-mlx.optimizers.Adafactor
-mlx.optimizers.AdaDelta
-mlx.optimizers.Adam
-mlx.optimizers.AdamW
-mlx.optimizers.Adamax
-mlx.optimizers.Lion
+Common Optimizers
-Tree Utils
+
+Tree Utils
mlx.utils.tree_flatten
mlx.utils.tree_unflatten
mlx.utils.tree_map
@@ -678,7 +692,7 @@ document.write(`
mlx.core.argmax
-mlx.core. argmax ( a : array , / , axis : Union [ None , int ] = None , keepdims : bool = False , * , stream : Union [ None , Stream , Device ] = None ) → array
+mlx.core. argmax ( a : array , / , axis : Union [ None , int ] = None , keepdims : bool = False , * , stream : Union [ None , Stream , Device ] = None ) → array
Indices of the maximum values along the axis.
Parameters:
diff --git a/docs/build/html/python/_autosummary/mlx.core.argmin.html b/docs/build/html/python/_autosummary/mlx.core.argmin.html
index 665687543..35b06229d 100644
--- a/docs/build/html/python/_autosummary/mlx.core.argmin.html
+++ b/docs/build/html/python/_autosummary/mlx.core.argmin.html
@@ -9,7 +9,7 @@
- mlx.core.argmin — MLX 0.2.0 documentation
+ mlx.core.argmin — MLX 0.3.0 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -206,12 +206,13 @@
Devices and Streams
Operations
Layers
Optimizers
-Optimizer
-mlx.optimizers.Optimizer.state
-mlx.optimizers.Optimizer.apply_gradients
-mlx.optimizers.Optimizer.init
-mlx.optimizers.Optimizer.update
+Optimizer
-mlx.optimizers.SGD
-mlx.optimizers.RMSprop
-mlx.optimizers.Adagrad
-mlx.optimizers.Adafactor
-mlx.optimizers.AdaDelta
-mlx.optimizers.Adam
-mlx.optimizers.AdamW
-mlx.optimizers.Adamax
-mlx.optimizers.Lion
+Common Optimizers
-Tree Utils
+
+Tree Utils
mlx.utils.tree_flatten
mlx.utils.tree_unflatten
mlx.utils.tree_map
@@ -678,7 +692,7 @@ document.write(`
mlx.core.argmin
-mlx.core. argmin ( a : array , / , axis : Union [ None , int ] = None , keepdims : bool = False , * , stream : Union [ None , Stream , Device ] = None ) → array
+mlx.core. argmin ( a : array , / , axis : Union [ None , int ] = None , keepdims : bool = False , * , stream : Union [ None , Stream , Device ] = None ) → array
Indices of the minimum values along the axis.
Parameters:
diff --git a/docs/build/html/python/_autosummary/mlx.core.argpartition.html b/docs/build/html/python/_autosummary/mlx.core.argpartition.html
index fa3698063..fc9fc3d94 100644
--- a/docs/build/html/python/_autosummary/mlx.core.argpartition.html
+++ b/docs/build/html/python/_autosummary/mlx.core.argpartition.html
@@ -9,7 +9,7 @@
- mlx.core.argpartition — MLX 0.2.0 documentation
+ mlx.core.argpartition — MLX 0.3.0 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -206,12 +206,13 @@
Devices and Streams
Operations
Layers
Optimizers
-Optimizer
-mlx.optimizers.Optimizer.state
-mlx.optimizers.Optimizer.apply_gradients
-mlx.optimizers.Optimizer.init
-mlx.optimizers.Optimizer.update
+Optimizer
-mlx.optimizers.SGD
-mlx.optimizers.RMSprop
-mlx.optimizers.Adagrad
-mlx.optimizers.Adafactor
-mlx.optimizers.AdaDelta
-mlx.optimizers.Adam
-mlx.optimizers.AdamW
-mlx.optimizers.Adamax
-mlx.optimizers.Lion
+Common Optimizers
-Tree Utils
+
+Tree Utils
mlx.utils.tree_flatten
mlx.utils.tree_unflatten
mlx.utils.tree_map
@@ -678,7 +692,7 @@ document.write(`
mlx.core.argpartition
-mlx.core. argpartition ( a : array , / , kth : int , axis : Union [ None , int ] = - 1 , * , stream : Union [ None , Stream , Device ] = None ) → array
+mlx.core. argpartition ( a : array , / , kth : int , axis : Union [ None , int ] = - 1 , * , stream : Union [ None , Stream , Device ] = None ) → array
Returns the indices that partition the array.
The ordering of the elements within a partition in given by the indices
is undefined.
diff --git a/docs/build/html/python/_autosummary/mlx.core.argsort.html b/docs/build/html/python/_autosummary/mlx.core.argsort.html
index ac2a7022f..9267872eb 100644
--- a/docs/build/html/python/_autosummary/mlx.core.argsort.html
+++ b/docs/build/html/python/_autosummary/mlx.core.argsort.html
@@ -9,7 +9,7 @@
- mlx.core.argsort — MLX 0.2.0 documentation
+ mlx.core.argsort — MLX 0.3.0 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -206,12 +206,13 @@
Devices and Streams
Operations
Layers
Optimizers
-Optimizer
-mlx.optimizers.Optimizer.state
-mlx.optimizers.Optimizer.apply_gradients
-mlx.optimizers.Optimizer.init
-mlx.optimizers.Optimizer.update
+Optimizer
-mlx.optimizers.SGD
-mlx.optimizers.RMSprop
-mlx.optimizers.Adagrad
-mlx.optimizers.Adafactor
-mlx.optimizers.AdaDelta
-mlx.optimizers.Adam
-mlx.optimizers.AdamW
-mlx.optimizers.Adamax
-mlx.optimizers.Lion
+Common Optimizers
-Tree Utils
+
+Tree Utils
mlx.utils.tree_flatten
mlx.utils.tree_unflatten
mlx.utils.tree_map
@@ -678,7 +692,7 @@ document.write(`
mlx.core.argsort
-mlx.core. argsort ( a : array , / , axis : Union [ None , int ] = - 1 , * , stream : Union [ None , Stream , Device ] = None ) → array
+mlx.core. argsort ( a : array , / , axis : Union [ None , int ] = - 1 , * , stream : Union [ None , Stream , Device ] = None ) → array
Returns the indices that sort the array.
Parameters:
diff --git a/docs/build/html/python/_autosummary/mlx.core.array.T.html b/docs/build/html/python/_autosummary/mlx.core.array.T.html
index 9992d96f9..b03ecc0ec 100644
--- a/docs/build/html/python/_autosummary/mlx.core.array.T.html
+++ b/docs/build/html/python/_autosummary/mlx.core.array.T.html
@@ -9,7 +9,7 @@
- mlx.core.array.T — MLX 0.2.0 documentation
+ mlx.core.array.T — MLX 0.3.0 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -206,12 +206,13 @@
Devices and Streams
Operations
Layers
Optimizers
-Optimizer
-mlx.optimizers.Optimizer.state
-mlx.optimizers.Optimizer.apply_gradients
-mlx.optimizers.Optimizer.init
-mlx.optimizers.Optimizer.update
+Optimizer
-mlx.optimizers.SGD
-mlx.optimizers.RMSprop
-mlx.optimizers.Adagrad
-mlx.optimizers.Adafactor
-mlx.optimizers.AdaDelta
-mlx.optimizers.Adam
-mlx.optimizers.AdamW
-mlx.optimizers.Adamax
-mlx.optimizers.Lion
+Common Optimizers
-Tree Utils
+
+Tree Utils
mlx.utils.tree_flatten
mlx.utils.tree_unflatten
mlx.utils.tree_map
diff --git a/docs/build/html/python/_autosummary/mlx.core.array.abs.html b/docs/build/html/python/_autosummary/mlx.core.array.abs.html
index 4ce7ef032..cabc95346 100644
--- a/docs/build/html/python/_autosummary/mlx.core.array.abs.html
+++ b/docs/build/html/python/_autosummary/mlx.core.array.abs.html
@@ -9,7 +9,7 @@
- mlx.core.array.abs — MLX 0.2.0 documentation
+ mlx.core.array.abs — MLX 0.3.0 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -206,12 +206,13 @@
Devices and Streams
Operations
Layers
Optimizers
-Optimizer
-mlx.optimizers.Optimizer.state
-mlx.optimizers.Optimizer.apply_gradients
-mlx.optimizers.Optimizer.init
-mlx.optimizers.Optimizer.update
+Optimizer
-mlx.optimizers.SGD
-mlx.optimizers.RMSprop
-mlx.optimizers.Adagrad
-mlx.optimizers.Adafactor
-mlx.optimizers.AdaDelta
-mlx.optimizers.Adam
-mlx.optimizers.AdamW
-mlx.optimizers.Adamax
-mlx.optimizers.Lion
+Common Optimizers
-Tree Utils
+
+Tree Utils
mlx.utils.tree_flatten
mlx.utils.tree_unflatten
mlx.utils.tree_map
@@ -678,7 +692,7 @@ document.write(`
mlx.core.array.abs
-array. abs ( self : array , * , stream : Union [ None , Stream , Device ] = None ) → array
+array. abs ( self : array , * , stream : Union [ None , Stream , Device ] = None ) → array
See abs()
.
diff --git a/docs/build/html/python/_autosummary/mlx.core.array.all.html b/docs/build/html/python/_autosummary/mlx.core.array.all.html
index 2389f82f9..d54b61fca 100644
--- a/docs/build/html/python/_autosummary/mlx.core.array.all.html
+++ b/docs/build/html/python/_autosummary/mlx.core.array.all.html
@@ -9,7 +9,7 @@
- mlx.core.array.all — MLX 0.2.0 documentation
+ mlx.core.array.all — MLX 0.3.0 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -206,12 +206,13 @@
Devices and Streams
Operations
Layers
Optimizers
-Optimizer
-mlx.optimizers.Optimizer.state
-mlx.optimizers.Optimizer.apply_gradients
-mlx.optimizers.Optimizer.init
-mlx.optimizers.Optimizer.update
+Optimizer
-mlx.optimizers.SGD
-mlx.optimizers.RMSprop
-mlx.optimizers.Adagrad
-mlx.optimizers.Adafactor
-mlx.optimizers.AdaDelta
-mlx.optimizers.Adam
-mlx.optimizers.AdamW
-mlx.optimizers.Adamax
-mlx.optimizers.Lion
+Common Optimizers
-Tree Utils
+
+Tree Utils
mlx.utils.tree_flatten
mlx.utils.tree_unflatten
mlx.utils.tree_map
@@ -678,7 +692,7 @@ document.write(`
mlx.core.array.all
-array. all ( self : array , axis : Union [ None , int , List [ int ] ] = None , keepdims : bool = False , * , stream : Union [ None , Stream , Device ] = None ) → array
+array. all ( self : array , axis : Union [ None , int , List [ int ] ] = None , keepdims : bool = False , * , stream : Union [ None , Stream , Device ] = None ) → array
See all()
.
diff --git a/docs/build/html/python/_autosummary/mlx.core.array.any.html b/docs/build/html/python/_autosummary/mlx.core.array.any.html
index 6c800392f..e0d01f868 100644
--- a/docs/build/html/python/_autosummary/mlx.core.array.any.html
+++ b/docs/build/html/python/_autosummary/mlx.core.array.any.html
@@ -9,7 +9,7 @@
- mlx.core.array.any — MLX 0.2.0 documentation
+ mlx.core.array.any — MLX 0.3.0 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -206,12 +206,13 @@
Devices and Streams
Operations
Layers
Optimizers
-Optimizer
-mlx.optimizers.Optimizer.state
-mlx.optimizers.Optimizer.apply_gradients
-mlx.optimizers.Optimizer.init
-mlx.optimizers.Optimizer.update
+Optimizer
-mlx.optimizers.SGD
-mlx.optimizers.RMSprop
-mlx.optimizers.Adagrad
-mlx.optimizers.Adafactor
-mlx.optimizers.AdaDelta
-mlx.optimizers.Adam
-mlx.optimizers.AdamW
-mlx.optimizers.Adamax
-mlx.optimizers.Lion
+Common Optimizers
-Tree Utils
+
+Tree Utils
mlx.utils.tree_flatten
mlx.utils.tree_unflatten
mlx.utils.tree_map
@@ -678,7 +692,7 @@ document.write(`
mlx.core.array.any
-array. any ( self : array , axis : Union [ None , int , List [ int ] ] = None , keepdims : bool = False , * , stream : Union [ None , Stream , Device ] = None ) → array
+array. any ( self : array , axis : Union [ None , int , List [ int ] ] = None , keepdims : bool = False , * , stream : Union [ None , Stream , Device ] = None ) → array
See any()
.
diff --git a/docs/build/html/python/_autosummary/mlx.core.array.argmax.html b/docs/build/html/python/_autosummary/mlx.core.array.argmax.html
index 50794bf36..0df48637a 100644
--- a/docs/build/html/python/_autosummary/mlx.core.array.argmax.html
+++ b/docs/build/html/python/_autosummary/mlx.core.array.argmax.html
@@ -9,7 +9,7 @@
- mlx.core.array.argmax — MLX 0.2.0 documentation
+ mlx.core.array.argmax — MLX 0.3.0 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -206,12 +206,13 @@
Devices and Streams
Operations
Layers
Optimizers
-Optimizer
-mlx.optimizers.Optimizer.state
-mlx.optimizers.Optimizer.apply_gradients
-mlx.optimizers.Optimizer.init
-mlx.optimizers.Optimizer.update
+Optimizer
-mlx.optimizers.SGD
-mlx.optimizers.RMSprop
-mlx.optimizers.Adagrad
-mlx.optimizers.Adafactor
-mlx.optimizers.AdaDelta
-mlx.optimizers.Adam
-mlx.optimizers.AdamW
-mlx.optimizers.Adamax
-mlx.optimizers.Lion
+Common Optimizers
-Tree Utils
+
+Tree Utils
mlx.utils.tree_flatten
mlx.utils.tree_unflatten
mlx.utils.tree_map
@@ -678,7 +692,7 @@ document.write(`
mlx.core.array.argmax
-array. argmax ( self : array , axis : Optional [ int ] = None , keepdims : bool = False , * , stream : Union [ None , Stream , Device ] = None ) → array
+array. argmax ( self : array , axis : Optional [ int ] = None , keepdims : bool = False , * , stream : Union [ None , Stream , Device ] = None ) → array
See argmax()
.
diff --git a/docs/build/html/python/_autosummary/mlx.core.array.argmin.html b/docs/build/html/python/_autosummary/mlx.core.array.argmin.html
index e70fa73c2..d1de6bf6b 100644
--- a/docs/build/html/python/_autosummary/mlx.core.array.argmin.html
+++ b/docs/build/html/python/_autosummary/mlx.core.array.argmin.html
@@ -9,7 +9,7 @@
- mlx.core.array.argmin — MLX 0.2.0 documentation
+ mlx.core.array.argmin — MLX 0.3.0 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -206,12 +206,13 @@
Devices and Streams
Operations
Layers
Optimizers
-Optimizer
-mlx.optimizers.Optimizer.state
-mlx.optimizers.Optimizer.apply_gradients
-mlx.optimizers.Optimizer.init
-mlx.optimizers.Optimizer.update
+Optimizer
-mlx.optimizers.SGD
-mlx.optimizers.RMSprop
-mlx.optimizers.Adagrad
-mlx.optimizers.Adafactor
-mlx.optimizers.AdaDelta
-mlx.optimizers.Adam
-mlx.optimizers.AdamW
-mlx.optimizers.Adamax
-mlx.optimizers.Lion
+Common Optimizers
-Tree Utils
+
+Tree Utils
mlx.utils.tree_flatten
mlx.utils.tree_unflatten
mlx.utils.tree_map
@@ -678,7 +692,7 @@ document.write(`
mlx.core.array.argmin
-array. argmin ( self : array , axis : Optional [ int ] = None , keepdims : bool = False , * , stream : Union [ None , Stream , Device ] = None ) → array
+array. argmin ( self : array , axis : Optional [ int ] = None , keepdims : bool = False , * , stream : Union [ None , Stream , Device ] = None ) → array
See argmin()
.
diff --git a/docs/build/html/python/_autosummary/mlx.core.array.astype.html b/docs/build/html/python/_autosummary/mlx.core.array.astype.html
index e31a40933..7b76aa42a 100644
--- a/docs/build/html/python/_autosummary/mlx.core.array.astype.html
+++ b/docs/build/html/python/_autosummary/mlx.core.array.astype.html
@@ -9,7 +9,7 @@
- mlx.core.array.astype — MLX 0.2.0 documentation
+ mlx.core.array.astype — MLX 0.3.0 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -206,12 +206,13 @@
Devices and Streams
Operations
Layers
Optimizers
-Optimizer
-mlx.optimizers.Optimizer.state
-mlx.optimizers.Optimizer.apply_gradients
-mlx.optimizers.Optimizer.init
-mlx.optimizers.Optimizer.update
+Optimizer
-mlx.optimizers.SGD
-mlx.optimizers.RMSprop
-mlx.optimizers.Adagrad
-mlx.optimizers.Adafactor
-mlx.optimizers.AdaDelta
-mlx.optimizers.Adam
-mlx.optimizers.AdamW
-mlx.optimizers.Adamax
-mlx.optimizers.Lion
+Common Optimizers
-Tree Utils
+
+Tree Utils
mlx.utils.tree_flatten
mlx.utils.tree_unflatten
mlx.utils.tree_map
@@ -678,13 +692,13 @@ document.write(`
mlx.core.array.astype
-array. astype ( self : array , dtype : Dtype , stream : Union [ None , Stream , Device ] = None ) → array
+array. astype ( self : array , dtype : Dtype , stream : Union [ None , Stream , Device ] = None ) → array
Cast the array to a specified type.
Parameters:
dtype (Dtype ) – Type to which the array is cast.
-stream (Stream ) – Stream (or device) for the operation.
+stream (Stream ) – Stream (or device) for the operation.
Returns:
diff --git a/docs/build/html/python/_autosummary/mlx.core.array.cos.html b/docs/build/html/python/_autosummary/mlx.core.array.cos.html
index fea79d466..af15a6cc4 100644
--- a/docs/build/html/python/_autosummary/mlx.core.array.cos.html
+++ b/docs/build/html/python/_autosummary/mlx.core.array.cos.html
@@ -9,7 +9,7 @@
- mlx.core.array.cos — MLX 0.2.0 documentation
+ mlx.core.array.cos — MLX 0.3.0 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -206,12 +206,13 @@
Devices and Streams
Operations
Layers
Optimizers
-Optimizer
-mlx.optimizers.Optimizer.state
-mlx.optimizers.Optimizer.apply_gradients
-mlx.optimizers.Optimizer.init
-mlx.optimizers.Optimizer.update
+Optimizer
-mlx.optimizers.SGD
-mlx.optimizers.RMSprop
-mlx.optimizers.Adagrad
-mlx.optimizers.Adafactor
-mlx.optimizers.AdaDelta
-mlx.optimizers.Adam
-mlx.optimizers.AdamW
-mlx.optimizers.Adamax
-mlx.optimizers.Lion
+Common Optimizers
-Tree Utils
+
+Tree Utils
mlx.utils.tree_flatten
mlx.utils.tree_unflatten
mlx.utils.tree_map
@@ -678,7 +692,7 @@ document.write(`
mlx.core.array.cos
-array. cos ( self : array , * , stream : Union [ None , Stream , Device ] = None ) → array
+array. cos ( self : array , * , stream : Union [ None , Stream , Device ] = None ) → array
See cos()
.
diff --git a/docs/build/html/python/_autosummary/mlx.core.array.dtype.html b/docs/build/html/python/_autosummary/mlx.core.array.dtype.html
index de356e920..8d921c243 100644
--- a/docs/build/html/python/_autosummary/mlx.core.array.dtype.html
+++ b/docs/build/html/python/_autosummary/mlx.core.array.dtype.html
@@ -9,7 +9,7 @@
- mlx.core.array.dtype — MLX 0.2.0 documentation
+ mlx.core.array.dtype — MLX 0.3.0 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -206,12 +206,13 @@
Devices and Streams
Operations
Layers
Optimizers
-Optimizer
-mlx.optimizers.Optimizer.state
-mlx.optimizers.Optimizer.apply_gradients
-mlx.optimizers.Optimizer.init
-mlx.optimizers.Optimizer.update
+Optimizer
-mlx.optimizers.SGD
-mlx.optimizers.RMSprop
-mlx.optimizers.Adagrad
-mlx.optimizers.Adafactor
-mlx.optimizers.AdaDelta
-mlx.optimizers.Adam
-mlx.optimizers.AdamW
-mlx.optimizers.Adamax
-mlx.optimizers.Lion
+Common Optimizers
-Tree Utils
+
+Tree Utils
mlx.utils.tree_flatten
mlx.utils.tree_unflatten
mlx.utils.tree_map
diff --git a/docs/build/html/python/_autosummary/mlx.core.array.exp.html b/docs/build/html/python/_autosummary/mlx.core.array.exp.html
index 0ce211cc6..3d0b21ebb 100644
--- a/docs/build/html/python/_autosummary/mlx.core.array.exp.html
+++ b/docs/build/html/python/_autosummary/mlx.core.array.exp.html
@@ -9,7 +9,7 @@
- mlx.core.array.exp — MLX 0.2.0 documentation
+ mlx.core.array.exp — MLX 0.3.0 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -206,12 +206,13 @@
Devices and Streams
Operations
Layers
Optimizers
-Optimizer
-mlx.optimizers.Optimizer.state
-mlx.optimizers.Optimizer.apply_gradients
-mlx.optimizers.Optimizer.init
-mlx.optimizers.Optimizer.update
+Optimizer
-mlx.optimizers.SGD
-mlx.optimizers.RMSprop
-mlx.optimizers.Adagrad
-mlx.optimizers.Adafactor
-mlx.optimizers.AdaDelta
-mlx.optimizers.Adam
-mlx.optimizers.AdamW
-mlx.optimizers.Adamax
-mlx.optimizers.Lion
+Common Optimizers
-Tree Utils
+
+Tree Utils
mlx.utils.tree_flatten
mlx.utils.tree_unflatten
mlx.utils.tree_map
@@ -678,7 +692,7 @@ document.write(`
mlx.core.array.exp
-array. exp ( self : array , * , stream : Union [ None , Stream , Device ] = None ) → array
+array. exp ( self : array , * , stream : Union [ None , Stream , Device ] = None ) → array
See exp()
.
diff --git a/docs/build/html/python/_autosummary/mlx.core.array.html b/docs/build/html/python/_autosummary/mlx.core.array.html
index 670597595..a25731251 100644
--- a/docs/build/html/python/_autosummary/mlx.core.array.html
+++ b/docs/build/html/python/_autosummary/mlx.core.array.html
@@ -9,7 +9,7 @@
- mlx.core.array — MLX 0.2.0 documentation
+ mlx.core.array — MLX 0.3.0 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -206,12 +206,13 @@
Devices and Streams
Operations
Layers
Optimizers
-Optimizer
-mlx.optimizers.Optimizer.state
-mlx.optimizers.Optimizer.apply_gradients
-mlx.optimizers.Optimizer.init
-mlx.optimizers.Optimizer.update
+Optimizer
-mlx.optimizers.SGD
-mlx.optimizers.RMSprop
-mlx.optimizers.Adagrad
-mlx.optimizers.Adafactor
-mlx.optimizers.AdaDelta
-mlx.optimizers.Adam
-mlx.optimizers.AdamW
-mlx.optimizers.Adamax
-mlx.optimizers.Lion
+Common Optimizers
-Tree Utils
+
+Tree Utils
mlx.utils.tree_flatten
mlx.utils.tree_unflatten
mlx.utils.tree_map
diff --git a/docs/build/html/python/_autosummary/mlx.core.array.item.html b/docs/build/html/python/_autosummary/mlx.core.array.item.html
index f724ba20b..81b4093d6 100644
--- a/docs/build/html/python/_autosummary/mlx.core.array.item.html
+++ b/docs/build/html/python/_autosummary/mlx.core.array.item.html
@@ -9,7 +9,7 @@
- mlx.core.array.item — MLX 0.2.0 documentation
+ mlx.core.array.item — MLX 0.3.0 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -206,12 +206,13 @@
Devices and Streams
Operations
Layers
Optimizers
-Optimizer
-mlx.optimizers.Optimizer.state
-mlx.optimizers.Optimizer.apply_gradients
-mlx.optimizers.Optimizer.init
-mlx.optimizers.Optimizer.update
+Optimizer
-mlx.optimizers.SGD
-mlx.optimizers.RMSprop
-mlx.optimizers.Adagrad
-mlx.optimizers.Adafactor
-mlx.optimizers.AdaDelta
-mlx.optimizers.Adam
-mlx.optimizers.AdamW
-mlx.optimizers.Adamax
-mlx.optimizers.Lion
+Common Optimizers
-Tree Utils
+
+Tree Utils
mlx.utils.tree_flatten
mlx.utils.tree_unflatten
mlx.utils.tree_map
diff --git a/docs/build/html/python/_autosummary/mlx.core.array.log.html b/docs/build/html/python/_autosummary/mlx.core.array.log.html
index 475be716a..828651f03 100644
--- a/docs/build/html/python/_autosummary/mlx.core.array.log.html
+++ b/docs/build/html/python/_autosummary/mlx.core.array.log.html
@@ -9,7 +9,7 @@
- mlx.core.array.log — MLX 0.2.0 documentation
+ mlx.core.array.log — MLX 0.3.0 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -206,12 +206,13 @@
Devices and Streams
Operations
Layers
Optimizers
-Optimizer
-mlx.optimizers.Optimizer.state
-mlx.optimizers.Optimizer.apply_gradients
-mlx.optimizers.Optimizer.init
-mlx.optimizers.Optimizer.update
+Optimizer
-mlx.optimizers.SGD
-mlx.optimizers.RMSprop
-mlx.optimizers.Adagrad
-mlx.optimizers.Adafactor
-mlx.optimizers.AdaDelta
-mlx.optimizers.Adam
-mlx.optimizers.AdamW
-mlx.optimizers.Adamax
-mlx.optimizers.Lion
+Common Optimizers
-Tree Utils
+
+Tree Utils
mlx.utils.tree_flatten
mlx.utils.tree_unflatten
mlx.utils.tree_map
@@ -678,7 +692,7 @@ document.write(`
mlx.core.array.log
-array. log ( self : array , * , stream : Union [ None , Stream , Device ] = None ) → array
+array. log ( self : array , * , stream : Union [ None , Stream , Device ] = None ) → array
See log()
.
diff --git a/docs/build/html/python/_autosummary/mlx.core.array.log1p.html b/docs/build/html/python/_autosummary/mlx.core.array.log1p.html
index 70cf7d89f..4bd42c970 100644
--- a/docs/build/html/python/_autosummary/mlx.core.array.log1p.html
+++ b/docs/build/html/python/_autosummary/mlx.core.array.log1p.html
@@ -9,7 +9,7 @@
- mlx.core.array.log1p — MLX 0.2.0 documentation
+ mlx.core.array.log1p — MLX 0.3.0 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -206,12 +206,13 @@
Devices and Streams
Operations
Layers
Optimizers
-Optimizer
-mlx.optimizers.Optimizer.state
-mlx.optimizers.Optimizer.apply_gradients
-mlx.optimizers.Optimizer.init
-mlx.optimizers.Optimizer.update
+Optimizer
-mlx.optimizers.SGD
-mlx.optimizers.RMSprop
-mlx.optimizers.Adagrad
-mlx.optimizers.Adafactor
-mlx.optimizers.AdaDelta
-mlx.optimizers.Adam
-mlx.optimizers.AdamW
-mlx.optimizers.Adamax
-mlx.optimizers.Lion
+Common Optimizers
-Tree Utils
+
+Tree Utils
mlx.utils.tree_flatten
mlx.utils.tree_unflatten
mlx.utils.tree_map
@@ -678,7 +692,7 @@ document.write(`
mlx.core.array.log1p
-array. log1p ( self : array , * , stream : Union [ None , Stream , Device ] = None ) → array
+array. log1p ( self : array , * , stream : Union [ None , Stream , Device ] = None ) → array
See log1p()
.
diff --git a/docs/build/html/python/_autosummary/mlx.core.array.logsumexp.html b/docs/build/html/python/_autosummary/mlx.core.array.logsumexp.html
index 5a46453dc..7aa1b39e6 100644
--- a/docs/build/html/python/_autosummary/mlx.core.array.logsumexp.html
+++ b/docs/build/html/python/_autosummary/mlx.core.array.logsumexp.html
@@ -9,7 +9,7 @@
- mlx.core.array.logsumexp — MLX 0.2.0 documentation
+ mlx.core.array.logsumexp — MLX 0.3.0 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -206,12 +206,13 @@
Devices and Streams
Operations
Layers
Optimizers
-Optimizer
-mlx.optimizers.Optimizer.state
-mlx.optimizers.Optimizer.apply_gradients
-mlx.optimizers.Optimizer.init
-mlx.optimizers.Optimizer.update
+Optimizer
-mlx.optimizers.SGD
-mlx.optimizers.RMSprop
-mlx.optimizers.Adagrad
-mlx.optimizers.Adafactor
-mlx.optimizers.AdaDelta
-mlx.optimizers.Adam
-mlx.optimizers.AdamW
-mlx.optimizers.Adamax
-mlx.optimizers.Lion
+Common Optimizers
-Tree Utils
+
+Tree Utils
mlx.utils.tree_flatten
mlx.utils.tree_unflatten
mlx.utils.tree_map
@@ -678,7 +692,7 @@ document.write(`
mlx.core.array.logsumexp
-array. logsumexp ( self : array , axis : Union [ None , int , List [ int ] ] = None , keepdims : bool = False , * , stream : Union [ None , Stream , Device ] = None ) → array
+array. logsumexp ( self : array , axis : Union [ None , int , List [ int ] ] = None , keepdims : bool = False , * , stream : Union [ None , Stream , Device ] = None ) → array
See logsumexp()
.
diff --git a/docs/build/html/python/_autosummary/mlx.core.array.max.html b/docs/build/html/python/_autosummary/mlx.core.array.max.html
index dc9642039..6aeb95a34 100644
--- a/docs/build/html/python/_autosummary/mlx.core.array.max.html
+++ b/docs/build/html/python/_autosummary/mlx.core.array.max.html
@@ -9,7 +9,7 @@
- mlx.core.array.max — MLX 0.2.0 documentation
+ mlx.core.array.max — MLX 0.3.0 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -206,12 +206,13 @@
Devices and Streams
Operations
Layers
Optimizers
-Optimizer
-mlx.optimizers.Optimizer.state
-mlx.optimizers.Optimizer.apply_gradients
-mlx.optimizers.Optimizer.init
-mlx.optimizers.Optimizer.update
+Optimizer
-mlx.optimizers.SGD
-mlx.optimizers.RMSprop
-mlx.optimizers.Adagrad
-mlx.optimizers.Adafactor
-mlx.optimizers.AdaDelta
-mlx.optimizers.Adam
-mlx.optimizers.AdamW
-mlx.optimizers.Adamax
-mlx.optimizers.Lion
+Common Optimizers
-Tree Utils
+
+Tree Utils
mlx.utils.tree_flatten
mlx.utils.tree_unflatten
mlx.utils.tree_map
@@ -678,7 +692,7 @@ document.write(`
mlx.core.array.max
-array. max ( self : array , axis : Union [ None , int , List [ int ] ] = None , keepdims : bool = False , * , stream : Union [ None , Stream , Device ] = None ) → array
+array. max ( self : array , axis : Union [ None , int , List [ int ] ] = None , keepdims : bool = False , * , stream : Union [ None , Stream , Device ] = None ) → array
See max()
.
diff --git a/docs/build/html/python/_autosummary/mlx.core.array.mean.html b/docs/build/html/python/_autosummary/mlx.core.array.mean.html
index ce1cd6b72..293d503a0 100644
--- a/docs/build/html/python/_autosummary/mlx.core.array.mean.html
+++ b/docs/build/html/python/_autosummary/mlx.core.array.mean.html
@@ -9,7 +9,7 @@
- mlx.core.array.mean — MLX 0.2.0 documentation
+ mlx.core.array.mean — MLX 0.3.0 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -206,12 +206,13 @@
Devices and Streams
Operations
Layers
Optimizers
-Optimizer
-mlx.optimizers.Optimizer.state
-mlx.optimizers.Optimizer.apply_gradients
-mlx.optimizers.Optimizer.init
-mlx.optimizers.Optimizer.update
+Optimizer
-mlx.optimizers.SGD
-mlx.optimizers.RMSprop
-mlx.optimizers.Adagrad
-mlx.optimizers.Adafactor
-mlx.optimizers.AdaDelta
-mlx.optimizers.Adam
-mlx.optimizers.AdamW
-mlx.optimizers.Adamax
-mlx.optimizers.Lion
+Common Optimizers
-Tree Utils
+
+Tree Utils
mlx.utils.tree_flatten
mlx.utils.tree_unflatten
mlx.utils.tree_map
@@ -678,7 +692,7 @@ document.write(`
mlx.core.array.mean
-array. mean ( self : array , axis : Union [ None , int , List [ int ] ] = None , keepdims : bool = False , * , stream : Union [ None , Stream , Device ] = None ) → array
+array. mean ( self : array , axis : Union [ None , int , List [ int ] ] = None , keepdims : bool = False , * , stream : Union [ None , Stream , Device ] = None ) → array
See mean()
.
diff --git a/docs/build/html/python/_autosummary/mlx.core.array.min.html b/docs/build/html/python/_autosummary/mlx.core.array.min.html
index 778f9e915..dff71c243 100644
--- a/docs/build/html/python/_autosummary/mlx.core.array.min.html
+++ b/docs/build/html/python/_autosummary/mlx.core.array.min.html
@@ -9,7 +9,7 @@
- mlx.core.array.min — MLX 0.2.0 documentation
+ mlx.core.array.min — MLX 0.3.0 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -206,12 +206,13 @@
Devices and Streams
Operations
Layers
Optimizers
-Optimizer
-mlx.optimizers.Optimizer.state
-mlx.optimizers.Optimizer.apply_gradients
-mlx.optimizers.Optimizer.init
-mlx.optimizers.Optimizer.update
+Optimizer
-mlx.optimizers.SGD
-mlx.optimizers.RMSprop
-mlx.optimizers.Adagrad
-mlx.optimizers.Adafactor
-mlx.optimizers.AdaDelta
-mlx.optimizers.Adam
-mlx.optimizers.AdamW
-mlx.optimizers.Adamax
-mlx.optimizers.Lion
+Common Optimizers
-Tree Utils
+
+Tree Utils
mlx.utils.tree_flatten
mlx.utils.tree_unflatten
mlx.utils.tree_map
@@ -678,7 +692,7 @@ document.write(`
mlx.core.array.min
-array. min ( self : array , axis : Union [ None , int , List [ int ] ] = None , keepdims : bool = False , * , stream : Union [ None , Stream , Device ] = None ) → array
+array. min ( self : array , axis : Union [ None , int , List [ int ] ] = None , keepdims : bool = False , * , stream : Union [ None , Stream , Device ] = None ) → array
See min()
.
diff --git a/docs/build/html/python/_autosummary/mlx.core.array.ndim.html b/docs/build/html/python/_autosummary/mlx.core.array.ndim.html
index cfffb6b2d..4bd6f259d 100644
--- a/docs/build/html/python/_autosummary/mlx.core.array.ndim.html
+++ b/docs/build/html/python/_autosummary/mlx.core.array.ndim.html
@@ -9,7 +9,7 @@
- mlx.core.array.ndim — MLX 0.2.0 documentation
+ mlx.core.array.ndim — MLX 0.3.0 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -206,12 +206,13 @@
Devices and Streams
Operations
Layers
Optimizers
-Optimizer
-mlx.optimizers.Optimizer.state
-mlx.optimizers.Optimizer.apply_gradients
-mlx.optimizers.Optimizer.init
-mlx.optimizers.Optimizer.update
+Optimizer
-mlx.optimizers.SGD
-mlx.optimizers.RMSprop
-mlx.optimizers.Adagrad
-mlx.optimizers.Adafactor
-mlx.optimizers.AdaDelta
-mlx.optimizers.Adam
-mlx.optimizers.AdamW
-mlx.optimizers.Adamax
-mlx.optimizers.Lion
+Common Optimizers
-Tree Utils
+
+Tree Utils
mlx.utils.tree_flatten
mlx.utils.tree_unflatten
mlx.utils.tree_map
diff --git a/docs/build/html/python/_autosummary/mlx.core.array.prod.html b/docs/build/html/python/_autosummary/mlx.core.array.prod.html
index 7a7d1cac3..503bc76b5 100644
--- a/docs/build/html/python/_autosummary/mlx.core.array.prod.html
+++ b/docs/build/html/python/_autosummary/mlx.core.array.prod.html
@@ -9,7 +9,7 @@
- mlx.core.array.prod — MLX 0.2.0 documentation
+ mlx.core.array.prod — MLX 0.3.0 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -206,12 +206,13 @@
Devices and Streams
Operations
Layers
Optimizers
-Optimizer
-mlx.optimizers.Optimizer.state
-mlx.optimizers.Optimizer.apply_gradients
-mlx.optimizers.Optimizer.init
-mlx.optimizers.Optimizer.update
+Optimizer
-mlx.optimizers.SGD
-mlx.optimizers.RMSprop
-mlx.optimizers.Adagrad
-mlx.optimizers.Adafactor
-mlx.optimizers.AdaDelta
-mlx.optimizers.Adam
-mlx.optimizers.AdamW
-mlx.optimizers.Adamax
-mlx.optimizers.Lion
+Common Optimizers
-Tree Utils
+
+Tree Utils
mlx.utils.tree_flatten
mlx.utils.tree_unflatten
mlx.utils.tree_map
@@ -678,7 +692,7 @@ document.write(`
mlx.core.array.prod
-array. prod ( self : array , axis : Union [ None , int , List [ int ] ] = None , keepdims : bool = False , * , stream : Union [ None , Stream , Device ] = None ) → array
+array. prod ( self : array , axis : Union [ None , int , List [ int ] ] = None , keepdims : bool = False , * , stream : Union [ None , Stream , Device ] = None ) → array
See prod()
.
diff --git a/docs/build/html/python/_autosummary/mlx.core.array.reciprocal.html b/docs/build/html/python/_autosummary/mlx.core.array.reciprocal.html
index 708d9ea9f..9c57a5d5a 100644
--- a/docs/build/html/python/_autosummary/mlx.core.array.reciprocal.html
+++ b/docs/build/html/python/_autosummary/mlx.core.array.reciprocal.html
@@ -9,7 +9,7 @@
- mlx.core.array.reciprocal — MLX 0.2.0 documentation
+ mlx.core.array.reciprocal — MLX 0.3.0 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -206,12 +206,13 @@
Devices and Streams
Operations
Layers
Optimizers
-Optimizer
-mlx.optimizers.Optimizer.state
-mlx.optimizers.Optimizer.apply_gradients
-mlx.optimizers.Optimizer.init
-mlx.optimizers.Optimizer.update
+Optimizer
-mlx.optimizers.SGD
-mlx.optimizers.RMSprop
-mlx.optimizers.Adagrad
-mlx.optimizers.Adafactor
-mlx.optimizers.AdaDelta
-mlx.optimizers.Adam
-mlx.optimizers.AdamW
-mlx.optimizers.Adamax
-mlx.optimizers.Lion
+Common Optimizers
-Tree Utils
+
+Tree Utils
mlx.utils.tree_flatten
mlx.utils.tree_unflatten
mlx.utils.tree_map
@@ -678,7 +692,7 @@ document.write(`
mlx.core.array.reciprocal
-array. reciprocal ( self : array , * , stream : Union [ None , Stream , Device ] = None ) → array
+array. reciprocal ( self : array , * , stream : Union [ None , Stream , Device ] = None ) → array
See reciprocal()
.
diff --git a/docs/build/html/python/_autosummary/mlx.core.array.reshape.html b/docs/build/html/python/_autosummary/mlx.core.array.reshape.html
index 12f29bea2..995d3b527 100644
--- a/docs/build/html/python/_autosummary/mlx.core.array.reshape.html
+++ b/docs/build/html/python/_autosummary/mlx.core.array.reshape.html
@@ -9,7 +9,7 @@
- mlx.core.array.reshape — MLX 0.2.0 documentation
+ mlx.core.array.reshape — MLX 0.3.0 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -206,12 +206,13 @@
Devices and Streams
Operations
Layers
Optimizers
-Optimizer
-mlx.optimizers.Optimizer.state
-mlx.optimizers.Optimizer.apply_gradients
-mlx.optimizers.Optimizer.init
-mlx.optimizers.Optimizer.update
+Optimizer
-mlx.optimizers.SGD
-mlx.optimizers.RMSprop
-mlx.optimizers.Adagrad
-mlx.optimizers.Adafactor
-mlx.optimizers.AdaDelta
-mlx.optimizers.Adam
-mlx.optimizers.AdamW
-mlx.optimizers.Adamax
-mlx.optimizers.Lion
+Common Optimizers
-Tree Utils
+
+Tree Utils
mlx.utils.tree_flatten
mlx.utils.tree_unflatten
mlx.utils.tree_map
@@ -678,7 +692,7 @@ document.write(`
mlx.core.array.reshape
-array. reshape ( self : array , * args , stream : Union [ None , Stream , Device ] = None ) → array
+array. reshape ( self : array , * args , stream : Union [ None , Stream , Device ] = None ) → array
Equivalent to reshape()
but the shape can be passed either as a
tuple or as separate arguments.
See reshape()
for full documentation.
diff --git a/docs/build/html/python/_autosummary/mlx.core.array.round.html b/docs/build/html/python/_autosummary/mlx.core.array.round.html
index bb211866d..a720e8f3b 100644
--- a/docs/build/html/python/_autosummary/mlx.core.array.round.html
+++ b/docs/build/html/python/_autosummary/mlx.core.array.round.html
@@ -9,7 +9,7 @@
- mlx.core.array.round — MLX 0.2.0 documentation
+ mlx.core.array.round — MLX 0.3.0 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -206,12 +206,13 @@
Devices and Streams
Operations
Layers
Optimizers
-Optimizer
-mlx.optimizers.Optimizer.state
-mlx.optimizers.Optimizer.apply_gradients
-mlx.optimizers.Optimizer.init
-mlx.optimizers.Optimizer.update
+Optimizer
-mlx.optimizers.SGD
-mlx.optimizers.RMSprop
-mlx.optimizers.Adagrad
-mlx.optimizers.Adafactor
-mlx.optimizers.AdaDelta
-mlx.optimizers.Adam
-mlx.optimizers.AdamW
-mlx.optimizers.Adamax
-mlx.optimizers.Lion
+Common Optimizers
-Tree Utils
+
+Tree Utils
mlx.utils.tree_flatten
mlx.utils.tree_unflatten
mlx.utils.tree_map
@@ -678,7 +692,7 @@ document.write(`
mlx.core.array.round
-array. round ( self : array , / , decimals : int = 0 , * , stream : Union [ None , Stream , Device ] = None ) → array
+array. round ( self : array , / , decimals : int = 0 , * , stream : Union [ None , Stream , Device ] = None ) → array
See round()
.
diff --git a/docs/build/html/python/_autosummary/mlx.core.array.rsqrt.html b/docs/build/html/python/_autosummary/mlx.core.array.rsqrt.html
index 77bd2baef..d2f4c1b55 100644
--- a/docs/build/html/python/_autosummary/mlx.core.array.rsqrt.html
+++ b/docs/build/html/python/_autosummary/mlx.core.array.rsqrt.html
@@ -9,7 +9,7 @@
- mlx.core.array.rsqrt — MLX 0.2.0 documentation
+ mlx.core.array.rsqrt — MLX 0.3.0 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -206,12 +206,13 @@
Devices and Streams
Operations
Layers
Optimizers
-Optimizer
-mlx.optimizers.Optimizer.state
-mlx.optimizers.Optimizer.apply_gradients
-mlx.optimizers.Optimizer.init
-mlx.optimizers.Optimizer.update
+Optimizer
-mlx.optimizers.SGD
-mlx.optimizers.RMSprop
-mlx.optimizers.Adagrad
-mlx.optimizers.Adafactor
-mlx.optimizers.AdaDelta
-mlx.optimizers.Adam
-mlx.optimizers.AdamW
-mlx.optimizers.Adamax
-mlx.optimizers.Lion
+Common Optimizers
-Tree Utils
+
+Tree Utils
mlx.utils.tree_flatten
mlx.utils.tree_unflatten
mlx.utils.tree_map
@@ -678,7 +692,7 @@ document.write(`
mlx.core.array.rsqrt
-array. rsqrt ( self : array , * , stream : Union [ None , Stream , Device ] = None ) → array
+array. rsqrt ( self : array , * , stream : Union [ None , Stream , Device ] = None ) → array
See rsqrt()
.
diff --git a/docs/build/html/python/_autosummary/mlx.core.array.shape.html b/docs/build/html/python/_autosummary/mlx.core.array.shape.html
index 0a7f4714b..e90b189ac 100644
--- a/docs/build/html/python/_autosummary/mlx.core.array.shape.html
+++ b/docs/build/html/python/_autosummary/mlx.core.array.shape.html
@@ -9,7 +9,7 @@
- mlx.core.array.shape — MLX 0.2.0 documentation
+ mlx.core.array.shape — MLX 0.3.0 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -206,12 +206,13 @@
Devices and Streams
Operations
Layers
Optimizers
-Optimizer
-mlx.optimizers.Optimizer.state
-mlx.optimizers.Optimizer.apply_gradients
-mlx.optimizers.Optimizer.init
-mlx.optimizers.Optimizer.update
+Optimizer
-mlx.optimizers.SGD
-mlx.optimizers.RMSprop
-mlx.optimizers.Adagrad
-mlx.optimizers.Adafactor
-mlx.optimizers.AdaDelta
-mlx.optimizers.Adam
-mlx.optimizers.AdamW
-mlx.optimizers.Adamax
-mlx.optimizers.Lion
+Common Optimizers
-Tree Utils
+
+Tree Utils
mlx.utils.tree_flatten
mlx.utils.tree_unflatten
mlx.utils.tree_map
diff --git a/docs/build/html/python/_autosummary/mlx.core.array.sin.html b/docs/build/html/python/_autosummary/mlx.core.array.sin.html
index c454129d7..eb6a601f9 100644
--- a/docs/build/html/python/_autosummary/mlx.core.array.sin.html
+++ b/docs/build/html/python/_autosummary/mlx.core.array.sin.html
@@ -9,7 +9,7 @@
- mlx.core.array.sin — MLX 0.2.0 documentation
+ mlx.core.array.sin — MLX 0.3.0 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -206,12 +206,13 @@
Devices and Streams
Operations
Layers
Optimizers
-Optimizer
-mlx.optimizers.Optimizer.state
-mlx.optimizers.Optimizer.apply_gradients
-mlx.optimizers.Optimizer.init
-mlx.optimizers.Optimizer.update
+Optimizer
-mlx.optimizers.SGD
-mlx.optimizers.RMSprop
-mlx.optimizers.Adagrad
-mlx.optimizers.Adafactor
-mlx.optimizers.AdaDelta
-mlx.optimizers.Adam
-mlx.optimizers.AdamW
-mlx.optimizers.Adamax
-mlx.optimizers.Lion
+Common Optimizers
-Tree Utils
+
+Tree Utils
mlx.utils.tree_flatten
mlx.utils.tree_unflatten
mlx.utils.tree_map
@@ -678,7 +692,7 @@ document.write(`
mlx.core.array.sin
-array. sin ( self : array , * , stream : Union [ None , Stream , Device ] = None ) → array
+array. sin ( self : array , * , stream : Union [ None , Stream , Device ] = None ) → array
See sin()
.
diff --git a/docs/build/html/python/_autosummary/mlx.core.array.size.html b/docs/build/html/python/_autosummary/mlx.core.array.size.html
index 86d8e62e9..01aaaa111 100644
--- a/docs/build/html/python/_autosummary/mlx.core.array.size.html
+++ b/docs/build/html/python/_autosummary/mlx.core.array.size.html
@@ -9,7 +9,7 @@
- mlx.core.array.size — MLX 0.2.0 documentation
+ mlx.core.array.size — MLX 0.3.0 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -206,12 +206,13 @@
Devices and Streams
Operations
Layers
Optimizers
-Optimizer
-mlx.optimizers.Optimizer.state
-mlx.optimizers.Optimizer.apply_gradients
-mlx.optimizers.Optimizer.init
-mlx.optimizers.Optimizer.update
+Optimizer
-mlx.optimizers.SGD
-mlx.optimizers.RMSprop
-mlx.optimizers.Adagrad
-mlx.optimizers.Adafactor
-mlx.optimizers.AdaDelta
-mlx.optimizers.Adam
-mlx.optimizers.AdamW
-mlx.optimizers.Adamax
-mlx.optimizers.Lion
+Common Optimizers
-Tree Utils
+
+Tree Utils
mlx.utils.tree_flatten
mlx.utils.tree_unflatten
mlx.utils.tree_map
diff --git a/docs/build/html/python/_autosummary/mlx.core.array.split.html b/docs/build/html/python/_autosummary/mlx.core.array.split.html
index b30b04af3..ca70f4a93 100644
--- a/docs/build/html/python/_autosummary/mlx.core.array.split.html
+++ b/docs/build/html/python/_autosummary/mlx.core.array.split.html
@@ -9,7 +9,7 @@
- mlx.core.array.split — MLX 0.2.0 documentation
+ mlx.core.array.split — MLX 0.3.0 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -206,12 +206,13 @@
Devices and Streams
Operations
Layers
Optimizers
-Optimizer
-mlx.optimizers.Optimizer.state
-mlx.optimizers.Optimizer.apply_gradients
-mlx.optimizers.Optimizer.init
-mlx.optimizers.Optimizer.update
+Optimizer
-mlx.optimizers.SGD
-mlx.optimizers.RMSprop
-mlx.optimizers.Adagrad
-mlx.optimizers.Adafactor
-mlx.optimizers.AdaDelta
-mlx.optimizers.Adam
-mlx.optimizers.AdamW
-mlx.optimizers.Adamax
-mlx.optimizers.Lion
+Common Optimizers
-Tree Utils
+
+Tree Utils
mlx.utils.tree_flatten
mlx.utils.tree_unflatten
mlx.utils.tree_map
@@ -678,7 +692,7 @@ document.write(`
mlx.core.array.split
-array. split ( self : array , indices_or_sections : Union [ int , List [ int ] ] , axis : int = 0 , * , stream : Union [ None , Stream , Device ] = None ) → List [ array ]
+array. split ( self : array , indices_or_sections : Union [ int , List [ int ] ] , axis : int = 0 , * , stream : Union [ None , Stream , Device ] = None ) → List [ array ]
See split()
.
diff --git a/docs/build/html/python/_autosummary/mlx.core.array.sqrt.html b/docs/build/html/python/_autosummary/mlx.core.array.sqrt.html
index 903564882..3272c44a9 100644
--- a/docs/build/html/python/_autosummary/mlx.core.array.sqrt.html
+++ b/docs/build/html/python/_autosummary/mlx.core.array.sqrt.html
@@ -9,7 +9,7 @@
- mlx.core.array.sqrt — MLX 0.2.0 documentation
+ mlx.core.array.sqrt — MLX 0.3.0 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -206,12 +206,13 @@
Devices and Streams
Operations
Layers
Optimizers
-Optimizer
-mlx.optimizers.Optimizer.state
-mlx.optimizers.Optimizer.apply_gradients
-mlx.optimizers.Optimizer.init
-mlx.optimizers.Optimizer.update
+Optimizer
-mlx.optimizers.SGD
-mlx.optimizers.RMSprop
-mlx.optimizers.Adagrad
-mlx.optimizers.Adafactor
-mlx.optimizers.AdaDelta
-mlx.optimizers.Adam
-mlx.optimizers.AdamW
-mlx.optimizers.Adamax
-mlx.optimizers.Lion
+Common Optimizers
-Tree Utils
+
+Tree Utils
mlx.utils.tree_flatten
mlx.utils.tree_unflatten
mlx.utils.tree_map
@@ -678,7 +692,7 @@ document.write(`
mlx.core.array.sqrt
-array. sqrt ( self : array , * , stream : Union [ None , Stream , Device ] = None ) → array
+array. sqrt ( self : array , * , stream : Union [ None , Stream , Device ] = None ) → array
See sqrt()
.
diff --git a/docs/build/html/python/_autosummary/mlx.core.array.square.html b/docs/build/html/python/_autosummary/mlx.core.array.square.html
index 82056ee04..f3bf7a84c 100644
--- a/docs/build/html/python/_autosummary/mlx.core.array.square.html
+++ b/docs/build/html/python/_autosummary/mlx.core.array.square.html
@@ -9,7 +9,7 @@
- mlx.core.array.square — MLX 0.2.0 documentation
+ mlx.core.array.square — MLX 0.3.0 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -206,12 +206,13 @@
Devices and Streams
Operations
Layers
Optimizers
-Optimizer
-mlx.optimizers.Optimizer.state
-mlx.optimizers.Optimizer.apply_gradients
-mlx.optimizers.Optimizer.init
-mlx.optimizers.Optimizer.update
+Optimizer
-mlx.optimizers.SGD
-mlx.optimizers.RMSprop
-mlx.optimizers.Adagrad
-mlx.optimizers.Adafactor
-mlx.optimizers.AdaDelta
-mlx.optimizers.Adam
-mlx.optimizers.AdamW
-mlx.optimizers.Adamax
-mlx.optimizers.Lion
+Common Optimizers
-Tree Utils
+
+Tree Utils
mlx.utils.tree_flatten
mlx.utils.tree_unflatten
mlx.utils.tree_map
@@ -678,7 +692,7 @@ document.write(`
mlx.core.array.square
-array. square ( self : array , * , stream : Union [ None , Stream , Device ] = None ) → array
+array. square ( self : array , * , stream : Union [ None , Stream , Device ] = None ) → array
See square()
.
diff --git a/docs/build/html/python/_autosummary/mlx.core.array.sum.html b/docs/build/html/python/_autosummary/mlx.core.array.sum.html
index d4739dd8c..8dabf375d 100644
--- a/docs/build/html/python/_autosummary/mlx.core.array.sum.html
+++ b/docs/build/html/python/_autosummary/mlx.core.array.sum.html
@@ -9,7 +9,7 @@
- mlx.core.array.sum — MLX 0.2.0 documentation
+ mlx.core.array.sum — MLX 0.3.0 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -206,12 +206,13 @@
Devices and Streams
Operations
Layers
Optimizers
-Optimizer
-mlx.optimizers.Optimizer.state
-mlx.optimizers.Optimizer.apply_gradients
-mlx.optimizers.Optimizer.init
-mlx.optimizers.Optimizer.update
+Optimizer
-mlx.optimizers.SGD
-mlx.optimizers.RMSprop
-mlx.optimizers.Adagrad
-mlx.optimizers.Adafactor
-mlx.optimizers.AdaDelta
-mlx.optimizers.Adam
-mlx.optimizers.AdamW
-mlx.optimizers.Adamax
-mlx.optimizers.Lion
+Common Optimizers
-Tree Utils
+
+Tree Utils
mlx.utils.tree_flatten
mlx.utils.tree_unflatten
mlx.utils.tree_map
@@ -678,7 +692,7 @@ document.write(`
mlx.core.array.sum
-array. sum ( self : array , axis : Union [ None , int , List [ int ] ] = None , keepdims : bool = False , * , stream : Union [ None , Stream , Device ] = None ) → array
+array. sum ( self : array , axis : Union [ None , int , List [ int ] ] = None , keepdims : bool = False , * , stream : Union [ None , Stream , Device ] = None ) → array
See sum()
.
diff --git a/docs/build/html/python/_autosummary/mlx.core.array.tolist.html b/docs/build/html/python/_autosummary/mlx.core.array.tolist.html
index dad6a41fe..89789abee 100644
--- a/docs/build/html/python/_autosummary/mlx.core.array.tolist.html
+++ b/docs/build/html/python/_autosummary/mlx.core.array.tolist.html
@@ -9,7 +9,7 @@
- mlx.core.array.tolist — MLX 0.2.0 documentation
+ mlx.core.array.tolist — MLX 0.3.0 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -206,12 +206,13 @@
Devices and Streams
Operations
Layers
Optimizers
-Optimizer
-mlx.optimizers.Optimizer.state
-mlx.optimizers.Optimizer.apply_gradients
-mlx.optimizers.Optimizer.init
-mlx.optimizers.Optimizer.update
+Optimizer
-mlx.optimizers.SGD
-mlx.optimizers.RMSprop
-mlx.optimizers.Adagrad
-mlx.optimizers.Adafactor
-mlx.optimizers.AdaDelta
-mlx.optimizers.Adam
-mlx.optimizers.AdamW
-mlx.optimizers.Adamax
-mlx.optimizers.Lion
+Common Optimizers
-Tree Utils
+
+Tree Utils
mlx.utils.tree_flatten
mlx.utils.tree_unflatten
mlx.utils.tree_map
diff --git a/docs/build/html/python/_autosummary/mlx.core.array.transpose.html b/docs/build/html/python/_autosummary/mlx.core.array.transpose.html
index f701c0b8e..c3a1172eb 100644
--- a/docs/build/html/python/_autosummary/mlx.core.array.transpose.html
+++ b/docs/build/html/python/_autosummary/mlx.core.array.transpose.html
@@ -9,7 +9,7 @@
- mlx.core.array.transpose — MLX 0.2.0 documentation
+ mlx.core.array.transpose — MLX 0.3.0 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -206,12 +206,13 @@
Devices and Streams
Operations
Layers
Optimizers
-Optimizer
-mlx.optimizers.Optimizer.state
-mlx.optimizers.Optimizer.apply_gradients
-mlx.optimizers.Optimizer.init
-mlx.optimizers.Optimizer.update
+Optimizer
-mlx.optimizers.SGD
-mlx.optimizers.RMSprop
-mlx.optimizers.Adagrad
-mlx.optimizers.Adafactor
-mlx.optimizers.AdaDelta
-mlx.optimizers.Adam
-mlx.optimizers.AdamW
-mlx.optimizers.Adamax
-mlx.optimizers.Lion
+Common Optimizers
-Tree Utils
+
+Tree Utils
mlx.utils.tree_flatten
mlx.utils.tree_unflatten
mlx.utils.tree_map
@@ -678,7 +692,7 @@ document.write(`
mlx.core.array.transpose
-array. transpose ( self : array , * args , stream : Union [ None , Stream , Device ] = None ) → array
+array. transpose ( self : array , * args , stream : Union [ None , Stream , Device ] = None ) → array
Equivalent to transpose()
but the axes can be passed either as
a tuple or as separate arguments.
See transpose()
for full documentation.
diff --git a/docs/build/html/python/_autosummary/mlx.core.array.var.html b/docs/build/html/python/_autosummary/mlx.core.array.var.html
index 025eac6ae..fbfae3d39 100644
--- a/docs/build/html/python/_autosummary/mlx.core.array.var.html
+++ b/docs/build/html/python/_autosummary/mlx.core.array.var.html
@@ -9,7 +9,7 @@
- mlx.core.array.var — MLX 0.2.0 documentation
+ mlx.core.array.var — MLX 0.3.0 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -206,12 +206,13 @@
Devices and Streams
Operations
Layers
Optimizers
-Optimizer
-mlx.optimizers.Optimizer.state
-mlx.optimizers.Optimizer.apply_gradients
-mlx.optimizers.Optimizer.init
-mlx.optimizers.Optimizer.update
+Optimizer
-mlx.optimizers.SGD
-mlx.optimizers.RMSprop
-mlx.optimizers.Adagrad
-mlx.optimizers.Adafactor
-mlx.optimizers.AdaDelta
-mlx.optimizers.Adam
-mlx.optimizers.AdamW
-mlx.optimizers.Adamax
-mlx.optimizers.Lion
+Common Optimizers
-Tree Utils
+
+Tree Utils
mlx.utils.tree_flatten
mlx.utils.tree_unflatten
mlx.utils.tree_map
@@ -678,7 +692,7 @@ document.write(`
mlx.core.array.var
-array. var ( self : array , axis : Union [ None , int , List [ int ] ] = None , keepdims : bool = False , ddof : int = 0 , * , stream : Union [ None , Stream , Device ] = None ) → array
+array. var ( self : array , axis : Union [ None , int , List [ int ] ] = None , keepdims : bool = False , ddof : int = 0 , * , stream : Union [ None , Stream , Device ] = None ) → array
See var()
.
diff --git a/docs/build/html/python/_autosummary/mlx.core.array_equal.html b/docs/build/html/python/_autosummary/mlx.core.array_equal.html
index 56c3b2794..4a543eae8 100644
--- a/docs/build/html/python/_autosummary/mlx.core.array_equal.html
+++ b/docs/build/html/python/_autosummary/mlx.core.array_equal.html
@@ -9,7 +9,7 @@
- mlx.core.array_equal — MLX 0.2.0 documentation
+ mlx.core.array_equal — MLX 0.3.0 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -206,12 +206,13 @@
Devices and Streams
Operations
Layers
Optimizers
-Optimizer
-mlx.optimizers.Optimizer.state
-mlx.optimizers.Optimizer.apply_gradients
-mlx.optimizers.Optimizer.init
-mlx.optimizers.Optimizer.update
+Optimizer
-mlx.optimizers.SGD
-mlx.optimizers.RMSprop
-mlx.optimizers.Adagrad
-mlx.optimizers.Adafactor
-mlx.optimizers.AdaDelta
-mlx.optimizers.Adam
-mlx.optimizers.AdamW
-mlx.optimizers.Adamax
-mlx.optimizers.Lion
+Common Optimizers
-Tree Utils
+
+Tree Utils
mlx.utils.tree_flatten
mlx.utils.tree_unflatten
mlx.utils.tree_map
@@ -678,7 +692,7 @@ document.write(`
mlx.core.array_equal
-mlx.core. array_equal ( a : Union [ scalar , array ] , b : Union [ scalar , array ] , equal_nan : bool = False , stream : Union [ None , Stream , Device ] = None ) → array
+mlx.core. array_equal ( a : Union [ scalar , array ] , b : Union [ scalar , array ] , equal_nan : bool = False , stream : Union [ None , Stream , Device ] = None ) → array
Array equality check.
Compare two arrays for equality. Returns True
if and only if the arrays
have the same shape and their values are equal. The arrays need not have
diff --git a/docs/build/html/python/_autosummary/mlx.core.broadcast_to.html b/docs/build/html/python/_autosummary/mlx.core.broadcast_to.html
index c0edb9d9e..7cb111e25 100644
--- a/docs/build/html/python/_autosummary/mlx.core.broadcast_to.html
+++ b/docs/build/html/python/_autosummary/mlx.core.broadcast_to.html
@@ -9,7 +9,7 @@
-
mlx.core.broadcast_to — MLX 0.2.0 documentation
+ mlx.core.broadcast_to — MLX 0.3.0 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -206,12 +206,13 @@
Devices and Streams
Operations
Layers
Optimizers
-Optimizer
-mlx.optimizers.Optimizer.state
-mlx.optimizers.Optimizer.apply_gradients
-mlx.optimizers.Optimizer.init
-mlx.optimizers.Optimizer.update
+Optimizer
-mlx.optimizers.SGD
-mlx.optimizers.RMSprop
-mlx.optimizers.Adagrad
-mlx.optimizers.Adafactor
-mlx.optimizers.AdaDelta
-mlx.optimizers.Adam
-mlx.optimizers.AdamW
-mlx.optimizers.Adamax
-mlx.optimizers.Lion
+Common Optimizers
-Tree Utils
+
+Tree Utils
mlx.utils.tree_flatten
mlx.utils.tree_unflatten
mlx.utils.tree_map
@@ -678,7 +692,7 @@ document.write(`
mlx.core.broadcast_to
-mlx.core. broadcast_to ( a : Union [ scalar , array ] , / , shape : List [ int ] , * , stream : Union [ None , Stream , Device ] = None ) → array
+mlx.core. broadcast_to ( a : Union [ scalar , array ] , / , shape : List [ int ] , * , stream : Union [ None , Stream , Device ] = None ) → array
Broadcast an array to the given shape.
The broadcasting semantics are the same as Numpy.
diff --git a/docs/build/html/python/_autosummary/mlx.core.ceil.html b/docs/build/html/python/_autosummary/mlx.core.ceil.html
index ae3355d0e..c54e380bb 100644
--- a/docs/build/html/python/_autosummary/mlx.core.ceil.html
+++ b/docs/build/html/python/_autosummary/mlx.core.ceil.html
@@ -9,7 +9,7 @@
- mlx.core.ceil — MLX 0.2.0 documentation
+ mlx.core.ceil — MLX 0.3.0 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -206,12 +206,13 @@
Devices and Streams
Operations
Layers
Optimizers
-Optimizer
-mlx.optimizers.Optimizer.state
-mlx.optimizers.Optimizer.apply_gradients
-mlx.optimizers.Optimizer.init
-mlx.optimizers.Optimizer.update
+Optimizer
-mlx.optimizers.SGD
-mlx.optimizers.RMSprop
-mlx.optimizers.Adagrad
-mlx.optimizers.Adafactor
-mlx.optimizers.AdaDelta
-mlx.optimizers.Adam
-mlx.optimizers.AdamW
-mlx.optimizers.Adamax
-mlx.optimizers.Lion
+Common Optimizers
-Tree Utils
+
+Tree Utils
mlx.utils.tree_flatten
mlx.utils.tree_unflatten
mlx.utils.tree_map
@@ -678,7 +692,7 @@ document.write(`
mlx.core.ceil
-mlx.core. ceil ( a : array , / , * , stream : Union [ None , Stream , Device ] = None ) → array
+mlx.core. ceil ( a : array , / , * , stream : Union [ None , Stream , Device ] = None ) → array
Element-wise ceil.
Parameters:
diff --git a/docs/build/html/python/_autosummary/mlx.core.clip.html b/docs/build/html/python/_autosummary/mlx.core.clip.html
index 896a401f9..ba61439d9 100644
--- a/docs/build/html/python/_autosummary/mlx.core.clip.html
+++ b/docs/build/html/python/_autosummary/mlx.core.clip.html
@@ -9,7 +9,7 @@
- mlx.core.clip — MLX 0.2.0 documentation
+ mlx.core.clip — MLX 0.3.0 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -206,12 +206,13 @@
Devices and Streams
Operations
Layers
Optimizers
-Optimizer
-mlx.optimizers.Optimizer.state
-mlx.optimizers.Optimizer.apply_gradients
-mlx.optimizers.Optimizer.init
-mlx.optimizers.Optimizer.update
+Optimizer
-mlx.optimizers.SGD
-mlx.optimizers.RMSprop
-mlx.optimizers.Adagrad
-mlx.optimizers.Adafactor
-mlx.optimizers.AdaDelta
-mlx.optimizers.Adam
-mlx.optimizers.AdamW
-mlx.optimizers.Adamax
-mlx.optimizers.Lion
+Common Optimizers
-Tree Utils
+
+Tree Utils
mlx.utils.tree_flatten
mlx.utils.tree_unflatten
mlx.utils.tree_map
@@ -678,7 +692,7 @@ document.write(`
mlx.core.clip
-mlx.core. clip ( a : array , / , a_min : Union [ scalar , array , None ] , a_max : Union [ scalar , array , None ] , * , stream : Union [ None , Stream , Device ] = None ) → array
+mlx.core. clip ( a : array , / , a_min : Union [ scalar , array , None ] , a_max : Union [ scalar , array , None ] , * , stream : Union [ None , Stream , Device ] = None ) → array
Clip the values of the array between the given minimum and maximum.
If either a_min
or a_max
are None
, then corresponding edge
is ignored. At least one of a_min
and a_max
cannot be None
.
diff --git a/docs/build/html/python/_autosummary/mlx.core.compile.html b/docs/build/html/python/_autosummary/mlx.core.compile.html
index 13a02bb33..5422e81cf 100644
--- a/docs/build/html/python/_autosummary/mlx.core.compile.html
+++ b/docs/build/html/python/_autosummary/mlx.core.compile.html
@@ -9,7 +9,7 @@
-
mlx.core.compile — MLX 0.2.0 documentation
+ mlx.core.compile — MLX 0.3.0 documentation
@@ -134,8 +134,8 @@
-
-
+
+
@@ -206,12 +206,13 @@
Devices and Streams
Operations
Layers
Optimizers