py-flash-attn: add v2.5.6 -> main (#44894)

* Add latest releases of py-flash-attn

* Add main branch for flash attention

* Add additional requirements
This commit is contained in:
Auriane R 2024-07-05 14:19:01 +02:00 committed by GitHub
parent 1d8bdcfc04
commit 5ab814505e
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -11,17 +11,30 @@ class PyFlashAttn(PythonPackage):
This package provides the official implementation of FlashAttention.
"""
pypi = "flash-attn/flash_attn-2.5.4.tar.gz"
homepage = "https://github.com/Dao-AILab/flash-attention.git"
pypi = "flash-attn/flash_attn-0.0.0.tar.gz"
git = "https://github.com/Dao-AILab/flash-attention.git"
maintainers("aurianer")
license("BSD")
version("main", branch="main")
version(
"2.5.9.post1", sha256="a92db1683a5b141a0f4371d251ae9f73e9aef629b3a58a50d0ef430266c68782"
)
version("2.5.8", sha256="2e5b2bcff6d5cff40d494af91ecd1eb3c5b4520a6ce7a0a8b1f9c1ed129fb402")
version("2.5.7", sha256="7c079aef4e77c4e9a71a3cd88662362e0fe82f658db0b2dbff6f279de2a387a8")
version("2.5.6", sha256="d25801aa060877cad997939bd7130faf620fdbeda947c3ffde5865906d430c36")
version("2.5.5", sha256="751cee17711d006fe7341cdd78584af86a6239afcfe43b9ed11c84db93126267")
version("2.5.4", sha256="d83bb427b517b07e9db655f6e5166eb2607dccf4d6ca3229e3a3528c206b0175")
version("2.4.2", sha256="eb822a8c4219b610e9d734cbc8cd9ee4547f27433815a2b90dc1462766feefc1")
depends_on("py-setuptools", type="build")
with default_args(type="build"):
depends_on("py-ninja")
depends_on("py-packaging")
depends_on("py-psutil")
depends_on("py-setuptools")
with default_args(type=("build", "run")):
depends_on("py-torch+cuda")
@ -32,6 +45,4 @@ class PyFlashAttn(PythonPackage):
with default_args(type=("build", "link", "run")):
depends_on("py-pybind11")
depends_on("py-psutil", type="build")
depends_on("python@3.7:", type=("build", "run"))