py-flash-attn: add missing triton dependency (#48645)

This commit is contained in:
Thomas Bouvier 2025-01-28 10:43:27 +00:00 committed by GitHub
parent 87e65e5377
commit a50c45f00c
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 5 additions and 4 deletions

View File

@ -14,7 +14,7 @@ class PyFlashAttn(PythonPackage):
pypi = "flash-attn/flash_attn-0.0.0.tar.gz"
git = "https://github.com/Dao-AILab/flash-attention.git"
maintainers("aurianer")
maintainers("aurianer", "thomas-bouvier")
license("BSD")
@ -41,6 +41,7 @@ class PyFlashAttn(PythonPackage):
with default_args(type=("build", "run")):
depends_on("py-torch+cuda")
depends_on("py-einops")
depends_on("py-triton")
with default_args(type=("build", "link", "run")):
depends_on("py-pybind11")

View File

@ -8,9 +8,9 @@
class PyTriton(PythonPackage):
"""A language and compiler for custom Deep Learning operations."""
homepage = "https://github.com/openai/triton"
url = "https://github.com/openai/triton/archive/refs/tags/v2.1.0.tar.gz"
git = "https://github.com/openai/triton.git"
homepage = "https://github.com/triton-lang/triton"
url = "https://github.com/triton-lang/triton/archive/refs/tags/v2.1.0.tar.gz"
git = "https://github.com/triton-lang/triton.git"
license("MIT")