Skip to content

Commit

Permalink
Fixing linux installation of flash-attn
Browse files Browse the repository at this point in the history
  • Loading branch information
haixuanTao committed Feb 4, 2025
1 parent f801931 commit 1f6d109
Showing 1 changed file with 8 additions and 4 deletions.
12 changes: 8 additions & 4 deletions node-hub/dora-qwen2-5-vl/pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
[project]
name = "dora-qwenvl"
name = "dora-qwen2-5-vl"
version = "0.3.8"
authors = [
{ name = "Haixuan Xavier Tao", email = "[email protected]" },
Expand All @@ -22,14 +22,18 @@ dependencies = [
"peft == 0.13.2",
"accelerate>=1.3.0",
"transformers",
"flash-attn; sys_platform != 'darwin'",
"setuptools>=65.0.0",
"flash-attn>=2.7.1; sys_platform != 'darwin'",
]

[[tool.uv.dependency-metadata]]
name = "flash-attn"
version = ">=2.7.1"
version = "2.7.1"
requires-dist = ["torch"]

[tool.uv]
no-build-isolation-package = ['flash-attn']


[dependency-groups]
dev = ["pytest >=8.1.1", "ruff >=0.9.1"]
Expand All @@ -41,5 +45,5 @@ transformers = { git = "https://github.com/huggingface/transformers" }
dora-qwenvl = "dora_qwenvl.main:main"

[build-system]
requires = ["setuptools", "setuptools-scm"]
requires = ["setuptools", "setuptools-scm", "torch"]
build-backend = "setuptools.build_meta"

0 comments on commit 1f6d109

Please sign in to comment.