Skip to content

Commit

Permalink
fix flash attn install
Browse files Browse the repository at this point in the history
  • Loading branch information
haixuanTao committed Feb 4, 2025
1 parent a247999 commit ebcda89
Showing 1 changed file with 5 additions and 5 deletions.
10 changes: 5 additions & 5 deletions node-hub/dora-qwen2-5-vl/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -22,13 +22,13 @@ dependencies = [
"peft == 0.13.2",
"accelerate>=1.3.0",
"transformers",
"flash-attn>=v2.7.1; sys_platform != 'darwin'",
"flash-attn; sys_platform != 'darwin'",
]

[tool.uv]
no-build-isolation-package = ['flash-attn']

# flash_attn = "^2.6.1" # Install using: pip install -U flash-attn --no-build-isolation
[[tool.uv.dependency-metadata]]
name = "flash-attn"
version = ">=2.7.1"
requires-dist = ["torch"]


[dependency-groups]
Expand Down

0 comments on commit ebcda89

Please sign in to comment.