torch==2.6.0 transformers==4.46.3 tokenizers==0.20.3 einops addict easydict gradio>=4.0.0 spaces>=0.20.0 Pillow>=10.0.0 safetensors>=0.4.0 accelerate>=0.24.0 sentencepiece>=0.1.99 protobuf>=3.20.0 torchvision flash-attn @ https://github.com/Dao-AILab/flash-attention/releases/download/v2.7.3/flash_attn-2.7.3+cu12torch2.6cxx11abiFALSE-cp310-cp310-linux_x86_64.whl