| transformers==4.41.2 | |
| torch==2.3.0 | |
| accelerate==0.34.2 | |
| fastapi==0.115.0 | |
| uvicorn[standard]==0.30.0 | |
| llama-cpp-python==0.2.77 | |
| huggingface_hub==0.24.0 | |
| transformers==4.41.2 | |
| torch==2.3.0 | |
| accelerate==0.34.2 | |
| fastapi==0.115.0 | |
| uvicorn[standard]==0.30.0 | |
| llama-cpp-python==0.2.77 | |
| huggingface_hub==0.24.0 | |