vllm/requirements.txt
2023-09-07 13:15:53 -07:00

12 lines
246 B
Plaintext

ninja # For faster builds.
psutil
ray >= 2.5.1
sentencepiece # Required for LLaMA tokenizer.
numpy
torch >= 2.0.0
transformers >= 4.33.1 # Required for Code Llama.
xformers >= 0.0.21
fastapi
uvicorn
pydantic < 2 # Required for OpenAI server.