vllm/requirements.txt
2023-07-12 11:10:55 -04:00

13 lines
287 B
Plaintext

ninja # For faster builds.
psutil
ray
sentencepiece # Required for LLaMA tokenizer.
numpy
torch >= 2.0.0
transformers >= 4.28.0 # Required for LLaMA.
xformers >= 0.0.19
fastapi
uvicorn
pydantic < 2 # Required for OpenAI server.
fschat # Required for OpenAI ChatCompletion Endpoint.