[Bugfix] Fix encoding_format in examples/openai_embedding_client.py (#6755)

This commit is contained in:
Chang Su 2024-07-24 22:48:07 -07:00 committed by GitHub
parent 0310029a2f
commit 316a41ac1d
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 8 additions and 6 deletions

View File

@ -13,11 +13,14 @@ client = OpenAI(
models = client.models.list() models = client.models.list()
model = models.data[0].id model = models.data[0].id
responses = client.embeddings.create(input=[ responses = client.embeddings.create(
input=[
"Hello my name is", "Hello my name is",
"The best thing about vLLM is that it supports many different models" "The best thing about vLLM is that it supports many different models"
], ],
model=model) model=model,
encoding_format="float",
)
for data in responses.data: for data in responses.data:
print(data.embedding) # list of float of len 4096 print(data.embedding) # list of float of len 4096

View File

@ -18,7 +18,6 @@ def embedding_server():
"--enforce-eager", "--enforce-eager",
"--max-model-len", "--max-model-len",
"8192", "8192",
"--enforce-eager",
] ]
with RemoteOpenAIServer(EMBEDDING_MODEL_NAME, args) as remote_server: with RemoteOpenAIServer(EMBEDDING_MODEL_NAME, args) as remote_server: