diff --git a/.buildkite/run-cpu-test.sh b/.buildkite/run-cpu-test.sh index 968b212b3fe54..c23b37db5c199 100644 --- a/.buildkite/run-cpu-test.sh +++ b/.buildkite/run-cpu-test.sh @@ -22,7 +22,7 @@ docker exec cpu-test-avx2 bash -c "python3 examples/offline_inference.py" # Run basic model test docker exec cpu-test bash -c " - pip install pytest Pillow protobuf + pip install pytest pytest -v -s tests/models -m \"not vlm\" --ignore=tests/models/test_embedding.py --ignore=tests/models/test_oot_registration.py --ignore=tests/models/test_registry.py --ignore=tests/models/test_jamba.py --ignore=tests/models/test_danube3_4b.py" # Mamba and Danube3-4B on CPU is not supported # online inference diff --git a/Dockerfile.cpu b/Dockerfile.cpu index 35ce5dde99d2a..9a570f988f3db 100644 --- a/Dockerfile.cpu +++ b/Dockerfile.cpu @@ -13,7 +13,7 @@ RUN --mount=type=cache,target=/var/cache/apt \ RUN --mount=type=cache,target=/root/.cache/pip \ pip install intel-openmp -ENV LD_PRELOAD="/usr/lib/x86_64-linux-gnu/libtcmalloc_minimal.so.4:/usr/local/lib/libiomp5.so:$LD_PRELOAD" +ENV LD_PRELOAD="/usr/lib/x86_64-linux-gnu/libtcmalloc_minimal.so.4:/usr/local/lib/libiomp5.so" RUN echo 'ulimit -c 0' >> ~/.bashrc diff --git a/requirements-common.txt b/requirements-common.txt index 6ace082ad97d6..e17ff06308011 100644 --- a/requirements-common.txt +++ b/requirements-common.txt @@ -6,6 +6,7 @@ tqdm py-cpuinfo transformers >= 4.43.2 # Required for Chameleon and Llama 3.1 hotfox. tokenizers >= 0.19.1 # Required for Llama 3. +protobuf # Required by LlamaTokenizer. fastapi aiohttp openai