diff --git a/.buildkite/scripts/generate-nightly-index.py b/.buildkite/scripts/generate-nightly-index.py
index a61f081076475..90286ad4c6e14 100644
--- a/.buildkite/scripts/generate-nightly-index.py
+++ b/.buildkite/scripts/generate-nightly-index.py
@@ -112,11 +112,12 @@ def generate_package_index_and_metadata(
relative_path = (
wheel_base_dir.relative_to(index_base_dir, walk_up=True) / file.filename
)
- href_tags.append(
- f' {file.filename}
'
- )
+ # handle with '+' in URL, and avoid double-encoding '/' and already-encoded '%2B'
+ # NOTE: this is AWS S3 specific behavior!
+ file_path_quoted = quote(relative_path.as_posix(), safe=":%/")
+ href_tags.append(f' {file.filename}
')
file_meta = asdict(file)
- file_meta["path"] = relative_path.as_posix()
+ file_meta["path"] = file_path_quoted
metadata.append(file_meta)
index_str = INDEX_HTML_TEMPLATE.format(items="\n".join(href_tags))
metadata_str = json.dumps(metadata, indent=2)
@@ -185,7 +186,7 @@ def generate_index_and_metadata(
"platform_tag": "manylinux2014_aarch64",
"variant": "cu129",
"filename": "vllm-0.10.2rc2+cu129-cp38-abi3-manylinux2014_aarch64.whl",
- "path": "../vllm-0.10.2rc2+cu129-cp38-abi3-manylinux2014_aarch64.whl" # to be concatenated with the directory URL
+ "path": "../vllm-0.10.2rc2%2Bcu129-cp38-abi3-manylinux2014_aarch64.whl" # to be concatenated with the directory URL and URL-encoded
},
...
]
diff --git a/setup.py b/setup.py
index 67226b4447c73..67fbebb1d37b5 100644
--- a/setup.py
+++ b/setup.py
@@ -319,14 +319,17 @@ class precompiled_wheel_utils:
"""Extracts libraries and other files from an existing wheel."""
@staticmethod
- def extract_precompiled_and_patch_package(wheel_url_or_path: str) -> dict:
+ def extract_precompiled_and_patch_package(
+ wheel_url_or_path: str, download_filename: str | None
+ ) -> dict:
import tempfile
import zipfile
temp_dir = None
try:
if not os.path.isfile(wheel_url_or_path):
- wheel_filename = wheel_url_or_path.split("/")[-1]
+ # use provided filename first, then derive from URL
+ wheel_filename = download_filename or wheel_url_or_path.split("/")[-1]
temp_dir = tempfile.mkdtemp(prefix="vllm-wheels")
wheel_path = os.path.join(temp_dir, wheel_filename)
print(f"Downloading wheel from {wheel_url_or_path} to {wheel_path}")
@@ -673,7 +676,8 @@ if envs.VLLM_USE_PRECOMPILED:
wheel_location = os.getenv("VLLM_PRECOMPILED_WHEEL_LOCATION", None)
if wheel_location is not None:
wheel_url = wheel_location
- logger.info("Using user-specified precompiled wheel location: {}", wheel_url)
+ download_filename = None
+ logger.info("Using user-specified precompiled wheel location: %s", wheel_url)
else:
import platform
@@ -686,17 +690,17 @@ if envs.VLLM_USE_PRECOMPILED:
precompiled_wheel_utils.get_base_commit_in_main_branch(),
)
logger.info(
- "Using precompiled wheel commit {} with variant {}", commit, variant
+ "Using precompiled wheel commit %s with variant %s", commit, variant
)
try_default = False
- wheels, repo_url = None, None
+ wheels, repo_url, download_filename = None, None, None
try:
wheels, repo_url = _fetch_metadata_for_variant(commit, variant)
- except Exception as e:
+ except Exception:
logger.warning(
- "Failed to fetch precompiled wheel metadata for variant {}",
+ "Failed to fetch precompiled wheel metadata for variant %s",
variant,
- exc_info=e,
+ exc_info=True,
)
try_default = True # try outside handler to keep the stacktrace simple
if try_default:
@@ -717,26 +721,29 @@ if envs.VLLM_USE_PRECOMPILED:
"platform_tag": "manylinux1_x86_64",
"variant": null,
"filename": "vllm-0.11.2.dev278+gdbc3d9991-cp38-abi3-manylinux1_x86_64.whl",
-"path": "../vllm-0.11.2.dev278+gdbc3d9991-cp38-abi3-manylinux1_x86_64.whl"
+"path": "../vllm-0.11.2.dev278%2Bgdbc3d9991-cp38-abi3-manylinux1_x86_64.whl"
},
...]"""
for wheel in wheels:
+ # TODO: maybe check more compatibility later? (python_tag, abi_tag, etc)
if wheel.get("package_name") == "vllm" and arch in wheel.get(
"platform_tag", ""
):
- logger.info("Found precompiled wheel metadata: {}", wheel)
+ logger.info("Found precompiled wheel metadata: %s", wheel)
if "path" not in wheel:
raise ValueError(f"Wheel metadata missing path: {wheel}")
- # TODO: maybe check more compatibility later? (python_tag, abi_tag, etc)
wheel_url = repo_url + wheel["path"]
- logger.info("Using precompiled wheel URL: {}", wheel_url)
+ download_filename = wheel.get("filename")
+ logger.info("Using precompiled wheel URL: %s", wheel_url)
break
else:
raise ValueError(
f"No precompiled vllm wheel found for architecture {arch} "
f"from repo {repo_url}. All available wheels: {wheels}"
)
- patch = precompiled_wheel_utils.extract_precompiled_and_patch_package(wheel_url)
+ patch = precompiled_wheel_utils.extract_precompiled_and_patch_package(
+ wheel_url, download_filename
+ )
for pkg, files in patch.items():
package_data.setdefault(pkg, []).extend(files)