Use Transformers v5 RoPE standardisation and validation (#30046)

Signed-off-by: Harry Mellor <19981378+hmellor@users.noreply.github.com>
This commit is contained in:
Harry Mellor 2025-12-04 14:54:28 +00:00 committed by GitHub
parent dd97e047e0
commit 5c32a06a04
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -304,14 +304,19 @@ def set_default_rope_theta(config: PretrainedConfig, default_theta: float) -> No
def patch_rope_parameters(config: PretrainedConfig) -> None:
"""Provide backwards compatibility for RoPE."""
rope_theta = getattr(config, "rope_theta", None)
if Version(version("transformers")) < Version("5.0.0.dev0"):
# Transformers v4 installed, legacy config fields may be present
if (rope_scaling := getattr(config, "rope_scaling", None)) is not None:
config.rope_parameters = rope_scaling
if (rope_theta := getattr(config, "rope_theta", None)) is not None:
if rope_theta is not None:
if not hasattr(config, "rope_parameters"):
config.rope_parameters = {"rope_type": "default"}
config.rope_parameters["rope_theta"] = rope_theta
elif rope_theta is not None or hasattr(config, "rope_parameters"):
# Transformers v5 installed
config.standardize_rope_params()
config.validate_rope()
# No RoPE parameters to patch
if getattr(config, "rope_parameters", None) is None: