[Bugfix] fix rotary embedding test for _get_padded_tensor_shape (#18229)

Signed-off-by: Lucas Wilkinson <lwilkinson@neuralmagic.com>
This commit is contained in:
Lucas Wilkinson 2025-05-15 21:32:45 -04:00 committed by GitHub
parent c7852a6d9b
commit 4e1c6a0264
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -152,6 +152,10 @@ def test_batched_rotary_embedding(
query = torch.randn(query_shape, dtype=dtype)
key = torch.randn_like(query) if use_key else None
# slice tensor if required, noop otherwise
query = query[..., :head_size]
key = key[..., :head_size] if use_key else None
# NOTE(woosuk): The reference implementation should be executed first
# because the custom kernel is in-place.
ref_query, ref_key = rope.forward_native(positions, query, key)