[Bugfix] Remove contiguous output req for context parallel MLA (#25414)

Signed-off-by: Michael Goin <mgoin64@gmail.com>
This commit is contained in:
Michael Goin 2025-09-22 23:26:32 -04:00 committed by GitHub
parent eea1783989
commit 78237e43bf
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -134,6 +134,5 @@ def cp_lse_ag_out_rs(cp_attn_out: torch.Tensor,
cp_attn_lse = cp_attn_lse.contiguous()
lses = cp_group.all_gather(cp_attn_lse, dim=0).view_as(lses)
out, _ = correct_attn_out(cp_attn_out, lses, cp_group.rank_in_group, ctx)
assert out.is_contiguous()
out = cp_group.reduce_scatter(out, dim=1)
return out