remove outdated comment

Signed-off-by: bk-201 <joy25810@foxmail.com>
This commit is contained in:
bk-201 2025-12-11 16:00:16 +00:00
parent 27448490f1
commit 65e403d335

View File

@ -63,10 +63,6 @@ class RowParallelLinearWithLoRA(BaseLinearLayerWithLoRA):
input_parallel = splitted_input[self.tp_rank].contiguous()
# Matrix multiply.
# Only fuse bias add into GEMM for rank 0 (matches base
# RowParallelLinear behavior). This ensures bias will not get
# added more than once in TP>1 case and matches the numerical
# behavior of the unwrapped layer
bias_ = (
None
if (self.tp_rank > 0 or self.base_layer.skip_bias_add)