mirror of
https://git.datalinker.icu/vllm-project/vllm.git
synced 2025-12-17 07:05:01 +08:00
[mypy] Fix incorrect type hint for EAGLE3 support (#23617)
Signed-off-by: DarkLight1337 <tlleungac@connect.ust.hk>
This commit is contained in:
parent
959783fb99
commit
7d67a9d9f9
@ -353,7 +353,7 @@ class LlamaModel(nn.Module):
|
|||||||
else:
|
else:
|
||||||
self.norm = PPMissingLayer()
|
self.norm = PPMissingLayer()
|
||||||
|
|
||||||
self.aux_hidden_state_layers: tuple[int] = tuple()
|
self.aux_hidden_state_layers = tuple[int, ...]()
|
||||||
|
|
||||||
self.make_empty_intermediate_tensors = (
|
self.make_empty_intermediate_tensors = (
|
||||||
make_empty_intermediate_tensors_factory(
|
make_empty_intermediate_tensors_factory(
|
||||||
@ -553,10 +553,10 @@ class LlamaForCausalLM(nn.Module, SupportsLoRA, SupportsPP, SupportsEagle3):
|
|||||||
self.make_empty_intermediate_tensors = (
|
self.make_empty_intermediate_tensors = (
|
||||||
self.model.make_empty_intermediate_tensors)
|
self.model.make_empty_intermediate_tensors)
|
||||||
|
|
||||||
def set_aux_hidden_state_layers(self, layers: tuple[int]) -> None:
|
def set_aux_hidden_state_layers(self, layers: tuple[int, ...]) -> None:
|
||||||
self.model.aux_hidden_state_layers = layers
|
self.model.aux_hidden_state_layers = layers
|
||||||
|
|
||||||
def get_eagle3_aux_hidden_state_layers(self) -> tuple[int]:
|
def get_eagle3_aux_hidden_state_layers(self) -> tuple[int, ...]:
|
||||||
num_layers = len(self.model.layers)
|
num_layers = len(self.model.layers)
|
||||||
return (2, num_layers // 2, num_layers - 3)
|
return (2, num_layers // 2, num_layers - 3)
|
||||||
|
|
||||||
|
|||||||
@ -333,7 +333,7 @@ class Qwen2Model(nn.Module):
|
|||||||
else:
|
else:
|
||||||
self.norm = PPMissingLayer()
|
self.norm = PPMissingLayer()
|
||||||
|
|
||||||
self.aux_hidden_state_layers: tuple[int] = tuple()
|
self.aux_hidden_state_layers = tuple[int, ...]()
|
||||||
|
|
||||||
def get_input_embeddings(self, input_ids: torch.Tensor) -> torch.Tensor:
|
def get_input_embeddings(self, input_ids: torch.Tensor) -> torch.Tensor:
|
||||||
return self.embed_tokens(input_ids)
|
return self.embed_tokens(input_ids)
|
||||||
@ -488,10 +488,10 @@ class Qwen2ForCausalLM(nn.Module, SupportsLoRA, SupportsPP, SupportsEagle3):
|
|||||||
def get_input_embeddings(self, input_ids: torch.Tensor) -> torch.Tensor:
|
def get_input_embeddings(self, input_ids: torch.Tensor) -> torch.Tensor:
|
||||||
return self.model.get_input_embeddings(input_ids)
|
return self.model.get_input_embeddings(input_ids)
|
||||||
|
|
||||||
def set_aux_hidden_state_layers(self, layers: tuple[int]) -> None:
|
def set_aux_hidden_state_layers(self, layers: tuple[int, ...]) -> None:
|
||||||
self.model.aux_hidden_state_layers = layers
|
self.model.aux_hidden_state_layers = layers
|
||||||
|
|
||||||
def get_eagle3_aux_hidden_state_layers(self) -> tuple[int]:
|
def get_eagle3_aux_hidden_state_layers(self) -> tuple[int, ...]:
|
||||||
num_layers = len(self.model.layers)
|
num_layers = len(self.model.layers)
|
||||||
return (2, num_layers // 2, num_layers - 3)
|
return (2, num_layers // 2, num_layers - 3)
|
||||||
|
|
||||||
|
|||||||
@ -304,10 +304,10 @@ class Qwen3ForCausalLM(nn.Module, SupportsLoRA, SupportsPP, SupportsEagle3):
|
|||||||
self.make_empty_intermediate_tensors = (
|
self.make_empty_intermediate_tensors = (
|
||||||
self.model.make_empty_intermediate_tensors)
|
self.model.make_empty_intermediate_tensors)
|
||||||
|
|
||||||
def set_aux_hidden_state_layers(self, layers: tuple[int]) -> None:
|
def set_aux_hidden_state_layers(self, layers: tuple[int, ...]) -> None:
|
||||||
self.model.aux_hidden_state_layers = layers
|
self.model.aux_hidden_state_layers = layers
|
||||||
|
|
||||||
def get_eagle3_aux_hidden_state_layers(self) -> tuple[int]:
|
def get_eagle3_aux_hidden_state_layers(self) -> tuple[int, ...]:
|
||||||
num_layers = len(self.model.layers)
|
num_layers = len(self.model.layers)
|
||||||
return (2, num_layers // 2, num_layers - 3)
|
return (2, num_layers // 2, num_layers - 3)
|
||||||
|
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user