mirror of
https://git.datalinker.icu/vllm-project/vllm.git
synced 2025-12-24 20:45:33 +08:00
[Bugfix] Fix transform_config parsing in Compressed Tensors (#23945)
Signed-off-by: Kyle Sayers <kylesayrs@gmail.com>
This commit is contained in:
parent
c83c4ff815
commit
1c41310584
@ -63,7 +63,7 @@ class CompressedTensorsConfig(QuantizationConfig):
|
||||
sparsity_ignore_list: list[str],
|
||||
kv_cache_scheme: Optional[dict[str, Any]] = None,
|
||||
config: Optional[dict[str, Any]] = None,
|
||||
transform_config: Optional[TransformConfig] = None,
|
||||
transform_config: Optional[dict[str, Any]] = None,
|
||||
):
|
||||
super().__init__()
|
||||
self.ignore = ignore
|
||||
@ -75,7 +75,7 @@ class CompressedTensorsConfig(QuantizationConfig):
|
||||
self.sparsity_ignore_list = sparsity_ignore_list
|
||||
self.config = config
|
||||
|
||||
if transform_config is not None:
|
||||
if transform_config:
|
||||
self.transform_config = TransformConfig.model_validate(
|
||||
transform_config)
|
||||
else:
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user