Adjust flux mem usage factor a bit. (#9588)

This commit is contained in:
comfyanonymous 2025-08-27 20:08:17 -07:00 committed by GitHub
parent 38f697d953
commit 4aa79dbf2c
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -700,7 +700,7 @@ class Flux(supported_models_base.BASE):
unet_extra_config = {}
latent_format = latent_formats.Flux
memory_usage_factor = 2.8
memory_usage_factor = 3.1 # TODO: debug why flux mem usage is so weird on windows.
supported_inference_dtypes = [torch.bfloat16, torch.float16, torch.float32]