From 28eca469594fabe9ddbae99c6557d40a04df1398 Mon Sep 17 00:00:00 2001 From: Won-Kyu Park Date: Fri, 20 Sep 2024 00:00:34 +0900 Subject: [PATCH] fix flux to use float8 t5xxl --- modules/models/flux/flux.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/models/flux/flux.py b/modules/models/flux/flux.py index 46fd568a0..42e9ea788 100644 --- a/modules/models/flux/flux.py +++ b/modules/models/flux/flux.py @@ -108,7 +108,7 @@ class FluxCond(torch.nn.Module): self.clip_l.transformer.load_state_dict(SafetensorsMapping(file), strict=False) if self.t5xxl and 'text_encoders.t5xxl.transformer.encoder.block.0.layer.0.SelfAttention.k.weight' not in state_dict: - t5_file = modelloader.load_file_from_url(T5_URL, model_dir=clip_path, file_name="t5xxl_fp16.safetensors") + t5_file = modelloader.load_file_from_url(T5_URL, model_dir=clip_path, file_name="t5xxl_fp8_e4m3fn.safetensors") with safetensors.safe_open(t5_file, framework="pt") as file: self.t5xxl.transformer.load_state_dict(SafetensorsMapping(file), strict=False)