From abb36d66b5bd9bb8d4b38d7f3c73e26983b5da86 Mon Sep 17 00:00:00 2001 From: Wagner Bruna Date: Wed, 10 Sep 2025 10:38:21 -0300 Subject: [PATCH] chore: update flash attention warnings (#805) --- stable-diffusion.cpp | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/stable-diffusion.cpp b/stable-diffusion.cpp index 7b5aaf6..eaa77f5 100644 --- a/stable-diffusion.cpp +++ b/stable-diffusion.cpp @@ -345,7 +345,7 @@ public: } if (sd_version_is_sd3(version)) { if (sd_ctx_params->diffusion_flash_attn) { - LOG_WARN("flash attention in this diffusion model is currently unsupported!"); + LOG_WARN("flash attention in this diffusion model is currently not implemented!"); } cond_stage_model = std::make_shared(clip_backend, offload_params_to_cpu, @@ -362,6 +362,15 @@ public: } } if (is_chroma) { + if (sd_ctx_params->diffusion_flash_attn && sd_ctx_params->chroma_use_dit_mask) { + LOG_WARN( + "!!!It looks like you are using Chroma with flash attention. " + "This is currently unsupported. " + "If you find that the generated images are broken, " + "try either disabling flash attention or specifying " + "--chroma-disable-dit-mask as a workaround."); + } + cond_stage_model = std::make_shared(clip_backend, offload_params_to_cpu, model_loader.tensor_storages_types,