Skip to content

Commit abb36d6

Browse files
authored
chore: update flash attention warnings (#805)
1 parent ff4fdbb commit abb36d6

File tree

1 file changed

+10
-1
lines changed

1 file changed

+10
-1
lines changed

stable-diffusion.cpp

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -345,7 +345,7 @@ class StableDiffusionGGML {
345345
}
346346
if (sd_version_is_sd3(version)) {
347347
if (sd_ctx_params->diffusion_flash_attn) {
348-
LOG_WARN("flash attention in this diffusion model is currently unsupported!");
348+
LOG_WARN("flash attention in this diffusion model is currently not implemented!");
349349
}
350350
cond_stage_model = std::make_shared<SD3CLIPEmbedder>(clip_backend,
351351
offload_params_to_cpu,
@@ -362,6 +362,15 @@ class StableDiffusionGGML {
362362
}
363363
}
364364
if (is_chroma) {
365+
if (sd_ctx_params->diffusion_flash_attn && sd_ctx_params->chroma_use_dit_mask) {
366+
LOG_WARN(
367+
"!!!It looks like you are using Chroma with flash attention. "
368+
"This is currently unsupported. "
369+
"If you find that the generated images are broken, "
370+
"try either disabling flash attention or specifying "
371+
"--chroma-disable-dit-mask as a workaround.");
372+
}
373+
365374
cond_stage_model = std::make_shared<T5CLIPEmbedder>(clip_backend,
366375
offload_params_to_cpu,
367376
model_loader.tensor_storages_types,

0 commit comments

Comments
 (0)