We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
1 parent c1b47c0 commit cfd73d2Copy full SHA for cfd73d2
1 file changed
src/diffusers/models/attention_dispatch.py
@@ -75,10 +75,7 @@
75
except (ImportError, OSError, RuntimeError) as e:
76
# Handle ABI mismatch or other import failures gracefully.
77
# This can happen when flash_attn was compiled against a different PyTorch version.
78
- logger.warning(
79
- f"flash_attn is installed but failed to import: {e}. "
80
- f"Falling back to native PyTorch attention."
81
- )
+ logger.warning(f"flash_attn is installed but failed to import: {e}. Falling back to native PyTorch attention.")
82
_CAN_USE_FLASH_ATTN = False
83
flash_attn_func = None
84
flash_attn_varlen_func = None
0 commit comments