Skip to content

Commit cfd73d2

Browse files
Apply style fixes
1 parent c1b47c0 commit cfd73d2

1 file changed

Lines changed: 1 addition & 4 deletions

File tree

src/diffusers/models/attention_dispatch.py

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -75,10 +75,7 @@
7575
except (ImportError, OSError, RuntimeError) as e:
7676
# Handle ABI mismatch or other import failures gracefully.
7777
# This can happen when flash_attn was compiled against a different PyTorch version.
78-
logger.warning(
79-
f"flash_attn is installed but failed to import: {e}. "
80-
f"Falling back to native PyTorch attention."
81-
)
78+
logger.warning(f"flash_attn is installed but failed to import: {e}. Falling back to native PyTorch attention.")
8279
_CAN_USE_FLASH_ATTN = False
8380
flash_attn_func = None
8481
flash_attn_varlen_func = None

0 commit comments

Comments
 (0)