From 15c6fc3620b19ee47ca3e0f9839695d3d449049f Mon Sep 17 00:00:00 2001 From: Alexander Eichhorn Date: Mon, 23 Mar 2026 05:43:11 +0100 Subject: [PATCH] fix: detect Z-Image LoRAs with transformer.layers prefix OneTrainer exports Z-Image LoRAs with 'transformer.layers.' key prefix instead of 'diffusion_model.layers.'. Add this prefix (and the PEFT-wrapped 'base_model.model.transformer.layers.' variant) to the Z-Image LoRA probe so these models are correctly identified and loaded. --- invokeai/backend/model_manager/configs/lora.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/invokeai/backend/model_manager/configs/lora.py b/invokeai/backend/model_manager/configs/lora.py index 1619c9d6f06..791ded2ed0a 100644 --- a/invokeai/backend/model_manager/configs/lora.py +++ b/invokeai/backend/model_manager/configs/lora.py @@ -711,6 +711,8 @@ def _validate_looks_like_lora(cls, mod: ModelOnDisk) -> None: state_dict, { "diffusion_model.layers.", # Z-Image S3-DiT layer pattern + "transformer.layers.", # OneTrainer/diffusers prefix variant + "base_model.model.transformer.layers.", # PEFT-wrapped variant }, ) @@ -747,6 +749,8 @@ def _get_base_or_raise(cls, mod: ModelOnDisk) -> BaseModelType: state_dict, { "diffusion_model.layers.", # Z-Image S3-DiT layer pattern + "transformer.layers.", # OneTrainer/diffusers prefix variant + "base_model.model.transformer.layers.", # PEFT-wrapped variant }, )