Skip to content

Commit 33a77c6

Browse files
sayakpaulyiyixuxu
andcommitted
[LoRA] make set_adapters() method more robust. (#9535)
* make set_adapters() method more robust. * remove patch * better and concise code. * Update src/diffusers/loaders/lora_base.py Co-authored-by: YiYi Xu <yixu310@gmail.com> --------- Co-authored-by: YiYi Xu <yixu310@gmail.com>
1 parent f018acd commit 33a77c6

File tree

2 files changed

+54
-4
lines changed

2 files changed

+54
-4
lines changed

src/diffusers/loaders/lora_base.py

Lines changed: 10 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -532,13 +532,19 @@ def set_adapters(
532532
)
533533

534534
list_adapters = self.get_list_adapters() # eg {"unet": ["adapter1", "adapter2"], "text_encoder": ["adapter2"]}
535-
all_adapters = {
536-
adapter for adapters in list_adapters.values() for adapter in adapters
537-
} # eg ["adapter1", "adapter2"]
535+
# eg ["adapter1", "adapter2"]
536+
all_adapters = {adapter for adapters in list_adapters.values() for adapter in adapters}
537+
missing_adapters = set(adapter_names) - all_adapters
538+
if len(missing_adapters) > 0:
539+
raise ValueError(
540+
f"Adapter name(s) {missing_adapters} not in the list of present adapters: {all_adapters}."
541+
)
542+
543+
# eg {"adapter1": ["unet"], "adapter2": ["unet", "text_encoder"]}
538544
invert_list_adapters = {
539545
adapter: [part for part, adapters in list_adapters.items() if adapter in adapters]
540546
for adapter in all_adapters
541-
} # eg {"adapter1": ["unet"], "adapter2": ["unet", "text_encoder"]}
547+
}
542548

543549
# Decompose weights into weights for denoiser and text encoders.
544550
_component_adapter_weights = {}

tests/lora/utils.py

Lines changed: 44 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -929,12 +929,24 @@ def test_simple_inference_with_text_denoiser_multi_adapter(self):
929929

930930
pipe.set_adapters("adapter-1")
931931
output_adapter_1 = pipe(**inputs, generator=torch.manual_seed(0))[0]
932+
self.assertFalse(
933+
np.allclose(output_no_lora, output_adapter_1, atol=1e-3, rtol=1e-3),
934+
"Adapter outputs should be different.",
935+
)
932936

933937
pipe.set_adapters("adapter-2")
934938
output_adapter_2 = pipe(**inputs, generator=torch.manual_seed(0))[0]
939+
self.assertFalse(
940+
np.allclose(output_no_lora, output_adapter_2, atol=1e-3, rtol=1e-3),
941+
"Adapter outputs should be different.",
942+
)
935943

936944
pipe.set_adapters(["adapter-1", "adapter-2"])
937945
output_adapter_mixed = pipe(**inputs, generator=torch.manual_seed(0))[0]
946+
self.assertFalse(
947+
np.allclose(output_no_lora, output_adapter_mixed, atol=1e-3, rtol=1e-3),
948+
"Adapter outputs should be different.",
949+
)
938950

939951
# Fuse and unfuse should lead to the same results
940952
self.assertFalse(
@@ -960,6 +972,38 @@ def test_simple_inference_with_text_denoiser_multi_adapter(self):
960972
"output with no lora and output with lora disabled should give same results",
961973
)
962974

975+
def test_wrong_adapter_name_raises_error(self):
976+
scheduler_cls = self.scheduler_classes[0]
977+
components, text_lora_config, denoiser_lora_config = self.get_dummy_components(scheduler_cls)
978+
pipe = self.pipeline_class(**components)
979+
pipe = pipe.to(torch_device)
980+
pipe.set_progress_bar_config(disable=None)
981+
_, _, inputs = self.get_dummy_inputs(with_generator=False)
982+
983+
if "text_encoder" in self.pipeline_class._lora_loadable_modules:
984+
pipe.text_encoder.add_adapter(text_lora_config, "adapter-1")
985+
self.assertTrue(check_if_lora_correctly_set(pipe.text_encoder), "Lora not correctly set in text encoder")
986+
987+
denoiser = pipe.transformer if self.unet_kwargs is None else pipe.unet
988+
denoiser.add_adapter(denoiser_lora_config, "adapter-1")
989+
self.assertTrue(check_if_lora_correctly_set(denoiser), "Lora not correctly set in denoiser.")
990+
991+
if self.has_two_text_encoders or self.has_three_text_encoders:
992+
if "text_encoder_2" in self.pipeline_class._lora_loadable_modules:
993+
pipe.text_encoder_2.add_adapter(text_lora_config, "adapter-1")
994+
self.assertTrue(
995+
check_if_lora_correctly_set(pipe.text_encoder_2), "Lora not correctly set in text encoder 2"
996+
)
997+
998+
with self.assertRaises(ValueError) as err_context:
999+
pipe.set_adapters("test")
1000+
1001+
self.assertTrue("not in the list of present adapters" in str(err_context.exception))
1002+
1003+
# test this works.
1004+
pipe.set_adapters("adapter-1")
1005+
_ = pipe(**inputs, generator=torch.manual_seed(0))[0]
1006+
9631007
def test_simple_inference_with_text_denoiser_block_scale(self):
9641008
"""
9651009
Tests a simple inference with lora attached to text encoder and unet, attaches

0 commit comments

Comments
 (0)