@@ -929,12 +929,24 @@ def test_simple_inference_with_text_denoiser_multi_adapter(self):
929
929
930
930
pipe .set_adapters ("adapter-1" )
931
931
output_adapter_1 = pipe (** inputs , generator = torch .manual_seed (0 ))[0 ]
932
+ self .assertFalse (
933
+ np .allclose (output_no_lora , output_adapter_1 , atol = 1e-3 , rtol = 1e-3 ),
934
+ "Adapter outputs should be different." ,
935
+ )
932
936
933
937
pipe .set_adapters ("adapter-2" )
934
938
output_adapter_2 = pipe (** inputs , generator = torch .manual_seed (0 ))[0 ]
939
+ self .assertFalse (
940
+ np .allclose (output_no_lora , output_adapter_2 , atol = 1e-3 , rtol = 1e-3 ),
941
+ "Adapter outputs should be different." ,
942
+ )
935
943
936
944
pipe .set_adapters (["adapter-1" , "adapter-2" ])
937
945
output_adapter_mixed = pipe (** inputs , generator = torch .manual_seed (0 ))[0 ]
946
+ self .assertFalse (
947
+ np .allclose (output_no_lora , output_adapter_mixed , atol = 1e-3 , rtol = 1e-3 ),
948
+ "Adapter outputs should be different." ,
949
+ )
938
950
939
951
# Fuse and unfuse should lead to the same results
940
952
self .assertFalse (
@@ -960,6 +972,38 @@ def test_simple_inference_with_text_denoiser_multi_adapter(self):
960
972
"output with no lora and output with lora disabled should give same results" ,
961
973
)
962
974
975
+ def test_wrong_adapter_name_raises_error (self ):
976
+ scheduler_cls = self .scheduler_classes [0 ]
977
+ components , text_lora_config , denoiser_lora_config = self .get_dummy_components (scheduler_cls )
978
+ pipe = self .pipeline_class (** components )
979
+ pipe = pipe .to (torch_device )
980
+ pipe .set_progress_bar_config (disable = None )
981
+ _ , _ , inputs = self .get_dummy_inputs (with_generator = False )
982
+
983
+ if "text_encoder" in self .pipeline_class ._lora_loadable_modules :
984
+ pipe .text_encoder .add_adapter (text_lora_config , "adapter-1" )
985
+ self .assertTrue (check_if_lora_correctly_set (pipe .text_encoder ), "Lora not correctly set in text encoder" )
986
+
987
+ denoiser = pipe .transformer if self .unet_kwargs is None else pipe .unet
988
+ denoiser .add_adapter (denoiser_lora_config , "adapter-1" )
989
+ self .assertTrue (check_if_lora_correctly_set (denoiser ), "Lora not correctly set in denoiser." )
990
+
991
+ if self .has_two_text_encoders or self .has_three_text_encoders :
992
+ if "text_encoder_2" in self .pipeline_class ._lora_loadable_modules :
993
+ pipe .text_encoder_2 .add_adapter (text_lora_config , "adapter-1" )
994
+ self .assertTrue (
995
+ check_if_lora_correctly_set (pipe .text_encoder_2 ), "Lora not correctly set in text encoder 2"
996
+ )
997
+
998
+ with self .assertRaises (ValueError ) as err_context :
999
+ pipe .set_adapters ("test" )
1000
+
1001
+ self .assertTrue ("not in the list of present adapters" in str (err_context .exception ))
1002
+
1003
+ # test this works.
1004
+ pipe .set_adapters ("adapter-1" )
1005
+ _ = pipe (** inputs , generator = torch .manual_seed (0 ))[0 ]
1006
+
963
1007
def test_simple_inference_with_text_denoiser_block_scale (self ):
964
1008
"""
965
1009
Tests a simple inference with lora attached to text encoder and unet, attaches
0 commit comments