Closed wkpark closed 1 year ago
sd_models.load_model() call with already_loaded_state_dict=state_dict makes current sd_model state "meta" by line https://github.com/AUTOMATIC1111/stable-diffusion-webui/blob/v1.6.0/modules/sd_models.py#L563-L577
sd_models.load_model()
already_loaded_state_dict=state_dict
"meta"
... if model_data.sd_model: send_model_to_trash(model_data.sd_model) # this line model_data.sd_model = None devices.torch_gc() ...
workaround this issue before calling sd_models.load_model() add the follows
diff --git a/scripts/model_mixer.py b/scripts/model_mixer.py index 1981557..ef9a17d 100644 --- a/scripts/model_mixer.py +++ b/scripts/model_mixer.py @@ -2122,6 +2122,12 @@ class ModelMixerScript(scripts.Script): print("WARN: lowvram/medvram load_model() with minor workaround") sd_models.unload_model_weights() #sd_models.model_data.__init__() + + if sd_models.model_data.sd_model: + sd_models.send_model_to_cpu(sd_models.model_data.sd_model) # this line. + sd_models.model_data.sd_model = None + devices.torch_gc() + sd_models.load_model(checkpoint_info=checkpoint_info, already_loaded_state_dict=state_dict) del state_dict devices.torch_gc()
sd_models.load_model()
call withalready_loaded_state_dict=state_dict
makes current sd_model state"meta"
by line https://github.com/AUTOMATIC1111/stable-diffusion-webui/blob/v1.6.0/modules/sd_models.py#L563-L577workaround this issue before calling
sd_models.load_model()
add the follows