fix: reset weight adapter for models if no loras in request (#1307)

This commit is contained in:
Korsar13 2026-03-03 21:34:07 +05:00 committed by GitHub
parent 5792c66879
commit 810ef0cf76
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -1098,6 +1098,18 @@ public:
cond_stage_lora_models.clear(); cond_stage_lora_models.clear();
diffusion_lora_models.clear(); diffusion_lora_models.clear();
first_stage_lora_models.clear(); first_stage_lora_models.clear();
if (cond_stage_model) {
cond_stage_model->set_weight_adapter(nullptr);
}
if (diffusion_model) {
diffusion_model->set_weight_adapter(nullptr);
}
if (high_noise_diffusion_model) {
high_noise_diffusion_model->set_weight_adapter(nullptr);
}
if (first_stage_model) {
first_stage_model->set_weight_adapter(nullptr);
}
if (lora_state.empty()) { if (lora_state.empty()) {
return; return;
} }