context: ignore zero scale LoRAs when checking sameness (#20166)

This commit is contained in:
Tim Neumann 2026-03-06 14:05:52 +01:00 committed by GitHub
parent f5ddcd1696
commit 388baabc06
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
1 changed files with 11 additions and 3 deletions

View File

@ -1039,11 +1039,15 @@ void llama_context::set_adapters_lora(llama_adapter_lora ** adapters, size_t n_a
bool llama_context::adapters_lora_are_same(llama_adapter_lora ** adapters, size_t n_adapters, float * scales) {
LLAMA_LOG_DEBUG("%s: adapters = %p\n", __func__, (void *) adapters);
if (n_adapters != loras->size()) {
return false;
}
// Adapters with a zero scale are never added to `loras`, so also ignore them for the comparison.
size_t n_non_zero = 0;
for (size_t i = 0; i < n_adapters; i ++) {
if (scales[i] == 0.0f) {
continue;
}
n_non_zero++;
auto it = loras->find(adapters[i]);
if (it == loras->end() || it->second != scales[i]) {
@ -1051,6 +1055,10 @@ bool llama_context::adapters_lora_are_same(llama_adapter_lora ** adapters, size_
}
}
if (n_non_zero != loras->size()) {
return false;
}
return true;
}