context: ignore zero scale LoRAs when checking sameness (#20166)
This commit is contained in:
parent
f5ddcd1696
commit
388baabc06
|
|
@ -1039,11 +1039,15 @@ void llama_context::set_adapters_lora(llama_adapter_lora ** adapters, size_t n_a
|
|||
bool llama_context::adapters_lora_are_same(llama_adapter_lora ** adapters, size_t n_adapters, float * scales) {
|
||||
LLAMA_LOG_DEBUG("%s: adapters = %p\n", __func__, (void *) adapters);
|
||||
|
||||
if (n_adapters != loras->size()) {
|
||||
return false;
|
||||
}
|
||||
// Adapters with a zero scale are never added to `loras`, so also ignore them for the comparison.
|
||||
size_t n_non_zero = 0;
|
||||
|
||||
for (size_t i = 0; i < n_adapters; i ++) {
|
||||
if (scales[i] == 0.0f) {
|
||||
continue;
|
||||
}
|
||||
n_non_zero++;
|
||||
|
||||
auto it = loras->find(adapters[i]);
|
||||
|
||||
if (it == loras->end() || it->second != scales[i]) {
|
||||
|
|
@ -1051,6 +1055,10 @@ bool llama_context::adapters_lora_are_same(llama_adapter_lora ** adapters, size_
|
|||
}
|
||||
}
|
||||
|
||||
if (n_non_zero != loras->size()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
|
|
|||
Loading…
Reference in New Issue