Minor: not add attention_size_swa for non-swa model

This commit is contained in:
Yu, Zijun 2025-09-17 16:50:54 +08:00 committed by Mustafa Cavus
parent 1a38339cea
commit 67e178a2f6
1 changed files with 3 additions and 1 deletions

View File

@ -360,7 +360,9 @@ void GgmlOvDecoder::add_extra_inputs() {
};
create_attention_size_input("attention_size", attention_size);
create_attention_size_input("attention_size_swa", attention_size_swa);
if (attention_size_swa != -1) {
create_attention_size_input("attention_size_swa", attention_size_swa);
}
}
const ggml_tensor* GgmlOvDecoder::get_tensor_used_op(const ggml_tensor* tensor) const {