From f32778568ab5a250632192b25c8e2a26810b81d6 Mon Sep 17 00:00:00 2001 From: ylwango613 <128395302+ylwango613@users.noreply.github.com> Date: Sun, 4 Jan 2026 19:16:35 +0800 Subject: [PATCH] Update tools/mtmd/mtmd.cpp Co-authored-by: Aaron Teo --- tools/mtmd/mtmd.cpp | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/tools/mtmd/mtmd.cpp b/tools/mtmd/mtmd.cpp index 3dce7958f8..2fb87987ce 100644 --- a/tools/mtmd/mtmd.cpp +++ b/tools/mtmd/mtmd.cpp @@ -774,8 +774,7 @@ struct mtmd_tokenizer { int n_tokens = text.length() + 2 * add_special; std::vector result(n_tokens); n_tokens = llama_tokenize(vocab, text.data(), text.length(), result.data(), result.size(), add_special, parse_special); - // -2147483648 is std::numeric_limits::min() - if (n_tokens == -2147483648) { + if (n_tokens == std::numeric_limits::min()) { throw std::runtime_error("Tokenization failed: input text too large, tokenization result exceeds int32_t limit"); } if (n_tokens < 0) {