diff --git a/tools/mtmd/mtmd.cpp b/tools/mtmd/mtmd.cpp index 3dce7958f8..2fb87987ce 100644 --- a/tools/mtmd/mtmd.cpp +++ b/tools/mtmd/mtmd.cpp @@ -774,8 +774,7 @@ struct mtmd_tokenizer { int n_tokens = text.length() + 2 * add_special; std::vector result(n_tokens); n_tokens = llama_tokenize(vocab, text.data(), text.length(), result.data(), result.size(), add_special, parse_special); - // -2147483648 is std::numeric_limits::min() - if (n_tokens == -2147483648) { + if (n_tokens == std::numeric_limits::min()) { throw std::runtime_error("Tokenization failed: input text too large, tokenization result exceeds int32_t limit"); } if (n_tokens < 0) {