Update tools/mtmd/mtmd.cpp
Co-authored-by: Aaron Teo <taronaeo@gmail.com>
This commit is contained in:
parent
4e20fcdc95
commit
f32778568a
|
|
@ -774,8 +774,7 @@ struct mtmd_tokenizer {
|
|||
int n_tokens = text.length() + 2 * add_special;
|
||||
std::vector<llama_token> result(n_tokens);
|
||||
n_tokens = llama_tokenize(vocab, text.data(), text.length(), result.data(), result.size(), add_special, parse_special);
|
||||
// -2147483648 is std::numeric_limits<int32_t>::min()
|
||||
if (n_tokens == -2147483648) {
|
||||
if (n_tokens == std::numeric_limits<int32_t>::min()) {
|
||||
throw std::runtime_error("Tokenization failed: input text too large, tokenization result exceeds int32_t limit");
|
||||
}
|
||||
if (n_tokens < 0) {
|
||||
|
|
|
|||
Loading…
Reference in New Issue