llama-mmap: fix direct-io loading fallback EOF exception (#18801)

This commit is contained in:
Ruben Ortlam 2026-01-13 15:57:07 +01:00 committed by GitHub
parent 20ca2e12c4
commit 960e5e3b46
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
1 changed files with 5 additions and 2 deletions

View File

@ -244,11 +244,14 @@ struct llama_file::impl {
}
errno = 0;
if (fd == -1) {
std::size_t ret = std::fread(ptr, len, 1, fp);
const size_t curr_off = tell();
const size_t to_read = std::min(len, size - curr_off);
std::size_t ret = std::fread(ptr, to_read, 1, fp);
if (ferror(fp)) {
throw std::runtime_error(format("read error: %s", strerror(errno)));
}
if (ret != 1) {
if (to_read > 0 && ret != 1) {
throw std::runtime_error("unexpectedly reached end of file");
}
} else {