From 2d2d9c206224040778222b221f2c621630226974 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adrien=20Gallou=C3=ABt?= Date: Tue, 24 Mar 2026 09:24:39 +0100 Subject: [PATCH] common : add a WARNING for HF cache migration (#20935) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Adrien GallouĆ«t --- README.md | 1 + common/hf-cache.cpp | 15 +++++++++++++++ 2 files changed, 16 insertions(+) diff --git a/README.md b/README.md index 8339105100..7d3ae6b7c2 100644 --- a/README.md +++ b/README.md @@ -17,6 +17,7 @@ LLM inference in C/C++ ## Hot topics +- **HuggingFace cache migration: models downloaded with `-hf` are now stored in the standard HuggingFace cache directory, enabling sharing with other HF tools.** - **[guide : using the new WebUI of llama.cpp](https://github.com/ggml-org/llama.cpp/discussions/16938)** - [guide : running gpt-oss with llama.cpp](https://github.com/ggml-org/llama.cpp/discussions/15396) - [[FEEDBACK] Better packaging for llama.cpp to support downstream consumers šŸ¤—](https://github.com/ggml-org/llama.cpp/discussions/15313) diff --git a/common/hf-cache.cpp b/common/hf-cache.cpp index ad68c55674..ce66f64679 100644 --- a/common/hf-cache.cpp +++ b/common/hf-cache.cpp @@ -590,6 +590,8 @@ void migrate_old_cache_to_hf_cache(const std::string & token, bool offline) { return; // -hf is not going to work } + bool warned = false; + for (const auto & entry : fs::directory_iterator(old_cache)) { if (!entry.is_regular_file()) { continue; @@ -601,6 +603,19 @@ void migrate_old_cache_to_hf_cache(const std::string & token, bool offline) { continue; } + if (!warned) { + warned = true; + LOG_WRN("================================================================================\n" + "WARNING: Migrating cache to HuggingFace cache directory\n" + " Old cache: %s\n" + " New cache: %s\n" + "This one-time migration moves models previously downloaded with -hf\n" + "from the legacy llama.cpp cache to the standard HuggingFace cache.\n" + "Models downloaded with --model-url are not affected.\n" + "================================================================================\n", + old_cache.string().c_str(), get_cache_directory().string().c_str()); + } + auto repo_id = owner + "/" + repo; auto files = get_repo_files(repo_id, token);