From 3228e7728789e0456d0458ce38d20d0b1d60a9aa Mon Sep 17 00:00:00 2001 From: Alex Trotta <44127594+Ahajha@users.noreply.github.com> Date: Fri, 6 Feb 2026 15:05:19 -0500 Subject: [PATCH] gguf-py : bump sentencepiece version (#19319) * gguf-py: Bump sentencepiece version There's a new version that's been out for a while that addresses the issues mentioned in https://github.com/ggml-org/llama.cpp/pull/14200. There's a long chain of reasons I would like this change, but the short version is that it allows people who use both `sentencepiece` and `gguf` to take advantage of these fixes. On conda-forge, currently, it locks the version (since there is no notion of optional dependencies). Regardless, I don't think this should be too controversial. * review feedback --- gguf-py/pyproject.toml | 2 +- pyproject.toml | 2 +- requirements/requirements-convert_legacy_llama.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/gguf-py/pyproject.toml b/gguf-py/pyproject.toml index f6c4cd14e7..48693ae3e3 100644 --- a/gguf-py/pyproject.toml +++ b/gguf-py/pyproject.toml @@ -23,7 +23,7 @@ numpy = ">=1.17" tqdm = ">=4.27" pyyaml = ">=5.1" requests = ">=2.25" -sentencepiece = { version = ">=0.1.98,<=0.2.0", optional = true } +sentencepiece = { version = ">=0.1.98,<0.3.0", optional = true } PySide6 = { version = "^6.9", python = ">=3.9,<3.14", optional = true } [tool.poetry.dev-dependencies] diff --git a/pyproject.toml b/pyproject.toml index 3d71b055a8..422f53c7c7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -17,7 +17,7 @@ classifiers = [ [tool.poetry.dependencies] python = ">=3.9" numpy = "^1.25.0" -sentencepiece = ">=0.1.98,<=0.2.0" +sentencepiece = ">=0.1.98,<0.3.0" transformers = ">=4.35.2,<5.0.0" protobuf = ">=4.21.0,<5.0.0" gguf = { path = "./gguf-py" } diff --git a/requirements/requirements-convert_legacy_llama.txt b/requirements/requirements-convert_legacy_llama.txt index dbab3b9508..4898bf7ee2 100644 --- a/requirements/requirements-convert_legacy_llama.txt +++ b/requirements/requirements-convert_legacy_llama.txt @@ -1,5 +1,5 @@ numpy~=1.26.4 -sentencepiece~=0.2.0 +sentencepiece>=0.1.98,<0.3.0 transformers>=4.57.1,<5.0.0