ci : bump ty to 0.0.26 (#21156)

* fix incorrect type ignore comments

* bump ty to 0.0.26
This commit is contained in:
Sigbjørn Skjæret 2026-03-30 09:29:15 +02:00 committed by GitHub
parent abf9a62161
commit e2eb39e81c
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
9 changed files with 16 additions and 16 deletions

View File

@ -31,7 +31,7 @@ jobs:
uses: actions/setup-python@v6 uses: actions/setup-python@v6
with: with:
python-version: "3.11" python-version: "3.11"
pip-install: -r requirements/requirements-all.txt ty==0.0.24 pip-install: -r requirements/requirements-all.txt ty==0.0.26
# - name: Type-check with Pyright # - name: Type-check with Pyright
# uses: jakebailey/pyright-action@v2 # uses: jakebailey/pyright-action@v2
# with: # with:

View File

@ -31,10 +31,10 @@ import gguf
from gguf.vocab import MistralTokenizerType, MistralVocab from gguf.vocab import MistralTokenizerType, MistralVocab
try: try:
from mistral_common.tokens.tokenizers.base import TokenizerVersion # type: ignore[import-not-found] from mistral_common.tokens.tokenizers.base import TokenizerVersion # type: ignore[import-not-found, ty:unresolved-import]
from mistral_common.tokens.tokenizers.multimodal import DATASET_MEAN as _MISTRAL_COMMON_DATASET_MEAN, DATASET_STD as _MISTRAL_COMMON_DATASET_STD # type: ignore[import-not-found] from mistral_common.tokens.tokenizers.multimodal import DATASET_MEAN as _MISTRAL_COMMON_DATASET_MEAN, DATASET_STD as _MISTRAL_COMMON_DATASET_STD # type: ignore[import-not-found, ty:unresolved-import]
from mistral_common.tokens.tokenizers.tekken import Tekkenizer # type: ignore[import-not-found] from mistral_common.tokens.tokenizers.tekken import Tekkenizer # type: ignore[import-not-found, ty:unresolved-import]
from mistral_common.tokens.tokenizers.sentencepiece import ( # type: ignore[import-not-found] from mistral_common.tokens.tokenizers.sentencepiece import ( # type: ignore[import-not-found, ty:unresolved-import]
SentencePieceTokenizer, SentencePieceTokenizer,
) )

View File

@ -7,7 +7,7 @@ import os
# Add utils directory to path for direct script execution # Add utils directory to path for direct script execution
sys.path.insert(0, str(Path(__file__).parent.parent / "utils")) sys.path.insert(0, str(Path(__file__).parent.parent / "utils"))
from common import get_model_name_from_env_path, compare_tokens, exit_with_warning # type: ignore[import-not-found] from common import get_model_name_from_env_path, compare_tokens, exit_with_warning # type: ignore[import-not-found, ty:unresolved-import]
def quick_logits_check(pytorch_file, llamacpp_file): def quick_logits_check(pytorch_file, llamacpp_file):
"""Lightweight sanity check before NMSE""" """Lightweight sanity check before NMSE"""

View File

@ -5,7 +5,7 @@ import sys
import os import os
import argparse import argparse
from pathlib import Path from pathlib import Path
from common import get_model_name_from_env_path # type: ignore[import-not-found] from common import get_model_name_from_env_path # type: ignore[import-not-found, ty:unresolved-import]
def calculate_nmse(reference, test): def calculate_nmse(reference, test):
mse = np.mean((test - reference) ** 2) mse = np.mean((test - reference) ** 2)

View File

@ -2,7 +2,7 @@
import argparse import argparse
import sys import sys
from common import compare_tokens # type: ignore[import-not-found] from common import compare_tokens # type: ignore[import-not-found, ty:unresolved-import]
def parse_arguments(): def parse_arguments():

View File

@ -7,7 +7,7 @@ import importlib
from pathlib import Path from pathlib import Path
from transformers import AutoTokenizer, AutoConfig, AutoModelForCausalLM, AutoModel from transformers import AutoTokenizer, AutoConfig, AutoModelForCausalLM, AutoModel
from common import compare_tokens, exit_with_warning # type: ignore[import-not-found] from common import compare_tokens, exit_with_warning # type: ignore[import-not-found, ty:unresolved-import]
unreleased_model_name = os.getenv('UNRELEASED_MODEL_NAME') unreleased_model_name = os.getenv('UNRELEASED_MODEL_NAME')

View File

@ -14,12 +14,12 @@ except ImportError:
SentencePieceProcessor: Any = None SentencePieceProcessor: Any = None
try: try:
from mistral_common.tokens.tokenizers.mistral import MistralTokenizer # type: ignore[import-not-found] from mistral_common.tokens.tokenizers.mistral import MistralTokenizer # type: ignore[import-not-found, ty:unresolved-import]
from mistral_common.tokens.tokenizers.tekken import Tekkenizer # type: ignore[import-not-found] from mistral_common.tokens.tokenizers.tekken import Tekkenizer # type: ignore[import-not-found, ty:unresolved-import]
from mistral_common.tokens.tokenizers.utils import ( # type: ignore[import-not-found] from mistral_common.tokens.tokenizers.utils import ( # type: ignore[import-not-found, ty:unresolved-import]
_filter_valid_tokenizer_files, _filter_valid_tokenizer_files,
) )
from mistral_common.tokens.tokenizers.sentencepiece import ( # type: ignore[import-not-found] from mistral_common.tokens.tokenizers.sentencepiece import ( # type: ignore[import-not-found, ty:unresolved-import]
SentencePieceTokenizer, SentencePieceTokenizer,
) )
except ImportError: except ImportError:
@ -32,7 +32,7 @@ else:
_mistral_common_installed = True _mistral_common_installed = True
try: try:
from mistral_common.tokens.tokenizers.utils import ( # type: ignore[import-not-found] from mistral_common.tokens.tokenizers.utils import ( # type: ignore[import-not-found, ty:unresolved-import]
get_one_valid_tokenizer_file, get_one_valid_tokenizer_file,
) )
except ImportError: except ImportError:

View File

@ -147,7 +147,7 @@ ranges_nfd: list[tuple[int, int, int]] = [(0, 0, 0)] # start, last, nfd
for codepoint, norm in table_nfd: for codepoint, norm in table_nfd:
start = ranges_nfd[-1][0] start = ranges_nfd[-1][0]
if ranges_nfd[-1] != (start, codepoint - 1, norm): if ranges_nfd[-1] != (start, codepoint - 1, norm):
ranges_nfd.append(None) # type: ignore[arg-type] # dummy, will be replaced below ranges_nfd.append((0, 0, 0)) # dummy, will be replaced below
start = codepoint start = codepoint
ranges_nfd[-1] = (start, codepoint, norm) ranges_nfd[-1] = (start, codepoint, norm)

View File

@ -116,7 +116,7 @@ class ServerProcess:
self.server_port = int(os.environ["PORT"]) self.server_port = int(os.environ["PORT"])
self.external_server = "DEBUG_EXTERNAL" in os.environ self.external_server = "DEBUG_EXTERNAL" in os.environ
def start(self, timeout_seconds: int | None = DEFAULT_HTTP_TIMEOUT) -> None: def start(self, timeout_seconds: int = DEFAULT_HTTP_TIMEOUT) -> None:
if self.external_server: if self.external_server:
print(f"[external_server]: Assuming external server running on {self.server_host}:{self.server_port}") print(f"[external_server]: Assuming external server running on {self.server_host}:{self.server_port}")
return return