fix tokenizer scope

This commit is contained in:
austinvhuang 2024-03-10 13:23:16 -04:00
parent 0fc80fad05
commit 5d323c00fe
1 changed files with 2 additions and 2 deletions

View File

@ -797,10 +797,10 @@ void GemmaImpl<ConfigGemma7B>::Generate(
Gemma::Gemma(const Path& tokenizer_path, const Path& compressed_weights_path, Gemma::Gemma(const Path& tokenizer_path, const Path& compressed_weights_path,
const Path& weights_path, Model model_type, const Path& weights_path, Model model_type,
hwy::ThreadPool& pool) { hwy::ThreadPool& pool) {
std::unique_ptr<sentencepiece::SentencePieceProcessor> tokenizer;
{ {
PROFILER_ZONE("Startup.tokenizer"); PROFILER_ZONE("Startup.tokenizer");
std::unique_ptr<sentencepiece::SentencePieceProcessor> tokenizer = tokenizer = std::make_unique<sentencepiece::SentencePieceProcessor>();
std::make_unique<sentencepiece::SentencePieceProcessor>();
if (!tokenizer->Load(tokenizer_path.path).ok()) { if (!tokenizer->Load(tokenizer_path.path).ok()) {
HWY_ABORT("Failed to load the tokenizer file."); HWY_ABORT("Failed to load the tokenizer file.");
} }