mirror of https://github.com/google/gemma.cpp.git
fix tokenizer scope
This commit is contained in:
parent
0fc80fad05
commit
5d323c00fe
4
gemma.cc
4
gemma.cc
|
|
@ -797,10 +797,10 @@ void GemmaImpl<ConfigGemma7B>::Generate(
|
||||||
Gemma::Gemma(const Path& tokenizer_path, const Path& compressed_weights_path,
|
Gemma::Gemma(const Path& tokenizer_path, const Path& compressed_weights_path,
|
||||||
const Path& weights_path, Model model_type,
|
const Path& weights_path, Model model_type,
|
||||||
hwy::ThreadPool& pool) {
|
hwy::ThreadPool& pool) {
|
||||||
|
std::unique_ptr<sentencepiece::SentencePieceProcessor> tokenizer;
|
||||||
{
|
{
|
||||||
PROFILER_ZONE("Startup.tokenizer");
|
PROFILER_ZONE("Startup.tokenizer");
|
||||||
std::unique_ptr<sentencepiece::SentencePieceProcessor> tokenizer =
|
tokenizer = std::make_unique<sentencepiece::SentencePieceProcessor>();
|
||||||
std::make_unique<sentencepiece::SentencePieceProcessor>();
|
|
||||||
if (!tokenizer->Load(tokenizer_path.path).ok()) {
|
if (!tokenizer->Load(tokenizer_path.path).ok()) {
|
||||||
HWY_ABORT("Failed to load the tokenizer file.");
|
HWY_ABORT("Failed to load the tokenizer file.");
|
||||||
}
|
}
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue