From abc19635a394b5dc17d0360af9a484cb9ce3e8fa Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Thu, 4 Dec 2025 17:42:09 +0200 Subject: [PATCH] cont : keep backend sampling disabled for now --- common/common.cpp | 7 +++++-- examples/batched/batched.cpp | 1 + 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/common/common.cpp b/common/common.cpp index f52c41af76..b439bd3786 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -1101,8 +1101,11 @@ common_init_result::common_init_result(common_params & params) : pimpl->samplers_seq_config[i] = { i, common_sampler_get(pimpl->samplers[i].get()) }; } - cparams.samplers = pimpl->samplers_seq_config.data(); - cparams.n_samplers = pimpl->samplers_seq_config.size(); + // TODO: temporarily gated behind a flag + if (params.sampling.backend_sampling) { + cparams.samplers = pimpl->samplers_seq_config.data(); + cparams.n_samplers = pimpl->samplers_seq_config.size(); + } llama_context * lctx = llama_init_from_model(model, cparams); if (lctx == NULL) { diff --git a/examples/batched/batched.cpp b/examples/batched/batched.cpp index 0eb76316cb..6b134b4f6f 100644 --- a/examples/batched/batched.cpp +++ b/examples/batched/batched.cpp @@ -81,6 +81,7 @@ int main(int argc, char ** argv) { sampler_configs.push_back({ i, smpl }); } + // TODO: temporarily gated behind a flag if (params.sampling.backend_sampling) { ctx_params.samplers = sampler_configs.data(); ctx_params.n_samplers = sampler_configs.size();