Fix tokenize with router on

This commit is contained in:
Leszek Hanusz 2026-02-04 00:21:56 +01:00
parent 03077cf297
commit 9cf4742adb
5 changed files with 23 additions and 25 deletions

Binary file not shown.

View File

@ -11,8 +11,8 @@
KeyboardShortcutInfo,
ModelsSelector
} from '$lib/components/app';
import { useModelChangeValidation } from '$lib/hooks/use-model-change-validation.svelte';
import { modelsStore, modelOptions, selectedModelId } from '$lib/stores/models.svelte';
import { modelOptions, selectedModelId } from '$lib/stores/models.svelte';
import { isRouterMode } from '$lib/stores/server.svelte';
import * as Tooltip from '$lib/components/ui/tooltip';
@ -46,7 +46,9 @@
// Sync local input with store content
$effect(() => {
inputContent = notebookStore.content;
notebookStore.updateTokenCount();
if (activeModelId || !isRouter) {
notebookStore.updateTokenCount(activeModelId);
}
});
function handleInput(e: Event) {
@ -68,12 +70,8 @@
scrollToBottom();
}
if (notebookModel == null) {
notebookModel = activeModelId;
}
if (notebookModel) {
await notebookStore.generate(notebookModel);
if (activeModelId) {
await notebookStore.generate(activeModelId);
}
}
@ -135,15 +133,6 @@
let canGenerate = $derived(inputContent.length > 0 && hasModelSelected && isSelectedModelInCache);
let isDisabled = $derived(!canGenerate);
let notebookModel = $state<string | null>(null);
const { handleModelChange } = useModelChangeValidation({
getRequiredModalities: () => ({ vision: false, audio: false }), // Notebook doesn't require modalities
onSuccess: async (modelName) => {
notebookModel = modelName;
}
});
function handleScroll() {
if (disableAutoScroll || !scrollContainer) return;
@ -293,7 +282,7 @@
</Tooltip.Content>
</Tooltip.Root>
{#snippet generateButton(props = {})}
{#snippet generateButton()}
<Button
disabled={isDisabled}
onclick={notebookStore.isGenerating ? handleStop : handleGenerate}
@ -328,8 +317,6 @@
</Tooltip.Root>
<ModelsSelector
currentModel={notebookModel}
onModelChange={handleModelChange}
forceForegroundText={true}
useGlobalSelection={true}
disabled={notebookStore.isGenerating}

View File

@ -1132,15 +1132,21 @@ export class ChatService {
* Tokenizes the provided text using the server's tokenizer.
*
* @param content - The text content to tokenize
* @param model - Optional model name to use for tokenization (required in router mode)
* @param signal - Optional AbortSignal
* @returns {Promise<number[]>} Promise that resolves to an array of token IDs
*/
static async tokenize(content: string, signal?: AbortSignal): Promise<number[]> {
static async tokenize(content: string, model?: string, signal?: AbortSignal): Promise<number[]> {
try {
const body: { content: string; model?: string } = { content };
if (model) {
body.model = model;
}
const response = await fetch('./tokenize', {
method: 'POST',
headers: getJsonHeaders(),
body: JSON.stringify({ content }),
body: JSON.stringify(body),
signal
});

View File

@ -134,13 +134,17 @@ export class NotebookStore {
this.isGenerating = false;
}
updateTokenCount() {
updateTokenCount(model?: string) {
if (this.tokenizeTimeout) {
clearTimeout(this.tokenizeTimeout);
}
this.tokenizeTimeout = setTimeout(async () => {
const tokens = await ChatService.tokenize(this.content);
if (this.content.length === 0) {
this.totalTokens = 0;
return;
}
const tokens = await ChatService.tokenize(this.content, model);
this.totalTokens = tokens.length;
}, 500);
}

View File

@ -302,6 +302,7 @@ export interface ApiCompletionStreamChunk {
predicted_ms?: number;
cache_n?: number;
};
prompt_progress?: ChatMessagePromptProgress;
}
export interface ApiCompletionResponse {