Use a separate callbacks argument for sendCompletion

This commit is contained in:
Leszek Hanusz 2026-02-10 01:20:14 +01:00
parent 8f79f1fccb
commit a35e4c4d81
4 changed files with 66 additions and 75 deletions

Binary file not shown.

View File

@ -2,8 +2,8 @@ import { getJsonHeaders } from '$lib/utils';
import { ChatService } from '$lib/services/chat'; import { ChatService } from '$lib/services/chat';
import type { ApiCompletionRequest, ApiCompletionStreamChunk } from '$lib/types/api'; import type { ApiCompletionRequest, ApiCompletionStreamChunk } from '$lib/types/api';
import type { ChatMessageTimings, ChatMessagePromptProgress } from '$lib/types/chat'; import type { ChatMessageTimings } from '$lib/types/chat';
import type { SettingsChatServiceOptions } from '$lib/types/settings'; import type { CompletionServiceCallbacks, SettingsChatServiceOptions } from '$lib/types/settings';
/** /**
* CompletionService - Low-level API communication layer for raw text completions. * CompletionService - Low-level API communication layer for raw text completions.
@ -15,21 +15,18 @@ export class CompletionService {
* Supports only streaming responses. * Supports only streaming responses.
* *
* @param prompt - The text prompt to complete * @param prompt - The text prompt to complete
* @param callbacks - Callbacks methods (onChunk, onComplete, ...)
* @param options - Configuration options for the completion request * @param options - Configuration options for the completion request
* @returns {Promise<void>} that resolves to void * @returns {Promise<void>} that resolves to void
* @throws {Error} if the request fails or is aborted * @throws {Error} if the request fails or is aborted
*/ */
static async sendCompletion( static async sendCompletion(
prompt: string, prompt: string,
callbacks: CompletionServiceCallbacks,
options: SettingsChatServiceOptions = {}, options: SettingsChatServiceOptions = {},
signal?: AbortSignal signal?: AbortSignal
): Promise<string | void> { ): Promise<string | void> {
const { const {
onChunk,
onComplete,
onError,
onModel,
onTimings,
// Generation parameters // Generation parameters
temperature, temperature,
max_tokens, max_tokens,
@ -126,21 +123,13 @@ export class CompletionService {
if (!response.ok) { if (!response.ok) {
const error = await ChatService.parseErrorResponse(response); const error = await ChatService.parseErrorResponse(response);
if (onError) { if (callbacks.onError) {
onError(error); callbacks.onError(error);
} }
throw error; throw error;
} }
await CompletionService.handleCompletionStreamResponse( await CompletionService.handleCompletionStreamResponse(response, callbacks, signal);
response,
onChunk,
onComplete,
onError,
onModel,
onTimings,
signal
);
return; return;
} catch (error) { } catch (error) {
if (error instanceof Error && error.name === 'AbortError') { if (error instanceof Error && error.name === 'AbortError') {
@ -170,8 +159,8 @@ export class CompletionService {
} }
console.error('Error in sendCompletion:', error); console.error('Error in sendCompletion:', error);
if (onError) { if (callbacks.onError) {
onError(userFriendlyError); callbacks.onError(userFriendlyError);
} }
throw userFriendlyError; throw userFriendlyError;
} }
@ -182,16 +171,7 @@ export class CompletionService {
*/ */
private static async handleCompletionStreamResponse( private static async handleCompletionStreamResponse(
response: Response, response: Response,
onChunk?: (chunk: string) => void, callbacks: CompletionServiceCallbacks,
onComplete?: (
response: string,
reasoningContent?: string,
timings?: ChatMessageTimings,
toolCalls?: string
) => void,
onError?: (error: Error) => void,
onModel?: (model: string) => void,
onTimings?: (timings?: ChatMessageTimings, promptProgress?: ChatMessagePromptProgress) => void,
abortSignal?: AbortSignal abortSignal?: AbortSignal
): Promise<void> { ): Promise<void> {
const reader = response.body?.getReader(); const reader = response.body?.getReader();
@ -247,22 +227,22 @@ export class CompletionService {
if (model && !modelEmitted) { if (model && !modelEmitted) {
modelEmitted = true; modelEmitted = true;
onModel?.(model); callbacks.onModel?.(model);
} }
if (promptProgress) { if (promptProgress) {
ChatService.notifyTimings(undefined, promptProgress, onTimings); ChatService.notifyTimings(undefined, promptProgress, callbacks.onTimings);
} }
if (timings) { if (timings) {
ChatService.notifyTimings(timings, promptProgress, onTimings); ChatService.notifyTimings(timings, promptProgress, callbacks.onTimings);
lastTimings = timings; lastTimings = timings;
} }
if (content) { if (content) {
aggregatedContent += content; aggregatedContent += content;
if (!abortSignal?.aborted) { if (!abortSignal?.aborted) {
onChunk?.(content); callbacks.onChunk?.(content);
} }
} }
} catch (e) { } catch (e) {
@ -281,11 +261,11 @@ export class CompletionService {
} }
if (streamFinished) { if (streamFinished) {
onComplete?.(aggregatedContent, undefined, lastTimings, undefined); callbacks.onComplete?.(aggregatedContent, lastTimings);
} }
} catch (error) { } catch (error) {
const err = error instanceof Error ? error : new Error('Stream error'); const err = error instanceof Error ? error : new Error('Stream error');
onError?.(err); callbacks.onError?.(err);
throw err; throw err;
} finally { } finally {
reader.releaseLock(); reader.releaseLock();

View File

@ -48,50 +48,52 @@ export class NotebookStore {
try { try {
const currentConfig = config(); const currentConfig = config();
const callbacks = {
onChunk: (chunk: string) => {
this.content += chunk;
},
onTimings: (timings: ChatMessageTimings, promptProgress: ChatMessagePromptProgress) => {
if (timings) {
if (timings.cache_n) this.cacheTokens = timings.cache_n;
if (timings.prompt_n) this.promptTokens = timings.prompt_n;
if (timings.prompt_ms) this.promptMs = timings.prompt_ms;
if (timings.predicted_n) this.predictedTokens = timings.predicted_n;
if (timings.predicted_ms) this.predictedMs = timings.predicted_ms;
}
if (promptProgress) {
// Update prompt stats from progress
const { processed, time_ms } = promptProgress;
if (processed > 0) this.promptTokens = processed;
if (time_ms > 0) this.promptMs = time_ms;
}
// Update totalTokens live
this.totalTokens = this.cacheTokens + this.promptTokens + this.predictedTokens;
},
onComplete: () => {
this.isGenerating = false;
},
onError: (error: unknown) => {
if (error instanceof Error && error.name === 'AbortError') {
// aborted by user
} else {
console.error('Notebook generation error:', error);
this.error = {
message: error instanceof Error ? error.message : String(error),
type: 'server'
};
}
this.isGenerating = false;
}
};
await CompletionService.sendCompletion( await CompletionService.sendCompletion(
this.content, this.content,
callbacks,
{ {
...currentConfig, ...currentConfig,
model, model,
stream: true, timings_per_token: true
timings_per_token: true,
onChunk: (chunk: string) => {
this.content += chunk;
},
onTimings: (timings: ChatMessageTimings, promptProgress: ChatMessagePromptProgress) => {
if (timings) {
if (timings.cache_n) this.cacheTokens = timings.cache_n;
if (timings.prompt_n) this.promptTokens = timings.prompt_n;
if (timings.prompt_ms) this.promptMs = timings.prompt_ms;
if (timings.predicted_n) this.predictedTokens = timings.predicted_n;
if (timings.predicted_ms) this.predictedMs = timings.predicted_ms;
}
if (promptProgress) {
// Update prompt stats from progress
const { processed, time_ms } = promptProgress;
if (processed > 0) this.promptTokens = processed;
if (time_ms > 0) this.promptMs = time_ms;
}
// Update totalTokens live
this.totalTokens = this.cacheTokens + this.promptTokens + this.predictedTokens;
},
onComplete: () => {
this.isGenerating = false;
},
onError: (error: unknown) => {
if (error instanceof Error && error.name === 'AbortError') {
// aborted by user
} else {
console.error('Notebook generation error:', error);
this.error = {
message: error instanceof Error ? error.message : String(error),
type: 'server'
};
}
this.isGenerating = false;
}
}, },
this.abortController.signal this.abortController.signal
); );

View File

@ -12,6 +12,15 @@ export interface SettingsFieldConfig {
options?: Array<{ value: string; label: string; icon?: typeof import('@lucide/svelte').Icon }>; options?: Array<{ value: string; label: string; icon?: typeof import('@lucide/svelte').Icon }>;
} }
export interface CompletionServiceCallbacks {
// Callbacks
onChunk?: (chunk: string) => void;
onModel?: (model: string) => void;
onTimings?: (timings?: ChatMessageTimings, promptProgress?: ChatMessagePromptProgress) => void;
onComplete?: (response: string, timings?: ChatMessageTimings) => void;
onError?: (error: Error) => void;
}
export interface SettingsChatServiceOptions { export interface SettingsChatServiceOptions {
stream?: boolean; stream?: boolean;
// Model (required in ROUTER mode, optional in MODEL mode) // Model (required in ROUTER mode, optional in MODEL mode)