refactor: Remove unused code
This commit is contained in:
parent
22d9e645aa
commit
f7b5f62586
|
|
@ -1,193 +0,0 @@
|
|||
import type {
|
||||
ApiChatCompletionToolCall,
|
||||
ApiChatCompletionToolCallDelta,
|
||||
ApiChatCompletionStreamChunk
|
||||
} from '$lib/types/api';
|
||||
import type { ChatMessagePromptProgress, ChatMessageTimings } from '$lib/types/chat';
|
||||
import { mergeToolCallDeltas, extractModelName } from '$lib/utils';
|
||||
import type { AgenticChatCompletionRequest } from '$lib/types/agentic';
|
||||
|
||||
export type OpenAISseCallbacks = {
|
||||
onChunk?: (chunk: string) => void;
|
||||
onReasoningChunk?: (chunk: string) => void;
|
||||
onToolCallChunk?: (serializedToolCalls: string) => void;
|
||||
onModel?: (model: string) => void;
|
||||
onFirstValidChunk?: () => void;
|
||||
onProcessingUpdate?: (timings?: ChatMessageTimings, progress?: ChatMessagePromptProgress) => void;
|
||||
};
|
||||
|
||||
export type OpenAISseTurnResult = {
|
||||
content: string;
|
||||
reasoningContent?: string;
|
||||
toolCalls: ApiChatCompletionToolCall[];
|
||||
finishReason?: string | null;
|
||||
timings?: ChatMessageTimings;
|
||||
};
|
||||
|
||||
export type OpenAISseClientOptions = {
|
||||
url: string;
|
||||
buildHeaders?: () => Record<string, string>;
|
||||
};
|
||||
|
||||
export class OpenAISseClient {
|
||||
constructor(private readonly options: OpenAISseClientOptions) {}
|
||||
|
||||
async stream(
|
||||
request: AgenticChatCompletionRequest,
|
||||
callbacks: OpenAISseCallbacks = {},
|
||||
abortSignal?: AbortSignal
|
||||
): Promise<OpenAISseTurnResult> {
|
||||
const response = await fetch(this.options.url, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
...(this.options.buildHeaders?.() ?? {})
|
||||
},
|
||||
body: JSON.stringify(request),
|
||||
signal: abortSignal
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text();
|
||||
throw new Error(errorText || `LLM request failed (${response.status})`);
|
||||
}
|
||||
|
||||
const reader = response.body?.getReader();
|
||||
if (!reader) {
|
||||
throw new Error('LLM response stream is not available');
|
||||
}
|
||||
|
||||
return this.consumeStream(reader, callbacks, abortSignal);
|
||||
}
|
||||
|
||||
private async consumeStream(
|
||||
reader: ReadableStreamDefaultReader<Uint8Array>,
|
||||
callbacks: OpenAISseCallbacks,
|
||||
abortSignal?: AbortSignal
|
||||
): Promise<OpenAISseTurnResult> {
|
||||
const decoder = new TextDecoder();
|
||||
let buffer = '';
|
||||
let aggregatedContent = '';
|
||||
let aggregatedReasoning = '';
|
||||
let aggregatedToolCalls: ApiChatCompletionToolCall[] = [];
|
||||
let hasOpenToolCallBatch = false;
|
||||
let toolCallIndexOffset = 0;
|
||||
let finishReason: string | null | undefined;
|
||||
let lastTimings: ChatMessageTimings | undefined;
|
||||
let modelEmitted = false;
|
||||
let firstValidChunkEmitted = false;
|
||||
|
||||
const finalizeToolCallBatch = () => {
|
||||
if (!hasOpenToolCallBatch) return;
|
||||
toolCallIndexOffset = aggregatedToolCalls.length;
|
||||
hasOpenToolCallBatch = false;
|
||||
};
|
||||
|
||||
const processToolCalls = (toolCalls?: ApiChatCompletionToolCallDelta[]) => {
|
||||
if (!toolCalls || toolCalls.length === 0) {
|
||||
return;
|
||||
}
|
||||
aggregatedToolCalls = mergeToolCallDeltas(
|
||||
aggregatedToolCalls,
|
||||
toolCalls,
|
||||
toolCallIndexOffset
|
||||
);
|
||||
if (aggregatedToolCalls.length === 0) {
|
||||
return;
|
||||
}
|
||||
hasOpenToolCallBatch = true;
|
||||
};
|
||||
|
||||
try {
|
||||
while (true) {
|
||||
if (abortSignal?.aborted) {
|
||||
throw new DOMException('Aborted', 'AbortError');
|
||||
}
|
||||
|
||||
const { done, value } = await reader.read();
|
||||
if (done) break;
|
||||
|
||||
buffer += decoder.decode(value, { stream: true });
|
||||
const lines = buffer.split('\n');
|
||||
buffer = lines.pop() ?? '';
|
||||
|
||||
for (const line of lines) {
|
||||
if (!line.startsWith('data: ')) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const payload = line.slice(6);
|
||||
if (payload === '[DONE]' || payload.trim().length === 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let chunk: ApiChatCompletionStreamChunk;
|
||||
try {
|
||||
chunk = JSON.parse(payload) as ApiChatCompletionStreamChunk;
|
||||
} catch (error) {
|
||||
console.error('[Agentic][SSE] Failed to parse chunk:', error);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!firstValidChunkEmitted && chunk.object === 'chat.completion.chunk') {
|
||||
firstValidChunkEmitted = true;
|
||||
callbacks.onFirstValidChunk?.();
|
||||
}
|
||||
|
||||
const choice = chunk.choices?.[0];
|
||||
const delta = choice?.delta;
|
||||
finishReason = choice?.finish_reason ?? finishReason;
|
||||
|
||||
if (!modelEmitted) {
|
||||
const chunkModel = extractModelName(chunk);
|
||||
if (chunkModel) {
|
||||
modelEmitted = true;
|
||||
callbacks.onModel?.(chunkModel);
|
||||
}
|
||||
}
|
||||
|
||||
if (chunk.timings || chunk.prompt_progress) {
|
||||
callbacks.onProcessingUpdate?.(chunk.timings, chunk.prompt_progress);
|
||||
if (chunk.timings) {
|
||||
lastTimings = chunk.timings;
|
||||
}
|
||||
}
|
||||
|
||||
if (delta?.content) {
|
||||
finalizeToolCallBatch();
|
||||
aggregatedContent += delta.content;
|
||||
callbacks.onChunk?.(delta.content);
|
||||
}
|
||||
|
||||
if (delta?.reasoning_content) {
|
||||
finalizeToolCallBatch();
|
||||
aggregatedReasoning += delta.reasoning_content;
|
||||
callbacks.onReasoningChunk?.(delta.reasoning_content);
|
||||
}
|
||||
|
||||
processToolCalls(delta?.tool_calls);
|
||||
if (aggregatedToolCalls.length > 0) {
|
||||
callbacks.onToolCallChunk?.(JSON.stringify(aggregatedToolCalls));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
finalizeToolCallBatch();
|
||||
} catch (error) {
|
||||
if ((error as Error).name === 'AbortError') {
|
||||
throw error;
|
||||
}
|
||||
throw error instanceof Error ? error : new Error('LLM stream error');
|
||||
} finally {
|
||||
reader.releaseLock();
|
||||
}
|
||||
|
||||
return {
|
||||
content: aggregatedContent,
|
||||
reasoningContent: aggregatedReasoning || undefined,
|
||||
toolCalls: aggregatedToolCalls,
|
||||
finishReason,
|
||||
timings: lastTimings
|
||||
};
|
||||
}
|
||||
}
|
||||
|
|
@ -1,29 +0,0 @@
|
|||
/**
|
||||
* Decode base64 to UTF-8 string using modern APIs.
|
||||
* Falls back to legacy method if TextDecoder is unavailable.
|
||||
*
|
||||
* @param base64 - Base64 encoded string (padding is optional)
|
||||
* @returns Decoded UTF-8 string, or empty string if decoding fails
|
||||
*/
|
||||
export function decodeBase64(base64: string): string {
|
||||
if (!base64) return '';
|
||||
|
||||
const padded = base64 + '=='.slice(0, (4 - (base64.length % 4)) % 4);
|
||||
|
||||
try {
|
||||
const binaryString = atob(padded);
|
||||
const bytes = new Uint8Array(binaryString.length);
|
||||
for (let i = 0; i < binaryString.length; i++) {
|
||||
bytes[i] = binaryString.charCodeAt(i);
|
||||
}
|
||||
return new TextDecoder('utf-8').decode(bytes);
|
||||
} catch {
|
||||
// Fallback to legacy method
|
||||
try {
|
||||
return decodeURIComponent(escape(atob(padded)));
|
||||
} catch {
|
||||
// Return empty string if all decoding fails
|
||||
return '';
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1,85 +0,0 @@
|
|||
import type {
|
||||
ApiChatCompletionResponse,
|
||||
ApiChatCompletionStreamChunk,
|
||||
ApiChatCompletionToolCall,
|
||||
ApiChatCompletionToolCallDelta
|
||||
} from '$lib/types/api';
|
||||
|
||||
export function mergeToolCallDeltas(
|
||||
existing: ApiChatCompletionToolCall[],
|
||||
deltas: ApiChatCompletionToolCallDelta[],
|
||||
indexOffset = 0
|
||||
): ApiChatCompletionToolCall[] {
|
||||
const result = existing.map((call) => ({
|
||||
...call,
|
||||
function: call.function ? { ...call.function } : undefined
|
||||
}));
|
||||
|
||||
for (const delta of deltas) {
|
||||
const index =
|
||||
typeof delta.index === 'number' && delta.index >= 0
|
||||
? delta.index + indexOffset
|
||||
: result.length;
|
||||
|
||||
while (result.length <= index) {
|
||||
result.push({ function: undefined });
|
||||
}
|
||||
|
||||
const target = result[index]!;
|
||||
|
||||
if (delta.id) {
|
||||
target.id = delta.id;
|
||||
}
|
||||
|
||||
if (delta.type) {
|
||||
target.type = delta.type;
|
||||
}
|
||||
|
||||
if (delta.function) {
|
||||
const fn = target.function ? { ...target.function } : {};
|
||||
|
||||
if (delta.function.name) {
|
||||
fn.name = delta.function.name;
|
||||
}
|
||||
|
||||
if (delta.function.arguments) {
|
||||
fn.arguments = (fn.arguments ?? '') + delta.function.arguments;
|
||||
}
|
||||
|
||||
target.function = fn;
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
export function extractModelName(
|
||||
data: ApiChatCompletionStreamChunk | ApiChatCompletionResponse | unknown
|
||||
): string | undefined {
|
||||
const asRecord = (value: unknown): Record<string, unknown> | undefined => {
|
||||
return typeof value === 'object' && value !== null
|
||||
? (value as Record<string, unknown>)
|
||||
: undefined;
|
||||
};
|
||||
|
||||
const getTrimmedString = (value: unknown): string | undefined => {
|
||||
return typeof value === 'string' && value.trim() ? value.trim() : undefined;
|
||||
};
|
||||
|
||||
const root = asRecord(data);
|
||||
if (!root) return undefined;
|
||||
|
||||
const rootModel = getTrimmedString(root.model);
|
||||
if (rootModel) return rootModel;
|
||||
|
||||
const firstChoice = Array.isArray(root.choices) ? asRecord(root.choices[0]) : undefined;
|
||||
if (!firstChoice) return undefined;
|
||||
|
||||
const deltaModel = getTrimmedString(asRecord(firstChoice.delta)?.model);
|
||||
if (deltaModel) return deltaModel;
|
||||
|
||||
const messageModel = getTrimmedString(asRecord(firstChoice.message)?.model);
|
||||
if (messageModel) return messageModel;
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
|
@ -104,12 +104,6 @@ export { isTextFileByName, readFileAsText, isLikelyTextFile } from './text-files
|
|||
// Agentic utilities
|
||||
export { toAgenticMessages, getAgenticConfig } from './agentic';
|
||||
|
||||
// Base64 utilities
|
||||
export { decodeBase64 } from './base64';
|
||||
|
||||
// Chat stream utilities
|
||||
export { mergeToolCallDeltas, extractModelName } from './chat-stream';
|
||||
|
||||
// Debounce utilities
|
||||
export { debounce } from './debounce';
|
||||
|
||||
|
|
|
|||
Loading…
Reference in New Issue