diff --git a/tools/server/webui/src/lib/clients/chat.client.ts b/tools/server/webui/src/lib/clients/chat.client.ts index 566ff9fa89..ca27c1fb38 100644 --- a/tools/server/webui/src/lib/clients/chat.client.ts +++ b/tools/server/webui/src/lib/clients/chat.client.ts @@ -17,6 +17,7 @@ import { import { DEFAULT_CONTEXT } from '$lib/constants/default-context'; import { getAgenticConfig } from '$lib/utils/agentic'; import { SYSTEM_MESSAGE_PLACEHOLDER } from '$lib/constants/ui'; +import { REASONING_TAGS } from '$lib/constants/agentic'; import type { ChatMessageTimings, ChatMessagePromptProgress } from '$lib/types/chat'; import type { DatabaseMessage, DatabaseMessageExtra } from '$lib/types/database'; @@ -87,6 +88,17 @@ interface ChatStoreStateCallbacks { getCurrentResponse: () => string; } +const countOccurrences = (source: string, token: string): number => + source ? source.split(token).length - 1 : 0; + +const hasUnclosedReasoningTag = (content: string): boolean => + countOccurrences(content, REASONING_TAGS.START) > countOccurrences(content, REASONING_TAGS.END); + +const wrapReasoningContent = (content: string, reasoningContent?: string): string => { + if (!reasoningContent) return content; + return `${REASONING_TAGS.START}${reasoningContent}${REASONING_TAGS.END}${content}`; +}; + /** * ChatClient - Business Logic Facade for Chat Operations * @@ -205,7 +217,6 @@ export class ChatClient { content, type, timestamp: Date.now(), - thinking: '', toolCalls: '', children: [], extra: extras @@ -377,7 +388,6 @@ export class ChatClient { role: 'assistant', content: '', timestamp: Date.now(), - thinking: '', toolCalls: '', children: [], model: null @@ -476,8 +486,9 @@ export class ChatClient { } let streamedContent = ''; - let streamedReasoningContent = ''; let streamedToolCallContent = ''; + let isReasoningOpen = false; + let hasStreamedChunks = false; let resolvedModel: string | null = null; let modelPersisted = false; let streamedExtras: DatabaseMessageExtra[] = assistantMessage.extra @@ -500,6 +511,39 @@ export class ChatClient { } }; + const updateStreamingContent = () => { + this.store.setChatStreaming(assistantMessage.convId, streamedContent, assistantMessage.id); + const idx = conversationsStore.findMessageIndex(assistantMessage.id); + conversationsStore.updateMessageAtIndex(idx, { content: streamedContent }); + }; + + const appendContentChunk = (chunk: string) => { + if (isReasoningOpen) { + streamedContent += REASONING_TAGS.END; + isReasoningOpen = false; + } + streamedContent += chunk; + hasStreamedChunks = true; + updateStreamingContent(); + }; + + const appendReasoningChunk = (chunk: string) => { + if (!isReasoningOpen) { + streamedContent += REASONING_TAGS.START; + isReasoningOpen = true; + } + streamedContent += chunk; + hasStreamedChunks = true; + updateStreamingContent(); + }; + + const finalizeReasoning = () => { + if (isReasoningOpen) { + streamedContent += REASONING_TAGS.END; + isReasoningOpen = false; + } + }; + this.store.setStreamingActive(true); this.store.setActiveProcessingConversation(assistantMessage.convId); @@ -507,15 +551,10 @@ export class ChatClient { const streamCallbacks: ChatStreamCallbacks = { onChunk: (chunk: string) => { - streamedContent += chunk; - this.store.setChatStreaming(assistantMessage.convId, streamedContent, assistantMessage.id); - const idx = conversationsStore.findMessageIndex(assistantMessage.id); - conversationsStore.updateMessageAtIndex(idx, { content: streamedContent }); + appendContentChunk(chunk); }, onReasoningChunk: (reasoningChunk: string) => { - streamedReasoningContent += reasoningChunk; - const idx = conversationsStore.findMessageIndex(assistantMessage.id); - conversationsStore.updateMessageAtIndex(idx, { thinking: streamedReasoningContent }); + appendReasoningChunk(reasoningChunk); }, onToolCallChunk: (toolCallChunk: string) => { const chunk = toolCallChunk.trim(); @@ -558,10 +597,14 @@ export class ChatClient { toolCallContent?: string ) => { this.store.setStreamingActive(false); + finalizeReasoning(); + + const combinedContent = hasStreamedChunks + ? streamedContent + : wrapReasoningContent(finalContent || '', reasoningContent); const updateData: Record = { - content: finalContent || streamedContent, - thinking: reasoningContent || streamedReasoningContent, + content: combinedContent, toolCalls: toolCallContent || streamedToolCallContent, timings }; @@ -575,7 +618,7 @@ export class ChatClient { const idx = conversationsStore.findMessageIndex(assistantMessage.id); const uiUpdate: Partial = { - content: updateData.content as string, + content: combinedContent, toolCalls: updateData.toolCalls as string }; if (streamedExtras.length > 0) { @@ -587,7 +630,7 @@ export class ChatClient { conversationsStore.updateMessageAtIndex(idx, uiUpdate); await conversationsStore.updateCurrentNode(assistantMessage.id); - if (onComplete) await onComplete(streamedContent); + if (onComplete) await onComplete(combinedContent); this.store.setChatLoading(assistantMessage.convId, false); this.store.clearChatStreaming(assistantMessage.convId); this.store.setProcessingState(assistantMessage.convId, null); @@ -714,10 +757,9 @@ export class ChatClient { if (lastMessage?.role === 'assistant') { try { - const updateData: { content: string; thinking?: string; timings?: ChatMessageTimings } = { + const updateData: { content: string; timings?: ChatMessageTimings } = { content: streamingState.response }; - if (lastMessage.thinking?.trim()) updateData.thinking = lastMessage.thinking; const lastKnownState = this.store.getProcessingState(conversationId); if (lastKnownState) { updateData.timings = { @@ -736,7 +778,6 @@ export class ChatClient { lastMessage.content = this.store.getCurrentResponse(); - if (updateData.thinking) lastMessage.thinking = updateData.thinking; if (updateData.timings) lastMessage.timings = updateData.timings; } catch (error) { lastMessage.content = this.store.getCurrentResponse(); @@ -891,7 +932,6 @@ export class ChatClient { timestamp: Date.now(), role: msg.role, content: '', - thinking: '', toolCalls: '', children: [], model: null @@ -1040,7 +1080,6 @@ export class ChatClient { } const originalContent = dbMessage.content; - const originalThinking = dbMessage.thinking || ''; const conversationContext = conversationsStore.activeMessages.slice(0, idx); const contextWithContinue = [ @@ -1048,9 +1087,41 @@ export class ChatClient { { role: 'assistant' as const, content: originalContent } ]; - let appendedContent = '', - appendedThinking = '', - hasReceivedContent = false; + let appendedContent = ''; + let hasReceivedContent = false; + let isReasoningOpen = hasUnclosedReasoningTag(originalContent); + + const updateStreamingContent = (fullContent: string) => { + this.store.setChatStreaming(msg.convId, fullContent, msg.id); + conversationsStore.updateMessageAtIndex(idx, { content: fullContent }); + }; + + const appendContentChunk = (chunk: string) => { + if (isReasoningOpen) { + appendedContent += REASONING_TAGS.END; + isReasoningOpen = false; + } + appendedContent += chunk; + hasReceivedContent = true; + updateStreamingContent(originalContent + appendedContent); + }; + + const appendReasoningChunk = (chunk: string) => { + if (!isReasoningOpen) { + appendedContent += REASONING_TAGS.START; + isReasoningOpen = true; + } + appendedContent += chunk; + hasReceivedContent = true; + updateStreamingContent(originalContent + appendedContent); + }; + + const finalizeReasoning = () => { + if (isReasoningOpen) { + appendedContent += REASONING_TAGS.END; + isReasoningOpen = false; + } + }; const abortController = this.store.getAbortController(msg.convId); @@ -1060,19 +1131,11 @@ export class ChatClient { ...this.getApiOptions(), onChunk: (chunk: string) => { - hasReceivedContent = true; - appendedContent += chunk; - const fullContent = originalContent + appendedContent; - this.store.setChatStreaming(msg.convId, fullContent, msg.id); - conversationsStore.updateMessageAtIndex(idx, { content: fullContent }); + appendContentChunk(chunk); }, onReasoningChunk: (reasoningChunk: string) => { - hasReceivedContent = true; - appendedThinking += reasoningChunk; - conversationsStore.updateMessageAtIndex(idx, { - thinking: originalThinking + appendedThinking - }); + appendReasoningChunk(reasoningChunk); }, onTimings: (timings?: ChatMessageTimings, promptProgress?: ChatMessagePromptProgress) => { @@ -1098,17 +1161,18 @@ export class ChatClient { reasoningContent?: string, timings?: ChatMessageTimings ) => { - const fullContent = originalContent + (finalContent || appendedContent); - const fullThinking = originalThinking + (reasoningContent || appendedThinking); + finalizeReasoning(); + const appendedFromCompletion = hasReceivedContent + ? appendedContent + : wrapReasoningContent(finalContent || '', reasoningContent); + const fullContent = originalContent + appendedFromCompletion; await DatabaseService.updateMessage(msg.id, { content: fullContent, - thinking: fullThinking, timestamp: Date.now(), timings }); conversationsStore.updateMessageAtIndex(idx, { content: fullContent, - thinking: fullThinking, timestamp: Date.now(), timings }); @@ -1123,12 +1187,10 @@ export class ChatClient { if (hasReceivedContent && appendedContent) { await DatabaseService.updateMessage(msg.id, { content: originalContent + appendedContent, - thinking: originalThinking + appendedThinking, timestamp: Date.now() }); conversationsStore.updateMessageAtIndex(idx, { content: originalContent + appendedContent, - thinking: originalThinking + appendedThinking, timestamp: Date.now() }); } @@ -1139,12 +1201,10 @@ export class ChatClient { } console.error('Continue generation error:', error); conversationsStore.updateMessageAtIndex(idx, { - content: originalContent, - thinking: originalThinking + content: originalContent }); await DatabaseService.updateMessage(msg.id, { - content: originalContent, - thinking: originalThinking + content: originalContent }); this.store.setChatLoading(msg.convId, false); this.store.clearChatStreaming(msg.convId); @@ -1192,7 +1252,6 @@ export class ChatClient { timestamp: Date.now(), role: msg.role, content: newContent, - thinking: msg.thinking || '', toolCalls: msg.toolCalls || '', children: [], model: msg.model @@ -1307,7 +1366,6 @@ export class ChatClient { timestamp: Date.now(), role: msg.role, content: newContent, - thinking: msg.thinking || '', toolCalls: msg.toolCalls || '', children: [], extra: extrasToUse, @@ -1357,7 +1415,6 @@ export class ChatClient { timestamp: Date.now(), role: 'assistant', content: '', - thinking: '', toolCalls: '', children: [], model: null diff --git a/tools/server/webui/src/lib/components/app/chat/ChatMessages/AgenticContent.svelte b/tools/server/webui/src/lib/components/app/chat/ChatMessages/AgenticContent.svelte index 41b9d0fc95..f11efa8e0d 100644 --- a/tools/server/webui/src/lib/components/app/chat/ChatMessages/AgenticContent.svelte +++ b/tools/server/webui/src/lib/components/app/chat/ChatMessages/AgenticContent.svelte @@ -2,9 +2,8 @@ /** * AgenticContent - Chronological display of agentic flow output * - * Parses content with tool call markers and displays them inline - * with text content. Each tool call is shown as a collapsible box - * similar to the reasoning/thinking block UI. + * Parses content with tool call and reasoning markers and displays them inline + * with text content. Tool calls and reasoning are shown as collapsible blocks. */ import { @@ -13,9 +12,9 @@ SyntaxHighlightedCode } from '$lib/components/app'; import { config } from '$lib/stores/settings.svelte'; - import { Wrench, Loader2, AlertTriangle } from '@lucide/svelte'; + import { Wrench, Loader2, AlertTriangle, Brain } from '@lucide/svelte'; import { AgenticSectionType } from '$lib/enums'; - import { AGENTIC_TAGS, AGENTIC_REGEX } from '$lib/constants/agentic'; + import { AGENTIC_TAGS, AGENTIC_REGEX, REASONING_TAGS } from '$lib/constants/agentic'; import { formatJsonPretty } from '$lib/utils/formatters'; import type { DatabaseMessage } from '$lib/types/database'; @@ -40,24 +39,90 @@ let expandedStates: Record = $state({}); const showToolCallInProgress = $derived(config().showToolCallInProgress as boolean); + const showThoughtInProgress = $derived(config().showThoughtInProgress as boolean); - function getDefaultExpanded(isPending: boolean): boolean { - return showToolCallInProgress && isPending; + function getDefaultExpanded(section: AgenticSection): boolean { + if ( + section.type === AgenticSectionType.TOOL_CALL_PENDING || + section.type === AgenticSectionType.TOOL_CALL_STREAMING + ) { + return showToolCallInProgress; + } + if (section.type === AgenticSectionType.REASONING_PENDING) { + return showThoughtInProgress; + } + return false; } - function isExpanded(index: number, isPending: boolean): boolean { + function isExpanded(index: number, section: AgenticSection): boolean { if (expandedStates[index] !== undefined) { return expandedStates[index]; } - return getDefaultExpanded(isPending); + return getDefaultExpanded(section); } - function toggleExpanded(index: number, isPending: boolean) { - const currentState = isExpanded(index, isPending); + function toggleExpanded(index: number, section: AgenticSection) { + const currentState = isExpanded(index, section); expandedStates[index] = !currentState; } - function parseAgenticContent(rawContent: string): AgenticSection[] { + type ReasoningSegment = { + type: 'text' | 'reasoning' | 'reasoning_pending'; + content: string; + }; + + function stripPartialMarker(text: string): string { + const partialMarkerMatch = text.match(AGENTIC_REGEX.PARTIAL_MARKER); + if (partialMarkerMatch) { + return text.slice(0, partialMarkerMatch.index).trim(); + } + return text; + } + + function splitReasoningSegments(rawContent: string): ReasoningSegment[] { + if (!rawContent) return []; + + const segments: ReasoningSegment[] = []; + let cursor = 0; + + while (cursor < rawContent.length) { + const startIndex = rawContent.indexOf(REASONING_TAGS.START, cursor); + if (startIndex === -1) { + const remainingText = rawContent.slice(cursor); + if (remainingText) { + segments.push({ type: 'text', content: remainingText }); + } + break; + } + + if (startIndex > cursor) { + const textBefore = rawContent.slice(cursor, startIndex); + if (textBefore) { + segments.push({ type: 'text', content: textBefore }); + } + } + + const contentStart = startIndex + REASONING_TAGS.START.length; + const endIndex = rawContent.indexOf(REASONING_TAGS.END, contentStart); + + if (endIndex === -1) { + const pendingContent = rawContent.slice(contentStart); + segments.push({ + type: 'reasoning_pending', + content: stripPartialMarker(pendingContent) + }); + break; + } + + const reasoningContent = rawContent.slice(contentStart, endIndex); + segments.push({ type: 'reasoning', content: reasoningContent }); + cursor = endIndex + REASONING_TAGS.END.length; + } + + return segments; + } + + function parseToolCallContent(rawContent: string): AgenticSection[] { if (!rawContent) return []; const sections: AgenticSection[] = []; @@ -180,6 +245,34 @@ return sections; } + + function parseAgenticContent(rawContent: string): AgenticSection[] { + if (!rawContent) return []; + + const segments = splitReasoningSegments(rawContent); + const sections: AgenticSection[] = []; + + for (const segment of segments) { + if (segment.type === 'text') { + sections.push(...parseToolCallContent(segment.content)); + continue; + } + + if (segment.type === 'reasoning') { + if (segment.content.trim()) { + sections.push({ type: AgenticSectionType.REASONING, content: segment.content }); + } + continue; + } + + sections.push({ + type: AgenticSectionType.REASONING_PENDING, + content: segment.content + }); + } + + return sections; + }
@@ -193,13 +286,13 @@ {@const streamingIconClass = isStreaming ? 'h-4 w-4 animate-spin' : 'h-4 w-4 text-yellow-500'} {@const streamingSubtitle = isStreaming ? 'streaming...' : 'incomplete'} toggleExpanded(index, true)} + onToggle={() => toggleExpanded(index, section)} >
@@ -233,13 +326,13 @@ {@const toolIcon = isPending ? Loader2 : Wrench} {@const toolIconClass = isPending ? 'h-4 w-4 animate-spin' : 'h-4 w-4'} toggleExpanded(index, isPending)} + onToggle={() => toggleExpanded(index, section)} > {#if section.toolArgs && section.toolArgs !== '{}'}
@@ -272,6 +365,37 @@ {/if}
+ {:else if section.type === AgenticSectionType.REASONING} + toggleExpanded(index, section)} + > +
+
+ {section.content} +
+
+
+ {:else if section.type === AgenticSectionType.REASONING_PENDING} + {@const reasoningTitle = isStreaming ? 'Reasoning...' : 'Reasoning'} + {@const reasoningSubtitle = isStreaming ? 'streaming...' : 'incomplete'} + toggleExpanded(index, section)} + > +
+
+ {section.content} +
+
+
{/if} {/each}
diff --git a/tools/server/webui/src/lib/components/app/chat/ChatMessages/ChatMessage.svelte b/tools/server/webui/src/lib/components/app/chat/ChatMessages/ChatMessage.svelte index fe0933fdfe..fda89570f9 100644 --- a/tools/server/webui/src/lib/components/app/chat/ChatMessages/ChatMessage.svelte +++ b/tools/server/webui/src/lib/components/app/chat/ChatMessages/ChatMessage.svelte @@ -66,15 +66,6 @@ let shouldBranchAfterEdit = $state(false); let textareaElement: HTMLTextAreaElement | undefined = $state(); - let thinkingContent = $derived.by(() => { - if (message.role === MessageRole.ASSISTANT) { - const trimmedThinking = message.thinking?.trim(); - - return trimmedThinking ? trimmedThinking : null; - } - return null; - }); - // Auto-start edit mode if this message is the pending edit target $effect(() => { const pendingId = pendingEditMessageId(); @@ -309,6 +300,5 @@ onShouldBranchAfterEditChange={(value) => (shouldBranchAfterEdit = value)} {showDeleteDialog} {siblingInfo} - {thinkingContent} /> {/if} diff --git a/tools/server/webui/src/lib/components/app/chat/ChatMessages/ChatMessageAssistant.svelte b/tools/server/webui/src/lib/components/app/chat/ChatMessages/ChatMessageAssistant.svelte index ff7782624c..6695b99333 100644 --- a/tools/server/webui/src/lib/components/app/chat/ChatMessages/ChatMessageAssistant.svelte +++ b/tools/server/webui/src/lib/components/app/chat/ChatMessages/ChatMessageAssistant.svelte @@ -4,7 +4,6 @@ ModelBadge, ChatMessageActions, ChatMessageStatistics, - ChatMessageThinkingBlock, MarkdownContent, ModelsSelector } from '$lib/components/app'; @@ -23,6 +22,7 @@ import { config } from '$lib/stores/settings.svelte'; import { conversationsStore } from '$lib/stores/conversations.svelte'; import { isRouterMode } from '$lib/stores/server.svelte'; + import { AGENTIC_TAGS, REASONING_TAGS } from '$lib/constants/agentic'; interface Props { class?: string; @@ -53,7 +53,6 @@ shouldBranchAfterEdit?: boolean; siblingInfo?: ChatMessageSiblingInfo | null; textareaElement?: HTMLTextAreaElement; - thinkingContent: string | null; } let { @@ -79,15 +78,17 @@ showDeleteDialog, shouldBranchAfterEdit = false, siblingInfo = null, - textareaElement = $bindable(), - thinkingContent + textareaElement = $bindable() }: Props = $props(); const hasAgenticMarkers = $derived( - messageContent?.includes('<<>>') ?? false + messageContent?.includes(AGENTIC_TAGS.TOOL_CALL_START) ?? false ); const hasStreamingToolCall = $derived(isChatStreaming() && agenticStreamingToolCall() !== null); - const isAgenticContent = $derived(hasAgenticMarkers || hasStreamingToolCall); + const hasReasoningMarkers = $derived(messageContent?.includes(REASONING_TAGS.START) ?? false); + const isStructuredContent = $derived( + hasAgenticMarkers || hasReasoningMarkers || hasStreamingToolCall + ); const processingState = useProcessingState(); let currentConfig = $derived(config()); @@ -123,14 +124,6 @@ role="group" aria-label="Assistant message with actions" > - {#if thinkingContent} - - {/if} - {#if message?.role === 'assistant' && isLoading() && !message?.content?.trim()}
@@ -182,7 +175,7 @@ {:else if message.role === 'assistant'} {#if showRawOutput}
{messageContent || ''}
- {:else if isAgenticContent} + {:else if isStructuredContent} {:else} @@ -248,7 +241,7 @@ {onCopy} {onEdit} {onRegenerate} - onContinue={currentConfig.enableContinueGeneration && !thinkingContent + onContinue={currentConfig.enableContinueGeneration && !hasReasoningMarkers ? onContinue : undefined} {onDelete} diff --git a/tools/server/webui/src/lib/components/app/chat/ChatMessages/ChatMessageThinkingBlock.svelte b/tools/server/webui/src/lib/components/app/chat/ChatMessages/ChatMessageThinkingBlock.svelte deleted file mode 100644 index 7e53726d3a..0000000000 --- a/tools/server/webui/src/lib/components/app/chat/ChatMessages/ChatMessageThinkingBlock.svelte +++ /dev/null @@ -1,42 +0,0 @@ - - - -
-
- {reasoningContent ?? ''} -
-
-
diff --git a/tools/server/webui/src/lib/components/app/index.ts b/tools/server/webui/src/lib/components/app/index.ts index 617daf17b0..2f80665113 100644 --- a/tools/server/webui/src/lib/components/app/index.ts +++ b/tools/server/webui/src/lib/components/app/index.ts @@ -21,7 +21,6 @@ export { default as ChatMessageActions } from './chat/ChatMessages/ChatMessageAc export { default as ChatMessageBranchingControls } from './chat/ChatMessages/ChatMessageBranchingControls.svelte'; export { default as ChatMessageStatistics } from './chat/ChatMessages/ChatMessageStatistics.svelte'; export { default as ChatMessageSystem } from './chat/ChatMessages/ChatMessageSystem.svelte'; -export { default as ChatMessageThinkingBlock } from './chat/ChatMessages/ChatMessageThinkingBlock.svelte'; export { default as ChatMessages } from './chat/ChatMessages/ChatMessages.svelte'; export { default as CollapsibleContentBlock } from './chat/ChatMessages/CollapsibleContentBlock.svelte'; export { default as MessageBranchingControls } from './chat/ChatMessages/ChatMessageBranchingControls.svelte'; diff --git a/tools/server/webui/src/lib/constants/agentic.ts b/tools/server/webui/src/lib/constants/agentic.ts index 6c6cc665ca..140fee41ad 100644 --- a/tools/server/webui/src/lib/constants/agentic.ts +++ b/tools/server/webui/src/lib/constants/agentic.ts @@ -17,6 +17,11 @@ export const AGENTIC_TAGS = { TAG_SUFFIX: '>>>' } as const; +export const REASONING_TAGS = { + START: '<<>>', + END: '<<>>' +} as const; + // Regex patterns for parsing agentic content export const AGENTIC_REGEX = { // Matches completed tool calls (with END marker) @@ -31,7 +36,7 @@ export const AGENTIC_REGEX = { // Matches early tool call (just START marker) EARLY_MATCH: /<<>>([\s\S]*)$/, // Matches partial marker at end of content - PARTIAL_MARKER: /<<<[A-Z_]*$/, + PARTIAL_MARKER: /<<<[A-Za-z_]*$/, // Matches tool name inside content TOOL_NAME_EXTRACT: /<<]+)>>>/ } as const; diff --git a/tools/server/webui/src/lib/enums/agentic.ts b/tools/server/webui/src/lib/enums/agentic.ts index aad7a69e83..5c61880b7b 100644 --- a/tools/server/webui/src/lib/enums/agentic.ts +++ b/tools/server/webui/src/lib/enums/agentic.ts @@ -5,5 +5,7 @@ export enum AgenticSectionType { TEXT = 'text', TOOL_CALL = 'tool_call', TOOL_CALL_PENDING = 'tool_call_pending', - TOOL_CALL_STREAMING = 'tool_call_streaming' + TOOL_CALL_STREAMING = 'tool_call_streaming', + REASONING = 'reasoning', + REASONING_PENDING = 'reasoning_pending' } diff --git a/tools/server/webui/src/lib/services/chat.service.ts b/tools/server/webui/src/lib/services/chat.service.ts index cf435d47b0..861363e5e1 100644 --- a/tools/server/webui/src/lib/services/chat.service.ts +++ b/tools/server/webui/src/lib/services/chat.service.ts @@ -489,10 +489,6 @@ export class ChatService { const reasoningContent = data.choices[0]?.message?.reasoning_content; const toolCalls = data.choices[0]?.message?.tool_calls; - if (reasoningContent) { - console.log('Full reasoning content:', reasoningContent); - } - let serializedToolCalls: string | undefined; if (toolCalls && toolCalls.length > 0) { diff --git a/tools/server/webui/src/lib/services/database.service.ts b/tools/server/webui/src/lib/services/database.service.ts index b6fef9c0a2..02b62e2f1d 100644 --- a/tools/server/webui/src/lib/services/database.service.ts +++ b/tools/server/webui/src/lib/services/database.service.ts @@ -165,7 +165,6 @@ export class DatabaseService { role: 'system', content: '', parent: null, - thinking: '', toolCalls: '', children: [] }; @@ -201,7 +200,6 @@ export class DatabaseService { role: 'system', content: trimmedPrompt, parent: parentId, - thinking: '', children: [] };