refactor: inline reasoning with tags, remove fixed thinking field

This commit is contained in:
Pascal 2026-01-16 14:24:43 +01:00
parent a1550ab77d
commit 2973c64609
10 changed files with 260 additions and 138 deletions

View File

@ -17,6 +17,7 @@ import {
import { DEFAULT_CONTEXT } from '$lib/constants/default-context';
import { getAgenticConfig } from '$lib/utils/agentic';
import { SYSTEM_MESSAGE_PLACEHOLDER } from '$lib/constants/ui';
import { REASONING_TAGS } from '$lib/constants/agentic';
import type { ChatMessageTimings, ChatMessagePromptProgress } from '$lib/types/chat';
import type { DatabaseMessage, DatabaseMessageExtra } from '$lib/types/database';
@ -87,6 +88,17 @@ interface ChatStoreStateCallbacks {
getCurrentResponse: () => string;
}
const countOccurrences = (source: string, token: string): number =>
source ? source.split(token).length - 1 : 0;
const hasUnclosedReasoningTag = (content: string): boolean =>
countOccurrences(content, REASONING_TAGS.START) > countOccurrences(content, REASONING_TAGS.END);
const wrapReasoningContent = (content: string, reasoningContent?: string): string => {
if (!reasoningContent) return content;
return `${REASONING_TAGS.START}${reasoningContent}${REASONING_TAGS.END}${content}`;
};
/**
* ChatClient - Business Logic Facade for Chat Operations
*
@ -205,7 +217,6 @@ export class ChatClient {
content,
type,
timestamp: Date.now(),
thinking: '',
toolCalls: '',
children: [],
extra: extras
@ -377,7 +388,6 @@ export class ChatClient {
role: 'assistant',
content: '',
timestamp: Date.now(),
thinking: '',
toolCalls: '',
children: [],
model: null
@ -476,8 +486,9 @@ export class ChatClient {
}
let streamedContent = '';
let streamedReasoningContent = '';
let streamedToolCallContent = '';
let isReasoningOpen = false;
let hasStreamedChunks = false;
let resolvedModel: string | null = null;
let modelPersisted = false;
let streamedExtras: DatabaseMessageExtra[] = assistantMessage.extra
@ -500,6 +511,39 @@ export class ChatClient {
}
};
const updateStreamingContent = () => {
this.store.setChatStreaming(assistantMessage.convId, streamedContent, assistantMessage.id);
const idx = conversationsStore.findMessageIndex(assistantMessage.id);
conversationsStore.updateMessageAtIndex(idx, { content: streamedContent });
};
const appendContentChunk = (chunk: string) => {
if (isReasoningOpen) {
streamedContent += REASONING_TAGS.END;
isReasoningOpen = false;
}
streamedContent += chunk;
hasStreamedChunks = true;
updateStreamingContent();
};
const appendReasoningChunk = (chunk: string) => {
if (!isReasoningOpen) {
streamedContent += REASONING_TAGS.START;
isReasoningOpen = true;
}
streamedContent += chunk;
hasStreamedChunks = true;
updateStreamingContent();
};
const finalizeReasoning = () => {
if (isReasoningOpen) {
streamedContent += REASONING_TAGS.END;
isReasoningOpen = false;
}
};
this.store.setStreamingActive(true);
this.store.setActiveProcessingConversation(assistantMessage.convId);
@ -507,15 +551,10 @@ export class ChatClient {
const streamCallbacks: ChatStreamCallbacks = {
onChunk: (chunk: string) => {
streamedContent += chunk;
this.store.setChatStreaming(assistantMessage.convId, streamedContent, assistantMessage.id);
const idx = conversationsStore.findMessageIndex(assistantMessage.id);
conversationsStore.updateMessageAtIndex(idx, { content: streamedContent });
appendContentChunk(chunk);
},
onReasoningChunk: (reasoningChunk: string) => {
streamedReasoningContent += reasoningChunk;
const idx = conversationsStore.findMessageIndex(assistantMessage.id);
conversationsStore.updateMessageAtIndex(idx, { thinking: streamedReasoningContent });
appendReasoningChunk(reasoningChunk);
},
onToolCallChunk: (toolCallChunk: string) => {
const chunk = toolCallChunk.trim();
@ -558,10 +597,14 @@ export class ChatClient {
toolCallContent?: string
) => {
this.store.setStreamingActive(false);
finalizeReasoning();
const combinedContent = hasStreamedChunks
? streamedContent
: wrapReasoningContent(finalContent || '', reasoningContent);
const updateData: Record<string, unknown> = {
content: finalContent || streamedContent,
thinking: reasoningContent || streamedReasoningContent,
content: combinedContent,
toolCalls: toolCallContent || streamedToolCallContent,
timings
};
@ -575,7 +618,7 @@ export class ChatClient {
const idx = conversationsStore.findMessageIndex(assistantMessage.id);
const uiUpdate: Partial<DatabaseMessage> = {
content: updateData.content as string,
content: combinedContent,
toolCalls: updateData.toolCalls as string
};
if (streamedExtras.length > 0) {
@ -587,7 +630,7 @@ export class ChatClient {
conversationsStore.updateMessageAtIndex(idx, uiUpdate);
await conversationsStore.updateCurrentNode(assistantMessage.id);
if (onComplete) await onComplete(streamedContent);
if (onComplete) await onComplete(combinedContent);
this.store.setChatLoading(assistantMessage.convId, false);
this.store.clearChatStreaming(assistantMessage.convId);
this.store.setProcessingState(assistantMessage.convId, null);
@ -714,10 +757,9 @@ export class ChatClient {
if (lastMessage?.role === 'assistant') {
try {
const updateData: { content: string; thinking?: string; timings?: ChatMessageTimings } = {
const updateData: { content: string; timings?: ChatMessageTimings } = {
content: streamingState.response
};
if (lastMessage.thinking?.trim()) updateData.thinking = lastMessage.thinking;
const lastKnownState = this.store.getProcessingState(conversationId);
if (lastKnownState) {
updateData.timings = {
@ -736,7 +778,6 @@ export class ChatClient {
lastMessage.content = this.store.getCurrentResponse();
if (updateData.thinking) lastMessage.thinking = updateData.thinking;
if (updateData.timings) lastMessage.timings = updateData.timings;
} catch (error) {
lastMessage.content = this.store.getCurrentResponse();
@ -891,7 +932,6 @@ export class ChatClient {
timestamp: Date.now(),
role: msg.role,
content: '',
thinking: '',
toolCalls: '',
children: [],
model: null
@ -1040,7 +1080,6 @@ export class ChatClient {
}
const originalContent = dbMessage.content;
const originalThinking = dbMessage.thinking || '';
const conversationContext = conversationsStore.activeMessages.slice(0, idx);
const contextWithContinue = [
@ -1048,9 +1087,41 @@ export class ChatClient {
{ role: 'assistant' as const, content: originalContent }
];
let appendedContent = '',
appendedThinking = '',
hasReceivedContent = false;
let appendedContent = '';
let hasReceivedContent = false;
let isReasoningOpen = hasUnclosedReasoningTag(originalContent);
const updateStreamingContent = (fullContent: string) => {
this.store.setChatStreaming(msg.convId, fullContent, msg.id);
conversationsStore.updateMessageAtIndex(idx, { content: fullContent });
};
const appendContentChunk = (chunk: string) => {
if (isReasoningOpen) {
appendedContent += REASONING_TAGS.END;
isReasoningOpen = false;
}
appendedContent += chunk;
hasReceivedContent = true;
updateStreamingContent(originalContent + appendedContent);
};
const appendReasoningChunk = (chunk: string) => {
if (!isReasoningOpen) {
appendedContent += REASONING_TAGS.START;
isReasoningOpen = true;
}
appendedContent += chunk;
hasReceivedContent = true;
updateStreamingContent(originalContent + appendedContent);
};
const finalizeReasoning = () => {
if (isReasoningOpen) {
appendedContent += REASONING_TAGS.END;
isReasoningOpen = false;
}
};
const abortController = this.store.getAbortController(msg.convId);
@ -1060,19 +1131,11 @@ export class ChatClient {
...this.getApiOptions(),
onChunk: (chunk: string) => {
hasReceivedContent = true;
appendedContent += chunk;
const fullContent = originalContent + appendedContent;
this.store.setChatStreaming(msg.convId, fullContent, msg.id);
conversationsStore.updateMessageAtIndex(idx, { content: fullContent });
appendContentChunk(chunk);
},
onReasoningChunk: (reasoningChunk: string) => {
hasReceivedContent = true;
appendedThinking += reasoningChunk;
conversationsStore.updateMessageAtIndex(idx, {
thinking: originalThinking + appendedThinking
});
appendReasoningChunk(reasoningChunk);
},
onTimings: (timings?: ChatMessageTimings, promptProgress?: ChatMessagePromptProgress) => {
@ -1098,17 +1161,18 @@ export class ChatClient {
reasoningContent?: string,
timings?: ChatMessageTimings
) => {
const fullContent = originalContent + (finalContent || appendedContent);
const fullThinking = originalThinking + (reasoningContent || appendedThinking);
finalizeReasoning();
const appendedFromCompletion = hasReceivedContent
? appendedContent
: wrapReasoningContent(finalContent || '', reasoningContent);
const fullContent = originalContent + appendedFromCompletion;
await DatabaseService.updateMessage(msg.id, {
content: fullContent,
thinking: fullThinking,
timestamp: Date.now(),
timings
});
conversationsStore.updateMessageAtIndex(idx, {
content: fullContent,
thinking: fullThinking,
timestamp: Date.now(),
timings
});
@ -1123,12 +1187,10 @@ export class ChatClient {
if (hasReceivedContent && appendedContent) {
await DatabaseService.updateMessage(msg.id, {
content: originalContent + appendedContent,
thinking: originalThinking + appendedThinking,
timestamp: Date.now()
});
conversationsStore.updateMessageAtIndex(idx, {
content: originalContent + appendedContent,
thinking: originalThinking + appendedThinking,
timestamp: Date.now()
});
}
@ -1139,12 +1201,10 @@ export class ChatClient {
}
console.error('Continue generation error:', error);
conversationsStore.updateMessageAtIndex(idx, {
content: originalContent,
thinking: originalThinking
content: originalContent
});
await DatabaseService.updateMessage(msg.id, {
content: originalContent,
thinking: originalThinking
content: originalContent
});
this.store.setChatLoading(msg.convId, false);
this.store.clearChatStreaming(msg.convId);
@ -1192,7 +1252,6 @@ export class ChatClient {
timestamp: Date.now(),
role: msg.role,
content: newContent,
thinking: msg.thinking || '',
toolCalls: msg.toolCalls || '',
children: [],
model: msg.model
@ -1307,7 +1366,6 @@ export class ChatClient {
timestamp: Date.now(),
role: msg.role,
content: newContent,
thinking: msg.thinking || '',
toolCalls: msg.toolCalls || '',
children: [],
extra: extrasToUse,
@ -1357,7 +1415,6 @@ export class ChatClient {
timestamp: Date.now(),
role: 'assistant',
content: '',
thinking: '',
toolCalls: '',
children: [],
model: null

View File

@ -2,9 +2,8 @@
/**
* AgenticContent - Chronological display of agentic flow output
*
* Parses content with tool call markers and displays them inline
* with text content. Each tool call is shown as a collapsible box
* similar to the reasoning/thinking block UI.
* Parses content with tool call and reasoning markers and displays them inline
* with text content. Tool calls and reasoning are shown as collapsible blocks.
*/
import {
@ -13,9 +12,9 @@
SyntaxHighlightedCode
} from '$lib/components/app';
import { config } from '$lib/stores/settings.svelte';
import { Wrench, Loader2, AlertTriangle } from '@lucide/svelte';
import { Wrench, Loader2, AlertTriangle, Brain } from '@lucide/svelte';
import { AgenticSectionType } from '$lib/enums';
import { AGENTIC_TAGS, AGENTIC_REGEX } from '$lib/constants/agentic';
import { AGENTIC_TAGS, AGENTIC_REGEX, REASONING_TAGS } from '$lib/constants/agentic';
import { formatJsonPretty } from '$lib/utils/formatters';
import type { DatabaseMessage } from '$lib/types/database';
@ -40,24 +39,90 @@
let expandedStates: Record<number, boolean> = $state({});
const showToolCallInProgress = $derived(config().showToolCallInProgress as boolean);
const showThoughtInProgress = $derived(config().showThoughtInProgress as boolean);
function getDefaultExpanded(isPending: boolean): boolean {
return showToolCallInProgress && isPending;
function getDefaultExpanded(section: AgenticSection): boolean {
if (
section.type === AgenticSectionType.TOOL_CALL_PENDING ||
section.type === AgenticSectionType.TOOL_CALL_STREAMING
) {
return showToolCallInProgress;
}
if (section.type === AgenticSectionType.REASONING_PENDING) {
return showThoughtInProgress;
}
return false;
}
function isExpanded(index: number, isPending: boolean): boolean {
function isExpanded(index: number, section: AgenticSection): boolean {
if (expandedStates[index] !== undefined) {
return expandedStates[index];
}
return getDefaultExpanded(isPending);
return getDefaultExpanded(section);
}
function toggleExpanded(index: number, isPending: boolean) {
const currentState = isExpanded(index, isPending);
function toggleExpanded(index: number, section: AgenticSection) {
const currentState = isExpanded(index, section);
expandedStates[index] = !currentState;
}
function parseAgenticContent(rawContent: string): AgenticSection[] {
type ReasoningSegment = {
type: 'text' | 'reasoning' | 'reasoning_pending';
content: string;
};
function stripPartialMarker(text: string): string {
const partialMarkerMatch = text.match(AGENTIC_REGEX.PARTIAL_MARKER);
if (partialMarkerMatch) {
return text.slice(0, partialMarkerMatch.index).trim();
}
return text;
}
function splitReasoningSegments(rawContent: string): ReasoningSegment[] {
if (!rawContent) return [];
const segments: ReasoningSegment[] = [];
let cursor = 0;
while (cursor < rawContent.length) {
const startIndex = rawContent.indexOf(REASONING_TAGS.START, cursor);
if (startIndex === -1) {
const remainingText = rawContent.slice(cursor);
if (remainingText) {
segments.push({ type: 'text', content: remainingText });
}
break;
}
if (startIndex > cursor) {
const textBefore = rawContent.slice(cursor, startIndex);
if (textBefore) {
segments.push({ type: 'text', content: textBefore });
}
}
const contentStart = startIndex + REASONING_TAGS.START.length;
const endIndex = rawContent.indexOf(REASONING_TAGS.END, contentStart);
if (endIndex === -1) {
const pendingContent = rawContent.slice(contentStart);
segments.push({
type: 'reasoning_pending',
content: stripPartialMarker(pendingContent)
});
break;
}
const reasoningContent = rawContent.slice(contentStart, endIndex);
segments.push({ type: 'reasoning', content: reasoningContent });
cursor = endIndex + REASONING_TAGS.END.length;
}
return segments;
}
function parseToolCallContent(rawContent: string): AgenticSection[] {
if (!rawContent) return [];
const sections: AgenticSection[] = [];
@ -180,6 +245,34 @@
return sections;
}
function parseAgenticContent(rawContent: string): AgenticSection[] {
if (!rawContent) return [];
const segments = splitReasoningSegments(rawContent);
const sections: AgenticSection[] = [];
for (const segment of segments) {
if (segment.type === 'text') {
sections.push(...parseToolCallContent(segment.content));
continue;
}
if (segment.type === 'reasoning') {
if (segment.content.trim()) {
sections.push({ type: AgenticSectionType.REASONING, content: segment.content });
}
continue;
}
sections.push({
type: AgenticSectionType.REASONING_PENDING,
content: segment.content
});
}
return sections;
}
</script>
<div class="agentic-content">
@ -193,13 +286,13 @@
{@const streamingIconClass = isStreaming ? 'h-4 w-4 animate-spin' : 'h-4 w-4 text-yellow-500'}
{@const streamingSubtitle = isStreaming ? 'streaming...' : 'incomplete'}
<CollapsibleContentBlock
open={isExpanded(index, true)}
open={isExpanded(index, section)}
class="my-2"
icon={streamingIcon}
iconClass={streamingIconClass}
title={section.toolName || 'Tool call'}
subtitle={streamingSubtitle}
onToggle={() => toggleExpanded(index, true)}
onToggle={() => toggleExpanded(index, section)}
>
<div class="pt-3">
<div class="my-3 flex items-center gap-2 text-xs text-muted-foreground">
@ -233,13 +326,13 @@
{@const toolIcon = isPending ? Loader2 : Wrench}
{@const toolIconClass = isPending ? 'h-4 w-4 animate-spin' : 'h-4 w-4'}
<CollapsibleContentBlock
open={isExpanded(index, isPending)}
open={isExpanded(index, section)}
class="my-2"
icon={toolIcon}
iconClass={toolIconClass}
title={section.toolName || ''}
subtitle={isPending ? 'executing...' : undefined}
onToggle={() => toggleExpanded(index, isPending)}
onToggle={() => toggleExpanded(index, section)}
>
{#if section.toolArgs && section.toolArgs !== '{}'}
<div class="pt-3">
@ -272,6 +365,37 @@
{/if}
</div>
</CollapsibleContentBlock>
{:else if section.type === AgenticSectionType.REASONING}
<CollapsibleContentBlock
open={isExpanded(index, section)}
class="my-2"
icon={Brain}
title="Reasoning"
onToggle={() => toggleExpanded(index, section)}
>
<div class="pt-3">
<div class="text-xs leading-relaxed break-words whitespace-pre-wrap">
{section.content}
</div>
</div>
</CollapsibleContentBlock>
{:else if section.type === AgenticSectionType.REASONING_PENDING}
{@const reasoningTitle = isStreaming ? 'Reasoning...' : 'Reasoning'}
{@const reasoningSubtitle = isStreaming ? 'streaming...' : 'incomplete'}
<CollapsibleContentBlock
open={isExpanded(index, section)}
class="my-2"
icon={Brain}
title={reasoningTitle}
subtitle={reasoningSubtitle}
onToggle={() => toggleExpanded(index, section)}
>
<div class="pt-3">
<div class="text-xs leading-relaxed break-words whitespace-pre-wrap">
{section.content}
</div>
</div>
</CollapsibleContentBlock>
{/if}
{/each}
</div>

View File

@ -66,15 +66,6 @@
let shouldBranchAfterEdit = $state(false);
let textareaElement: HTMLTextAreaElement | undefined = $state();
let thinkingContent = $derived.by(() => {
if (message.role === MessageRole.ASSISTANT) {
const trimmedThinking = message.thinking?.trim();
return trimmedThinking ? trimmedThinking : null;
}
return null;
});
// Auto-start edit mode if this message is the pending edit target
$effect(() => {
const pendingId = pendingEditMessageId();
@ -309,6 +300,5 @@
onShouldBranchAfterEditChange={(value) => (shouldBranchAfterEdit = value)}
{showDeleteDialog}
{siblingInfo}
{thinkingContent}
/>
{/if}

View File

@ -4,7 +4,6 @@
ModelBadge,
ChatMessageActions,
ChatMessageStatistics,
ChatMessageThinkingBlock,
MarkdownContent,
ModelsSelector
} from '$lib/components/app';
@ -23,6 +22,7 @@
import { config } from '$lib/stores/settings.svelte';
import { conversationsStore } from '$lib/stores/conversations.svelte';
import { isRouterMode } from '$lib/stores/server.svelte';
import { AGENTIC_TAGS, REASONING_TAGS } from '$lib/constants/agentic';
interface Props {
class?: string;
@ -53,7 +53,6 @@
shouldBranchAfterEdit?: boolean;
siblingInfo?: ChatMessageSiblingInfo | null;
textareaElement?: HTMLTextAreaElement;
thinkingContent: string | null;
}
let {
@ -79,15 +78,17 @@
showDeleteDialog,
shouldBranchAfterEdit = false,
siblingInfo = null,
textareaElement = $bindable(),
thinkingContent
textareaElement = $bindable()
}: Props = $props();
const hasAgenticMarkers = $derived(
messageContent?.includes('<<<AGENTIC_TOOL_CALL_START>>>') ?? false
messageContent?.includes(AGENTIC_TAGS.TOOL_CALL_START) ?? false
);
const hasStreamingToolCall = $derived(isChatStreaming() && agenticStreamingToolCall() !== null);
const isAgenticContent = $derived(hasAgenticMarkers || hasStreamingToolCall);
const hasReasoningMarkers = $derived(messageContent?.includes(REASONING_TAGS.START) ?? false);
const isStructuredContent = $derived(
hasAgenticMarkers || hasReasoningMarkers || hasStreamingToolCall
);
const processingState = useProcessingState();
let currentConfig = $derived(config());
@ -123,14 +124,6 @@
role="group"
aria-label="Assistant message with actions"
>
{#if thinkingContent}
<ChatMessageThinkingBlock
reasoningContent={thinkingContent}
isStreaming={!message.timestamp}
hasRegularContent={!!messageContent?.trim()}
/>
{/if}
{#if message?.role === 'assistant' && isLoading() && !message?.content?.trim()}
<div class="mt-6 w-full max-w-[48rem]" in:fade>
<div class="processing-container">
@ -182,7 +175,7 @@
{:else if message.role === 'assistant'}
{#if showRawOutput}
<pre class="raw-output">{messageContent || ''}</pre>
{:else if isAgenticContent}
{:else if isStructuredContent}
<AgenticContent content={messageContent || ''} isStreaming={isChatStreaming()} {message} />
{:else}
<MarkdownContent content={messageContent || ''} {message} />
@ -248,7 +241,7 @@
{onCopy}
{onEdit}
{onRegenerate}
onContinue={currentConfig.enableContinueGeneration && !thinkingContent
onContinue={currentConfig.enableContinueGeneration && !hasReasoningMarkers
? onContinue
: undefined}
{onDelete}

View File

@ -1,42 +0,0 @@
<script lang="ts">
import { Brain } from '@lucide/svelte';
import { config } from '$lib/stores/settings.svelte';
import { CollapsibleContentBlock } from '$lib/components/app';
interface Props {
class?: string;
hasRegularContent?: boolean;
isStreaming?: boolean;
reasoningContent: string | null;
}
let {
class: className = '',
hasRegularContent = false,
isStreaming = false,
reasoningContent
}: Props = $props();
const currentConfig = config();
let isExpanded = $state(currentConfig.showThoughtInProgress);
$effect(() => {
if (hasRegularContent && reasoningContent && currentConfig.showThoughtInProgress) {
isExpanded = false;
}
});
</script>
<CollapsibleContentBlock
bind:open={isExpanded}
class="mb-6 {className}"
icon={Brain}
title={isStreaming ? 'Reasoning...' : 'Reasoning'}
>
<div class="pt-3">
<div class="text-xs leading-relaxed break-words whitespace-pre-wrap">
{reasoningContent ?? ''}
</div>
</div>
</CollapsibleContentBlock>

View File

@ -21,7 +21,6 @@ export { default as ChatMessageActions } from './chat/ChatMessages/ChatMessageAc
export { default as ChatMessageBranchingControls } from './chat/ChatMessages/ChatMessageBranchingControls.svelte';
export { default as ChatMessageStatistics } from './chat/ChatMessages/ChatMessageStatistics.svelte';
export { default as ChatMessageSystem } from './chat/ChatMessages/ChatMessageSystem.svelte';
export { default as ChatMessageThinkingBlock } from './chat/ChatMessages/ChatMessageThinkingBlock.svelte';
export { default as ChatMessages } from './chat/ChatMessages/ChatMessages.svelte';
export { default as CollapsibleContentBlock } from './chat/ChatMessages/CollapsibleContentBlock.svelte';
export { default as MessageBranchingControls } from './chat/ChatMessages/ChatMessageBranchingControls.svelte';

View File

@ -17,6 +17,11 @@ export const AGENTIC_TAGS = {
TAG_SUFFIX: '>>>'
} as const;
export const REASONING_TAGS = {
START: '<<<reasoning_content_start>>>',
END: '<<<reasoning_content_end>>>'
} as const;
// Regex patterns for parsing agentic content
export const AGENTIC_REGEX = {
// Matches completed tool calls (with END marker)
@ -31,7 +36,7 @@ export const AGENTIC_REGEX = {
// Matches early tool call (just START marker)
EARLY_MATCH: /<<<AGENTIC_TOOL_CALL_START>>>([\s\S]*)$/,
// Matches partial marker at end of content
PARTIAL_MARKER: /<<<[A-Z_]*$/,
PARTIAL_MARKER: /<<<[A-Za-z_]*$/,
// Matches tool name inside content
TOOL_NAME_EXTRACT: /<<<TOOL_NAME:([^>]+)>>>/
} as const;

View File

@ -5,5 +5,7 @@ export enum AgenticSectionType {
TEXT = 'text',
TOOL_CALL = 'tool_call',
TOOL_CALL_PENDING = 'tool_call_pending',
TOOL_CALL_STREAMING = 'tool_call_streaming'
TOOL_CALL_STREAMING = 'tool_call_streaming',
REASONING = 'reasoning',
REASONING_PENDING = 'reasoning_pending'
}

View File

@ -489,10 +489,6 @@ export class ChatService {
const reasoningContent = data.choices[0]?.message?.reasoning_content;
const toolCalls = data.choices[0]?.message?.tool_calls;
if (reasoningContent) {
console.log('Full reasoning content:', reasoningContent);
}
let serializedToolCalls: string | undefined;
if (toolCalls && toolCalls.length > 0) {

View File

@ -165,7 +165,6 @@ export class DatabaseService {
role: 'system',
content: '',
parent: null,
thinking: '',
toolCalls: '',
children: []
};
@ -201,7 +200,6 @@ export class DatabaseService {
role: 'system',
content: trimmedPrompt,
parent: parentId,
thinking: '',
children: []
};