webui: Add switcher to Chat Message UI to show raw LLM output (#19571)

This commit is contained in:
Aleksander Grygier 2026-02-12 19:55:51 +01:00 committed by GitHub
parent 4b385bfcf8
commit 4c61875bf8
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
12 changed files with 64 additions and 28 deletions

Binary file not shown.

View File

@ -139,6 +139,6 @@ sequenceDiagram
Note over settingsStore: UI-only (not synced):
rect rgb(255, 240, 240)
Note over settingsStore: systemMessage, custom (JSON)<br/>showStatistics, enableContinueGeneration<br/>autoMicOnEmpty, disableAutoScroll<br/>apiKey, pdfAsImage, disableReasoningFormat
Note over settingsStore: systemMessage, custom (JSON)<br/>showStatistics, enableContinueGeneration<br/>autoMicOnEmpty, disableAutoScroll<br/>apiKey, pdfAsImage, disableReasoningParsing, showRawOutputSwitch
end
```

View File

@ -5,6 +5,7 @@
ChatMessageBranchingControls,
DialogConfirmation
} from '$lib/components/app';
import { Switch } from '$lib/components/ui/switch';
interface Props {
role: 'user' | 'assistant';
@ -26,6 +27,9 @@
onConfirmDelete: () => void;
onNavigateToSibling?: (siblingId: string) => void;
onShowDeleteDialogChange: (show: boolean) => void;
showRawOutputSwitch?: boolean;
rawOutputEnabled?: boolean;
onRawOutputToggle?: (enabled: boolean) => void;
}
let {
@ -42,7 +46,10 @@
onRegenerate,
role,
siblingInfo = null,
showDeleteDialog
showDeleteDialog,
showRawOutputSwitch = false,
rawOutputEnabled = false,
onRawOutputToggle
}: Props = $props();
function handleConfirmDelete() {
@ -51,9 +58,9 @@
}
</script>
<div class="relative {justify === 'start' ? 'mt-2' : ''} flex h-6 items-center justify-{justify}">
<div class="relative {justify === 'start' ? 'mt-2' : ''} flex h-6 items-center justify-between">
<div
class="absolute top-0 {actionsPosition === 'left'
class="{actionsPosition === 'left'
? 'left-0'
: 'right-0'} flex items-center gap-2 opacity-100 transition-opacity"
>
@ -81,6 +88,16 @@
<ActionButton icon={Trash2} tooltip="Delete" onclick={onDelete} />
</div>
</div>
{#if showRawOutputSwitch}
<div class="flex items-center gap-2">
<span class="text-xs text-muted-foreground">Show raw output</span>
<Switch
checked={rawOutputEnabled}
onCheckedChange={(checked) => onRawOutputToggle?.(checked)}
/>
</div>
{/if}
</div>
<DialogConfirmation

View File

@ -90,6 +90,9 @@
const processingState = useProcessingState();
// Local state for raw output toggle (per message)
let showRawOutput = $state(false);
let currentConfig = $derived(config());
let isRouter = $derived(isRouterMode());
let displayedModel = $derived((): string | null => {
@ -238,7 +241,7 @@
</div>
</div>
{:else if message.role === 'assistant'}
{#if config().disableReasoningFormat}
{#if showRawOutput}
<pre class="raw-output">{messageContent || ''}</pre>
{:else}
<MarkdownContent content={messageContent || ''} />
@ -352,6 +355,9 @@
{onConfirmDelete}
{onNavigateToSibling}
{onShowDeleteDialogChange}
showRawOutputSwitch={currentConfig.showRawOutputSwitch}
rawOutputEnabled={showRawOutput}
onRawOutputToggle={(enabled) => (showRawOutput = enabled)}
/>
{/if}
</div>

View File

@ -21,6 +21,7 @@
chatStore,
errorDialog,
isLoading,
isChatStreaming,
isEditing,
getAddFilesHandler
} from '$lib/stores/chat.svelte';
@ -81,7 +82,7 @@
let isServerLoading = $derived(serverLoading());
let hasPropsError = $derived(!!serverError());
let isCurrentConversationLoading = $derived(isLoading());
let isCurrentConversationLoading = $derived(isLoading() || isChatStreaming());
let isRouter = $derived(isRouterMode());

View File

@ -254,8 +254,13 @@
type: 'checkbox'
},
{
key: 'disableReasoningFormat',
label: 'Show raw LLM output',
key: 'disableReasoningParsing',
label: 'Disable reasoning content parsing',
type: 'checkbox'
},
{
key: 'showRawOutputSwitch',
label: 'Enable raw output toggle',
type: 'checkbox'
},
{

View File

@ -7,7 +7,8 @@ export const SETTING_CONFIG_DEFAULT: Record<string, string | number | boolean> =
theme: 'system',
showThoughtInProgress: false,
showToolCalls: false,
disableReasoningFormat: false,
disableReasoningParsing: false,
showRawOutputSwitch: false,
keepStatsVisible: false,
showMessageStats: true,
askForTitleConfirmation: false,
@ -92,8 +93,10 @@ export const SETTING_CONFIG_INFO: Record<string, string> = {
showThoughtInProgress: 'Expand thought process by default when generating messages.',
showToolCalls:
'Display tool call labels and payloads from Harmony-compatible delta.tool_calls data below assistant messages.',
disableReasoningFormat:
'Show raw LLM output without backend parsing and frontend Markdown rendering to inspect streaming across different models.',
disableReasoningParsing:
'Send reasoning_format=none to prevent server-side extraction of reasoning tokens into separate field',
showRawOutputSwitch:
'Show toggle button to display messages as plain text instead of Markdown-formatted content',
keepStatsVisible: 'Keep processing statistics visible after generation finishes.',
showMessageStats:
'Display generation statistics (tokens/second, token count, duration) below each assistant message.',

View File

@ -90,7 +90,7 @@ export class ChatService {
custom,
timings_per_token,
// Config options
disableReasoningFormat
disableReasoningParsing
} = options;
const normalizedMessages: ApiChatMessageData[] = messages
@ -127,7 +127,7 @@ export class ChatService {
requestBody.model = options.model;
}
requestBody.reasoning_format = disableReasoningFormat ? 'none' : 'auto';
requestBody.reasoning_format = disableReasoningParsing ? 'none' : 'auto';
if (temperature !== undefined) requestBody.temperature = temperature;
if (max_tokens !== undefined) {

View File

@ -70,12 +70,6 @@ export const SYNCABLE_PARAMETERS: SyncableParameter[] = [
canSync: true
},
{ key: 'showToolCalls', serverKey: 'showToolCalls', type: 'boolean', canSync: true },
{
key: 'disableReasoningFormat',
serverKey: 'disableReasoningFormat',
type: 'boolean',
canSync: true
},
{ key: 'keepStatsVisible', serverKey: 'keepStatsVisible', type: 'boolean', canSync: true },
{ key: 'showMessageStats', serverKey: 'showMessageStats', type: 'boolean', canSync: true },
{

View File

@ -118,6 +118,16 @@ class ChatStore {
this.isLoading = this.isChatLoading(convId);
const streamingState = this.getChatStreaming(convId);
this.currentResponse = streamingState?.response || '';
this.isStreamingActive = streamingState !== undefined;
this.setActiveProcessingConversation(convId);
// Sync streaming content to activeMessages so UI displays current content
if (streamingState?.response && streamingState?.messageId) {
const idx = conversationsStore.findMessageIndex(streamingState.messageId);
if (idx !== -1) {
conversationsStore.updateMessageAtIndex(idx, { content: streamingState.response });
}
}
}
/**
@ -1639,7 +1649,7 @@ class ChatStore {
// Config options needed by ChatService
if (currentConfig.systemMessage) apiOptions.systemMessage = currentConfig.systemMessage;
if (currentConfig.disableReasoningFormat) apiOptions.disableReasoningFormat = true;
if (currentConfig.disableReasoningParsing) apiOptions.disableReasoningParsing = true;
if (hasValue(currentConfig.temperature))
apiOptions.temperature = Number(currentConfig.temperature);

View File

@ -18,8 +18,8 @@ export interface SettingsChatServiceOptions {
model?: string;
// System message to inject
systemMessage?: string;
// Disable reasoning format (use 'none' instead of 'auto')
disableReasoningFormat?: boolean;
// Disable reasoning parsing (use 'none' instead of 'auto')
disableReasoningParsing?: boolean;
// Generation parameters
temperature?: number;
max_tokens?: number;

View File

@ -93,7 +93,7 @@
}}
play={async () => {
const { settingsStore } = await import('$lib/stores/settings.svelte');
settingsStore.updateConfig('disableReasoningFormat', false);
settingsStore.updateConfig('showRawOutputSwitch', false);
}}
/>
@ -105,7 +105,7 @@
}}
play={async () => {
const { settingsStore } = await import('$lib/stores/settings.svelte');
settingsStore.updateConfig('disableReasoningFormat', false);
settingsStore.updateConfig('showRawOutputSwitch', false);
}}
/>
@ -117,7 +117,7 @@
}}
play={async () => {
const { settingsStore } = await import('$lib/stores/settings.svelte');
settingsStore.updateConfig('disableReasoningFormat', false);
settingsStore.updateConfig('showRawOutputSwitch', false);
}}
/>
@ -129,7 +129,7 @@
}}
play={async () => {
const { settingsStore } = await import('$lib/stores/settings.svelte');
settingsStore.updateConfig('disableReasoningFormat', true);
settingsStore.updateConfig('showRawOutputSwitch', true);
}}
/>
@ -141,7 +141,7 @@
asChild
play={async () => {
const { settingsStore } = await import('$lib/stores/settings.svelte');
settingsStore.updateConfig('disableReasoningFormat', false);
settingsStore.updateConfig('showRawOutputSwitch', false);
// Phase 1: Stream reasoning content in chunks
let reasoningText =
'I need to think about this carefully. Let me break down the problem:\n\n1. The user is asking for help with something complex\n2. I should provide a thorough and helpful response\n3. I need to consider multiple approaches\n4. The best solution would be to explain step by step\n\nThis approach will ensure clarity and understanding.';
@ -193,7 +193,7 @@
}}
play={async () => {
const { settingsStore } = await import('$lib/stores/settings.svelte');
settingsStore.updateConfig('disableReasoningFormat', false);
settingsStore.updateConfig('showRawOutputSwitch', false);
// Import the chat store to simulate loading state
const { chatStore } = await import('$lib/stores/chat.svelte');