refactor: Improve server properties management

This commit is contained in:
Aleksander Grygier 2025-11-26 14:05:42 +01:00
parent 19e5385bd5
commit b1cf8bb814
7 changed files with 91 additions and 238 deletions

View File

@ -326,7 +326,9 @@
<form
onsubmit={handleSubmit}
class="{INPUT_CLASSES} border-radius-bottom-none mx-auto max-w-[48rem] overflow-hidden rounded-3xl backdrop-blur-md {className}"
class="{INPUT_CLASSES} border-radius-bottom-none mx-auto max-w-[48rem] overflow-hidden rounded-3xl backdrop-blur-md {disabled
? 'cursor-not-allowed opacity-60'
: ''} {className}"
>
<ChatAttachmentsList
bind:uploadedFiles

View File

@ -3,12 +3,10 @@
import {
ChatForm,
ChatScreenHeader,
ChatScreenWarning,
ChatMessages,
ChatScreenProcessingInfo,
DialogEmptyFileAlert,
DialogChatError,
ServerErrorSplash,
ServerLoadingSplash,
DialogConfirmation
} from '$lib/components/app';
@ -35,7 +33,7 @@
supportsVision,
supportsAudio,
propsLoading,
serverWarning,
propsError,
propsStore,
isRouterMode,
fetchModelProps,
@ -49,9 +47,8 @@
import { processFilesToChatUploaded } from '$lib/utils/process-uploaded-files';
import { onMount } from 'svelte';
import { fade, fly, slide } from 'svelte/transition';
import { Trash2 } from '@lucide/svelte';
import { Trash2, AlertTriangle, RefreshCw } from '@lucide/svelte';
import ChatScreenDragOverlay from './ChatScreenDragOverlay.svelte';
import { ModelModality } from '$lib/enums';
let { showCenteredEmpty = false } = $props();
@ -91,6 +88,7 @@
let activeErrorDialog = $derived(errorDialog());
let isServerLoading = $derived(propsLoading());
let hasPropsError = $derived(!!propsError());
let isCurrentConversationLoading = $derived(isLoading());
@ -399,12 +397,34 @@
>
<ChatScreenProcessingInfo />
{#if serverWarning()}
<ChatScreenWarning class="pointer-events-auto mx-auto max-w-[48rem] px-4" />
{#if hasPropsError}
<div
class="pointer-events-auto mx-auto mb-3 max-w-[48rem] px-4"
in:fly={{ y: 10, duration: 250 }}
>
<div class="rounded-xl border border-destructive/30 bg-destructive/10 px-4 py-3">
<div class="flex items-center justify-between">
<div class="flex items-center gap-2">
<AlertTriangle class="h-4 w-4 text-destructive" />
<span class="text-sm font-medium text-destructive">Server unavailable</span>
<span class="text-sm text-muted-foreground">{propsError()}</span>
</div>
<button
onclick={() => propsStore.fetch()}
disabled={isServerLoading}
class="flex items-center gap-1.5 rounded-lg bg-destructive/20 px-3 py-1.5 text-xs font-medium text-destructive hover:bg-destructive/30 disabled:opacity-50"
>
<RefreshCw class="h-3 w-3 {isServerLoading ? 'animate-spin' : ''}" />
{isServerLoading ? 'Retrying...' : 'Retry'}
</button>
</div>
</div>
</div>
{/if}
<div class="conversation-chat-form pointer-events-auto rounded-t-3xl pb-4">
<ChatForm
disabled={hasPropsError}
isLoading={isCurrentConversationLoading}
onFileRemove={handleFileRemove}
onFileUpload={handleFileUpload}
@ -416,11 +436,8 @@
</div>
</div>
</div>
{:else if propsStore.error && !propsStore.serverProps}
<!-- Server Error State (when error and no cached props) -->
<ServerErrorSplash error={propsStore.error} />
{:else if isServerLoading || !propsStore.serverProps}
<!-- Server Loading State (also shown when props haven't loaded yet) -->
{:else if isServerLoading}
<!-- Server Loading State -->
<ServerLoadingSplash />
{:else}
<div
@ -437,18 +454,37 @@
<h1 class="mb-4 text-3xl font-semibold tracking-tight">llama.cpp</h1>
<p class="text-lg text-muted-foreground">
{propsStore.supportedModalities.includes(ModelModality.AUDIO)
{propsStore.serverProps?.modalities?.audio
? 'Record audio, type a message '
: 'Type a message'} or upload files to get started
</p>
</div>
{#if serverWarning()}
<ChatScreenWarning />
{#if hasPropsError}
<div class="mb-4" in:fly={{ y: 10, duration: 250 }}>
<div class="rounded-xl border border-destructive/30 bg-destructive/10 px-4 py-3">
<div class="flex items-center justify-between">
<div class="flex items-center gap-2">
<AlertTriangle class="h-4 w-4 text-destructive" />
<span class="text-sm font-medium text-destructive">Server unavailable</span>
<span class="text-sm text-muted-foreground">{propsError()}</span>
</div>
<button
onclick={() => propsStore.fetch()}
disabled={isServerLoading}
class="flex items-center gap-1.5 rounded-lg bg-destructive/20 px-3 py-1.5 text-xs font-medium text-destructive hover:bg-destructive/30 disabled:opacity-50"
>
<RefreshCw class="h-3 w-3 {isServerLoading ? 'animate-spin' : ''}" />
{isServerLoading ? 'Retrying...' : 'Retry'}
</button>
</div>
</div>
</div>
{/if}
<div in:fly={{ y: 10, duration: 250, delay: 300 }}>
<div in:fly={{ y: 10, duration: 250, delay: hasPropsError ? 0 : 300 }}>
<ChatForm
disabled={hasPropsError}
isLoading={isCurrentConversationLoading}
onFileRemove={handleFileRemove}
onFileUpload={handleFileUpload}

View File

@ -1,38 +0,0 @@
<script lang="ts">
import { AlertTriangle, RefreshCw } from '@lucide/svelte';
import { propsLoading, propsStore } from '$lib/stores/props.svelte';
import { fly } from 'svelte/transition';
interface Props {
class?: string;
}
let { class: className = '' }: Props = $props();
function handleRefreshServer() {
propsStore.fetch();
}
</script>
<div class="mb-3 {className}" in:fly={{ y: 10, duration: 250 }}>
<div
class="rounded-md border border-yellow-200 bg-yellow-50 px-3 py-2 dark:border-yellow-800 dark:bg-yellow-950"
>
<div class="flex items-center justify-between">
<div class="flex items-center">
<AlertTriangle class="h-4 w-4 text-yellow-600 dark:text-yellow-400" />
<p class="ml-2 text-sm text-yellow-800 dark:text-yellow-200">
Server `/props` endpoint not available - using cached data
</p>
</div>
<button
onclick={handleRefreshServer}
disabled={propsLoading()}
class="ml-3 flex items-center gap-1.5 rounded bg-yellow-100 px-2 py-1 text-xs font-medium text-yellow-800 hover:bg-yellow-200 disabled:opacity-50 dark:bg-yellow-900 dark:text-yellow-200 dark:hover:bg-yellow-800"
>
<RefreshCw class="h-3 w-3 {propsLoading() ? 'animate-spin' : ''}" />
{propsLoading() ? 'Checking...' : 'Retry'}
</button>
</div>
</div>
</div>

View File

@ -25,7 +25,6 @@ export { default as ChatMessages } from './chat/ChatMessages/ChatMessages.svelte
export { default as ChatScreen } from './chat/ChatScreen/ChatScreen.svelte';
export { default as ChatScreenHeader } from './chat/ChatScreen/ChatScreenHeader.svelte';
export { default as ChatScreenProcessingInfo } from './chat/ChatScreen/ChatScreenProcessingInfo.svelte';
export { default as ChatScreenWarning } from './chat/ChatScreen/ChatScreenWarning.svelte';
export { default as ChatSettings } from './chat/ChatSettings/ChatSettings.svelte';
export { default as ChatSettingsFooter } from './chat/ChatSettings/ChatSettingsFooter.svelte';

View File

@ -1,5 +1,2 @@
export const SERVER_PROPS_LOCALSTORAGE_KEY = 'LlamaCppWebui.serverProps';
export const SELECTED_MODEL_LOCALSTORAGE_KEY = 'LlamaCppWebui.selectedModel';
export const CONFIG_LOCALSTORAGE_KEY = 'LlamaCppWebui.config';
export const USER_OVERRIDES_LOCALSTORAGE_KEY = 'LlamaCppWebui.userOverrides';

View File

@ -1,6 +1,6 @@
import { config } from '$lib/stores/settings.svelte';
import { selectedModelName } from '$lib/stores/models.svelte';
import { isRouterMode } from '$lib/stores/props.svelte';
import { isRouterMode, propsStore } from '$lib/stores/props.svelte';
import type {
ApiChatCompletionRequest,
ApiChatCompletionResponse,
@ -840,11 +840,24 @@ export class ChatService {
* Extracts model name from Chat Completions API response data.
* Handles various response formats including streaming chunks and final responses.
*
* WORKAROUND: In single model mode, llama-server returns a default/incorrect model name
* in the response. We override it with the actual model name from propsStore.
*
* @param data - Raw response data from the Chat Completions API
* @returns Model name string if found, undefined otherwise
* @private
*/
private extractModelName(data: unknown): string | undefined {
// WORKAROUND: In single model mode, use model name from props instead of API response
// because llama-server returns `gpt-3.5-turbo` value in the `model` field
const isRouter = isRouterMode();
if (!isRouter) {
const propsModelName = propsStore.modelName;
if (propsModelName) {
return propsModelName;
}
}
const asRecord = (value: unknown): Record<string, unknown> | undefined => {
return typeof value === 'object' && value !== null
? (value as Record<string, unknown>)

View File

@ -1,5 +1,3 @@
import { browser } from '$app/environment';
import { SERVER_PROPS_LOCALSTORAGE_KEY } from '$lib/constants/localstorage-keys';
import { PropsService } from '$lib/services/props';
import { ServerRole, ModelModality } from '$lib/enums';
@ -18,24 +16,12 @@ import { ServerRole, ModelModality } from '$lib/enums';
* - **Server Properties**: Model info, context size, build information
* - **Mode Detection**: MODEL (single model) vs ROUTER (multi-model)
* - **Capability Detection**: Vision and audio modality support
* - **Error Handling**: Graceful degradation with cached values
* - **Persistence**: LocalStorage caching for offline support
* - **Error Handling**: Clear error states when server unavailable
*/
class PropsStore {
constructor() {
if (!browser) return;
const cachedProps = this.readCachedServerProps();
if (cachedProps) {
this._serverProps = cachedProps;
this.detectServerRole(cachedProps);
}
}
private _serverProps = $state<ApiLlamaCppServerProps | null>(null);
private _loading = $state(false);
private _error = $state<string | null>(null);
private _serverWarning = $state<string | null>(null);
private _serverRole = $state<ServerRole | null>(null);
private fetchPromise: Promise<void> | null = null;
@ -43,71 +29,6 @@ class PropsStore {
private _modelPropsCache = $state<Map<string, ApiLlamaCppServerProps>>(new Map());
private _modelPropsFetching = $state<Set<string>>(new Set());
// ─────────────────────────────────────────────────────────────────────────────
// LocalStorage persistence with fingerprint validation
// ─────────────────────────────────────────────────────────────────────────────
/**
* Read cached server props from localStorage
* Note: Cache should be validated against fresh data using build_info fingerprint
*/
private readCachedServerProps(): ApiLlamaCppServerProps | null {
if (!browser) return null;
try {
const raw = localStorage.getItem(SERVER_PROPS_LOCALSTORAGE_KEY);
if (!raw) return null;
return JSON.parse(raw) as ApiLlamaCppServerProps;
} catch (error) {
console.warn('Failed to read cached server props from localStorage:', error);
return null;
}
}
/**
* Persist server props to localStorage
*/
private persistServerProps(props: ApiLlamaCppServerProps | null): void {
if (!browser) return;
try {
if (props) {
localStorage.setItem(SERVER_PROPS_LOCALSTORAGE_KEY, JSON.stringify(props));
} else {
localStorage.removeItem(SERVER_PROPS_LOCALSTORAGE_KEY);
}
} catch (error) {
console.warn('Failed to persist server props to localStorage:', error);
}
}
/**
* Validate cached props against fresh data using build_info fingerprint
* Returns true if cache is valid (same server instance)
*/
private isCacheValid(freshProps: ApiLlamaCppServerProps): boolean {
const cachedProps = this._serverProps;
if (!cachedProps) return true; // No cache to validate
// Compare build_info - different build means server was restarted or updated
if (cachedProps.build_info !== freshProps.build_info) {
console.info(
'Server build_info changed, invalidating cache',
`(${cachedProps.build_info}${freshProps.build_info})`
);
return false;
}
// Compare model_path - different model loaded means different configuration
if (cachedProps.model_path !== freshProps.model_path) {
console.info('Server model changed, invalidating cache');
return false;
}
return true;
}
// ─────────────────────────────────────────────────────────────────────────────
// Getters - Server Properties
// ─────────────────────────────────────────────────────────────────────────────
@ -124,10 +45,6 @@ class PropsStore {
return this._error;
}
get serverWarning(): string | null {
return this._serverWarning;
}
/**
* Get model name from server props.
* In MODEL mode: extracts from model_path or model_alias
@ -232,56 +149,39 @@ class PropsStore {
/**
* Fetches server properties from the server
*/
async fetch(options: { silent?: boolean } = {}): Promise<void> {
const { silent = false } = options;
const isSilent = silent && this._serverProps !== null;
async fetch(): Promise<void> {
if (this.fetchPromise) {
return this.fetchPromise;
}
if (!isSilent) {
this._loading = true;
this._error = null;
this._serverWarning = null;
}
const hadProps = this._serverProps !== null;
const previousBuildInfo = this._serverProps?.build_info;
const fetchPromise = (async () => {
try {
const props = await PropsService.fetch();
// Validate cache - if server was restarted, clear model-specific props cache
if (!this.isCacheValid(props)) {
// Clear model-specific props cache if server was restarted
if (previousBuildInfo && previousBuildInfo !== props.build_info) {
this._modelPropsCache.clear();
console.info('Cleared model props cache due to server change');
console.info('Cleared model props cache due to server restart');
}
this._serverProps = props;
this.persistServerProps(props);
this._error = null;
this._serverWarning = null;
this.detectServerRole(props);
} catch (error) {
if (isSilent && hadProps) {
console.warn('Silent server props refresh failed, keeping cached data:', error);
return;
}
this.handleFetchError(error, hadProps);
this._error = this.getErrorMessage(error);
console.error('Error fetching server properties:', error);
} finally {
if (!isSilent) {
this._loading = false;
}
this.fetchPromise = null;
}
})();
this.fetchPromise = fetchPromise;
await fetchPromise;
}
@ -335,84 +235,30 @@ class PropsStore {
// Error Handling
// ─────────────────────────────────────────────────────────────────────────────
private handleFetchError(error: unknown, hadProps: boolean): void {
const { errorMessage, isOfflineLikeError, isServerSideError } = this.normalizeFetchError(error);
let cachedProps: ApiLlamaCppServerProps | null = null;
if (!hadProps) {
cachedProps = this.readCachedServerProps();
if (cachedProps) {
this._serverProps = cachedProps;
this.detectServerRole(cachedProps);
this._error = null;
if (isOfflineLikeError || isServerSideError) {
this._serverWarning = errorMessage;
}
console.warn(
'Failed to refresh server properties, using cached values from localStorage:',
errorMessage
);
} else {
this._error = errorMessage;
}
} else {
this._error = null;
if (isOfflineLikeError || isServerSideError) {
this._serverWarning = errorMessage;
}
console.warn(
'Failed to refresh server properties, continuing with cached values:',
errorMessage
);
}
console.error('Error fetching server properties:', error);
}
private normalizeFetchError(error: unknown): {
errorMessage: string;
isOfflineLikeError: boolean;
isServerSideError: boolean;
} {
let errorMessage = 'Failed to connect to server';
let isOfflineLikeError = false;
let isServerSideError = false;
private getErrorMessage(error: unknown): string {
if (error instanceof Error) {
const message = error.message || '';
if (error.name === 'TypeError' && message.includes('fetch')) {
errorMessage = 'Server is not running or unreachable';
isOfflineLikeError = true;
return 'Server is not running or unreachable';
} else if (message.includes('ECONNREFUSED')) {
errorMessage = 'Connection refused - server may be offline';
isOfflineLikeError = true;
return 'Connection refused - server may be offline';
} else if (message.includes('ENOTFOUND')) {
errorMessage = 'Server not found - check server address';
isOfflineLikeError = true;
return 'Server not found - check server address';
} else if (message.includes('ETIMEDOUT')) {
errorMessage = 'Request timed out - the server took too long to respond';
isOfflineLikeError = true;
return 'Request timed out';
} else if (message.includes('503')) {
errorMessage = 'Server temporarily unavailable - try again shortly';
isServerSideError = true;
return 'Server temporarily unavailable';
} else if (message.includes('500')) {
errorMessage = 'Server error - check server logs';
isServerSideError = true;
return 'Server error - check server logs';
} else if (message.includes('404')) {
errorMessage = 'Server endpoint not found';
return 'Server endpoint not found';
} else if (message.includes('403') || message.includes('401')) {
errorMessage = 'Access denied';
return 'Access denied';
}
}
return { errorMessage, isOfflineLikeError, isServerSideError };
return 'Failed to connect to server';
}
// ─────────────────────────────────────────────────────────────────────────────
@ -425,11 +271,10 @@ class PropsStore {
clear(): void {
this._serverProps = null;
this._error = null;
this._serverWarning = null;
this._loading = false;
this._serverRole = null;
this.fetchPromise = null;
this.persistServerProps(null);
this._modelPropsCache.clear();
}
}
@ -442,7 +287,6 @@ export const propsStore = new PropsStore();
export const serverProps = () => propsStore.serverProps;
export const propsLoading = () => propsStore.loading;
export const propsError = () => propsStore.error;
export const serverWarning = () => propsStore.serverWarning;
export const modelName = () => propsStore.modelName;
export const supportedModalities = () => propsStore.supportedModalities;
export const supportsVision = () => propsStore.supportsVision;