webui: remove artificial cache limit, let GC handle cleanup on conversation change

This commit is contained in:
Pascal 2026-02-02 08:37:38 +01:00
parent 965655fafb
commit 4642664c1a
1 changed files with 2 additions and 7 deletions

View File

@ -61,7 +61,8 @@
let pendingMarkdown: string | null = null;
let isProcessing = false;
// Incremental parsing cache, avoids re-transforming stable blocks
// Per-instance transform cache, avoids re-transforming stable blocks during streaming
// Garbage collected when component is destroyed (on conversation change)
const transformCache = new SvelteMap<string, string>();
let previousContent = '';
@ -237,12 +238,6 @@
transformCache.set(hash, html);
// Limit cache size (generous limit for 200K token contexts)
if (transformCache.size > 5000) {
const keysToDelete = Array.from(transformCache.keys()).slice(0, 1000);
keysToDelete.forEach((k) => transformCache.delete(k));
}
return { html, hash };
}