Files
zclaw_openfang/desktop/src/lib/message-virtualization.ts
2026-03-17 23:26:16 +08:00

496 lines
12 KiB
TypeScript

/**
* Message Virtualization Utilities
*
* Provides efficient rendering for large message lists (10,000+ messages)
* using react-window's VariableSizeList with dynamic height measurement.
*
* @module message-virtualization
*/
import { useRef, useCallback, useMemo, useEffect, type CSSProperties, type ReactNode } from 'react';
import React from 'react';
import type { ListImperativeAPI } from 'react-window';
/**
* Message item interface for virtualization
*/
export interface VirtualizedMessageItem {
id: string;
height: number;
role: 'user' | 'assistant' | 'tool' | 'hand' | 'workflow' | 'system';
}
/**
* Props for the virtualized message list component
*/
export interface VirtualizedMessageListProps {
messages: VirtualizedMessageItem[];
renderMessage: (id: string, style: CSSProperties) => ReactNode;
height: number;
width: number | string;
overscan?: number;
onScroll?: (scrollTop: number) => void;
}
/**
* Default estimated heights for each message type
* These are used before actual measurement
*/
const DEFAULT_HEIGHTS: Record<string, number> = {
user: 80,
assistant: 150,
tool: 100,
hand: 120,
workflow: 100,
system: 60,
};
/**
* Hook return type for virtualized message management
*/
export interface UseVirtualizedMessagesReturn {
/** Reference to the List instance */
listRef: React.RefObject<ListImperativeAPI | null>;
/** Get the current height for a message by id and role */
getHeight: (id: string, role: string) => number;
/** Update the measured height for a message */
setHeight: (id: string, height: number) => void;
/** Calculate total height of all messages */
totalHeight: number;
/** Scroll to the bottom of the list */
scrollToBottom: () => void;
/** Scroll to a specific message index */
scrollToIndex: (index: number) => void;
/** Reset height cache and recalculate */
resetCache: () => void;
}
/**
* Hook for virtualized message rendering with dynamic height measurement.
*
* @param messages - Array of message items to virtualize
* @param defaultHeights - Optional custom default heights per role
* @returns Object containing list ref, height getters/setters, and scroll utilities
*
* @example
* ```tsx
* const { listRef, getHeight, setHeight, scrollToBottom } = useVirtualizedMessages(messages);
*
* // In render:
* <VariableSizeList
* ref={listRef}
* itemCount={messages.length}
* itemSize={(index) => getHeight(messages[index].id, messages[index].role)}
* >
* {({ index, style }) => (
* <MessageRenderer
* message={messages[index]}
* style={style}
* onHeightChange={(h) => setHeight(messages[index].id, h)}
* />
* )}
* </VariableSizeList>
* ```
*/
export function useVirtualizedMessages(
messages: VirtualizedMessageItem[],
defaultHeights: Record<string, number> = DEFAULT_HEIGHTS
): UseVirtualizedMessagesReturn {
const listRef = useRef<ListImperativeAPI>(null);
const heightsRef = useRef<Map<string, number>>(new Map());
const prevMessagesLengthRef = useRef<number>(0);
/**
* Get height for a message, falling back to default for role
*/
const getHeight = useCallback(
(id: string, role: string): number => {
return heightsRef.current.get(id) ?? defaultHeights[role] ?? 100;
},
[defaultHeights]
);
/**
* Update height when a message is measured
* Triggers list recalculation if height changed
*/
const setHeight = useCallback((id: string, height: number): void => {
const current = heightsRef.current.get(id);
if (current !== height) {
heightsRef.current.set(id, height);
// Height updated - the list will use the new height on next render
}
}, []);
/**
* Calculate total height of all messages
*/
const totalHeight = useMemo((): number => {
return messages.reduce(
(sum, msg) => sum + getHeight(msg.id, msg.role),
0
);
}, [messages, getHeight]);
/**
* Scroll to the bottom of the list
*/
const scrollToBottom = useCallback((): void => {
if (listRef.current && messages.length > 0) {
listRef.current.scrollToRow({ index: messages.length - 1, align: 'end' });
}
}, [messages.length]);
/**
* Scroll to a specific message index
*/
const scrollToIndex = useCallback((index: number): void => {
if (listRef.current && index >= 0 && index < messages.length) {
listRef.current.scrollToRow({ index, align: 'center' });
}
}, [messages.length]);
/**
* Reset the height cache and force recalculation
*/
const resetCache = useCallback((): void => {
heightsRef.current.clear();
}, []);
/**
* Auto-scroll to bottom when new messages arrive
*/
useEffect(() => {
if (messages.length > prevMessagesLengthRef.current) {
// New messages added, scroll to bottom
scrollToBottom();
}
prevMessagesLengthRef.current = messages.length;
}, [messages.length, scrollToBottom]);
return {
listRef,
getHeight,
setHeight,
totalHeight,
scrollToBottom,
scrollToIndex,
resetCache,
};
}
/**
* LRU Cache for rendered messages.
* Useful for caching computed message data or rendered content.
*
* @typeParam T - Type of cached data
*
* @example
* ```tsx
* const cache = new MessageCache<ParsedMessageContent>(100);
*
* // Get or compute
* let content = cache.get(messageId);
* if (!content) {
* content = parseMarkdown(message.content);
* cache.set(messageId, content);
* }
* ```
*/
export class MessageCache<T> {
private cache: Map<string, { data: T; timestamp: number }>;
private readonly maxSize: number;
private accessOrder: string[];
constructor(maxSize: number = 100) {
this.cache = new Map();
this.maxSize = maxSize;
this.accessOrder = [];
}
/**
* Get cached data by key
* Updates access order for LRU eviction
*/
get(key: string): T | undefined {
const entry = this.cache.get(key);
if (entry) {
// Move to end (most recently used)
const index = this.accessOrder.indexOf(key);
if (index > -1) {
this.accessOrder.splice(index, 1);
this.accessOrder.push(key);
}
return entry.data;
}
return undefined;
}
/**
* Set cached data by key
* Evicts oldest entries if at capacity
*/
set(key: string, data: T): void {
// Remove if exists
if (this.cache.has(key)) {
const index = this.accessOrder.indexOf(key);
if (index > -1) {
this.accessOrder.splice(index, 1);
}
}
// Evict oldest if at capacity
while (this.accessOrder.length >= this.maxSize) {
const oldest = this.accessOrder.shift();
if (oldest) {
this.cache.delete(oldest);
}
}
this.cache.set(key, { data, timestamp: Date.now() });
this.accessOrder.push(key);
}
/**
* Check if key exists in cache
*/
has(key: string): boolean {
return this.cache.has(key);
}
/**
* Remove a specific key from cache
*/
delete(key: string): boolean {
const index = this.accessOrder.indexOf(key);
if (index > -1) {
this.accessOrder.splice(index, 1);
}
return this.cache.delete(key);
}
/**
* Clear all cached data
*/
clear(): void {
this.cache.clear();
this.accessOrder = [];
}
/**
* Get current cache size
*/
get size(): number {
return this.cache.size;
}
/**
* Get all keys in access order (oldest first)
*/
get keys(): string[] {
return [...this.accessOrder];
}
}
/**
* Options for creating a message batcher
*/
export interface MessageBatcherOptions {
/** Maximum messages to batch before flush */
batchSize: number;
/** Maximum time to wait before flush (ms) */
maxWaitMs: number;
}
/**
* Message batcher for efficient WebSocket message processing.
* Groups incoming messages into batches for optimized rendering.
*
* @typeParam T - Type of message to batch
*
* @example
* ```tsx
* const batcher = createMessageBatcher<ChatMessage>(
* (messages) => {
* // Process batch of messages
* chatStore.addMessages(messages);
* },
* { batchSize: 10, maxWaitMs: 50 }
* );
*
* // Add messages as they arrive
* websocket.on('message', (msg) => batcher.add(msg));
*
* // Flush remaining on disconnect
* websocket.on('close', () => batcher.flush());
* ```
*/
export function createMessageBatcher<T>(
callback: (messages: T[]) => void,
options: MessageBatcherOptions = { batchSize: 10, maxWaitMs: 50 }
): {
add: (message: T) => void;
flush: () => void;
clear: () => void;
size: () => number;
} {
let batch: T[] = [];
let timeoutId: ReturnType<typeof setTimeout> | null = null;
const flush = (): void => {
if (batch.length > 0) {
callback([...batch]);
batch = [];
}
if (timeoutId) {
clearTimeout(timeoutId);
timeoutId = null;
}
};
return {
add: (message: T): void => {
batch.push(message);
if (batch.length >= options.batchSize) {
flush();
} else if (!timeoutId) {
timeoutId = setTimeout(flush, options.maxWaitMs);
}
},
flush,
clear: (): void => {
batch = [];
if (timeoutId) {
clearTimeout(timeoutId);
timeoutId = null;
}
},
size: (): number => batch.length,
};
}
/**
* Memoization helper for message content parsing.
* Caches parsed content to avoid re-parsing on re-renders.
*
* @param messageId - Unique message identifier
* @param content - Raw content to parse
* @param parser - Parsing function
* @param cache - Optional cache instance to use
* @returns Parsed content
*/
export function useMemoizedContent<T>(
messageId: string,
content: string,
parser: (content: string) => T,
cache?: MessageCache<T>
): T {
// Use provided cache or create a default one
const cacheRef = useRef<MessageCache<T> | undefined>(undefined);
if (!cacheRef.current && !cache) {
cacheRef.current = new MessageCache<T>(200);
}
const activeCache = cache ?? cacheRef.current!;
// Check cache first
const cached = activeCache.get(messageId);
if (cached !== undefined) {
return cached;
}
// Parse and cache
const parsed = parser(content);
activeCache.set(messageId, parsed);
return parsed;
}
/**
* Creates a stable message key for React rendering.
* Handles potential duplicate IDs by incorporating index.
*
* @param id - Message ID
* @param index - Message index in list
* @returns Stable key string
*/
export function createMessageKey(id: string, index: number): string {
return `${id}-${index}`;
}
/**
* Calculates the visible range of messages for a given viewport.
* Useful for lazy loading or prefetching.
*
* @param scrollTop - Current scroll position
* @param containerHeight - Height of visible container
* @param messages - Array of messages with heights
* @param overscan - Number of extra items to include on each side
* @returns Object with start and end indices of visible range
*/
export function calculateVisibleRange(
scrollTop: number,
containerHeight: number,
messages: VirtualizedMessageItem[],
overscan: number = 3
): { start: number; end: number } {
let currentOffset = 0;
let start = 0;
let end = messages.length - 1;
// Find start index
for (let i = 0; i < messages.length; i++) {
const msgHeight = messages[i].height;
if (currentOffset + msgHeight > scrollTop) {
start = Math.max(0, i - overscan);
break;
}
currentOffset += msgHeight;
}
// Find end index
const targetEnd = scrollTop + containerHeight;
currentOffset = 0;
for (let i = 0; i < messages.length; i++) {
const msgHeight = messages[i].height;
currentOffset += msgHeight;
if (currentOffset >= targetEnd) {
end = Math.min(messages.length - 1, i + overscan);
break;
}
}
return { start, end };
}
/**
* Debounced scroll handler factory.
* Prevents excessive re-renders during fast scrolling.
*
* @param callback - Function to call with scroll position
* @param delay - Debounce delay in ms
* @returns Debounced scroll handler
*/
export function createDebouncedScrollHandler(
callback: (scrollTop: number) => void,
delay: number = 100
): (scrollTop: number) => void {
let timeoutId: ReturnType<typeof setTimeout> | null = null;
let lastValue = 0;
return (scrollTop: number): void => {
lastValue = scrollTop;
if (timeoutId) {
clearTimeout(timeoutId);
}
timeoutId = setTimeout(() => {
callback(lastValue);
timeoutId = null;
}, delay);
};
}
export type {
VirtualizedMessageItem as MessageItem,
VirtualizedMessageListProps as MessageListProps,
};