Handle multiple AI responses in one question better.
All checks were successful
Publish Library / Build NPM Project (push) Successful in 33s
Publish Library / Tag Version (push) Successful in 8s

This commit is contained in:
2025-12-16 12:46:44 -05:00
parent a5ed4076b7
commit 1a0351aeef
5 changed files with 75 additions and 57 deletions

View File

@@ -11,6 +11,8 @@ export type LLMMessage = {
role: 'assistant' | 'system' | 'user';
/** Message content */
content: string | any;
/** Timestamp */
timestamp: number;
} | {
/** Tool call */
role: 'tool';
@@ -24,6 +26,8 @@ export type LLMMessage = {
content: undefined | string;
/** Tool error */
error: undefined | string;
/** Timestamp */
timestamp: number;
}
export type LLMOptions = {
@@ -125,7 +129,7 @@ export class LLM {
const recent = keep == 0 ? [] : history.slice(-keep),
process = (keep == 0 ? history : history.slice(0, -keep)).filter(h => h.role === 'assistant' || h.role === 'user');
const summary = await this.summarize(process.map(m => `${m.role}: ${m.content}`).join('\n\n'), 250, options);
return [{role: 'assistant', content: `Conversation Summary: ${summary}`}, ...recent];
return [{role: 'assistant', content: `Conversation Summary: ${summary}`, timestamp: Date.now()}, ...recent];
}
/**