From 27506d20af6eaa1112ddcc6d33f3d54a7dbc0f5f Mon Sep 17 00:00:00 2001 From: ztimson Date: Wed, 11 Feb 2026 22:45:30 -0500 Subject: [PATCH] Fix anthropic message history --- package.json | 2 +- src/antrhopic.ts | 41 ++++++++++++++++++++--------------------- src/open-ai.ts | 4 ++-- 3 files changed, 23 insertions(+), 24 deletions(-) diff --git a/package.json b/package.json index 3f03d7e..a0296fc 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@ztimson/ai-utils", - "version": "0.5.5", + "version": "0.5.6", "description": "AI Utility library", "author": "Zak Timson", "license": "MIT", diff --git a/src/antrhopic.ts b/src/antrhopic.ts index 1e41b7d..0b61203 100644 --- a/src/antrhopic.ts +++ b/src/antrhopic.ts @@ -13,25 +13,25 @@ export class Anthropic extends LLMProvider { } private toStandard(history: any[]): LLMMessage[] { - for(let i = 0; i < history.length; i++) { - const orgI = i; - if(typeof history[orgI].content != 'string') { - if(history[orgI].role == 'assistant') { - history[orgI].content.filter((c: any) => c.type =='tool_use').forEach((c: any) => { - history.splice(i + 1, 0, {role: 'tool', id: c.id, name: c.name, args: c.input, timestamp: Date.now()}); - }); - } else if(history[orgI].role == 'user') { - history[orgI].content.filter((c: any) => c.type =='tool_result').forEach((c: any) => { - const h = history.find((h: any) => h.id == c.tool_use_id); - h[c.is_error ? 'error' : 'content'] = c.content; - }); - } - history[orgI].content = history[orgI].content?.filter((c: any) => c.type == 'text').map((c: any) => c.text).join('\n\n'); - if(!history[orgI].content) history.splice(orgI, 1); + const timestamp = Date.now(); + const messages: LLMMessage[] = []; + for(let h of history) { + if(typeof h.content == 'string') { + messages.push({timestamp, ...h}); + } else { + const textContent = h.content?.filter((c: any) => c.type == 'text').map((c: any) => c.text).join('\n\n'); + if(textContent) messages.push({timestamp, role: h.role, content: textContent}); + h.content.forEach((c: any) => { + if(c.type == 'tool_use') { + messages.push({timestamp, role: 'tool', id: c.id, name: c.name, args: c.input, content: undefined}); + } else if(c.type == 'tool_result') { + const m: any = messages.findLast(m => (m).id == c.tool_use_id); + if(m) m[c.is_error ? 'error' : 'content'] = c.content; + } + }); } - if(!history[orgI].timestamp) history[orgI].timestamp = Date.now(); } - return history.filter(h => !!h.content); + return messages; } private fromStandard(history: LLMMessage[]): any[] { @@ -50,8 +50,8 @@ export class Anthropic extends LLMProvider { ask(message: string, options: LLMRequest = {}): AbortablePromise { const controller = new AbortController(); - return Object.assign(new Promise(async (res, rej) => { - const history = this.fromStandard([...options.history || [], {role: 'user', content: message, timestamp: Date.now()}]); + return Object.assign(new Promise(async (res) => { + let history = this.fromStandard([...options.history || [], {role: 'user', content: message, timestamp: Date.now()}]); const tools = options.tools || this.ai.options.llm?.tools || []; const requestParams: any = { model: options.model || this.model, @@ -73,7 +73,6 @@ export class Anthropic extends LLMProvider { }; let resp: any, isFirstMessage = true; - const assistantMessages: string[] = []; do { resp = await this.client.messages.create(requestParams).catch(err => { err.message += `\n\nMessages:\n${JSON.stringify(history, null, 2)}`; @@ -130,7 +129,7 @@ export class Anthropic extends LLMProvider { } } while (!controller.signal.aborted && resp.content.some((c: any) => c.type === 'tool_use')); history.push({role: 'assistant', content: resp.content.filter((c: any) => c.type == 'text').map((c: any) => c.text).join('\n\n')}); - this.toStandard(history); + history = this.toStandard(history); if(options.stream) options.stream({done: true}); if(options.history) options.history.splice(0, options.history.length, ...history); diff --git a/src/open-ai.ts b/src/open-ai.ts index be04d7b..2271408 100644 --- a/src/open-ai.ts +++ b/src/open-ai.ts @@ -68,7 +68,7 @@ export class OpenAi extends LLMProvider { const controller = new AbortController(); return Object.assign(new Promise(async (res, rej) => { if(options.system && options.history?.[0]?.role != 'system') options.history?.splice(0, 0, {role: 'system', content: options.system, timestamp: Date.now()}); - const history = this.fromStandard([...options.history || [], {role: 'user', content: message, timestamp: Date.now()}]); + let history = this.fromStandard([...options.history || [], {role: 'user', content: message, timestamp: Date.now()}]); const tools = options.tools || this.ai.options.llm?.tools || []; const requestParams: any = { model: options.model || this.model, @@ -133,7 +133,7 @@ export class OpenAi extends LLMProvider { } } while (!controller.signal.aborted && resp.choices?.[0]?.message?.tool_calls?.length); history.push({role: 'assistant', content: resp.choices[0].message.content || ''}); - this.toStandard(history); + history = this.toStandard(history); if(options.stream) options.stream({done: true}); if(options.history) options.history.splice(0, options.history.length, ...history);