Handle multiple AI responses in one question better.
All checks were successful
Publish Library / Build NPM Project (push) Successful in 33s
Publish Library / Tag Version (push) Successful in 8s

This commit is contained in:
2025-12-16 12:46:44 -05:00
parent a5ed4076b7
commit 1a0351aeef
5 changed files with 75 additions and 57 deletions

View File

@@ -13,29 +13,25 @@ export class Anthropic extends LLMProvider {
}
private toStandard(history: any[]): LLMMessage[] {
const merged: any[] = [];
for(let i = 0; i < history.length; i++) {
const msg = history[i];
if(typeof msg.content != 'string') {
if(msg.role == 'assistant') {
msg.content.filter((c: any) => c.type == 'tool_use').forEach((c: any) => {
merged.push({role: 'tool', id: c.id, name: c.name, args: c.input});
const orgI = i;
if(typeof history[orgI].content != 'string') {
if(history[orgI].role == 'assistant') {
history[orgI].content.filter((c: any) => c.type =='tool_use').forEach((c: any) => {
i++;
history.splice(i, 0, {role: 'tool', id: c.id, name: c.name, args: c.input, timestamp: Date.now()});
});
} else if(msg.role == 'user') {
msg.content.filter((c: any) => c.type == 'tool_result').forEach((c: any) => {
const h = merged.find((h: any) => h.id == c.tool_use_id);
if(h) h[c.is_error ? 'error' : 'content'] = c.content;
} else if(history[orgI].role == 'user') {
history[orgI].content.filter((c: any) => c.type =='tool_result').forEach((c: any) => {
const h = history.find((h: any) => h.id == c.tool_use_id);
h[c.is_error ? 'error' : 'content'] = c.content;
});
}
msg.content = msg.content.filter((c: any) => c.type == 'text').map((c: any) => c.text).join('\n\n');
}
if(msg.content) {
const last = merged.at(-1);
if(last && last.role == 'assistant' && msg.role == 'assistant') last.content += '\n\n' + msg.content;
else merged.push({role: msg.role, content: msg.content});
history[orgI].content = history[orgI].content.filter((c: any) => c.type == 'text').map((c: any) => c.text).join('\n\n');
}
if(!history[orgI].timestamp) history[orgI].timestamp = Date.now();
}
return merged;
return history.filter(h => !!h.content);
}
private fromStandard(history: LLMMessage[]): any[] {
@@ -43,8 +39,8 @@ export class Anthropic extends LLMProvider {
if(history[i].role == 'tool') {
const h: any = history[i];
history.splice(i, 1,
{role: 'assistant', content: [{type: 'tool_use', id: h.id, name: h.name, input: h.args}]},
{role: 'user', content: [{type: 'tool_result', tool_use_id: h.id, is_error: !!h.error, content: h.error || h.content}]}
{role: 'assistant', content: [{type: 'tool_use', id: h.id, name: h.name, input: h.args}], timestamp: h.timestamp},
{role: 'user', content: [{type: 'tool_result', tool_use_id: h.id, is_error: !!h.error, content: h.error || h.content}], timestamp: Date.now()}
)
i++;
}
@@ -55,7 +51,7 @@ export class Anthropic extends LLMProvider {
ask(message: string, options: LLMRequest = {}): AbortablePromise<LLMMessage[]> {
const controller = new AbortController();
const response = new Promise<any>(async (res, rej) => {
let history = this.fromStandard([...options.history || [], {role: 'user', content: message}]);
let history = this.fromStandard([...options.history || [], {role: 'user', content: message, timestamp: Date.now()}]);
if(options.compress) history = await this.ai.llm.compress(<any>history, options.compress.max, options.compress.min, options);
const requestParams: any = {
model: options.model || this.model,
@@ -77,14 +73,11 @@ export class Anthropic extends LLMProvider {
};
let resp: any;
let isFirstMessage = true;
const loopMessages: any[] = [];
do {
resp = await this.client.messages.create(requestParams);
if(options.stream) {
if(!isFirstMessage) options.stream({text: '\n\n'});
isFirstMessage = false;
if(loopMessages.length) options.stream({text: '\n\n'});
resp.content = [];
for await (const chunk of resp) {
if(controller.signal.aborted) break;
@@ -111,9 +104,10 @@ export class Anthropic extends LLMProvider {
}
}
loopMessages.push({role: 'assistant', content: resp.content, timestamp: Date.now()});
const toolCalls = resp.content.filter((c: any) => c.type === 'tool_use');
if(toolCalls.length && !controller.signal.aborted) {
history.push({role: 'assistant', content: resp.content});
history.push({role: 'assistant', content: resp.content, timestamp: Date.now()});
const results = await Promise.all(toolCalls.map(async (toolCall: any) => {
const tool = options.tools?.find(findByProp('name', toolCall.name));
if(!tool) return {tool_use_id: toolCall.id, is_error: true, content: 'Tool not found'};
@@ -124,16 +118,18 @@ export class Anthropic extends LLMProvider {
return {type: 'tool_result', tool_use_id: toolCall.id, is_error: true, content: err?.message || err?.toString() || 'Unknown'};
}
}));
history.push({role: 'user', content: results});
const userMsg = {role: 'user', content: results, timestamp: Date.now()};
history.push(userMsg);
loopMessages.push(userMsg);
requestParams.messages = history;
}
} while (!controller.signal.aborted && resp.content.some((c: any) => c.type === 'tool_use'));
const combinedContent = loopMessages.filter(m => m.role === 'assistant')
.map(m => m.content.filter((c: any) => c.type == 'text').map((c: any) => c.text).join('\n\n'))
.filter(c => c).join('\n\n');
if(options.stream) options.stream({done: true});
res(this.toStandard([...history, {
role: 'assistant',
content: resp.content.filter((c: any) => c.type == 'text').map((c: any) => c.text).join('\n\n')
}]));
res(this.toStandard([...history, {role: 'assistant', content: combinedContent, timestamp: Date.now()}]));
});
return Object.assign(response, {abort: () => controller.abort()});