|
|
|
|
@@ -1,5 +1,5 @@
|
|
|
|
|
import {Anthropic as anthropic} from '@anthropic-ai/sdk';
|
|
|
|
|
import {findByProp, objectMap, JSONSanitize, JSONAttemptParse} from '@ztimson/utils';
|
|
|
|
|
import {findByProp, objectMap, JSONSanitize, JSONAttemptParse, deepCopy} from '@ztimson/utils';
|
|
|
|
|
import {Ai} from './ai.ts';
|
|
|
|
|
import {LLMMessage, LLMRequest} from './llm.ts';
|
|
|
|
|
import {AbortablePromise, LLMProvider} from './provider.ts';
|
|
|
|
|
@@ -52,6 +52,7 @@ export class Anthropic extends LLMProvider {
|
|
|
|
|
const controller = new AbortController();
|
|
|
|
|
const response = new Promise<any>(async (res, rej) => {
|
|
|
|
|
let history = this.fromStandard([...options.history || [], {role: 'user', content: message, timestamp: Date.now()}]);
|
|
|
|
|
const original = deepCopy(history);
|
|
|
|
|
if(options.compress) history = await this.ai.llm.compress(<any>history, options.compress.max, options.compress.min, options);
|
|
|
|
|
const requestParams: any = {
|
|
|
|
|
model: options.model || this.model,
|
|
|
|
|
@@ -73,11 +74,11 @@ export class Anthropic extends LLMProvider {
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
let resp: any;
|
|
|
|
|
const loopMessages: any[] = [];
|
|
|
|
|
const assistantMessages: string[] = [];
|
|
|
|
|
do {
|
|
|
|
|
resp = await this.client.messages.create(requestParams);
|
|
|
|
|
if(options.stream) {
|
|
|
|
|
if(loopMessages.length) options.stream({text: '\n\n'});
|
|
|
|
|
if(assistantMessages.length) options.stream({text: '\n\n'});
|
|
|
|
|
resp.content = [];
|
|
|
|
|
for await (const chunk of resp) {
|
|
|
|
|
if(controller.signal.aborted) break;
|
|
|
|
|
@@ -104,10 +105,12 @@ export class Anthropic extends LLMProvider {
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
loopMessages.push({role: 'assistant', content: resp.content, timestamp: Date.now()});
|
|
|
|
|
const textContent = resp.content.filter((c: any) => c.type == 'text').map((c: any) => c.text).join('\n\n');
|
|
|
|
|
if(textContent) assistantMessages.push(textContent);
|
|
|
|
|
const toolCalls = resp.content.filter((c: any) => c.type === 'tool_use');
|
|
|
|
|
if(toolCalls.length && !controller.signal.aborted) {
|
|
|
|
|
history.push({role: 'assistant', content: resp.content});
|
|
|
|
|
original.push({role: 'assistant', content: resp.content});
|
|
|
|
|
const results = await Promise.all(toolCalls.map(async (toolCall: any) => {
|
|
|
|
|
const tool = options.tools?.find(findByProp('name', toolCall.name));
|
|
|
|
|
if(!tool) return {tool_use_id: toolCall.id, is_error: true, content: 'Tool not found'};
|
|
|
|
|
@@ -118,18 +121,13 @@ export class Anthropic extends LLMProvider {
|
|
|
|
|
return {type: 'tool_result', tool_use_id: toolCall.id, is_error: true, content: err?.message || err?.toString() || 'Unknown'};
|
|
|
|
|
}
|
|
|
|
|
}));
|
|
|
|
|
const userMsg = {role: 'user', content: results};
|
|
|
|
|
history.push(userMsg);
|
|
|
|
|
loopMessages.push({...userMsg, timestamp: Date.now()});
|
|
|
|
|
history.push({role: 'user', content: results});
|
|
|
|
|
original.push({role: 'user', content: results});
|
|
|
|
|
requestParams.messages = history;
|
|
|
|
|
}
|
|
|
|
|
} while (!controller.signal.aborted && resp.content.some((c: any) => c.type === 'tool_use'));
|
|
|
|
|
|
|
|
|
|
const combinedContent = loopMessages.filter(m => m.role === 'assistant')
|
|
|
|
|
.map(m => m.content.filter((c: any) => c.type == 'text').map((c: any) => c.text).join('\n\n'))
|
|
|
|
|
.filter(c => c).join('\n\n');
|
|
|
|
|
if(options.stream) options.stream({done: true});
|
|
|
|
|
res(this.toStandard([...history, {role: 'assistant', content: combinedContent, timestamp: Date.now()}]));
|
|
|
|
|
res(this.toStandard([...original, {role: 'assistant', content: assistantMessages.join('\n\n'), timestamp: Date.now()}]));
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
return Object.assign(response, {abort: () => controller.abort()});
|
|
|
|
|
|