import {Anthropic as anthropic} from '@anthropic-ai/sdk'; import {findByProp, objectMap, JSONSanitize, JSONAttemptParse} from '@ztimson/utils'; import {AbortablePromise, Ai} from './ai.ts'; import {LLMMessage, LLMRequest} from './llm.ts'; import {LLMProvider} from './provider.ts'; export class Anthropic extends LLMProvider { client!: anthropic; constructor(public readonly ai: Ai, public readonly apiToken: string, public model: string) { super(); this.client = new anthropic({apiKey: apiToken}); } private toStandard(history: any[]): LLMMessage[] { for(let i = 0; i < history.length; i++) { const orgI = i; if(typeof history[orgI].content != 'string') { if(history[orgI].role == 'assistant') { history[orgI].content.filter((c: any) => c.type =='tool_use').forEach((c: any) => { history.splice(i + 1, 0, {role: 'tool', id: c.id, name: c.name, args: c.input, timestamp: Date.now()}); }); } else if(history[orgI].role == 'user') { history[orgI].content.filter((c: any) => c.type =='tool_result').forEach((c: any) => { const h = history.find((h: any) => h.id == c.tool_use_id); h[c.is_error ? 'error' : 'content'] = c.content; }); } history[orgI].content = history[orgI].content.filter((c: any) => c.type == 'text').map((c: any) => c.text).join('\n\n'); if(!history[orgI].content) history.splice(orgI, 1); } if(!history[orgI].timestamp) history[orgI].timestamp = Date.now(); } return history.filter(h => !!h.content); } private fromStandard(history: LLMMessage[]): any[] { for(let i = 0; i < history.length; i++) { if(history[i].role == 'tool') { const h: any = history[i]; history.splice(i, 1, {role: 'assistant', content: [{type: 'tool_use', id: h.id, name: h.name, input: h.args}]}, {role: 'user', content: [{type: 'tool_result', tool_use_id: h.id, is_error: !!h.error, content: h.error || h.content}]} ) i++; } } return history.map(({timestamp, ...h}) => h); } ask(message: string, options: LLMRequest = {}): AbortablePromise { const controller = new AbortController(); return Object.assign(new Promise(async (res, rej) => { const history = this.fromStandard([...options.history || [], {role: 'user', content: message, timestamp: Date.now()}]); const tools = options.tools || this.ai.options.llm?.tools || []; const requestParams: any = { model: options.model || this.model, max_tokens: options.max_tokens || this.ai.options.llm?.max_tokens || 4096, system: options.system || this.ai.options.llm?.system || '', temperature: options.temperature || this.ai.options.llm?.temperature || 0.7, tools: tools.map(t => ({ name: t.name, description: t.description, input_schema: { type: 'object', properties: t.args ? objectMap(t.args, (key, value) => ({...value, required: undefined})) : {}, required: t.args ? Object.entries(t.args).filter(t => t[1].required).map(t => t[0]) : [] }, fn: undefined })), messages: history, stream: !!options.stream, }; let resp: any, isFirstMessage = true; const assistantMessages: string[] = []; do { resp = await this.client.messages.create(requestParams).catch(err => { err.message += `\n\nMessages:\n${JSON.stringify(history, null, 2)}`; throw err; }); // Streaming mode if(options.stream) { if(!isFirstMessage) options.stream({text: '\n\n'}); else isFirstMessage = false; resp.content = []; for await (const chunk of resp) { if(controller.signal.aborted) break; if(chunk.type === 'content_block_start') { if(chunk.content_block.type === 'text') { resp.content.push({type: 'text', text: ''}); } else if(chunk.content_block.type === 'tool_use') { resp.content.push({type: 'tool_use', id: chunk.content_block.id, name: chunk.content_block.name, input: ''}); } } else if(chunk.type === 'content_block_delta') { if(chunk.delta.type === 'text_delta') { const text = chunk.delta.text; resp.content.at(-1).text += text; options.stream({text}); } else if(chunk.delta.type === 'input_json_delta') { resp.content.at(-1).input += chunk.delta.partial_json; } } else if(chunk.type === 'content_block_stop') { const last = resp.content.at(-1); if(last.input != null) last.input = last.input ? JSONAttemptParse(last.input, {}) : {}; } else if(chunk.type === 'message_stop') { break; } } } // Run tools const toolCalls = resp.content.filter((c: any) => c.type === 'tool_use'); if(toolCalls.length && !controller.signal.aborted) { history.push({role: 'assistant', content: resp.content}); const results = await Promise.all(toolCalls.map(async (toolCall: any) => { const tool = tools.find(findByProp('name', toolCall.name)); if(options.stream) options.stream({tool: toolCall.name}); if(!tool) return {tool_use_id: toolCall.id, is_error: true, content: 'Tool not found'}; try { console.log(typeof tool.fn); const result = await tool.fn(toolCall.input, options?.stream, this.ai); return {type: 'tool_result', tool_use_id: toolCall.id, content: JSONSanitize(result)}; } catch (err: any) { return {type: 'tool_result', tool_use_id: toolCall.id, is_error: true, content: err?.message || err?.toString() || 'Unknown'}; } })); history.push({role: 'user', content: results}); requestParams.messages = history; } } while (!controller.signal.aborted && resp.content.some((c: any) => c.type === 'tool_use')); history.push({role: 'assistant', content: resp.content.filter((c: any) => c.type == 'text').map((c: any) => c.text).join('\n\n')}); this.toStandard(history); if(options.stream) options.stream({done: true}); if(options.history) options.history.splice(0, options.history.length, ...history); res(history.at(-1)?.content); }), {abort: () => controller.abort()}); } }