Files
ai-utils/src/open-ai.ts
ztimson 27506d20af
All checks were successful
Publish Library / Build NPM Project (push) Successful in 30s
Publish Library / Tag Version (push) Successful in 5s
Fix anthropic message history
2026-02-11 22:45:30 -05:00

144 lines
5.3 KiB
TypeScript

import {OpenAI as openAI} from 'openai';
import {findByProp, objectMap, JSONSanitize, JSONAttemptParse, clean} from '@ztimson/utils';
import {AbortablePromise, Ai} from './ai.ts';
import {LLMMessage, LLMRequest} from './llm.ts';
import {LLMProvider} from './provider.ts';
export class OpenAi extends LLMProvider {
client!: openAI;
constructor(public readonly ai: Ai, public readonly host: string | null, public readonly token: string, public model: string) {
super();
this.client = new openAI(clean({
baseURL: host,
apiKey: token
}));
}
private toStandard(history: any[]): LLMMessage[] {
for(let i = 0; i < history.length; i++) {
const h = history[i];
if(h.role === 'assistant' && h.tool_calls) {
const tools = h.tool_calls.map((tc: any) => ({
role: 'tool',
id: tc.id,
name: tc.function.name,
args: JSONAttemptParse(tc.function.arguments, {}),
timestamp: h.timestamp
}));
history.splice(i, 1, ...tools);
i += tools.length - 1;
} else if(h.role === 'tool' && h.content) {
const record = history.find(h2 => h.tool_call_id == h2.id);
if(record) {
if(h.content.includes('"error":')) record.error = h.content;
else record.content = h.content;
}
history.splice(i, 1);
i--;
}
if(!history[i]?.timestamp) history[i].timestamp = Date.now();
}
return history;
}
private fromStandard(history: LLMMessage[]): any[] {
return history.reduce((result, h) => {
if(h.role === 'tool') {
result.push({
role: 'assistant',
content: null,
tool_calls: [{ id: h.id, type: 'function', function: { name: h.name, arguments: JSON.stringify(h.args) } }],
refusal: null,
annotations: []
}, {
role: 'tool',
tool_call_id: h.id,
content: h.error || h.content
});
} else {
const {timestamp, ...rest} = h;
result.push(rest);
}
return result;
}, [] as any[]);
}
ask(message: string, options: LLMRequest = {}): AbortablePromise<string> {
const controller = new AbortController();
return Object.assign(new Promise<any>(async (res, rej) => {
if(options.system && options.history?.[0]?.role != 'system') options.history?.splice(0, 0, {role: 'system', content: options.system, timestamp: Date.now()});
let history = this.fromStandard([...options.history || [], {role: 'user', content: message, timestamp: Date.now()}]);
const tools = options.tools || this.ai.options.llm?.tools || [];
const requestParams: any = {
model: options.model || this.model,
messages: history,
stream: !!options.stream,
max_tokens: options.max_tokens || this.ai.options.llm?.max_tokens || 4096,
temperature: options.temperature || this.ai.options.llm?.temperature || 0.7,
tools: tools.map(t => ({
type: 'function',
function: {
name: t.name,
description: t.description,
parameters: {
type: 'object',
properties: t.args ? objectMap(t.args, (key, value) => ({...value, required: undefined})) : {},
required: t.args ? Object.entries(t.args).filter(t => t[1].required).map(t => t[0]) : []
}
}
}))
};
let resp: any, isFirstMessage = true;
do {
resp = await this.client.chat.completions.create(requestParams).catch(err => {
err.message += `\n\nMessages:\n${JSON.stringify(history, null, 2)}`;
throw err;
});
if(options.stream) {
if(!isFirstMessage) options.stream({text: '\n\n'});
else isFirstMessage = false;
resp.choices = [{message: {content: '', tool_calls: []}}];
for await (const chunk of resp) {
if(controller.signal.aborted) break;
if(chunk.choices[0].delta.content) {
resp.choices[0].message.content += chunk.choices[0].delta.content;
options.stream({text: chunk.choices[0].delta.content});
}
if(chunk.choices[0].delta.tool_calls) {
resp.choices[0].message.tool_calls = chunk.choices[0].delta.tool_calls;
}
}
}
const toolCalls = resp.choices[0].message.tool_calls || [];
if(toolCalls.length && !controller.signal.aborted) {
history.push(resp.choices[0].message);
const results = await Promise.all(toolCalls.map(async (toolCall: any) => {
const tool = tools?.find(findByProp('name', toolCall.function.name));
if(options.stream) options.stream({tool: toolCall.function.name});
if(!tool) return {role: 'tool', tool_call_id: toolCall.id, content: '{"error": "Tool not found"}'};
try {
const args = JSONAttemptParse(toolCall.function.arguments, {});
const result = await tool.fn(args, options.stream, this.ai);
return {role: 'tool', tool_call_id: toolCall.id, content: JSONSanitize(result)};
} catch (err: any) {
return {role: 'tool', tool_call_id: toolCall.id, content: JSONSanitize({error: err?.message || err?.toString() || 'Unknown'})};
}
}));
history.push(...results);
requestParams.messages = history;
}
} while (!controller.signal.aborted && resp.choices?.[0]?.message?.tool_calls?.length);
history.push({role: 'assistant', content: resp.choices[0].message.content || ''});
history = this.toStandard(history);
if(options.stream) options.stream({done: true});
if(options.history) options.history.splice(0, options.history.length, ...history);
res(history.at(-1)?.content);
}), {abort: () => controller.abort()});
}
}