141 lines
5.2 KiB
TypeScript
141 lines
5.2 KiB
TypeScript
import {OpenAI as openAI} from 'openai';
|
|
import {findByProp, objectMap, JSONSanitize, JSONAttemptParse} from '@ztimson/utils';
|
|
import {Ai} from './ai.ts';
|
|
import {LLMMessage, LLMRequest} from './llm.ts';
|
|
import {AbortablePromise, LLMProvider} from './provider.ts';
|
|
|
|
export class OpenAi extends LLMProvider {
|
|
client!: openAI;
|
|
|
|
constructor(public readonly ai: Ai, public readonly apiToken: string, public model: string) {
|
|
super();
|
|
this.client = new openAI({apiKey: apiToken});
|
|
}
|
|
|
|
private toStandard(history: any[]): LLMMessage[] {
|
|
for(let i = 0; i < history.length; i++) {
|
|
const h = history[i];
|
|
if(h.role === 'assistant' && h.tool_calls) {
|
|
const tools = h.tool_calls.map((tc: any) => ({
|
|
role: 'tool',
|
|
id: tc.id,
|
|
name: tc.function.name,
|
|
args: JSONAttemptParse(tc.function.arguments, {}),
|
|
timestamp: h.timestamp
|
|
}));
|
|
history.splice(i, 1, ...tools);
|
|
i += tools.length - 1;
|
|
} else if(h.role === 'tool' && h.content) {
|
|
const record = history.find(h2 => h.tool_call_id == h2.id);
|
|
if(record) {
|
|
if(h.content.includes('"error":')) record.error = h.content;
|
|
else record.content = h.content;
|
|
}
|
|
history.splice(i, 1);
|
|
i--;
|
|
}
|
|
if(!history[i]?.timestamp) history[i].timestamp = Date.now();
|
|
}
|
|
return history;
|
|
}
|
|
|
|
private fromStandard(history: LLMMessage[]): any[] {
|
|
return history.reduce((result, h) => {
|
|
if(h.role === 'tool') {
|
|
result.push({
|
|
role: 'assistant',
|
|
content: null,
|
|
tool_calls: [{ id: h.id, type: 'function', function: { name: h.name, arguments: JSON.stringify(h.args) } }],
|
|
refusal: null,
|
|
annotations: [],
|
|
timestamp: h.timestamp
|
|
}, {
|
|
role: 'tool',
|
|
tool_call_id: h.id,
|
|
content: h.error || h.content,
|
|
timestamp: Date.now()
|
|
});
|
|
} else {
|
|
result.push(h);
|
|
}
|
|
return result;
|
|
}, [] as any[]);
|
|
}
|
|
|
|
ask(message: string, options: LLMRequest = {}): AbortablePromise<LLMMessage[]> {
|
|
const controller = new AbortController();
|
|
const response = new Promise<any>(async (res, rej) => {
|
|
let history = this.fromStandard([...options.history || [], {role: 'user', content: message, timestamp: Date.now()}]);
|
|
if(options.compress) history = await this.ai.llm.compress(<any>history, options.compress.max, options.compress.min, options);
|
|
|
|
const requestParams: any = {
|
|
model: options.model || this.model,
|
|
messages: history,
|
|
stream: !!options.stream,
|
|
max_tokens: options.max_tokens || this.ai.options.max_tokens || 4096,
|
|
temperature: options.temperature || this.ai.options.temperature || 0.7,
|
|
tools: (options.tools || this.ai.options.tools || []).map(t => ({
|
|
type: 'function',
|
|
function: {
|
|
name: t.name,
|
|
description: t.description,
|
|
parameters: {
|
|
type: 'object',
|
|
properties: t.args ? objectMap(t.args, (key, value) => ({...value, required: undefined})) : {},
|
|
required: t.args ? Object.entries(t.args).filter(t => t[1].required).map(t => t[0]) : []
|
|
}
|
|
}
|
|
}))
|
|
};
|
|
|
|
let resp: any;
|
|
const loopMessages: any[] = [];
|
|
do {
|
|
resp = await this.client.chat.completions.create(requestParams);
|
|
if(options.stream) {
|
|
if(loopMessages.length) options.stream({text: '\n\n'});
|
|
resp.choices = [{message: {content: '', tool_calls: []}}];
|
|
for await (const chunk of resp) {
|
|
if(controller.signal.aborted) break;
|
|
if(chunk.choices[0].delta.content) {
|
|
resp.choices[0].message.content += chunk.choices[0].delta.content;
|
|
options.stream({text: chunk.choices[0].delta.content});
|
|
}
|
|
if(chunk.choices[0].delta.tool_calls) {
|
|
resp.choices[0].message.tool_calls = chunk.choices[0].delta.tool_calls;
|
|
}
|
|
}
|
|
}
|
|
|
|
loopMessages.push({role: 'assistant', content: resp.choices[0].message.content || '', timestamp: Date.now()});
|
|
|
|
const toolCalls = resp.choices[0].message.tool_calls || [];
|
|
if(toolCalls.length && !controller.signal.aborted) {
|
|
history.push({...resp.choices[0].message, timestamp: Date.now()});
|
|
const results = await Promise.all(toolCalls.map(async (toolCall: any) => {
|
|
const tool = options.tools?.find(findByProp('name', toolCall.function.name));
|
|
if(!tool) return {role: 'tool', tool_call_id: toolCall.id, content: '{"error": "Tool not found"}', timestamp: Date.now()};
|
|
try {
|
|
const args = JSONAttemptParse(toolCall.function.arguments, {});
|
|
const result = await tool.fn(args, this.ai);
|
|
return {role: 'tool', tool_call_id: toolCall.id, content: JSONSanitize(result), timestamp: Date.now()};
|
|
} catch (err: any) {
|
|
return {role: 'tool', tool_call_id: toolCall.id, content: JSONSanitize({error: err?.message || err?.toString() || 'Unknown'}), timestamp: Date.now()};
|
|
}
|
|
}));
|
|
history.push(...results);
|
|
loopMessages.push(...results);
|
|
requestParams.messages = history;
|
|
}
|
|
} while (!controller.signal.aborted && resp.choices?.[0]?.message?.tool_calls?.length);
|
|
|
|
const combinedContent = loopMessages.filter(m => m.role === 'assistant')
|
|
.map(m => m.content).filter(c => c).join('\n\n');
|
|
if(options.stream) options.stream({done: true});
|
|
res(this.toStandard([...history, {role: 'assistant', content: combinedContent, timestamp: Date.now()}]));
|
|
});
|
|
|
|
return Object.assign(response, {abort: () => controller.abort()});
|
|
}
|
|
}
|