Handle multiple AI responses in one question better.
All checks were successful
Publish Library / Build NPM Project (push) Successful in 33s
Publish Library / Tag Version (push) Successful in 8s

This commit is contained in:
2025-12-16 12:46:44 -05:00
parent a5ed4076b7
commit 1a0351aeef
5 changed files with 75 additions and 57 deletions

View File

@@ -20,7 +20,8 @@ export class OpenAi extends LLMProvider {
role: 'tool',
id: tc.id,
name: tc.function.name,
args: JSONAttemptParse(tc.function.arguments, {})
args: JSONAttemptParse(tc.function.arguments, {}),
timestamp: h.timestamp
}));
history.splice(i, 1, ...tools);
i += tools.length - 1;
@@ -33,7 +34,7 @@ export class OpenAi extends LLMProvider {
history.splice(i, 1);
i--;
}
if(!history[i]?.timestamp) history[i].timestamp = Date.now();
}
return history;
}
@@ -47,10 +48,12 @@ export class OpenAi extends LLMProvider {
tool_calls: [{ id: h.id, type: 'function', function: { name: h.name, arguments: JSON.stringify(h.args) } }],
refusal: null,
annotations: [],
timestamp: h.timestamp
}, {
role: 'tool',
tool_call_id: h.id,
content: h.error || h.content
content: h.error || h.content,
timestamp: Date.now()
});
} else {
result.push(h);
@@ -62,7 +65,7 @@ export class OpenAi extends LLMProvider {
ask(message: string, options: LLMRequest = {}): AbortablePromise<LLMMessage[]> {
const controller = new AbortController();
const response = new Promise<any>(async (res, rej) => {
let history = this.fromStandard([...options.history || [], {role: 'user', content: message}]);
let history = this.fromStandard([...options.history || [], {role: 'user', content: message, timestamp: Date.now()}]);
if(options.compress) history = await this.ai.llm.compress(<any>history, options.compress.max, options.compress.min, options);
const requestParams: any = {
@@ -85,44 +88,51 @@ export class OpenAi extends LLMProvider {
}))
};
// Tool call and streaming logic similar to other providers
let resp: any;
const loopMessages: any[] = [];
do {
resp = await this.client.chat.completions.create(requestParams);
// Implement streaming and tool call handling
if(options.stream) {
resp.choices = [];
if(loopMessages.length) options.stream({text: '\n\n'});
resp.choices = [{message: {content: '', tool_calls: []}}];
for await (const chunk of resp) {
if(controller.signal.aborted) break;
if(chunk.choices[0].delta.content) {
resp.choices[0].message.content += chunk.choices[0].delta.content;
options.stream({text: chunk.choices[0].delta.content});
}
if(chunk.choices[0].delta.tool_calls) {
resp.choices[0].message.tool_calls = chunk.choices[0].delta.tool_calls;
}
}
}
// Run tools
loopMessages.push({role: 'assistant', content: resp.choices[0].message.content || '', timestamp: Date.now()});
const toolCalls = resp.choices[0].message.tool_calls || [];
if(toolCalls.length && !controller.signal.aborted) {
history.push(resp.choices[0].message);
history.push({...resp.choices[0].message, timestamp: Date.now()});
const results = await Promise.all(toolCalls.map(async (toolCall: any) => {
const tool = options.tools?.find(findByProp('name', toolCall.function.name));
if(!tool) return {role: 'tool', tool_call_id: toolCall.id, content: '{"error": "Tool not found"}'};
if(!tool) return {role: 'tool', tool_call_id: toolCall.id, content: '{"error": "Tool not found"}', timestamp: Date.now()};
try {
const args = JSONAttemptParse(toolCall.function.arguments, {});
const result = await tool.fn(args, this.ai);
return {role: 'tool', tool_call_id: toolCall.id, content: JSONSanitize(result)};
return {role: 'tool', tool_call_id: toolCall.id, content: JSONSanitize(result), timestamp: Date.now()};
} catch (err: any) {
return {role: 'tool', tool_call_id: toolCall.id, content: JSONSanitize({error: err?.message || err?.toString() || 'Unknown'})};
return {role: 'tool', tool_call_id: toolCall.id, content: JSONSanitize({error: err?.message || err?.toString() || 'Unknown'}), timestamp: Date.now()};
}
}));
history.push(...results);
loopMessages.push(...results);
requestParams.messages = history;
}
} while (!controller.signal.aborted && resp.choices?.[0]?.message?.tool_calls?.length);
const combinedContent = loopMessages.filter(m => m.role === 'assistant')
.map(m => m.content).filter(c => c).join('\n\n');
if(options.stream) options.stream({done: true});
res(this.toStandard([...history, {role: 'assistant', content: resp.choices[0].message.content || ''}]));
res(this.toStandard([...history, {role: 'assistant', content: combinedContent, timestamp: Date.now()}]));
});
return Object.assign(response, {abort: () => controller.abort()});