Fixed LLM multi message responses
All checks were successful
Publish Library / Build NPM Project (push) Successful in 44s
Publish Library / Tag Version (push) Successful in 14s

This commit is contained in:
2025-12-17 19:59:34 -05:00
parent 1fe1e0cafe
commit c896b585d0
4 changed files with 17 additions and 24 deletions

View File

@@ -87,12 +87,12 @@ export class OpenAi extends LLMProvider {
}))
};
let resp: any;
const loopMessages: any[] = [];
let resp: any, isFirstMessage = true;
do {
resp = await this.client.chat.completions.create(requestParams);
if(options.stream) {
if(loopMessages.length) options.stream({text: '\n\n'});
if(!isFirstMessage) options.stream({text: '\n\n'});
else isFirstMessage = false;
resp.choices = [{message: {content: '', tool_calls: []}}];
for await (const chunk of resp) {
if(controller.signal.aborted) break;
@@ -106,8 +106,6 @@ export class OpenAi extends LLMProvider {
}
}
loopMessages.push({role: 'assistant', content: resp.choices[0].message.content || '', timestamp: Date.now()});
const toolCalls = resp.choices[0].message.tool_calls || [];
if(toolCalls.length && !controller.signal.aborted) {
history.push(resp.choices[0].message);
@@ -123,15 +121,12 @@ export class OpenAi extends LLMProvider {
}
}));
history.push(...results);
loopMessages.push(...results.map(r => ({...r, timestamp: Date.now()})));
requestParams.messages = history;
}
} while (!controller.signal.aborted && resp.choices?.[0]?.message?.tool_calls?.length);
const combinedContent = loopMessages.filter(m => m.role === 'assistant')
.map(m => m.content).filter(c => c).join('\n\n');
if(options.stream) options.stream({done: true});
res(this.toStandard([...history, {role: 'assistant', content: combinedContent, timestamp: Date.now()}]));
res(this.toStandard([...history, {role: 'assistant', content: resp.choices[0].message.content || ''}]));
});
return Object.assign(response, {abort: () => controller.abort()});
}