5 Commits

Author SHA1 Message Date
c896b585d0 Fixed LLM multi message responses
All checks were successful
Publish Library / Build NPM Project (push) Successful in 44s
Publish Library / Tag Version (push) Successful in 14s
2025-12-17 19:59:34 -05:00
1fe1e0cafe Fixing message combination on anthropic
All checks were successful
Publish Library / Build NPM Project (push) Successful in 35s
Publish Library / Tag Version (push) Successful in 8s
2025-12-16 16:11:13 -05:00
3aa4684923 Fixing message combination on anthropic
All checks were successful
Publish Library / Build NPM Project (push) Successful in 33s
Publish Library / Tag Version (push) Successful in 7s
2025-12-16 13:07:03 -05:00
0730f5f3f9 Fixed timestamp breaking api calls
All checks were successful
Publish Library / Build NPM Project (push) Successful in 34s
Publish Library / Tag Version (push) Successful in 8s
2025-12-16 12:56:56 -05:00
1a0351aeef Handle multiple AI responses in one question better.
All checks were successful
Publish Library / Build NPM Project (push) Successful in 33s
Publish Library / Tag Version (push) Successful in 8s
2025-12-16 12:46:44 -05:00
5 changed files with 55 additions and 48 deletions

View File

@@ -1,6 +1,6 @@
{
"name": "@ztimson/ai-utils",
"version": "0.1.16",
"version": "0.1.22",
"description": "AI Utility library",
"author": "Zak Timson",
"license": "MIT",

View File

@@ -1,5 +1,5 @@
import {Anthropic as anthropic} from '@anthropic-ai/sdk';
import {findByProp, objectMap, JSONSanitize, JSONAttemptParse} from '@ztimson/utils';
import {findByProp, objectMap, JSONSanitize, JSONAttemptParse, deepCopy} from '@ztimson/utils';
import {Ai} from './ai.ts';
import {LLMMessage, LLMRequest} from './llm.ts';
import {AbortablePromise, LLMProvider} from './provider.ts';
@@ -13,29 +13,25 @@ export class Anthropic extends LLMProvider {
}
private toStandard(history: any[]): LLMMessage[] {
const merged: any[] = [];
for(let i = 0; i < history.length; i++) {
const msg = history[i];
if(typeof msg.content != 'string') {
if(msg.role == 'assistant') {
msg.content.filter((c: any) => c.type == 'tool_use').forEach((c: any) => {
merged.push({role: 'tool', id: c.id, name: c.name, args: c.input});
const orgI = i;
if(typeof history[orgI].content != 'string') {
if(history[orgI].role == 'assistant') {
history[orgI].content.filter((c: any) => c.type =='tool_use').forEach((c: any) => {
i++;
history.splice(i, 0, {role: 'tool', id: c.id, name: c.name, args: c.input, timestamp: Date.now()});
});
} else if(msg.role == 'user') {
msg.content.filter((c: any) => c.type == 'tool_result').forEach((c: any) => {
const h = merged.find((h: any) => h.id == c.tool_use_id);
if(h) h[c.is_error ? 'error' : 'content'] = c.content;
} else if(history[orgI].role == 'user') {
history[orgI].content.filter((c: any) => c.type =='tool_result').forEach((c: any) => {
const h = history.find((h: any) => h.id == c.tool_use_id);
h[c.is_error ? 'error' : 'content'] = c.content;
});
}
msg.content = msg.content.filter((c: any) => c.type == 'text').map((c: any) => c.text).join('\n\n');
}
if(msg.content) {
const last = merged.at(-1);
if(last && last.role == 'assistant' && msg.role == 'assistant') last.content += '\n\n' + msg.content;
else merged.push({role: msg.role, content: msg.content});
history[orgI].content = history[orgI].content.filter((c: any) => c.type == 'text').map((c: any) => c.text).join('\n\n');
}
if(!history[orgI].timestamp) history[orgI].timestamp = Date.now();
}
return merged;
return history.filter(h => !!h.content);
}
private fromStandard(history: LLMMessage[]): any[] {
@@ -49,13 +45,14 @@ export class Anthropic extends LLMProvider {
i++;
}
}
return history;
return history.map(({timestamp, ...h}) => h);
}
ask(message: string, options: LLMRequest = {}): AbortablePromise<LLMMessage[]> {
const controller = new AbortController();
const response = new Promise<any>(async (res, rej) => {
let history = this.fromStandard([...options.history || [], {role: 'user', content: message}]);
let history = this.fromStandard([...options.history || [], {role: 'user', content: message, timestamp: Date.now()}]);
const original = deepCopy(history);
if(options.compress) history = await this.ai.llm.compress(<any>history, options.compress.max, options.compress.min, options);
const requestParams: any = {
model: options.model || this.model,
@@ -76,15 +73,15 @@ export class Anthropic extends LLMProvider {
stream: !!options.stream,
};
let resp: any;
let isFirstMessage = true;
let resp: any, isFirstMessage = true;
const assistantMessages: string[] = [];
do {
resp = await this.client.messages.create(requestParams);
// Streaming mode
if(options.stream) {
if(!isFirstMessage) options.stream({text: '\n\n'});
isFirstMessage = false;
else isFirstMessage = false;
resp.content = [];
for await (const chunk of resp) {
if(controller.signal.aborted) break;
@@ -111,9 +108,11 @@ export class Anthropic extends LLMProvider {
}
}
// Run tools
const toolCalls = resp.content.filter((c: any) => c.type === 'tool_use');
if(toolCalls.length && !controller.signal.aborted) {
history.push({role: 'assistant', content: resp.content});
original.push({role: 'assistant', content: resp.content});
const results = await Promise.all(toolCalls.map(async (toolCall: any) => {
const tool = options.tools?.find(findByProp('name', toolCall.name));
if(!tool) return {tool_use_id: toolCall.id, is_error: true, content: 'Tool not found'};
@@ -130,10 +129,7 @@ export class Anthropic extends LLMProvider {
} while (!controller.signal.aborted && resp.content.some((c: any) => c.type === 'tool_use'));
if(options.stream) options.stream({done: true});
res(this.toStandard([...history, {
role: 'assistant',
content: resp.content.filter((c: any) => c.type == 'text').map((c: any) => c.text).join('\n\n')
}]));
res(this.toStandard([...history, {role: 'assistant', content: resp.content.filter((c: any) => c.type == 'text').map((c: any) => c.text).join('\n\n')}]));
});
return Object.assign(response, {abort: () => controller.abort()});

View File

@@ -11,6 +11,8 @@ export type LLMMessage = {
role: 'assistant' | 'system' | 'user';
/** Message content */
content: string | any;
/** Timestamp */
timestamp?: number;
} | {
/** Tool call */
role: 'tool';
@@ -24,6 +26,8 @@ export type LLMMessage = {
content: undefined | string;
/** Tool error */
error: undefined | string;
/** Timestamp */
timestamp?: number;
}
export type LLMOptions = {
@@ -125,7 +129,7 @@ export class LLM {
const recent = keep == 0 ? [] : history.slice(-keep),
process = (keep == 0 ? history : history.slice(0, -keep)).filter(h => h.role === 'assistant' || h.role === 'user');
const summary = await this.summarize(process.map(m => `${m.role}: ${m.content}`).join('\n\n'), 250, options);
return [{role: 'assistant', content: `Conversation Summary: ${summary}`}, ...recent];
return [{role: 'assistant', content: `Conversation Summary: ${summary}`, timestamp: Date.now()}, ...recent];
}
/**

View File

@@ -22,15 +22,17 @@ export class Ollama extends LLMProvider {
}
} else if(history[i].role == 'tool') {
const error = history[i].content.startsWith('{"error":');
history[i] = {role: 'tool', name: history[i].tool_name, args: history[i].args, [error ? 'error' : 'content']: history[i].content};
history[i] = {role: 'tool', name: history[i].tool_name, args: history[i].args, [error ? 'error' : 'content']: history[i].content, timestamp: history[i].timestamp};
}
if(!history[i]?.timestamp) history[i].timestamp = Date.now();
}
return history;
}
private fromStandard(history: LLMMessage[]): any[] {
return history.map((h: any) => {
if(h.role != 'tool') return h;
const {timestamp, ...rest} = h;
if(h.role != 'tool') return rest;
return {role: 'tool', tool_name: h.name, content: h.error || h.content}
});
}
@@ -39,7 +41,7 @@ export class Ollama extends LLMProvider {
const controller = new AbortController();
const response = new Promise<any>(async (res, rej) => {
let system = options.system || this.ai.options.system;
let history = this.fromStandard([...options.history || [], {role: 'user', content: message}]);
let history = this.fromStandard([...options.history || [], {role: 'user', content: message, timestamp: Date.now()}]);
if(history[0].roll == 'system') {
if(!system) system = history.shift();
else history.shift();
@@ -70,11 +72,12 @@ export class Ollama extends LLMProvider {
}))
}
// Run tool chains
let resp: any;
let resp: any, isFirstMessage = true;
do {
resp = await this.client.chat(requestParams);
if(options.stream) {
if(!isFirstMessage) options.stream({text: '\n\n'});
else isFirstMessage = false;
resp.message = {role: 'assistant', content: '', tool_calls: []};
for await (const chunk of resp) {
if(controller.signal.aborted) break;
@@ -87,7 +90,6 @@ export class Ollama extends LLMProvider {
}
}
// Run tools
if(resp.message?.tool_calls?.length && !controller.signal.aborted) {
history.push(resp.message);
const results = await Promise.all(resp.message.tool_calls.map(async (toolCall: any) => {
@@ -105,9 +107,11 @@ export class Ollama extends LLMProvider {
requestParams.messages = history;
}
} while (!controller.signal.aborted && resp.message?.tool_calls?.length);
if(options.stream) options.stream({done: true});
res(this.toStandard([...history, {role: 'assistant', content: resp.message?.content}]));
});
return Object.assign(response, {abort: () => controller.abort()});
}
}

View File

@@ -20,7 +20,8 @@ export class OpenAi extends LLMProvider {
role: 'tool',
id: tc.id,
name: tc.function.name,
args: JSONAttemptParse(tc.function.arguments, {})
args: JSONAttemptParse(tc.function.arguments, {}),
timestamp: h.timestamp
}));
history.splice(i, 1, ...tools);
i += tools.length - 1;
@@ -33,7 +34,7 @@ export class OpenAi extends LLMProvider {
history.splice(i, 1);
i--;
}
if(!history[i]?.timestamp) history[i].timestamp = Date.now();
}
return history;
}
@@ -46,14 +47,15 @@ export class OpenAi extends LLMProvider {
content: null,
tool_calls: [{ id: h.id, type: 'function', function: { name: h.name, arguments: JSON.stringify(h.args) } }],
refusal: null,
annotations: [],
annotations: []
}, {
role: 'tool',
tool_call_id: h.id,
content: h.error || h.content
});
} else {
result.push(h);
const {timestamp, ...rest} = h;
result.push(rest);
}
return result;
}, [] as any[]);
@@ -62,7 +64,7 @@ export class OpenAi extends LLMProvider {
ask(message: string, options: LLMRequest = {}): AbortablePromise<LLMMessage[]> {
const controller = new AbortController();
const response = new Promise<any>(async (res, rej) => {
let history = this.fromStandard([...options.history || [], {role: 'user', content: message}]);
let history = this.fromStandard([...options.history || [], {role: 'user', content: message, timestamp: Date.now()}]);
if(options.compress) history = await this.ai.llm.compress(<any>history, options.compress.max, options.compress.min, options);
const requestParams: any = {
@@ -85,23 +87,25 @@ export class OpenAi extends LLMProvider {
}))
};
// Tool call and streaming logic similar to other providers
let resp: any;
let resp: any, isFirstMessage = true;
do {
resp = await this.client.chat.completions.create(requestParams);
// Implement streaming and tool call handling
if(options.stream) {
resp.choices = [];
if(!isFirstMessage) options.stream({text: '\n\n'});
else isFirstMessage = false;
resp.choices = [{message: {content: '', tool_calls: []}}];
for await (const chunk of resp) {
if(controller.signal.aborted) break;
if(chunk.choices[0].delta.content) {
resp.choices[0].message.content += chunk.choices[0].delta.content;
options.stream({text: chunk.choices[0].delta.content});
}
if(chunk.choices[0].delta.tool_calls) {
resp.choices[0].message.tool_calls = chunk.choices[0].delta.tool_calls;
}
}
}
// Run tools
const toolCalls = resp.choices[0].message.tool_calls || [];
if(toolCalls.length && !controller.signal.aborted) {
history.push(resp.choices[0].message);
@@ -124,7 +128,6 @@ export class OpenAi extends LLMProvider {
if(options.stream) options.stream({done: true});
res(this.toStandard([...history, {role: 'assistant', content: resp.choices[0].message.content || ''}]));
});
return Object.assign(response, {abort: () => controller.abort()});
}
}