12 Commits

Author SHA1 Message Date
c896b585d0 Fixed LLM multi message responses
All checks were successful
Publish Library / Build NPM Project (push) Successful in 44s
Publish Library / Tag Version (push) Successful in 14s
2025-12-17 19:59:34 -05:00
1fe1e0cafe Fixing message combination on anthropic
All checks were successful
Publish Library / Build NPM Project (push) Successful in 35s
Publish Library / Tag Version (push) Successful in 8s
2025-12-16 16:11:13 -05:00
3aa4684923 Fixing message combination on anthropic
All checks were successful
Publish Library / Build NPM Project (push) Successful in 33s
Publish Library / Tag Version (push) Successful in 7s
2025-12-16 13:07:03 -05:00
0730f5f3f9 Fixed timestamp breaking api calls
All checks were successful
Publish Library / Build NPM Project (push) Successful in 34s
Publish Library / Tag Version (push) Successful in 8s
2025-12-16 12:56:56 -05:00
1a0351aeef Handle multiple AI responses in one question better.
All checks were successful
Publish Library / Build NPM Project (push) Successful in 33s
Publish Library / Tag Version (push) Successful in 8s
2025-12-16 12:46:44 -05:00
a5ed4076b7 Handle anthropic multiple responses better.
All checks were successful
Publish Library / Build NPM Project (push) Successful in 34s
Publish Library / Tag Version (push) Successful in 8s
2025-12-16 12:22:14 -05:00
0112c92505 Removed log statements
All checks were successful
Publish Library / Build NPM Project (push) Successful in 20s
Publish Library / Tag Version (push) Successful in 5s
2025-12-14 21:16:39 -05:00
2351f590b5 Removed ASR file intermediary
All checks were successful
Publish Library / Build NPM Project (push) Successful in 37s
Publish Library / Tag Version (push) Successful in 8s
2025-12-14 09:27:07 -05:00
2c2acef84e ASR logging
All checks were successful
Publish Library / Build NPM Project (push) Successful in 37s
Publish Library / Tag Version (push) Successful in 8s
2025-12-14 08:49:02 -05:00
a6de121551 Fixed ASR command
All checks were successful
Publish Library / Build NPM Project (push) Successful in 26s
Publish Library / Tag Version (push) Successful in 7s
2025-12-13 23:19:30 -05:00
31d9ee4390 ASR Debugging
All checks were successful
Publish Library / Build NPM Project (push) Successful in 43s
Publish Library / Tag Version (push) Successful in 17s
2025-12-13 22:59:23 -05:00
d69bea3b38 Fixed ASR whisper models
All checks were successful
Publish Library / Build NPM Project (push) Successful in 30s
Publish Library / Tag Version (push) Successful in 7s
2025-12-13 22:47:35 -05:00
6 changed files with 73 additions and 58 deletions

View File

@@ -1,6 +1,6 @@
{ {
"name": "@ztimson/ai-utils", "name": "@ztimson/ai-utils",
"version": "0.1.9", "version": "0.1.22",
"description": "AI Utility library", "description": "AI Utility library",
"author": "Zak Timson", "author": "Zak Timson",
"license": "MIT", "license": "MIT",

View File

@@ -1,25 +1,21 @@
import {$} from '@ztimson/node-utils';
import {createWorker} from 'tesseract.js'; import {createWorker} from 'tesseract.js';
import {LLM, LLMOptions} from './llm'; import {LLM, LLMOptions} from './llm';
import fs from 'node:fs/promises'; import fs from 'node:fs/promises';
import Path from 'node:path'; import Path from 'node:path';
import * as tf from '@tensorflow/tfjs'; import * as tf from '@tensorflow/tfjs';
import {spawn} from 'node:child_process';
export type AiOptions = LLMOptions & { export type AiOptions = LLMOptions & {
whisper?: { whisper?: {
/** Whisper binary location */ /** Whisper binary location */
binary: string; binary: string;
/** Model */ /** Model: `ggml-base.en.bin` */
model: WhisperModel; model: string;
/** Path to models */ /** Path to models */
path: string; path: string;
/** Path to storage location for temporary files */
temp?: string;
} }
} }
export type WhisperModel = 'tiny' | 'base' | 'small' | 'medium' | 'large';
export class Ai { export class Ai {
private downloads: {[key: string]: Promise<string>} = {}; private downloads: {[key: string]: Promise<string>} = {};
private whisperModel!: string; private whisperModel!: string;
@@ -30,7 +26,7 @@ export class Ai {
constructor(public readonly options: AiOptions) { constructor(public readonly options: AiOptions) {
this.llm = new LLM(this, options); this.llm = new LLM(this, options);
if(this.options.whisper?.binary) { if(this.options.whisper?.binary) {
this.whisperModel = Path.join(<string>this.options.whisper?.path, this.options.whisper?.model + this.options.whisper?.model.endsWith('.bin') ? '' : '.bin'); this.whisperModel = this.options.whisper?.model.endsWith('.bin') ? this.options.whisper?.model : this.options.whisper?.model + '.bin';
this.downloadAsrModel(); this.downloadAsrModel();
} }
} }
@@ -41,17 +37,23 @@ export class Ai {
* @param model Whisper model * @param model Whisper model
* @returns {Promise<any>} Extracted text * @returns {Promise<any>} Extracted text
*/ */
async asr(path: string, model?: WhisperModel): Promise<string | null> { asr(path: string, model: string = this.whisperModel): {abort: () => void, response: Promise<string | null>} {
if(!this.options.whisper?.binary) throw new Error('Whisper not configured'); if(!this.options.whisper?.binary) throw new Error('Whisper not configured');
const m = await this.downloadAsrModel(model); let abort: any = () => {};
const name = Math.random().toString(36).substring(2, 10) + '-' + path.split('/').pop() + '.txt'; const response = new Promise<string | null>((resolve, reject) => {
const output = Path.join(this.options.whisper.temp || '/tmp', name); this.downloadAsrModel(model).then(m => {
console.log('model:', this.options.whisper?.model); let output = '';
console.log(this.whisperModel); const proc = spawn(<string>this.options.whisper?.binary, ['-nt', '-np', '-m', m, '-f', path], {stdio: ['ignore', 'pipe', 'ignore']});
console.log(`rm -f ${output} && ${this.options.whisper.binary} -nt -np -m ${m} -f ${path} -otxt -of ${output}`); abort = () => proc.kill('SIGTERM');
await $`rm -f ${output} && ${this.options.whisper.binary} -nt -np -m ${m} -f ${path} -otxt -of ${output}`; proc.on('error', (err: Error) => reject(err));
return fs.readFile(output, 'utf-8').then(text => text?.trim() || null) proc.stdout.on('data', (data: Buffer) => output += data.toString());
.finally(() => fs.rm(output, {force: true}).catch(() => {})); proc.on('close', (code: number) => {
if(code === 0) resolve(output.trim() || null);
else reject(new Error(`Exit code ${code}`));
});
});
});
return {response, abort};
} }
/** /**
@@ -60,22 +62,20 @@ export class Ai {
* @param {string} model Whisper model that will be downloaded * @param {string} model Whisper model that will be downloaded
* @return {Promise<string>} Absolute path to model file, resolves once downloaded * @return {Promise<string>} Absolute path to model file, resolves once downloaded
*/ */
async downloadAsrModel(model?: string): Promise<string> { async downloadAsrModel(model: string = this.whisperModel): Promise<string> {
if(!this.options.whisper?.binary) throw new Error('Whisper not configured'); if(!this.options.whisper?.binary) throw new Error('Whisper not configured');
let m; if(!model.endsWith('.bin')) model += '.bin';
if(model) m = model?.endsWith('.bin') ? model : model + '.bin'; const p = Path.join(this.options.whisper.path, model);
else m = <string>this.whisperModel.split('/').at(-1);
const p = Path.join(this.options.whisper.path, m);
if(await fs.stat(p).then(() => true).catch(() => false)) return p; if(await fs.stat(p).then(() => true).catch(() => false)) return p;
if(!!this.downloads[m]) return this.downloads[m]; if(!!this.downloads[model]) return this.downloads[model];
this.downloads[m] = fetch(`https://huggingface.co/ggerganov/whisper.cpp/resolve/main/${m}`) this.downloads[model] = fetch(`https://huggingface.co/ggerganov/whisper.cpp/resolve/main/${model}`)
.then(resp => resp.arrayBuffer()) .then(resp => resp.arrayBuffer())
.then(arr => Buffer.from(arr)).then(async buffer => { .then(arr => Buffer.from(arr)).then(async buffer => {
await fs.writeFile(Path.join((<any>this.options.whisper).path, m), buffer); await fs.writeFile(p, buffer);
delete this.downloads[m]; delete this.downloads[model];
return p; return p;
}); });
return this.downloads[m]; return this.downloads[model];
} }
/** /**

View File

@@ -1,5 +1,5 @@
import {Anthropic as anthropic} from '@anthropic-ai/sdk'; import {Anthropic as anthropic} from '@anthropic-ai/sdk';
import {findByProp, objectMap, JSONSanitize, JSONAttemptParse} from '@ztimson/utils'; import {findByProp, objectMap, JSONSanitize, JSONAttemptParse, deepCopy} from '@ztimson/utils';
import {Ai} from './ai.ts'; import {Ai} from './ai.ts';
import {LLMMessage, LLMRequest} from './llm.ts'; import {LLMMessage, LLMRequest} from './llm.ts';
import {AbortablePromise, LLMProvider} from './provider.ts'; import {AbortablePromise, LLMProvider} from './provider.ts';
@@ -19,7 +19,7 @@ export class Anthropic extends LLMProvider {
if(history[orgI].role == 'assistant') { if(history[orgI].role == 'assistant') {
history[orgI].content.filter((c: any) => c.type =='tool_use').forEach((c: any) => { history[orgI].content.filter((c: any) => c.type =='tool_use').forEach((c: any) => {
i++; i++;
history.splice(i, 0, {role: 'tool', id: c.id, name: c.name, args: c.input}); history.splice(i, 0, {role: 'tool', id: c.id, name: c.name, args: c.input, timestamp: Date.now()});
}); });
} else if(history[orgI].role == 'user') { } else if(history[orgI].role == 'user') {
history[orgI].content.filter((c: any) => c.type =='tool_result').forEach((c: any) => { history[orgI].content.filter((c: any) => c.type =='tool_result').forEach((c: any) => {
@@ -29,6 +29,7 @@ export class Anthropic extends LLMProvider {
} }
history[orgI].content = history[orgI].content.filter((c: any) => c.type == 'text').map((c: any) => c.text).join('\n\n'); history[orgI].content = history[orgI].content.filter((c: any) => c.type == 'text').map((c: any) => c.text).join('\n\n');
} }
if(!history[orgI].timestamp) history[orgI].timestamp = Date.now();
} }
return history.filter(h => !!h.content); return history.filter(h => !!h.content);
} }
@@ -44,13 +45,14 @@ export class Anthropic extends LLMProvider {
i++; i++;
} }
} }
return history; return history.map(({timestamp, ...h}) => h);
} }
ask(message: string, options: LLMRequest = {}): AbortablePromise<LLMMessage[]> { ask(message: string, options: LLMRequest = {}): AbortablePromise<LLMMessage[]> {
const controller = new AbortController(); const controller = new AbortController();
const response = new Promise<any>(async (res, rej) => { const response = new Promise<any>(async (res, rej) => {
let history = this.fromStandard([...options.history || [], {role: 'user', content: message}]); let history = this.fromStandard([...options.history || [], {role: 'user', content: message, timestamp: Date.now()}]);
const original = deepCopy(history);
if(options.compress) history = await this.ai.llm.compress(<any>history, options.compress.max, options.compress.min, options); if(options.compress) history = await this.ai.llm.compress(<any>history, options.compress.max, options.compress.min, options);
const requestParams: any = { const requestParams: any = {
model: options.model || this.model, model: options.model || this.model,
@@ -71,13 +73,15 @@ export class Anthropic extends LLMProvider {
stream: !!options.stream, stream: !!options.stream,
}; };
// Run tool changes let resp: any, isFirstMessage = true;
let resp: any; const assistantMessages: string[] = [];
do { do {
resp = await this.client.messages.create(requestParams); resp = await this.client.messages.create(requestParams);
// Streaming mode // Streaming mode
if(options.stream) { if(options.stream) {
if(!isFirstMessage) options.stream({text: '\n\n'});
else isFirstMessage = false;
resp.content = []; resp.content = [];
for await (const chunk of resp) { for await (const chunk of resp) {
if(controller.signal.aborted) break; if(controller.signal.aborted) break;
@@ -108,6 +112,7 @@ export class Anthropic extends LLMProvider {
const toolCalls = resp.content.filter((c: any) => c.type === 'tool_use'); const toolCalls = resp.content.filter((c: any) => c.type === 'tool_use');
if(toolCalls.length && !controller.signal.aborted) { if(toolCalls.length && !controller.signal.aborted) {
history.push({role: 'assistant', content: resp.content}); history.push({role: 'assistant', content: resp.content});
original.push({role: 'assistant', content: resp.content});
const results = await Promise.all(toolCalls.map(async (toolCall: any) => { const results = await Promise.all(toolCalls.map(async (toolCall: any) => {
const tool = options.tools?.find(findByProp('name', toolCall.name)); const tool = options.tools?.find(findByProp('name', toolCall.name));
if(!tool) return {tool_use_id: toolCall.id, is_error: true, content: 'Tool not found'}; if(!tool) return {tool_use_id: toolCall.id, is_error: true, content: 'Tool not found'};
@@ -122,12 +127,11 @@ export class Anthropic extends LLMProvider {
requestParams.messages = history; requestParams.messages = history;
} }
} while (!controller.signal.aborted && resp.content.some((c: any) => c.type === 'tool_use')); } while (!controller.signal.aborted && resp.content.some((c: any) => c.type === 'tool_use'));
if(options.stream) options.stream({done: true}); if(options.stream) options.stream({done: true});
res(this.toStandard([...history, { res(this.toStandard([...history, {role: 'assistant', content: resp.content.filter((c: any) => c.type == 'text').map((c: any) => c.text).join('\n\n')}]));
role: 'assistant',
content: resp.content.filter((c: any) => c.type == 'text').map((c: any) => c.text).join('\n\n')
}]));
}); });
return Object.assign(response, {abort: () => controller.abort()}); return Object.assign(response, {abort: () => controller.abort()});
} }
} }

View File

@@ -11,6 +11,8 @@ export type LLMMessage = {
role: 'assistant' | 'system' | 'user'; role: 'assistant' | 'system' | 'user';
/** Message content */ /** Message content */
content: string | any; content: string | any;
/** Timestamp */
timestamp?: number;
} | { } | {
/** Tool call */ /** Tool call */
role: 'tool'; role: 'tool';
@@ -24,6 +26,8 @@ export type LLMMessage = {
content: undefined | string; content: undefined | string;
/** Tool error */ /** Tool error */
error: undefined | string; error: undefined | string;
/** Timestamp */
timestamp?: number;
} }
export type LLMOptions = { export type LLMOptions = {
@@ -125,7 +129,7 @@ export class LLM {
const recent = keep == 0 ? [] : history.slice(-keep), const recent = keep == 0 ? [] : history.slice(-keep),
process = (keep == 0 ? history : history.slice(0, -keep)).filter(h => h.role === 'assistant' || h.role === 'user'); process = (keep == 0 ? history : history.slice(0, -keep)).filter(h => h.role === 'assistant' || h.role === 'user');
const summary = await this.summarize(process.map(m => `${m.role}: ${m.content}`).join('\n\n'), 250, options); const summary = await this.summarize(process.map(m => `${m.role}: ${m.content}`).join('\n\n'), 250, options);
return [{role: 'assistant', content: `Conversation Summary: ${summary}`}, ...recent]; return [{role: 'assistant', content: `Conversation Summary: ${summary}`, timestamp: Date.now()}, ...recent];
} }
/** /**

View File

@@ -22,15 +22,17 @@ export class Ollama extends LLMProvider {
} }
} else if(history[i].role == 'tool') { } else if(history[i].role == 'tool') {
const error = history[i].content.startsWith('{"error":'); const error = history[i].content.startsWith('{"error":');
history[i] = {role: 'tool', name: history[i].tool_name, args: history[i].args, [error ? 'error' : 'content']: history[i].content}; history[i] = {role: 'tool', name: history[i].tool_name, args: history[i].args, [error ? 'error' : 'content']: history[i].content, timestamp: history[i].timestamp};
} }
if(!history[i]?.timestamp) history[i].timestamp = Date.now();
} }
return history; return history;
} }
private fromStandard(history: LLMMessage[]): any[] { private fromStandard(history: LLMMessage[]): any[] {
return history.map((h: any) => { return history.map((h: any) => {
if(h.role != 'tool') return h; const {timestamp, ...rest} = h;
if(h.role != 'tool') return rest;
return {role: 'tool', tool_name: h.name, content: h.error || h.content} return {role: 'tool', tool_name: h.name, content: h.error || h.content}
}); });
} }
@@ -39,7 +41,7 @@ export class Ollama extends LLMProvider {
const controller = new AbortController(); const controller = new AbortController();
const response = new Promise<any>(async (res, rej) => { const response = new Promise<any>(async (res, rej) => {
let system = options.system || this.ai.options.system; let system = options.system || this.ai.options.system;
let history = this.fromStandard([...options.history || [], {role: 'user', content: message}]); let history = this.fromStandard([...options.history || [], {role: 'user', content: message, timestamp: Date.now()}]);
if(history[0].roll == 'system') { if(history[0].roll == 'system') {
if(!system) system = history.shift(); if(!system) system = history.shift();
else history.shift(); else history.shift();
@@ -70,11 +72,12 @@ export class Ollama extends LLMProvider {
})) }))
} }
// Run tool chains let resp: any, isFirstMessage = true;
let resp: any;
do { do {
resp = await this.client.chat(requestParams); resp = await this.client.chat(requestParams);
if(options.stream) { if(options.stream) {
if(!isFirstMessage) options.stream({text: '\n\n'});
else isFirstMessage = false;
resp.message = {role: 'assistant', content: '', tool_calls: []}; resp.message = {role: 'assistant', content: '', tool_calls: []};
for await (const chunk of resp) { for await (const chunk of resp) {
if(controller.signal.aborted) break; if(controller.signal.aborted) break;
@@ -87,7 +90,6 @@ export class Ollama extends LLMProvider {
} }
} }
// Run tools
if(resp.message?.tool_calls?.length && !controller.signal.aborted) { if(resp.message?.tool_calls?.length && !controller.signal.aborted) {
history.push(resp.message); history.push(resp.message);
const results = await Promise.all(resp.message.tool_calls.map(async (toolCall: any) => { const results = await Promise.all(resp.message.tool_calls.map(async (toolCall: any) => {
@@ -105,9 +107,11 @@ export class Ollama extends LLMProvider {
requestParams.messages = history; requestParams.messages = history;
} }
} while (!controller.signal.aborted && resp.message?.tool_calls?.length); } while (!controller.signal.aborted && resp.message?.tool_calls?.length);
if(options.stream) options.stream({done: true}); if(options.stream) options.stream({done: true});
res(this.toStandard([...history, {role: 'assistant', content: resp.message?.content}])); res(this.toStandard([...history, {role: 'assistant', content: resp.message?.content}]));
}); });
return Object.assign(response, {abort: () => controller.abort()}); return Object.assign(response, {abort: () => controller.abort()});
} }
} }

View File

@@ -20,7 +20,8 @@ export class OpenAi extends LLMProvider {
role: 'tool', role: 'tool',
id: tc.id, id: tc.id,
name: tc.function.name, name: tc.function.name,
args: JSONAttemptParse(tc.function.arguments, {}) args: JSONAttemptParse(tc.function.arguments, {}),
timestamp: h.timestamp
})); }));
history.splice(i, 1, ...tools); history.splice(i, 1, ...tools);
i += tools.length - 1; i += tools.length - 1;
@@ -33,7 +34,7 @@ export class OpenAi extends LLMProvider {
history.splice(i, 1); history.splice(i, 1);
i--; i--;
} }
if(!history[i]?.timestamp) history[i].timestamp = Date.now();
} }
return history; return history;
} }
@@ -46,14 +47,15 @@ export class OpenAi extends LLMProvider {
content: null, content: null,
tool_calls: [{ id: h.id, type: 'function', function: { name: h.name, arguments: JSON.stringify(h.args) } }], tool_calls: [{ id: h.id, type: 'function', function: { name: h.name, arguments: JSON.stringify(h.args) } }],
refusal: null, refusal: null,
annotations: [], annotations: []
}, { }, {
role: 'tool', role: 'tool',
tool_call_id: h.id, tool_call_id: h.id,
content: h.error || h.content content: h.error || h.content
}); });
} else { } else {
result.push(h); const {timestamp, ...rest} = h;
result.push(rest);
} }
return result; return result;
}, [] as any[]); }, [] as any[]);
@@ -62,7 +64,7 @@ export class OpenAi extends LLMProvider {
ask(message: string, options: LLMRequest = {}): AbortablePromise<LLMMessage[]> { ask(message: string, options: LLMRequest = {}): AbortablePromise<LLMMessage[]> {
const controller = new AbortController(); const controller = new AbortController();
const response = new Promise<any>(async (res, rej) => { const response = new Promise<any>(async (res, rej) => {
let history = this.fromStandard([...options.history || [], {role: 'user', content: message}]); let history = this.fromStandard([...options.history || [], {role: 'user', content: message, timestamp: Date.now()}]);
if(options.compress) history = await this.ai.llm.compress(<any>history, options.compress.max, options.compress.min, options); if(options.compress) history = await this.ai.llm.compress(<any>history, options.compress.max, options.compress.min, options);
const requestParams: any = { const requestParams: any = {
@@ -85,23 +87,25 @@ export class OpenAi extends LLMProvider {
})) }))
}; };
// Tool call and streaming logic similar to other providers let resp: any, isFirstMessage = true;
let resp: any;
do { do {
resp = await this.client.chat.completions.create(requestParams); resp = await this.client.chat.completions.create(requestParams);
// Implement streaming and tool call handling
if(options.stream) { if(options.stream) {
resp.choices = []; if(!isFirstMessage) options.stream({text: '\n\n'});
else isFirstMessage = false;
resp.choices = [{message: {content: '', tool_calls: []}}];
for await (const chunk of resp) { for await (const chunk of resp) {
if(controller.signal.aborted) break; if(controller.signal.aborted) break;
if(chunk.choices[0].delta.content) { if(chunk.choices[0].delta.content) {
resp.choices[0].message.content += chunk.choices[0].delta.content;
options.stream({text: chunk.choices[0].delta.content}); options.stream({text: chunk.choices[0].delta.content});
} }
if(chunk.choices[0].delta.tool_calls) {
resp.choices[0].message.tool_calls = chunk.choices[0].delta.tool_calls;
}
} }
} }
// Run tools
const toolCalls = resp.choices[0].message.tool_calls || []; const toolCalls = resp.choices[0].message.tool_calls || [];
if(toolCalls.length && !controller.signal.aborted) { if(toolCalls.length && !controller.signal.aborted) {
history.push(resp.choices[0].message); history.push(resp.choices[0].message);
@@ -124,7 +128,6 @@ export class OpenAi extends LLMProvider {
if(options.stream) options.stream({done: true}); if(options.stream) options.stream({done: true});
res(this.toStandard([...history, {role: 'assistant', content: resp.choices[0].message.content || ''}])); res(this.toStandard([...history, {role: 'assistant', content: resp.choices[0].message.content || ''}]));
}); });
return Object.assign(response, {abort: () => controller.abort()}); return Object.assign(response, {abort: () => controller.abort()});
} }
} }