diff --git a/package.json b/package.json index 303aab4..e6fd5b8 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@ztimson/ai-utils", - "version": "0.2.3", + "version": "0.2.4", "description": "AI Utility library", "author": "Zak Timson", "license": "MIT", diff --git a/src/ai.ts b/src/ai.ts index 335cee6..087ba76 100644 --- a/src/ai.ts +++ b/src/ai.ts @@ -1,3 +1,4 @@ +import * as os from 'node:os'; import {LLM, LLMOptions} from './llm'; import { Audio } from './audio.ts'; import {Vision} from './vision.ts'; @@ -10,13 +11,10 @@ export type AiOptions = LLMOptions & { model: string; } /** Path to models */ - path: string; + path?: string; } export class Ai { - private downloads: {[key: string]: Promise} = {}; - private whisperModel!: string; - /** Audio processing AI */ audio!: Audio; /** Language processing AI */ @@ -25,6 +23,7 @@ export class Ai { vision!: Vision; constructor(public readonly options: AiOptions) { + if(!options.path) options.path = os.tmpdir(); process.env.TRANSFORMERS_CACHE = options.path; this.audio = new Audio(this); this.language = new LLM(this); diff --git a/src/antrhopic.ts b/src/antrhopic.ts index a22ef00..583ad6f 100644 --- a/src/antrhopic.ts +++ b/src/antrhopic.ts @@ -54,12 +54,14 @@ export class Anthropic extends LLMProvider { let history = this.fromStandard([...options.history || [], {role: 'user', content: message, timestamp: Date.now()}]); const original = deepCopy(history); if(options.compress) history = await this.ai.language.compressHistory(history, options.compress.max, options.compress.min, options); + + const tools = options.tools || this.ai.options.tools || []; const requestParams: any = { model: options.model || this.model, max_tokens: options.max_tokens || this.ai.options.max_tokens || 4096, system: options.system || this.ai.options.system || '', temperature: options.temperature || this.ai.options.temperature || 0.7, - tools: (options.tools || this.ai.options.tools || []).map(t => ({ + tools: tools.map(t => ({ name: t.name, description: t.description, input_schema: { @@ -76,7 +78,10 @@ export class Anthropic extends LLMProvider { let resp: any, isFirstMessage = true; const assistantMessages: string[] = []; do { - resp = await this.client.messages.create(requestParams); + resp = await this.client.messages.create(requestParams).catch(err => { + err.message += `\n\nMessages:\n${JSON.stringify(history, null, 2)}`; + throw err; + }); // Streaming mode if(options.stream) { @@ -114,7 +119,7 @@ export class Anthropic extends LLMProvider { history.push({role: 'assistant', content: resp.content}); original.push({role: 'assistant', content: resp.content}); const results = await Promise.all(toolCalls.map(async (toolCall: any) => { - const tool = options.tools?.find(findByProp('name', toolCall.name)); + const tool = tools.find(findByProp('name', toolCall.name)); if(!tool) return {tool_use_id: toolCall.id, is_error: true, content: 'Tool not found'}; try { const result = await tool.fn(toolCall.input, this.ai); diff --git a/src/audio.ts b/src/audio.ts index c352e6a..0b0084a 100644 --- a/src/audio.ts +++ b/src/audio.ts @@ -48,7 +48,7 @@ export class Audio { async downloadAsrModel(model: string = this.whisperModel): Promise { if(!this.ai.options.whisper?.binary) throw new Error('Whisper not configured'); if(!model.endsWith('.bin')) model += '.bin'; - const p = Path.join(this.ai.options.path, model); + const p = Path.join(this.ai.options.path, model); if(await fs.stat(p).then(() => true).catch(() => false)) return p; if(!!this.downloads[model]) return this.downloads[model]; this.downloads[model] = fetch(`https://huggingface.co/ggerganov/whisper.cpp/resolve/main/${model}`) diff --git a/src/ollama.ts b/src/ollama.ts index a5d88cd..1160bb9 100644 --- a/src/ollama.ts +++ b/src/ollama.ts @@ -49,6 +49,7 @@ export class Ollama extends LLMProvider { if(options.compress) history = await this.ai.language.compressHistory(history, options.compress.max, options.compress.min); if(options.system) history.unshift({role: 'system', content: system}) + const tools = options.tools || this.ai.options.tools || []; const requestParams: any = { model: options.model || this.model, messages: history, @@ -58,7 +59,7 @@ export class Ollama extends LLMProvider { temperature: options.temperature || this.ai.options.temperature || 0.7, num_predict: options.max_tokens || this.ai.options.max_tokens || 4096, }, - tools: (options.tools || this.ai.options.tools || []).map(t => ({ + tools: tools.map(t => ({ type: 'function', function: { name: t.name, @@ -74,7 +75,11 @@ export class Ollama extends LLMProvider { let resp: any, isFirstMessage = true; do { - resp = await this.client.chat(requestParams); + resp = await this.client.chat(requestParams).catch(err => { + err.message += `\n\nMessages:\n${JSON.stringify(history, null, 2)}`; + throw err; + }); + if(options.stream) { if(!isFirstMessage) options.stream({text: '\n\n'}); else isFirstMessage = false; @@ -93,7 +98,7 @@ export class Ollama extends LLMProvider { if(resp.message?.tool_calls?.length && !controller.signal.aborted) { history.push(resp.message); const results = await Promise.all(resp.message.tool_calls.map(async (toolCall: any) => { - const tool = (options.tools || this.ai.options.tools)?.find(findByProp('name', toolCall.function.name)); + const tool = tools.find(findByProp('name', toolCall.function.name)); if(!tool) return {role: 'tool', tool_name: toolCall.function.name, content: '{"error": "Tool not found"}'}; const args = typeof toolCall.function.arguments === 'string' ? JSONAttemptParse(toolCall.function.arguments, {}) : toolCall.function.arguments; try { diff --git a/src/open-ai.ts b/src/open-ai.ts index dd09057..e58610e 100644 --- a/src/open-ai.ts +++ b/src/open-ai.ts @@ -67,13 +67,14 @@ export class OpenAi extends LLMProvider { let history = this.fromStandard([...options.history || [], {role: 'user', content: message, timestamp: Date.now()}]); if(options.compress) history = await this.ai.language.compressHistory(history, options.compress.max, options.compress.min, options); + const tools = options.tools || this.ai.options.tools || []; const requestParams: any = { model: options.model || this.model, messages: history, stream: !!options.stream, max_tokens: options.max_tokens || this.ai.options.max_tokens || 4096, temperature: options.temperature || this.ai.options.temperature || 0.7, - tools: (options.tools || this.ai.options.tools || []).map(t => ({ + tools: tools.map(t => ({ type: 'function', function: { name: t.name, @@ -89,7 +90,11 @@ export class OpenAi extends LLMProvider { let resp: any, isFirstMessage = true; do { - resp = await this.client.chat.completions.create(requestParams); + resp = await this.client.chat.completions.create(requestParams).catch(err => { + err.message += `\n\nMessages:\n${JSON.stringify(history, null, 2)}`; + throw err; + }); + if(options.stream) { if(!isFirstMessage) options.stream({text: '\n\n'}); else isFirstMessage = false; @@ -110,7 +115,7 @@ export class OpenAi extends LLMProvider { if(toolCalls.length && !controller.signal.aborted) { history.push(resp.choices[0].message); const results = await Promise.all(toolCalls.map(async (toolCall: any) => { - const tool = options.tools?.find(findByProp('name', toolCall.function.name)); + const tool = tools?.find(findByProp('name', toolCall.function.name)); if(!tool) return {role: 'tool', tool_call_id: toolCall.id, content: '{"error": "Tool not found"}'}; try { const args = JSONAttemptParse(toolCall.function.arguments, {});