Fixed tool calls
This commit is contained in:
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@ztimson/ai-utils",
|
"name": "@ztimson/ai-utils",
|
||||||
"version": "0.2.3",
|
"version": "0.2.4",
|
||||||
"description": "AI Utility library",
|
"description": "AI Utility library",
|
||||||
"author": "Zak Timson",
|
"author": "Zak Timson",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
import * as os from 'node:os';
|
||||||
import {LLM, LLMOptions} from './llm';
|
import {LLM, LLMOptions} from './llm';
|
||||||
import { Audio } from './audio.ts';
|
import { Audio } from './audio.ts';
|
||||||
import {Vision} from './vision.ts';
|
import {Vision} from './vision.ts';
|
||||||
@@ -10,13 +11,10 @@ export type AiOptions = LLMOptions & {
|
|||||||
model: string;
|
model: string;
|
||||||
}
|
}
|
||||||
/** Path to models */
|
/** Path to models */
|
||||||
path: string;
|
path?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
export class Ai {
|
export class Ai {
|
||||||
private downloads: {[key: string]: Promise<string>} = {};
|
|
||||||
private whisperModel!: string;
|
|
||||||
|
|
||||||
/** Audio processing AI */
|
/** Audio processing AI */
|
||||||
audio!: Audio;
|
audio!: Audio;
|
||||||
/** Language processing AI */
|
/** Language processing AI */
|
||||||
@@ -25,6 +23,7 @@ export class Ai {
|
|||||||
vision!: Vision;
|
vision!: Vision;
|
||||||
|
|
||||||
constructor(public readonly options: AiOptions) {
|
constructor(public readonly options: AiOptions) {
|
||||||
|
if(!options.path) options.path = os.tmpdir();
|
||||||
process.env.TRANSFORMERS_CACHE = options.path;
|
process.env.TRANSFORMERS_CACHE = options.path;
|
||||||
this.audio = new Audio(this);
|
this.audio = new Audio(this);
|
||||||
this.language = new LLM(this);
|
this.language = new LLM(this);
|
||||||
|
|||||||
@@ -54,12 +54,14 @@ export class Anthropic extends LLMProvider {
|
|||||||
let history = this.fromStandard([...options.history || [], {role: 'user', content: message, timestamp: Date.now()}]);
|
let history = this.fromStandard([...options.history || [], {role: 'user', content: message, timestamp: Date.now()}]);
|
||||||
const original = deepCopy(history);
|
const original = deepCopy(history);
|
||||||
if(options.compress) history = await this.ai.language.compressHistory(<any>history, options.compress.max, options.compress.min, options);
|
if(options.compress) history = await this.ai.language.compressHistory(<any>history, options.compress.max, options.compress.min, options);
|
||||||
|
|
||||||
|
const tools = options.tools || this.ai.options.tools || [];
|
||||||
const requestParams: any = {
|
const requestParams: any = {
|
||||||
model: options.model || this.model,
|
model: options.model || this.model,
|
||||||
max_tokens: options.max_tokens || this.ai.options.max_tokens || 4096,
|
max_tokens: options.max_tokens || this.ai.options.max_tokens || 4096,
|
||||||
system: options.system || this.ai.options.system || '',
|
system: options.system || this.ai.options.system || '',
|
||||||
temperature: options.temperature || this.ai.options.temperature || 0.7,
|
temperature: options.temperature || this.ai.options.temperature || 0.7,
|
||||||
tools: (options.tools || this.ai.options.tools || []).map(t => ({
|
tools: tools.map(t => ({
|
||||||
name: t.name,
|
name: t.name,
|
||||||
description: t.description,
|
description: t.description,
|
||||||
input_schema: {
|
input_schema: {
|
||||||
@@ -76,7 +78,10 @@ export class Anthropic extends LLMProvider {
|
|||||||
let resp: any, isFirstMessage = true;
|
let resp: any, isFirstMessage = true;
|
||||||
const assistantMessages: string[] = [];
|
const assistantMessages: string[] = [];
|
||||||
do {
|
do {
|
||||||
resp = await this.client.messages.create(requestParams);
|
resp = await this.client.messages.create(requestParams).catch(err => {
|
||||||
|
err.message += `\n\nMessages:\n${JSON.stringify(history, null, 2)}`;
|
||||||
|
throw err;
|
||||||
|
});
|
||||||
|
|
||||||
// Streaming mode
|
// Streaming mode
|
||||||
if(options.stream) {
|
if(options.stream) {
|
||||||
@@ -114,7 +119,7 @@ export class Anthropic extends LLMProvider {
|
|||||||
history.push({role: 'assistant', content: resp.content});
|
history.push({role: 'assistant', content: resp.content});
|
||||||
original.push({role: 'assistant', content: resp.content});
|
original.push({role: 'assistant', content: resp.content});
|
||||||
const results = await Promise.all(toolCalls.map(async (toolCall: any) => {
|
const results = await Promise.all(toolCalls.map(async (toolCall: any) => {
|
||||||
const tool = options.tools?.find(findByProp('name', toolCall.name));
|
const tool = tools.find(findByProp('name', toolCall.name));
|
||||||
if(!tool) return {tool_use_id: toolCall.id, is_error: true, content: 'Tool not found'};
|
if(!tool) return {tool_use_id: toolCall.id, is_error: true, content: 'Tool not found'};
|
||||||
try {
|
try {
|
||||||
const result = await tool.fn(toolCall.input, this.ai);
|
const result = await tool.fn(toolCall.input, this.ai);
|
||||||
|
|||||||
@@ -48,7 +48,7 @@ export class Audio {
|
|||||||
async downloadAsrModel(model: string = this.whisperModel): Promise<string> {
|
async downloadAsrModel(model: string = this.whisperModel): Promise<string> {
|
||||||
if(!this.ai.options.whisper?.binary) throw new Error('Whisper not configured');
|
if(!this.ai.options.whisper?.binary) throw new Error('Whisper not configured');
|
||||||
if(!model.endsWith('.bin')) model += '.bin';
|
if(!model.endsWith('.bin')) model += '.bin';
|
||||||
const p = Path.join(this.ai.options.path, model);
|
const p = Path.join(<string>this.ai.options.path, model);
|
||||||
if(await fs.stat(p).then(() => true).catch(() => false)) return p;
|
if(await fs.stat(p).then(() => true).catch(() => false)) return p;
|
||||||
if(!!this.downloads[model]) return this.downloads[model];
|
if(!!this.downloads[model]) return this.downloads[model];
|
||||||
this.downloads[model] = fetch(`https://huggingface.co/ggerganov/whisper.cpp/resolve/main/${model}`)
|
this.downloads[model] = fetch(`https://huggingface.co/ggerganov/whisper.cpp/resolve/main/${model}`)
|
||||||
|
|||||||
@@ -49,6 +49,7 @@ export class Ollama extends LLMProvider {
|
|||||||
if(options.compress) history = await this.ai.language.compressHistory(<any>history, options.compress.max, options.compress.min);
|
if(options.compress) history = await this.ai.language.compressHistory(<any>history, options.compress.max, options.compress.min);
|
||||||
if(options.system) history.unshift({role: 'system', content: system})
|
if(options.system) history.unshift({role: 'system', content: system})
|
||||||
|
|
||||||
|
const tools = options.tools || this.ai.options.tools || [];
|
||||||
const requestParams: any = {
|
const requestParams: any = {
|
||||||
model: options.model || this.model,
|
model: options.model || this.model,
|
||||||
messages: history,
|
messages: history,
|
||||||
@@ -58,7 +59,7 @@ export class Ollama extends LLMProvider {
|
|||||||
temperature: options.temperature || this.ai.options.temperature || 0.7,
|
temperature: options.temperature || this.ai.options.temperature || 0.7,
|
||||||
num_predict: options.max_tokens || this.ai.options.max_tokens || 4096,
|
num_predict: options.max_tokens || this.ai.options.max_tokens || 4096,
|
||||||
},
|
},
|
||||||
tools: (options.tools || this.ai.options.tools || []).map(t => ({
|
tools: tools.map(t => ({
|
||||||
type: 'function',
|
type: 'function',
|
||||||
function: {
|
function: {
|
||||||
name: t.name,
|
name: t.name,
|
||||||
@@ -74,7 +75,11 @@ export class Ollama extends LLMProvider {
|
|||||||
|
|
||||||
let resp: any, isFirstMessage = true;
|
let resp: any, isFirstMessage = true;
|
||||||
do {
|
do {
|
||||||
resp = await this.client.chat(requestParams);
|
resp = await this.client.chat(requestParams).catch(err => {
|
||||||
|
err.message += `\n\nMessages:\n${JSON.stringify(history, null, 2)}`;
|
||||||
|
throw err;
|
||||||
|
});
|
||||||
|
|
||||||
if(options.stream) {
|
if(options.stream) {
|
||||||
if(!isFirstMessage) options.stream({text: '\n\n'});
|
if(!isFirstMessage) options.stream({text: '\n\n'});
|
||||||
else isFirstMessage = false;
|
else isFirstMessage = false;
|
||||||
@@ -93,7 +98,7 @@ export class Ollama extends LLMProvider {
|
|||||||
if(resp.message?.tool_calls?.length && !controller.signal.aborted) {
|
if(resp.message?.tool_calls?.length && !controller.signal.aborted) {
|
||||||
history.push(resp.message);
|
history.push(resp.message);
|
||||||
const results = await Promise.all(resp.message.tool_calls.map(async (toolCall: any) => {
|
const results = await Promise.all(resp.message.tool_calls.map(async (toolCall: any) => {
|
||||||
const tool = (options.tools || this.ai.options.tools)?.find(findByProp('name', toolCall.function.name));
|
const tool = tools.find(findByProp('name', toolCall.function.name));
|
||||||
if(!tool) return {role: 'tool', tool_name: toolCall.function.name, content: '{"error": "Tool not found"}'};
|
if(!tool) return {role: 'tool', tool_name: toolCall.function.name, content: '{"error": "Tool not found"}'};
|
||||||
const args = typeof toolCall.function.arguments === 'string' ? JSONAttemptParse(toolCall.function.arguments, {}) : toolCall.function.arguments;
|
const args = typeof toolCall.function.arguments === 'string' ? JSONAttemptParse(toolCall.function.arguments, {}) : toolCall.function.arguments;
|
||||||
try {
|
try {
|
||||||
|
|||||||
@@ -67,13 +67,14 @@ export class OpenAi extends LLMProvider {
|
|||||||
let history = this.fromStandard([...options.history || [], {role: 'user', content: message, timestamp: Date.now()}]);
|
let history = this.fromStandard([...options.history || [], {role: 'user', content: message, timestamp: Date.now()}]);
|
||||||
if(options.compress) history = await this.ai.language.compressHistory(<any>history, options.compress.max, options.compress.min, options);
|
if(options.compress) history = await this.ai.language.compressHistory(<any>history, options.compress.max, options.compress.min, options);
|
||||||
|
|
||||||
|
const tools = options.tools || this.ai.options.tools || [];
|
||||||
const requestParams: any = {
|
const requestParams: any = {
|
||||||
model: options.model || this.model,
|
model: options.model || this.model,
|
||||||
messages: history,
|
messages: history,
|
||||||
stream: !!options.stream,
|
stream: !!options.stream,
|
||||||
max_tokens: options.max_tokens || this.ai.options.max_tokens || 4096,
|
max_tokens: options.max_tokens || this.ai.options.max_tokens || 4096,
|
||||||
temperature: options.temperature || this.ai.options.temperature || 0.7,
|
temperature: options.temperature || this.ai.options.temperature || 0.7,
|
||||||
tools: (options.tools || this.ai.options.tools || []).map(t => ({
|
tools: tools.map(t => ({
|
||||||
type: 'function',
|
type: 'function',
|
||||||
function: {
|
function: {
|
||||||
name: t.name,
|
name: t.name,
|
||||||
@@ -89,7 +90,11 @@ export class OpenAi extends LLMProvider {
|
|||||||
|
|
||||||
let resp: any, isFirstMessage = true;
|
let resp: any, isFirstMessage = true;
|
||||||
do {
|
do {
|
||||||
resp = await this.client.chat.completions.create(requestParams);
|
resp = await this.client.chat.completions.create(requestParams).catch(err => {
|
||||||
|
err.message += `\n\nMessages:\n${JSON.stringify(history, null, 2)}`;
|
||||||
|
throw err;
|
||||||
|
});
|
||||||
|
|
||||||
if(options.stream) {
|
if(options.stream) {
|
||||||
if(!isFirstMessage) options.stream({text: '\n\n'});
|
if(!isFirstMessage) options.stream({text: '\n\n'});
|
||||||
else isFirstMessage = false;
|
else isFirstMessage = false;
|
||||||
@@ -110,7 +115,7 @@ export class OpenAi extends LLMProvider {
|
|||||||
if(toolCalls.length && !controller.signal.aborted) {
|
if(toolCalls.length && !controller.signal.aborted) {
|
||||||
history.push(resp.choices[0].message);
|
history.push(resp.choices[0].message);
|
||||||
const results = await Promise.all(toolCalls.map(async (toolCall: any) => {
|
const results = await Promise.all(toolCalls.map(async (toolCall: any) => {
|
||||||
const tool = options.tools?.find(findByProp('name', toolCall.function.name));
|
const tool = tools?.find(findByProp('name', toolCall.function.name));
|
||||||
if(!tool) return {role: 'tool', tool_call_id: toolCall.id, content: '{"error": "Tool not found"}'};
|
if(!tool) return {role: 'tool', tool_call_id: toolCall.id, content: '{"error": "Tool not found"}'};
|
||||||
try {
|
try {
|
||||||
const args = JSONAttemptParse(toolCall.function.arguments, {});
|
const args = JSONAttemptParse(toolCall.function.arguments, {});
|
||||||
|
|||||||
Reference in New Issue
Block a user