Compare commits
20 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 7b57a0ded1 | |||
| 28904cddbe | |||
| d5bf1ec47e | |||
| cb60a0b0c5 | |||
| 1c59379c7d | |||
| 6dce0e8954 | |||
| 98dd0bb323 | |||
| ca5a2334bb | |||
| 3cd7b12f5f | |||
| bb6933f0d5 | |||
| 435c6127b1 | |||
| c896b585d0 | |||
| 1fe1e0cafe | |||
| 3aa4684923 | |||
| 0730f5f3f9 | |||
| 1a0351aeef | |||
| a5ed4076b7 | |||
| 0112c92505 | |||
| 2351f590b5 | |||
| 2c2acef84e |
1998
package-lock.json
generated
1998
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@ztimson/ai-utils",
|
"name": "@ztimson/ai-utils",
|
||||||
"version": "0.1.12",
|
"version": "0.4.0",
|
||||||
"description": "AI Utility library",
|
"description": "AI Utility library",
|
||||||
"author": "Zak Timson",
|
"author": "Zak Timson",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
@@ -27,9 +27,10 @@
|
|||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@anthropic-ai/sdk": "^0.67.0",
|
"@anthropic-ai/sdk": "^0.67.0",
|
||||||
"@tensorflow/tfjs": "^4.22.0",
|
"@tensorflow/tfjs": "^4.22.0",
|
||||||
|
"@xenova/transformers": "^2.17.2",
|
||||||
"@ztimson/node-utils": "^1.0.4",
|
"@ztimson/node-utils": "^1.0.4",
|
||||||
"@ztimson/utils": "^0.27.9",
|
"@ztimson/utils": "^0.27.9",
|
||||||
"ollama": "^0.6.0",
|
"cheerio": "^1.2.0",
|
||||||
"openai": "^6.6.0",
|
"openai": "^6.6.0",
|
||||||
"tesseract.js": "^6.0.1"
|
"tesseract.js": "^6.0.1"
|
||||||
},
|
},
|
||||||
|
|||||||
136
src/ai.ts
136
src/ai.ts
@@ -1,120 +1,44 @@
|
|||||||
import {$} from '@ztimson/node-utils';
|
import * as os from 'node:os';
|
||||||
import {createWorker} from 'tesseract.js';
|
import {LLM, AnthropicConfig, OllamaConfig, OpenAiConfig, LLMRequest} from './llm';
|
||||||
import {LLM, LLMOptions} from './llm';
|
import { Audio } from './audio.ts';
|
||||||
import fs from 'node:fs/promises';
|
import {Vision} from './vision.ts';
|
||||||
import Path from 'node:path';
|
|
||||||
import * as tf from '@tensorflow/tfjs';
|
|
||||||
|
|
||||||
export type AiOptions = LLMOptions & {
|
export type AbortablePromise<T> = Promise<T> & {abort: () => any};
|
||||||
|
|
||||||
|
export type AiOptions = {
|
||||||
|
/** Path to models */
|
||||||
|
path?: string;
|
||||||
|
/** Large language models, first is default */
|
||||||
|
llm?: Omit<LLMRequest, 'model'> & {
|
||||||
|
models: {[model: string]: AnthropicConfig | OllamaConfig | OpenAiConfig};
|
||||||
|
}
|
||||||
|
/** Tesseract OCR configuration */
|
||||||
|
tesseract?: {
|
||||||
|
/** Model: eng, eng_best, eng_fast */
|
||||||
|
model?: string;
|
||||||
|
}
|
||||||
|
/** Whisper ASR configuration */
|
||||||
whisper?: {
|
whisper?: {
|
||||||
/** Whisper binary location */
|
/** Whisper binary location */
|
||||||
binary: string;
|
binary: string;
|
||||||
/** Model: `ggml-base.en.bin` */
|
/** Model: `ggml-base.en.bin` */
|
||||||
model: string;
|
model: string;
|
||||||
/** Path to models */
|
|
||||||
path: string;
|
|
||||||
/** Path to storage location for temporary files */
|
|
||||||
temp?: string;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export class Ai {
|
export class Ai {
|
||||||
private downloads: {[key: string]: Promise<string>} = {};
|
/** Audio processing AI */
|
||||||
private whisperModel!: string;
|
audio!: Audio;
|
||||||
|
/** Language processing AI */
|
||||||
/** Large Language Models */
|
language!: LLM;
|
||||||
llm!: LLM;
|
/** Vision processing AI */
|
||||||
|
vision!: Vision;
|
||||||
|
|
||||||
constructor(public readonly options: AiOptions) {
|
constructor(public readonly options: AiOptions) {
|
||||||
this.llm = new LLM(this, options);
|
if(!options.path) options.path = os.tmpdir();
|
||||||
if(this.options.whisper?.binary) {
|
process.env.TRANSFORMERS_CACHE = options.path;
|
||||||
this.whisperModel = this.options.whisper?.model.endsWith('.bin') ? this.options.whisper?.model : this.options.whisper?.model + '.bin';
|
this.audio = new Audio(this);
|
||||||
this.downloadAsrModel();
|
this.language = new LLM(this);
|
||||||
}
|
this.vision = new Vision(this);
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Convert audio to text using Auditory Speech Recognition
|
|
||||||
* @param {string} path Path to audio
|
|
||||||
* @param model Whisper model
|
|
||||||
* @returns {Promise<any>} Extracted text
|
|
||||||
*/
|
|
||||||
async asr(path: string, model: string = this.whisperModel): Promise<string | null> {
|
|
||||||
if(!this.options.whisper?.binary) throw new Error('Whisper not configured');
|
|
||||||
const m = await this.downloadAsrModel(model);
|
|
||||||
const name = Math.random().toString(36).substring(2, 10) + '-' + path.split('/').pop() + '.txt';
|
|
||||||
const output = Path.join(this.options.whisper.temp || '/tmp', name);
|
|
||||||
await $`rm -f ${output} && ${this.options.whisper.binary} -nt -np -m ${m} -f ${path} -otxt -of ${output}`;
|
|
||||||
return fs.readFile(output, 'utf-8').then(text => text?.trim() || null)
|
|
||||||
.finally(() => fs.rm(output, {force: true}).catch(() => {}));
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Downloads the specified Whisper model if it is not already present locally.
|
|
||||||
*
|
|
||||||
* @param {string} model Whisper model that will be downloaded
|
|
||||||
* @return {Promise<string>} Absolute path to model file, resolves once downloaded
|
|
||||||
*/
|
|
||||||
async downloadAsrModel(model: string = this.whisperModel): Promise<string> {
|
|
||||||
if(!this.options.whisper?.binary) throw new Error('Whisper not configured');
|
|
||||||
if(!model.endsWith('.bin')) model += '.bin';
|
|
||||||
const p = Path.join(this.options.whisper.path, model);
|
|
||||||
if(await fs.stat(p).then(() => true).catch(() => false)) return p;
|
|
||||||
if(!!this.downloads[model]) return this.downloads[model];
|
|
||||||
this.downloads[model] = fetch(`https://huggingface.co/ggerganov/whisper.cpp/resolve/main/${model}`)
|
|
||||||
.then(resp => resp.arrayBuffer())
|
|
||||||
.then(arr => Buffer.from(arr)).then(async buffer => {
|
|
||||||
await fs.writeFile(p, buffer);
|
|
||||||
delete this.downloads[model];
|
|
||||||
return p;
|
|
||||||
});
|
|
||||||
return this.downloads[model];
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Convert image to text using Optical Character Recognition
|
|
||||||
* @param {string} path Path to image
|
|
||||||
* @returns {{abort: Function, response: Promise<string | null>}} Abort function & Promise of extracted text
|
|
||||||
*/
|
|
||||||
ocr(path: string): {abort: () => void, response: Promise<string | null>} {
|
|
||||||
let worker: any;
|
|
||||||
return {
|
|
||||||
abort: () => { worker?.terminate(); },
|
|
||||||
response: new Promise(async res => {
|
|
||||||
worker = await createWorker('eng');
|
|
||||||
const {data} = await worker.recognize(path);
|
|
||||||
await worker.terminate();
|
|
||||||
res(data.text.trim() || null);
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Compare the difference between two strings using tensor math
|
|
||||||
* @param target Text that will checked
|
|
||||||
* @param {string} searchTerms Multiple search terms to check against target
|
|
||||||
* @returns {{avg: number, max: number, similarities: number[]}} Similarity values 0-1: 0 = unique, 1 = identical
|
|
||||||
*/
|
|
||||||
semanticSimilarity(target: string, ...searchTerms: string[]) {
|
|
||||||
if(searchTerms.length < 2) throw new Error('Requires at least 2 strings to compare');
|
|
||||||
|
|
||||||
const vector = (text: string, dimensions: number = 10): number[] => {
|
|
||||||
return text.toLowerCase().split('').map((char, index) =>
|
|
||||||
(char.charCodeAt(0) * (index + 1)) % dimensions / dimensions).slice(0, dimensions);
|
|
||||||
}
|
|
||||||
|
|
||||||
const cosineSimilarity = (v1: number[], v2: number[]): number => {
|
|
||||||
if (v1.length !== v2.length) throw new Error('Vectors must be same length');
|
|
||||||
const tensor1 = tf.tensor1d(v1), tensor2 = tf.tensor1d(v2)
|
|
||||||
const dotProduct = tf.dot(tensor1, tensor2)
|
|
||||||
const magnitude1 = tf.norm(tensor1)
|
|
||||||
const magnitude2 = tf.norm(tensor2)
|
|
||||||
if(magnitude1.dataSync()[0] === 0 || magnitude2.dataSync()[0] === 0) return 0
|
|
||||||
return dotProduct.dataSync()[0] / (magnitude1.dataSync()[0] * magnitude2.dataSync()[0])
|
|
||||||
}
|
|
||||||
|
|
||||||
const v = vector(target);
|
|
||||||
const similarities = searchTerms.map(t => vector(t)).map(refVector => cosineSimilarity(v, refVector))
|
|
||||||
return {avg: similarities.reduce((acc, s) => acc + s, 0) / similarities.length, max: Math.max(...similarities), similarities}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
import {Anthropic as anthropic} from '@anthropic-ai/sdk';
|
import {Anthropic as anthropic} from '@anthropic-ai/sdk';
|
||||||
import {findByProp, objectMap, JSONSanitize, JSONAttemptParse} from '@ztimson/utils';
|
import {findByProp, objectMap, JSONSanitize, JSONAttemptParse} from '@ztimson/utils';
|
||||||
import {Ai} from './ai.ts';
|
import {AbortablePromise, Ai} from './ai.ts';
|
||||||
import {LLMMessage, LLMRequest} from './llm.ts';
|
import {LLMMessage, LLMRequest} from './llm.ts';
|
||||||
import {AbortablePromise, LLMProvider} from './provider.ts';
|
import {LLMProvider} from './provider.ts';
|
||||||
|
|
||||||
export class Anthropic extends LLMProvider {
|
export class Anthropic extends LLMProvider {
|
||||||
client!: anthropic;
|
client!: anthropic;
|
||||||
@@ -19,7 +19,7 @@ export class Anthropic extends LLMProvider {
|
|||||||
if(history[orgI].role == 'assistant') {
|
if(history[orgI].role == 'assistant') {
|
||||||
history[orgI].content.filter((c: any) => c.type =='tool_use').forEach((c: any) => {
|
history[orgI].content.filter((c: any) => c.type =='tool_use').forEach((c: any) => {
|
||||||
i++;
|
i++;
|
||||||
history.splice(i, 0, {role: 'tool', id: c.id, name: c.name, args: c.input});
|
history.splice(i, 0, {role: 'tool', id: c.id, name: c.name, args: c.input, timestamp: Date.now()});
|
||||||
});
|
});
|
||||||
} else if(history[orgI].role == 'user') {
|
} else if(history[orgI].role == 'user') {
|
||||||
history[orgI].content.filter((c: any) => c.type =='tool_result').forEach((c: any) => {
|
history[orgI].content.filter((c: any) => c.type =='tool_result').forEach((c: any) => {
|
||||||
@@ -29,6 +29,7 @@ export class Anthropic extends LLMProvider {
|
|||||||
}
|
}
|
||||||
history[orgI].content = history[orgI].content.filter((c: any) => c.type == 'text').map((c: any) => c.text).join('\n\n');
|
history[orgI].content = history[orgI].content.filter((c: any) => c.type == 'text').map((c: any) => c.text).join('\n\n');
|
||||||
}
|
}
|
||||||
|
if(!history[orgI].timestamp) history[orgI].timestamp = Date.now();
|
||||||
}
|
}
|
||||||
return history.filter(h => !!h.content);
|
return history.filter(h => !!h.content);
|
||||||
}
|
}
|
||||||
@@ -44,20 +45,23 @@ export class Anthropic extends LLMProvider {
|
|||||||
i++;
|
i++;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return history;
|
return history.map(({timestamp, ...h}) => h);
|
||||||
}
|
}
|
||||||
|
|
||||||
ask(message: string, options: LLMRequest = {}): AbortablePromise<LLMMessage[]> {
|
ask(message: string, options: LLMRequest = {}): AbortablePromise<LLMMessage[]> {
|
||||||
const controller = new AbortController();
|
const controller = new AbortController();
|
||||||
const response = new Promise<any>(async (res, rej) => {
|
const response = new Promise<any>(async (res, rej) => {
|
||||||
let history = this.fromStandard([...options.history || [], {role: 'user', content: message}]);
|
let history = [...options.history || [], {role: 'user', content: message, timestamp: Date.now()}];
|
||||||
if(options.compress) history = await this.ai.llm.compress(<any>history, options.compress.max, options.compress.min, options);
|
if(options.compress) history = await this.ai.language.compressHistory(<any>history, options.compress.max, options.compress.min, options);
|
||||||
|
history = this.fromStandard(<any>history);
|
||||||
|
|
||||||
|
const tools = options.tools || this.ai.options.llm?.tools || [];
|
||||||
const requestParams: any = {
|
const requestParams: any = {
|
||||||
model: options.model || this.model,
|
model: options.model || this.model,
|
||||||
max_tokens: options.max_tokens || this.ai.options.max_tokens || 4096,
|
max_tokens: options.max_tokens || this.ai.options.llm?.max_tokens || 4096,
|
||||||
system: options.system || this.ai.options.system || '',
|
system: options.system || this.ai.options.llm?.system || '',
|
||||||
temperature: options.temperature || this.ai.options.temperature || 0.7,
|
temperature: options.temperature || this.ai.options.llm?.temperature || 0.7,
|
||||||
tools: (options.tools || this.ai.options.tools || []).map(t => ({
|
tools: tools.map(t => ({
|
||||||
name: t.name,
|
name: t.name,
|
||||||
description: t.description,
|
description: t.description,
|
||||||
input_schema: {
|
input_schema: {
|
||||||
@@ -71,13 +75,18 @@ export class Anthropic extends LLMProvider {
|
|||||||
stream: !!options.stream,
|
stream: !!options.stream,
|
||||||
};
|
};
|
||||||
|
|
||||||
// Run tool changes
|
let resp: any, isFirstMessage = true;
|
||||||
let resp: any;
|
const assistantMessages: string[] = [];
|
||||||
do {
|
do {
|
||||||
resp = await this.client.messages.create(requestParams);
|
resp = await this.client.messages.create(requestParams).catch(err => {
|
||||||
|
err.message += `\n\nMessages:\n${JSON.stringify(history, null, 2)}`;
|
||||||
|
throw err;
|
||||||
|
});
|
||||||
|
|
||||||
// Streaming mode
|
// Streaming mode
|
||||||
if(options.stream) {
|
if(options.stream) {
|
||||||
|
if(!isFirstMessage) options.stream({text: '\n\n'});
|
||||||
|
else isFirstMessage = false;
|
||||||
resp.content = [];
|
resp.content = [];
|
||||||
for await (const chunk of resp) {
|
for await (const chunk of resp) {
|
||||||
if(controller.signal.aborted) break;
|
if(controller.signal.aborted) break;
|
||||||
@@ -109,7 +118,8 @@ export class Anthropic extends LLMProvider {
|
|||||||
if(toolCalls.length && !controller.signal.aborted) {
|
if(toolCalls.length && !controller.signal.aborted) {
|
||||||
history.push({role: 'assistant', content: resp.content});
|
history.push({role: 'assistant', content: resp.content});
|
||||||
const results = await Promise.all(toolCalls.map(async (toolCall: any) => {
|
const results = await Promise.all(toolCalls.map(async (toolCall: any) => {
|
||||||
const tool = options.tools?.find(findByProp('name', toolCall.name));
|
const tool = tools.find(findByProp('name', toolCall.name));
|
||||||
|
if(options.stream) options.stream({tool: toolCall.name});
|
||||||
if(!tool) return {tool_use_id: toolCall.id, is_error: true, content: 'Tool not found'};
|
if(!tool) return {tool_use_id: toolCall.id, is_error: true, content: 'Tool not found'};
|
||||||
try {
|
try {
|
||||||
const result = await tool.fn(toolCall.input, this.ai);
|
const result = await tool.fn(toolCall.input, this.ai);
|
||||||
@@ -122,12 +132,11 @@ export class Anthropic extends LLMProvider {
|
|||||||
requestParams.messages = history;
|
requestParams.messages = history;
|
||||||
}
|
}
|
||||||
} while (!controller.signal.aborted && resp.content.some((c: any) => c.type === 'tool_use'));
|
} while (!controller.signal.aborted && resp.content.some((c: any) => c.type === 'tool_use'));
|
||||||
|
|
||||||
if(options.stream) options.stream({done: true});
|
if(options.stream) options.stream({done: true});
|
||||||
res(this.toStandard([...history, {
|
res(this.toStandard([...history, {role: 'assistant', content: resp.content.filter((c: any) => c.type == 'text').map((c: any) => c.text).join('\n\n')}]));
|
||||||
role: 'assistant',
|
|
||||||
content: resp.content.filter((c: any) => c.type == 'text').map((c: any) => c.text).join('\n\n')
|
|
||||||
}]));
|
|
||||||
});
|
});
|
||||||
|
|
||||||
return Object.assign(response, {abort: () => controller.abort()});
|
return Object.assign(response, {abort: () => controller.abort()});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
50
src/audio.ts
Normal file
50
src/audio.ts
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
import {spawn} from 'node:child_process';
|
||||||
|
import fs from 'node:fs/promises';
|
||||||
|
import Path from 'node:path';
|
||||||
|
import {AbortablePromise, Ai} from './ai.ts';
|
||||||
|
|
||||||
|
export class Audio {
|
||||||
|
private downloads: {[key: string]: Promise<string>} = {};
|
||||||
|
private whisperModel!: string;
|
||||||
|
|
||||||
|
constructor(private ai: Ai) {
|
||||||
|
if(ai.options.whisper?.binary) {
|
||||||
|
this.whisperModel = ai.options.whisper?.model.endsWith('.bin') ? ai.options.whisper?.model : ai.options.whisper?.model + '.bin';
|
||||||
|
this.downloadAsrModel();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
asr(path: string, model: string = this.whisperModel): AbortablePromise<string | null> {
|
||||||
|
if(!this.ai.options.whisper?.binary) throw new Error('Whisper not configured');
|
||||||
|
let abort: any = () => {};
|
||||||
|
const p = new Promise<string | null>(async (resolve, reject) => {
|
||||||
|
const m = await this.downloadAsrModel(model);
|
||||||
|
let output = '';
|
||||||
|
const proc = spawn(<string>this.ai.options.whisper?.binary, ['-nt', '-np', '-m', m, '-f', path], {stdio: ['ignore', 'pipe', 'ignore']});
|
||||||
|
abort = () => proc.kill('SIGTERM');
|
||||||
|
proc.on('error', (err: Error) => reject(err));
|
||||||
|
proc.stdout.on('data', (data: Buffer) => output += data.toString());
|
||||||
|
proc.on('close', (code: number) => {
|
||||||
|
if(code === 0) resolve(output.trim() || null);
|
||||||
|
else reject(new Error(`Exit code ${code}`));
|
||||||
|
});
|
||||||
|
});
|
||||||
|
return Object.assign(p, {abort});
|
||||||
|
}
|
||||||
|
|
||||||
|
async downloadAsrModel(model: string = this.whisperModel): Promise<string> {
|
||||||
|
if(!this.ai.options.whisper?.binary) throw new Error('Whisper not configured');
|
||||||
|
if(!model.endsWith('.bin')) model += '.bin';
|
||||||
|
const p = Path.join(<string>this.ai.options.path, model);
|
||||||
|
if(await fs.stat(p).then(() => true).catch(() => false)) return p;
|
||||||
|
if(!!this.downloads[model]) return this.downloads[model];
|
||||||
|
this.downloads[model] = fetch(`https://huggingface.co/ggerganov/whisper.cpp/resolve/main/${model}`)
|
||||||
|
.then(resp => resp.arrayBuffer())
|
||||||
|
.then(arr => Buffer.from(arr)).then(async buffer => {
|
||||||
|
await fs.writeFile(p, buffer);
|
||||||
|
delete this.downloads[model];
|
||||||
|
return p;
|
||||||
|
});
|
||||||
|
return this.downloads[model];
|
||||||
|
}
|
||||||
|
}
|
||||||
11
src/embedder.ts
Normal file
11
src/embedder.ts
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
import { pipeline } from '@xenova/transformers';
|
||||||
|
import { parentPort } from 'worker_threads';
|
||||||
|
|
||||||
|
let model: any;
|
||||||
|
|
||||||
|
parentPort?.on('message', async ({ id, text }) => {
|
||||||
|
if(!model) model = await pipeline('feature-extraction', 'Xenova/all-MiniLM-L6-v2');
|
||||||
|
const output = await model(text, { pooling: 'mean', normalize: true });
|
||||||
|
const embedding = Array.from(output.data);
|
||||||
|
parentPort?.postMessage({ id, embedding });
|
||||||
|
});
|
||||||
@@ -1,4 +1,9 @@
|
|||||||
export * from './ai';
|
export * from './ai';
|
||||||
export * from './antrhopic';
|
export * from './antrhopic';
|
||||||
|
export * from './audio';
|
||||||
|
export * from './embedder'
|
||||||
export * from './llm';
|
export * from './llm';
|
||||||
|
export * from './open-ai';
|
||||||
|
export * from './provider';
|
||||||
export * from './tools';
|
export * from './tools';
|
||||||
|
export * from './vision';
|
||||||
|
|||||||
166
src/llm.ts
166
src/llm.ts
@@ -1,16 +1,24 @@
|
|||||||
import {JSONAttemptParse} from '@ztimson/utils';
|
import {JSONAttemptParse} from '@ztimson/utils';
|
||||||
import {Ai} from './ai.ts';
|
import {AbortablePromise, Ai} from './ai.ts';
|
||||||
import {Anthropic} from './antrhopic.ts';
|
import {Anthropic} from './antrhopic.ts';
|
||||||
import {Ollama} from './ollama.ts';
|
|
||||||
import {OpenAi} from './open-ai.ts';
|
import {OpenAi} from './open-ai.ts';
|
||||||
import {AbortablePromise, LLMProvider} from './provider.ts';
|
import {LLMProvider} from './provider.ts';
|
||||||
import {AiTool} from './tools.ts';
|
import {AiTool} from './tools.ts';
|
||||||
|
import {Worker} from 'worker_threads';
|
||||||
|
import {fileURLToPath} from 'url';
|
||||||
|
import {dirname, join} from 'path';
|
||||||
|
|
||||||
|
export type AnthropicConfig = {proto: 'anthropic', token: string};
|
||||||
|
export type OllamaConfig = {proto: 'ollama', host: string};
|
||||||
|
export type OpenAiConfig = {proto: 'openai', host?: string, token: string};
|
||||||
|
|
||||||
export type LLMMessage = {
|
export type LLMMessage = {
|
||||||
/** Message originator */
|
/** Message originator */
|
||||||
role: 'assistant' | 'system' | 'user';
|
role: 'assistant' | 'system' | 'user';
|
||||||
/** Message content */
|
/** Message content */
|
||||||
content: string | any;
|
content: string | any;
|
||||||
|
/** Timestamp */
|
||||||
|
timestamp?: number;
|
||||||
} | {
|
} | {
|
||||||
/** Tool call */
|
/** Tool call */
|
||||||
role: 'tool';
|
role: 'tool';
|
||||||
@@ -24,34 +32,10 @@ export type LLMMessage = {
|
|||||||
content: undefined | string;
|
content: undefined | string;
|
||||||
/** Tool error */
|
/** Tool error */
|
||||||
error: undefined | string;
|
error: undefined | string;
|
||||||
|
/** Timestamp */
|
||||||
|
timestamp?: number;
|
||||||
}
|
}
|
||||||
|
|
||||||
export type LLMOptions = {
|
|
||||||
/** Anthropic settings */
|
|
||||||
anthropic?: {
|
|
||||||
/** API Token */
|
|
||||||
token: string;
|
|
||||||
/** Default model */
|
|
||||||
model: string;
|
|
||||||
},
|
|
||||||
/** Ollama settings */
|
|
||||||
ollama?: {
|
|
||||||
/** connection URL */
|
|
||||||
host: string;
|
|
||||||
/** Default model */
|
|
||||||
model: string;
|
|
||||||
},
|
|
||||||
/** Open AI settings */
|
|
||||||
openAi?: {
|
|
||||||
/** API Token */
|
|
||||||
token: string;
|
|
||||||
/** Default model */
|
|
||||||
model: string;
|
|
||||||
},
|
|
||||||
/** Default provider & model */
|
|
||||||
model: string | [string, string];
|
|
||||||
} & Omit<LLMRequest, 'model'>;
|
|
||||||
|
|
||||||
export type LLMRequest = {
|
export type LLMRequest = {
|
||||||
/** System prompt */
|
/** System prompt */
|
||||||
system?: string;
|
system?: string;
|
||||||
@@ -64,9 +48,9 @@ export type LLMRequest = {
|
|||||||
/** Available tools */
|
/** Available tools */
|
||||||
tools?: AiTool[];
|
tools?: AiTool[];
|
||||||
/** LLM model */
|
/** LLM model */
|
||||||
model?: string | [string, string];
|
model?: string;
|
||||||
/** Stream response */
|
/** Stream response */
|
||||||
stream?: (chunk: {text?: string, done?: true}) => any;
|
stream?: (chunk: {text?: string, tool?: string, done?: true}) => any;
|
||||||
/** Compress old messages in the chat to free up context */
|
/** Compress old messages in the chat to free up context */
|
||||||
compress?: {
|
compress?: {
|
||||||
/** Trigger chat compression once context exceeds the token count */
|
/** Trigger chat compression once context exceeds the token count */
|
||||||
@@ -77,12 +61,29 @@ export type LLMRequest = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export class LLM {
|
export class LLM {
|
||||||
private providers: {[key: string]: LLMProvider} = {};
|
private embedWorker: Worker | null = null;
|
||||||
|
private embedQueue = new Map<number, { resolve: (value: number[]) => void; reject: (error: any) => void }>();
|
||||||
|
private embedId = 0;
|
||||||
|
private models: {[model: string]: LLMProvider} = {};
|
||||||
|
private defaultModel!: string;
|
||||||
|
|
||||||
constructor(public readonly ai: Ai, public readonly options: LLMOptions) {
|
constructor(public readonly ai: Ai) {
|
||||||
if(options.anthropic?.token) this.providers.anthropic = new Anthropic(this.ai, options.anthropic.token, options.anthropic.model);
|
this.embedWorker = new Worker(join(dirname(fileURLToPath(import.meta.url)), 'embedder.js'));
|
||||||
if(options.ollama?.host) this.providers.ollama = new Ollama(this.ai, options.ollama.host, options.ollama.model);
|
this.embedWorker.on('message', ({ id, embedding }) => {
|
||||||
if(options.openAi?.token) this.providers.openAi = new OpenAi(this.ai, options.openAi.token, options.openAi.model);
|
const pending = this.embedQueue.get(id);
|
||||||
|
if (pending) {
|
||||||
|
pending.resolve(embedding);
|
||||||
|
this.embedQueue.delete(id);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
if(!ai.options.llm?.models) return;
|
||||||
|
Object.entries(ai.options.llm.models).forEach(([model, config]) => {
|
||||||
|
if(!this.defaultModel) this.defaultModel = model;
|
||||||
|
if(config.proto == 'anthropic') this.models[model] = new Anthropic(this.ai, config.token, model);
|
||||||
|
else if(config.proto == 'ollama') this.models[model] = new OpenAi(this.ai, config.host, 'not-needed', model);
|
||||||
|
else if(config.proto == 'openai') this.models[model] = new OpenAi(this.ai, config.host || null, config.token, model);
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -92,17 +93,9 @@ export class LLM {
|
|||||||
* @returns {{abort: () => void, response: Promise<LLMMessage[]>}} Function to abort response and chat history
|
* @returns {{abort: () => void, response: Promise<LLMMessage[]>}} Function to abort response and chat history
|
||||||
*/
|
*/
|
||||||
ask(message: string, options: LLMRequest = {}): AbortablePromise<LLMMessage[]> {
|
ask(message: string, options: LLMRequest = {}): AbortablePromise<LLMMessage[]> {
|
||||||
let model: any = [null, null];
|
const m = options.model || this.defaultModel;
|
||||||
if(options.model) {
|
if(!this.models[m]) throw new Error(`Model does not exist: ${m}`);
|
||||||
if(typeof options.model == 'object') model = options.model;
|
return this.models[m].ask(message, options);
|
||||||
else model = [options.model, (<any>this.options)[options.model]?.model];
|
|
||||||
}
|
|
||||||
if(!options.model || model[1] == null) {
|
|
||||||
if(typeof this.options.model == 'object') model = this.options.model;
|
|
||||||
else model = [this.options.model, (<any>this.options)[this.options.model]?.model];
|
|
||||||
}
|
|
||||||
if(!model[0] || !model[1]) throw new Error(`Unknown LLM provider or model: ${model[0]} / ${model[1]}`);
|
|
||||||
return this.providers[model[0]].ask(message, {...options, model: model[1]});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -113,7 +106,7 @@ export class LLM {
|
|||||||
* @param {LLMRequest} options LLM options
|
* @param {LLMRequest} options LLM options
|
||||||
* @returns {Promise<LLMMessage[]>} New chat history will summary at index 0
|
* @returns {Promise<LLMMessage[]>} New chat history will summary at index 0
|
||||||
*/
|
*/
|
||||||
async compress(history: LLMMessage[], max: number, min: number, options?: LLMRequest): Promise<LLMMessage[]> {
|
async compressHistory(history: LLMMessage[], max: number, min: number, options?: LLMRequest): Promise<LLMMessage[]> {
|
||||||
if(this.estimateTokens(history) < max) return history;
|
if(this.estimateTokens(history) < max) return history;
|
||||||
let keep = 0, tokens = 0;
|
let keep = 0, tokens = 0;
|
||||||
for(let m of history.toReversed()) {
|
for(let m of history.toReversed()) {
|
||||||
@@ -125,7 +118,65 @@ export class LLM {
|
|||||||
const recent = keep == 0 ? [] : history.slice(-keep),
|
const recent = keep == 0 ? [] : history.slice(-keep),
|
||||||
process = (keep == 0 ? history : history.slice(0, -keep)).filter(h => h.role === 'assistant' || h.role === 'user');
|
process = (keep == 0 ? history : history.slice(0, -keep)).filter(h => h.role === 'assistant' || h.role === 'user');
|
||||||
const summary = await this.summarize(process.map(m => `${m.role}: ${m.content}`).join('\n\n'), 250, options);
|
const summary = await this.summarize(process.map(m => `${m.role}: ${m.content}`).join('\n\n'), 250, options);
|
||||||
return [{role: 'assistant', content: `Conversation Summary: ${summary}`}, ...recent];
|
return [{role: 'assistant', content: `Conversation Summary: ${summary}`, timestamp: Date.now()}, ...recent];
|
||||||
|
}
|
||||||
|
|
||||||
|
cosineSimilarity(v1: number[], v2: number[]): number {
|
||||||
|
if (v1.length !== v2.length) throw new Error('Vectors must be same length');
|
||||||
|
let dotProduct = 0, normA = 0, normB = 0;
|
||||||
|
for (let i = 0; i < v1.length; i++) {
|
||||||
|
dotProduct += v1[i] * v2[i];
|
||||||
|
normA += v1[i] * v1[i];
|
||||||
|
normB += v2[i] * v2[i];
|
||||||
|
}
|
||||||
|
const denominator = Math.sqrt(normA) * Math.sqrt(normB);
|
||||||
|
return denominator === 0 ? 0 : dotProduct / denominator;
|
||||||
|
}
|
||||||
|
|
||||||
|
chunk(target: object | string, maxTokens = 500, overlapTokens = 50): string[] {
|
||||||
|
const objString = (obj: any, path = ''): string[] => {
|
||||||
|
if(!obj) return [];
|
||||||
|
return Object.entries(obj).flatMap(([key, value]) => {
|
||||||
|
const p = path ? `${path}${isNaN(+key) ? `.${key}` : `[${key}]`}` : key;
|
||||||
|
if(typeof value === 'object' && !Array.isArray(value)) return objString(value, p);
|
||||||
|
return `${p}: ${Array.isArray(value) ? value.join(', ') : value}`;
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
const lines = typeof target === 'object' ? objString(target) : target.split('\n');
|
||||||
|
const tokens = lines.flatMap(l => [...l.split(/\s+/).filter(Boolean), '\n']);
|
||||||
|
const chunks: string[] = [];
|
||||||
|
for(let i = 0; i < tokens.length;) {
|
||||||
|
let text = '', j = i;
|
||||||
|
while(j < tokens.length) {
|
||||||
|
const next = text + (text ? ' ' : '') + tokens[j];
|
||||||
|
if(this.estimateTokens(next.replace(/\s*\n\s*/g, '\n')) > maxTokens && text) break;
|
||||||
|
text = next;
|
||||||
|
j++;
|
||||||
|
}
|
||||||
|
const clean = text.replace(/\s*\n\s*/g, '\n').trim();
|
||||||
|
if(clean) chunks.push(clean);
|
||||||
|
i = Math.max(j - overlapTokens, j === i ? i + 1 : j);
|
||||||
|
}
|
||||||
|
return chunks;
|
||||||
|
}
|
||||||
|
|
||||||
|
embedding(target: object | string, maxTokens = 500, overlapTokens = 50) {
|
||||||
|
const embed = (text: string): Promise<number[]> => {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
const id = this.embedId++;
|
||||||
|
this.embedQueue.set(id, { resolve, reject });
|
||||||
|
this.embedWorker?.postMessage({ id, text });
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
const chunks = this.chunk(target, maxTokens, overlapTokens);
|
||||||
|
return Promise.all(chunks.map(async (text, index) => ({
|
||||||
|
index,
|
||||||
|
embedding: await embed(text),
|
||||||
|
text,
|
||||||
|
tokens: this.estimateTokens(text),
|
||||||
|
})));
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -138,6 +189,23 @@ export class LLM {
|
|||||||
return Math.ceil((text.length / 4) * 1.2);
|
return Math.ceil((text.length / 4) * 1.2);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Compare the difference between two strings using tensor math
|
||||||
|
* @param target Text that will checked
|
||||||
|
* @param {string} searchTerms Multiple search terms to check against target
|
||||||
|
* @returns {{avg: number, max: number, similarities: number[]}} Similarity values 0-1: 0 = unique, 1 = identical
|
||||||
|
*/
|
||||||
|
fuzzyMatch(target: string, ...searchTerms: string[]) {
|
||||||
|
if(searchTerms.length < 2) throw new Error('Requires at least 2 strings to compare');
|
||||||
|
const vector = (text: string, dimensions: number = 10): number[] => {
|
||||||
|
return text.toLowerCase().split('').map((char, index) =>
|
||||||
|
(char.charCodeAt(0) * (index + 1)) % dimensions / dimensions).slice(0, dimensions);
|
||||||
|
}
|
||||||
|
const v = vector(target);
|
||||||
|
const similarities = searchTerms.map(t => vector(t)).map(refVector => this.cosineSimilarity(v, refVector))
|
||||||
|
return {avg: similarities.reduce((acc, s) => acc + s, 0) / similarities.length, max: Math.max(...similarities), similarities}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Ask a question with JSON response
|
* Ask a question with JSON response
|
||||||
* @param {string} message Question
|
* @param {string} message Question
|
||||||
|
|||||||
113
src/ollama.ts
113
src/ollama.ts
@@ -1,113 +0,0 @@
|
|||||||
import {findByProp, objectMap, JSONSanitize, JSONAttemptParse} from '@ztimson/utils';
|
|
||||||
import {Ai} from './ai.ts';
|
|
||||||
import {LLMMessage, LLMRequest} from './llm.ts';
|
|
||||||
import {AbortablePromise, LLMProvider} from './provider.ts';
|
|
||||||
import {Ollama as ollama} from 'ollama';
|
|
||||||
|
|
||||||
export class Ollama extends LLMProvider {
|
|
||||||
client!: ollama;
|
|
||||||
|
|
||||||
constructor(public readonly ai: Ai, public host: string, public model: string) {
|
|
||||||
super();
|
|
||||||
this.client = new ollama({host});
|
|
||||||
}
|
|
||||||
|
|
||||||
private toStandard(history: any[]): LLMMessage[] {
|
|
||||||
for(let i = 0; i < history.length; i++) {
|
|
||||||
if(history[i].role == 'assistant' && history[i].tool_calls) {
|
|
||||||
if(history[i].content) delete history[i].tool_calls;
|
|
||||||
else {
|
|
||||||
history.splice(i, 1);
|
|
||||||
i--;
|
|
||||||
}
|
|
||||||
} else if(history[i].role == 'tool') {
|
|
||||||
const error = history[i].content.startsWith('{"error":');
|
|
||||||
history[i] = {role: 'tool', name: history[i].tool_name, args: history[i].args, [error ? 'error' : 'content']: history[i].content};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return history;
|
|
||||||
}
|
|
||||||
|
|
||||||
private fromStandard(history: LLMMessage[]): any[] {
|
|
||||||
return history.map((h: any) => {
|
|
||||||
if(h.role != 'tool') return h;
|
|
||||||
return {role: 'tool', tool_name: h.name, content: h.error || h.content}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
ask(message: string, options: LLMRequest = {}): AbortablePromise<LLMMessage[]> {
|
|
||||||
const controller = new AbortController();
|
|
||||||
const response = new Promise<any>(async (res, rej) => {
|
|
||||||
let system = options.system || this.ai.options.system;
|
|
||||||
let history = this.fromStandard([...options.history || [], {role: 'user', content: message}]);
|
|
||||||
if(history[0].roll == 'system') {
|
|
||||||
if(!system) system = history.shift();
|
|
||||||
else history.shift();
|
|
||||||
}
|
|
||||||
if(options.compress) history = await this.ai.llm.compress(<any>history, options.compress.max, options.compress.min);
|
|
||||||
if(options.system) history.unshift({role: 'system', content: system})
|
|
||||||
|
|
||||||
const requestParams: any = {
|
|
||||||
model: options.model || this.model,
|
|
||||||
messages: history,
|
|
||||||
stream: !!options.stream,
|
|
||||||
signal: controller.signal,
|
|
||||||
options: {
|
|
||||||
temperature: options.temperature || this.ai.options.temperature || 0.7,
|
|
||||||
num_predict: options.max_tokens || this.ai.options.max_tokens || 4096,
|
|
||||||
},
|
|
||||||
tools: (options.tools || this.ai.options.tools || []).map(t => ({
|
|
||||||
type: 'function',
|
|
||||||
function: {
|
|
||||||
name: t.name,
|
|
||||||
description: t.description,
|
|
||||||
parameters: {
|
|
||||||
type: 'object',
|
|
||||||
properties: t.args ? objectMap(t.args, (key, value) => ({...value, required: undefined})) : {},
|
|
||||||
required: t.args ? Object.entries(t.args).filter(t => t[1].required).map(t => t[0]) : []
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}))
|
|
||||||
}
|
|
||||||
|
|
||||||
// Run tool chains
|
|
||||||
let resp: any;
|
|
||||||
do {
|
|
||||||
resp = await this.client.chat(requestParams);
|
|
||||||
if(options.stream) {
|
|
||||||
resp.message = {role: 'assistant', content: '', tool_calls: []};
|
|
||||||
for await (const chunk of resp) {
|
|
||||||
if(controller.signal.aborted) break;
|
|
||||||
if(chunk.message?.content) {
|
|
||||||
resp.message.content += chunk.message.content;
|
|
||||||
options.stream({text: chunk.message.content});
|
|
||||||
}
|
|
||||||
if(chunk.message?.tool_calls) resp.message.tool_calls = chunk.message.tool_calls;
|
|
||||||
if(chunk.done) break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Run tools
|
|
||||||
if(resp.message?.tool_calls?.length && !controller.signal.aborted) {
|
|
||||||
history.push(resp.message);
|
|
||||||
const results = await Promise.all(resp.message.tool_calls.map(async (toolCall: any) => {
|
|
||||||
const tool = (options.tools || this.ai.options.tools)?.find(findByProp('name', toolCall.function.name));
|
|
||||||
if(!tool) return {role: 'tool', tool_name: toolCall.function.name, content: '{"error": "Tool not found"}'};
|
|
||||||
const args = typeof toolCall.function.arguments === 'string' ? JSONAttemptParse(toolCall.function.arguments, {}) : toolCall.function.arguments;
|
|
||||||
try {
|
|
||||||
const result = await tool.fn(args, this.ai);
|
|
||||||
return {role: 'tool', tool_name: toolCall.function.name, args, content: JSONSanitize(result)};
|
|
||||||
} catch (err: any) {
|
|
||||||
return {role: 'tool', tool_name: toolCall.function.name, args, content: JSONSanitize({error: err?.message || err?.toString() || 'Unknown'})};
|
|
||||||
}
|
|
||||||
}));
|
|
||||||
history.push(...results);
|
|
||||||
requestParams.messages = history;
|
|
||||||
}
|
|
||||||
} while (!controller.signal.aborted && resp.message?.tool_calls?.length);
|
|
||||||
if(options.stream) options.stream({done: true});
|
|
||||||
res(this.toStandard([...history, {role: 'assistant', content: resp.message?.content}]));
|
|
||||||
});
|
|
||||||
return Object.assign(response, {abort: () => controller.abort()});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,15 +1,18 @@
|
|||||||
import {OpenAI as openAI} from 'openai';
|
import {OpenAI as openAI} from 'openai';
|
||||||
import {findByProp, objectMap, JSONSanitize, JSONAttemptParse} from '@ztimson/utils';
|
import {findByProp, objectMap, JSONSanitize, JSONAttemptParse, clean} from '@ztimson/utils';
|
||||||
import {Ai} from './ai.ts';
|
import {AbortablePromise, Ai} from './ai.ts';
|
||||||
import {LLMMessage, LLMRequest} from './llm.ts';
|
import {LLMMessage, LLMRequest} from './llm.ts';
|
||||||
import {AbortablePromise, LLMProvider} from './provider.ts';
|
import {LLMProvider} from './provider.ts';
|
||||||
|
|
||||||
export class OpenAi extends LLMProvider {
|
export class OpenAi extends LLMProvider {
|
||||||
client!: openAI;
|
client!: openAI;
|
||||||
|
|
||||||
constructor(public readonly ai: Ai, public readonly apiToken: string, public model: string) {
|
constructor(public readonly ai: Ai, public readonly host: string | null, public readonly token: string, public model: string) {
|
||||||
super();
|
super();
|
||||||
this.client = new openAI({apiKey: apiToken});
|
this.client = new openAI(clean({
|
||||||
|
baseURL: host,
|
||||||
|
apiKey: token
|
||||||
|
}));
|
||||||
}
|
}
|
||||||
|
|
||||||
private toStandard(history: any[]): LLMMessage[] {
|
private toStandard(history: any[]): LLMMessage[] {
|
||||||
@@ -20,7 +23,8 @@ export class OpenAi extends LLMProvider {
|
|||||||
role: 'tool',
|
role: 'tool',
|
||||||
id: tc.id,
|
id: tc.id,
|
||||||
name: tc.function.name,
|
name: tc.function.name,
|
||||||
args: JSONAttemptParse(tc.function.arguments, {})
|
args: JSONAttemptParse(tc.function.arguments, {}),
|
||||||
|
timestamp: h.timestamp
|
||||||
}));
|
}));
|
||||||
history.splice(i, 1, ...tools);
|
history.splice(i, 1, ...tools);
|
||||||
i += tools.length - 1;
|
i += tools.length - 1;
|
||||||
@@ -33,7 +37,7 @@ export class OpenAi extends LLMProvider {
|
|||||||
history.splice(i, 1);
|
history.splice(i, 1);
|
||||||
i--;
|
i--;
|
||||||
}
|
}
|
||||||
|
if(!history[i]?.timestamp) history[i].timestamp = Date.now();
|
||||||
}
|
}
|
||||||
return history;
|
return history;
|
||||||
}
|
}
|
||||||
@@ -46,14 +50,15 @@ export class OpenAi extends LLMProvider {
|
|||||||
content: null,
|
content: null,
|
||||||
tool_calls: [{ id: h.id, type: 'function', function: { name: h.name, arguments: JSON.stringify(h.args) } }],
|
tool_calls: [{ id: h.id, type: 'function', function: { name: h.name, arguments: JSON.stringify(h.args) } }],
|
||||||
refusal: null,
|
refusal: null,
|
||||||
annotations: [],
|
annotations: []
|
||||||
}, {
|
}, {
|
||||||
role: 'tool',
|
role: 'tool',
|
||||||
tool_call_id: h.id,
|
tool_call_id: h.id,
|
||||||
content: h.error || h.content
|
content: h.error || h.content
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
result.push(h);
|
const {timestamp, ...rest} = h;
|
||||||
|
result.push(rest);
|
||||||
}
|
}
|
||||||
return result;
|
return result;
|
||||||
}, [] as any[]);
|
}, [] as any[]);
|
||||||
@@ -62,16 +67,18 @@ export class OpenAi extends LLMProvider {
|
|||||||
ask(message: string, options: LLMRequest = {}): AbortablePromise<LLMMessage[]> {
|
ask(message: string, options: LLMRequest = {}): AbortablePromise<LLMMessage[]> {
|
||||||
const controller = new AbortController();
|
const controller = new AbortController();
|
||||||
const response = new Promise<any>(async (res, rej) => {
|
const response = new Promise<any>(async (res, rej) => {
|
||||||
let history = this.fromStandard([...options.history || [], {role: 'user', content: message}]);
|
let history = [...options.history || [], {role: 'user', content: message, timestamp: Date.now()}];
|
||||||
if(options.compress) history = await this.ai.llm.compress(<any>history, options.compress.max, options.compress.min, options);
|
if(options.compress) history = await this.ai.language.compressHistory(<any>history, options.compress.max, options.compress.min, options);
|
||||||
|
history = this.fromStandard(<any>history);
|
||||||
|
|
||||||
|
const tools = options.tools || this.ai.options.llm?.tools || [];
|
||||||
const requestParams: any = {
|
const requestParams: any = {
|
||||||
model: options.model || this.model,
|
model: options.model || this.model,
|
||||||
messages: history,
|
messages: history,
|
||||||
stream: !!options.stream,
|
stream: !!options.stream,
|
||||||
max_tokens: options.max_tokens || this.ai.options.max_tokens || 4096,
|
max_tokens: options.max_tokens || this.ai.options.llm?.max_tokens || 4096,
|
||||||
temperature: options.temperature || this.ai.options.temperature || 0.7,
|
temperature: options.temperature || this.ai.options.llm?.temperature || 0.7,
|
||||||
tools: (options.tools || this.ai.options.tools || []).map(t => ({
|
tools: tools.map(t => ({
|
||||||
type: 'function',
|
type: 'function',
|
||||||
function: {
|
function: {
|
||||||
name: t.name,
|
name: t.name,
|
||||||
@@ -85,28 +92,35 @@ export class OpenAi extends LLMProvider {
|
|||||||
}))
|
}))
|
||||||
};
|
};
|
||||||
|
|
||||||
// Tool call and streaming logic similar to other providers
|
let resp: any, isFirstMessage = true;
|
||||||
let resp: any;
|
|
||||||
do {
|
do {
|
||||||
resp = await this.client.chat.completions.create(requestParams);
|
resp = await this.client.chat.completions.create(requestParams).catch(err => {
|
||||||
|
err.message += `\n\nMessages:\n${JSON.stringify(history, null, 2)}`;
|
||||||
|
throw err;
|
||||||
|
});
|
||||||
|
|
||||||
// Implement streaming and tool call handling
|
|
||||||
if(options.stream) {
|
if(options.stream) {
|
||||||
resp.choices = [];
|
if(!isFirstMessage) options.stream({text: '\n\n'});
|
||||||
|
else isFirstMessage = false;
|
||||||
|
resp.choices = [{message: {content: '', tool_calls: []}}];
|
||||||
for await (const chunk of resp) {
|
for await (const chunk of resp) {
|
||||||
if(controller.signal.aborted) break;
|
if(controller.signal.aborted) break;
|
||||||
if(chunk.choices[0].delta.content) {
|
if(chunk.choices[0].delta.content) {
|
||||||
|
resp.choices[0].message.content += chunk.choices[0].delta.content;
|
||||||
options.stream({text: chunk.choices[0].delta.content});
|
options.stream({text: chunk.choices[0].delta.content});
|
||||||
}
|
}
|
||||||
|
if(chunk.choices[0].delta.tool_calls) {
|
||||||
|
resp.choices[0].message.tool_calls = chunk.choices[0].delta.tool_calls;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Run tools
|
|
||||||
const toolCalls = resp.choices[0].message.tool_calls || [];
|
const toolCalls = resp.choices[0].message.tool_calls || [];
|
||||||
if(toolCalls.length && !controller.signal.aborted) {
|
if(toolCalls.length && !controller.signal.aborted) {
|
||||||
history.push(resp.choices[0].message);
|
history.push(resp.choices[0].message);
|
||||||
const results = await Promise.all(toolCalls.map(async (toolCall: any) => {
|
const results = await Promise.all(toolCalls.map(async (toolCall: any) => {
|
||||||
const tool = options.tools?.find(findByProp('name', toolCall.function.name));
|
const tool = tools?.find(findByProp('name', toolCall.function.name));
|
||||||
|
if(options.stream) options.stream({tool: toolCall.function.name});
|
||||||
if(!tool) return {role: 'tool', tool_call_id: toolCall.id, content: '{"error": "Tool not found"}'};
|
if(!tool) return {role: 'tool', tool_call_id: toolCall.id, content: '{"error": "Tool not found"}'};
|
||||||
try {
|
try {
|
||||||
const args = JSONAttemptParse(toolCall.function.arguments, {});
|
const args = JSONAttemptParse(toolCall.function.arguments, {});
|
||||||
@@ -124,7 +138,6 @@ export class OpenAi extends LLMProvider {
|
|||||||
if(options.stream) options.stream({done: true});
|
if(options.stream) options.stream({done: true});
|
||||||
res(this.toStandard([...history, {role: 'assistant', content: resp.choices[0].message.content || ''}]));
|
res(this.toStandard([...history, {role: 'assistant', content: resp.choices[0].message.content || ''}]));
|
||||||
});
|
});
|
||||||
|
|
||||||
return Object.assign(response, {abort: () => controller.abort()});
|
return Object.assign(response, {abort: () => controller.abort()});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
import {LLMMessage, LLMOptions, LLMRequest} from './llm.ts';
|
import {AbortablePromise} from './ai.ts';
|
||||||
|
import {LLMMessage, LLMRequest} from './llm.ts';
|
||||||
export type AbortablePromise<T> = Promise<T> & {abort: () => void};
|
|
||||||
|
|
||||||
export abstract class LLMProvider {
|
export abstract class LLMProvider {
|
||||||
abstract ask(message: string, options: LLMRequest): AbortablePromise<LLMMessage[]>;
|
abstract ask(message: string, options: LLMRequest): AbortablePromise<LLMMessage[]>;
|
||||||
|
|||||||
41
src/tools.ts
41
src/tools.ts
@@ -1,3 +1,4 @@
|
|||||||
|
import * as cheerio from 'cheerio';
|
||||||
import {$, $Sync} from '@ztimson/node-utils';
|
import {$, $Sync} from '@ztimson/node-utils';
|
||||||
import {ASet, consoleInterceptor, Http, fn as Fn} from '@ztimson/utils';
|
import {ASet, consoleInterceptor, Http, fn as Fn} from '@ztimson/utils';
|
||||||
import {Ai} from './ai.ts';
|
import {Ai} from './ai.ts';
|
||||||
@@ -111,9 +112,43 @@ export const PythonTool: AiTool = {
|
|||||||
fn: async (args: {code: string}) => ({result: $Sync`python -c "${args.code}"`})
|
fn: async (args: {code: string}) => ({result: $Sync`python -c "${args.code}"`})
|
||||||
}
|
}
|
||||||
|
|
||||||
export const SearchTool: AiTool = {
|
export const ReadWebpageTool: AiTool = {
|
||||||
name: 'search',
|
name: 'read_webpage',
|
||||||
description: 'Use a search engine to find relevant URLs, should be changed with fetch to scrape sources',
|
description: 'Extract clean, structured content from a webpage. Use after web_search to read specific URLs',
|
||||||
|
args: {
|
||||||
|
url: {type: 'string', description: 'URL to extract content from', required: true},
|
||||||
|
focus: {type: 'string', description: 'Optional: What aspect to focus on (e.g., "pricing", "features", "contact info")'}
|
||||||
|
},
|
||||||
|
fn: async (args: {url: string; focus?: string}) => {
|
||||||
|
const html = await fetch(args.url, {headers: {"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64)"}})
|
||||||
|
.then(r => r.text()).catch(err => {throw new Error(`Failed to fetch: ${err.message}`)});
|
||||||
|
|
||||||
|
const $ = cheerio.load(html);
|
||||||
|
$('script, style, nav, footer, header, aside, iframe, noscript, [role="navigation"], [role="banner"], .ad, .ads, .cookie, .popup').remove();
|
||||||
|
const metadata = {
|
||||||
|
title: $('meta[property="og:title"]').attr('content') || $('title').text() || '',
|
||||||
|
description: $('meta[name="description"]').attr('content') || $('meta[property="og:description"]').attr('content') || '',
|
||||||
|
};
|
||||||
|
|
||||||
|
let content = '';
|
||||||
|
const contentSelectors = ['article', 'main', '[role="main"]', '.content', '.post', '.entry', 'body'];
|
||||||
|
for (const selector of contentSelectors) {
|
||||||
|
const el = $(selector).first();
|
||||||
|
if (el.length && el.text().trim().length > 200) {
|
||||||
|
content = el.text();
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (!content) content = $('body').text();
|
||||||
|
content = content.replace(/\s+/g, ' ').trim().slice(0, 8000);
|
||||||
|
|
||||||
|
return {url: args.url, title: metadata.title.trim(), description: metadata.description.trim(), content, focus: args.focus};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const WebSearchTool: AiTool = {
|
||||||
|
name: 'web_search',
|
||||||
|
description: 'Use duckduckgo (anonymous) to find find relevant online resources. Returns a list of URLs that works great with the `read_webpage` tool',
|
||||||
args: {
|
args: {
|
||||||
query: {type: 'string', description: 'Search string', required: true},
|
query: {type: 'string', description: 'Search string', required: true},
|
||||||
length: {type: 'string', description: 'Number of results to return', default: 5},
|
length: {type: 'string', description: 'Number of results to return', default: 5},
|
||||||
|
|||||||
23
src/vision.ts
Normal file
23
src/vision.ts
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
import {createWorker} from 'tesseract.js';
|
||||||
|
import {AbortablePromise, Ai} from './ai.ts';
|
||||||
|
|
||||||
|
export class Vision {
|
||||||
|
|
||||||
|
constructor(private ai: Ai) { }
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Convert image to text using Optical Character Recognition
|
||||||
|
* @param {string} path Path to image
|
||||||
|
* @returns {AbortablePromise<string | null>} Promise of extracted text with abort method
|
||||||
|
*/
|
||||||
|
ocr(path: string): AbortablePromise<string | null> {
|
||||||
|
let worker: any;
|
||||||
|
const p = new Promise<string | null>(async res => {
|
||||||
|
worker = await createWorker(this.ai.options.tesseract?.model || 'eng', 2, {cachePath: this.ai.options.path});
|
||||||
|
const {data} = await worker.recognize(path);
|
||||||
|
await worker.terminate();
|
||||||
|
res(data.text.trim() || null);
|
||||||
|
});
|
||||||
|
return Object.assign(p, {abort: () => worker?.terminate()});
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,12 +1,19 @@
|
|||||||
import {defineConfig} from 'vite';
|
import {defineConfig} from 'vite';
|
||||||
import dts from 'vite-plugin-dts';
|
import dts from 'vite-plugin-dts';
|
||||||
|
import {resolve} from 'path';
|
||||||
|
|
||||||
export default defineConfig({
|
export default defineConfig({
|
||||||
build: {
|
build: {
|
||||||
lib: {
|
lib: {
|
||||||
entry: './src/index.ts',
|
entry: {
|
||||||
|
index: './src/index.ts',
|
||||||
|
embedder: './src/embedder.ts',
|
||||||
|
},
|
||||||
name: 'utils',
|
name: 'utils',
|
||||||
fileName: (format) => (format === 'es' ? 'index.mjs' : 'index.js'),
|
fileName: (format, entryName) => {
|
||||||
|
if (entryName === 'embedder') return 'embedder.js';
|
||||||
|
return format === 'es' ? 'index.mjs' : 'index.js';
|
||||||
|
},
|
||||||
},
|
},
|
||||||
ssr: true,
|
ssr: true,
|
||||||
emptyOutDir: true,
|
emptyOutDir: true,
|
||||||
|
|||||||
Reference in New Issue
Block a user