Compare commits
8 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 28904cddbe | |||
| d5bf1ec47e | |||
| cb60a0b0c5 | |||
| 1c59379c7d | |||
| 6dce0e8954 | |||
| 98dd0bb323 | |||
| ca5a2334bb | |||
| 3cd7b12f5f |
1472
package-lock.json
generated
1472
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@ztimson/ai-utils",
|
"name": "@ztimson/ai-utils",
|
||||||
"version": "0.2.1",
|
"version": "0.3.0",
|
||||||
"description": "AI Utility library",
|
"description": "AI Utility library",
|
||||||
"author": "Zak Timson",
|
"author": "Zak Timson",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
@@ -42,6 +42,7 @@
|
|||||||
"vite-plugin-dts": "^4.5.3"
|
"vite-plugin-dts": "^4.5.3"
|
||||||
},
|
},
|
||||||
"files": [
|
"files": [
|
||||||
|
"bin",
|
||||||
"dist"
|
"dist"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
23
src/ai.ts
23
src/ai.ts
@@ -1,22 +1,33 @@
|
|||||||
|
import * as os from 'node:os';
|
||||||
import {LLM, LLMOptions} from './llm';
|
import {LLM, LLMOptions} from './llm';
|
||||||
import { Audio } from './audio.ts';
|
import { Audio } from './audio.ts';
|
||||||
import {Vision} from './vision.ts';
|
import {Vision} from './vision.ts';
|
||||||
|
|
||||||
|
export type AbortablePromise<T> = Promise<T> & {abort: () => any};
|
||||||
|
|
||||||
export type AiOptions = LLMOptions & {
|
export type AiOptions = LLMOptions & {
|
||||||
|
/** Path to models */
|
||||||
|
path?: string;
|
||||||
|
/** Piper TTS configuratoin */
|
||||||
|
piper?: {
|
||||||
|
/** Model URL: `https://huggingface.co/rhasspy/piper-voices/tree/main/.../model.onnx` */
|
||||||
|
model: string;
|
||||||
|
},
|
||||||
|
/** Tesseract OCR configuration */
|
||||||
|
tesseract?: {
|
||||||
|
/** Model: eng, eng_best, eng_fast */
|
||||||
|
model?: string;
|
||||||
|
}
|
||||||
|
/** Whisper ASR configuration */
|
||||||
whisper?: {
|
whisper?: {
|
||||||
/** Whisper binary location */
|
/** Whisper binary location */
|
||||||
binary: string;
|
binary: string;
|
||||||
/** Model: `ggml-base.en.bin` */
|
/** Model: `ggml-base.en.bin` */
|
||||||
model: string;
|
model: string;
|
||||||
/** Path to models */
|
|
||||||
path: string;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export class Ai {
|
export class Ai {
|
||||||
private downloads: {[key: string]: Promise<string>} = {};
|
|
||||||
private whisperModel!: string;
|
|
||||||
|
|
||||||
/** Audio processing AI */
|
/** Audio processing AI */
|
||||||
audio!: Audio;
|
audio!: Audio;
|
||||||
/** Language processing AI */
|
/** Language processing AI */
|
||||||
@@ -25,6 +36,8 @@ export class Ai {
|
|||||||
vision!: Vision;
|
vision!: Vision;
|
||||||
|
|
||||||
constructor(public readonly options: AiOptions) {
|
constructor(public readonly options: AiOptions) {
|
||||||
|
if(!options.path) options.path = os.tmpdir();
|
||||||
|
process.env.TRANSFORMERS_CACHE = options.path;
|
||||||
this.audio = new Audio(this);
|
this.audio = new Audio(this);
|
||||||
this.language = new LLM(this);
|
this.language = new LLM(this);
|
||||||
this.vision = new Vision(this);
|
this.vision = new Vision(this);
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
import {Anthropic as anthropic} from '@anthropic-ai/sdk';
|
import {Anthropic as anthropic} from '@anthropic-ai/sdk';
|
||||||
import {findByProp, objectMap, JSONSanitize, JSONAttemptParse, deepCopy} from '@ztimson/utils';
|
import {findByProp, objectMap, JSONSanitize, JSONAttemptParse, deepCopy} from '@ztimson/utils';
|
||||||
import {Ai} from './ai.ts';
|
import {AbortablePromise, Ai} from './ai.ts';
|
||||||
import {LLMMessage, LLMRequest} from './llm.ts';
|
import {LLMMessage, LLMRequest} from './llm.ts';
|
||||||
import {AbortablePromise, LLMProvider} from './provider.ts';
|
import {LLMProvider} from './provider.ts';
|
||||||
|
|
||||||
export class Anthropic extends LLMProvider {
|
export class Anthropic extends LLMProvider {
|
||||||
client!: anthropic;
|
client!: anthropic;
|
||||||
@@ -54,12 +54,14 @@ export class Anthropic extends LLMProvider {
|
|||||||
let history = this.fromStandard([...options.history || [], {role: 'user', content: message, timestamp: Date.now()}]);
|
let history = this.fromStandard([...options.history || [], {role: 'user', content: message, timestamp: Date.now()}]);
|
||||||
const original = deepCopy(history);
|
const original = deepCopy(history);
|
||||||
if(options.compress) history = await this.ai.language.compressHistory(<any>history, options.compress.max, options.compress.min, options);
|
if(options.compress) history = await this.ai.language.compressHistory(<any>history, options.compress.max, options.compress.min, options);
|
||||||
|
|
||||||
|
const tools = options.tools || this.ai.options.tools || [];
|
||||||
const requestParams: any = {
|
const requestParams: any = {
|
||||||
model: options.model || this.model,
|
model: options.model || this.model,
|
||||||
max_tokens: options.max_tokens || this.ai.options.max_tokens || 4096,
|
max_tokens: options.max_tokens || this.ai.options.max_tokens || 4096,
|
||||||
system: options.system || this.ai.options.system || '',
|
system: options.system || this.ai.options.system || '',
|
||||||
temperature: options.temperature || this.ai.options.temperature || 0.7,
|
temperature: options.temperature || this.ai.options.temperature || 0.7,
|
||||||
tools: (options.tools || this.ai.options.tools || []).map(t => ({
|
tools: tools.map(t => ({
|
||||||
name: t.name,
|
name: t.name,
|
||||||
description: t.description,
|
description: t.description,
|
||||||
input_schema: {
|
input_schema: {
|
||||||
@@ -76,7 +78,10 @@ export class Anthropic extends LLMProvider {
|
|||||||
let resp: any, isFirstMessage = true;
|
let resp: any, isFirstMessage = true;
|
||||||
const assistantMessages: string[] = [];
|
const assistantMessages: string[] = [];
|
||||||
do {
|
do {
|
||||||
resp = await this.client.messages.create(requestParams);
|
resp = await this.client.messages.create(requestParams).catch(err => {
|
||||||
|
err.message += `\n\nMessages:\n${JSON.stringify(history, null, 2)}`;
|
||||||
|
throw err;
|
||||||
|
});
|
||||||
|
|
||||||
// Streaming mode
|
// Streaming mode
|
||||||
if(options.stream) {
|
if(options.stream) {
|
||||||
@@ -114,7 +119,8 @@ export class Anthropic extends LLMProvider {
|
|||||||
history.push({role: 'assistant', content: resp.content});
|
history.push({role: 'assistant', content: resp.content});
|
||||||
original.push({role: 'assistant', content: resp.content});
|
original.push({role: 'assistant', content: resp.content});
|
||||||
const results = await Promise.all(toolCalls.map(async (toolCall: any) => {
|
const results = await Promise.all(toolCalls.map(async (toolCall: any) => {
|
||||||
const tool = options.tools?.find(findByProp('name', toolCall.name));
|
const tool = tools.find(findByProp('name', toolCall.name));
|
||||||
|
if(options.stream) options.stream({tool: toolCall.name});
|
||||||
if(!tool) return {tool_use_id: toolCall.id, is_error: true, content: 'Tool not found'};
|
if(!tool) return {tool_use_id: toolCall.id, is_error: true, content: 'Tool not found'};
|
||||||
try {
|
try {
|
||||||
const result = await tool.fn(toolCall.input, this.ai);
|
const result = await tool.fn(toolCall.input, this.ai);
|
||||||
|
|||||||
88
src/audio.ts
88
src/audio.ts
@@ -1,30 +1,35 @@
|
|||||||
import {spawn} from 'node:child_process';
|
import {spawn} from 'node:child_process';
|
||||||
|
import * as os from 'node:os';
|
||||||
|
import {platform, arch} from 'node:os';
|
||||||
import fs from 'node:fs/promises';
|
import fs from 'node:fs/promises';
|
||||||
import Path from 'node:path';
|
import Path from 'node:path';
|
||||||
import {Ai} from './ai.ts';
|
import {AbortablePromise, Ai} from './ai.ts';
|
||||||
|
|
||||||
export class Audio {
|
export class Audio {
|
||||||
private downloads: {[key: string]: Promise<string>} = {};
|
private downloads: {[key: string]: Promise<string>} = {};
|
||||||
private whisperModel!: string;
|
private whisperModel!: string;
|
||||||
|
private piperBinary?: string;
|
||||||
|
|
||||||
constructor(private ai: Ai) {
|
constructor(private ai: Ai) {
|
||||||
if(ai.options.whisper?.binary) {
|
if(ai.options.whisper?.binary) {
|
||||||
this.whisperModel = ai.options.whisper?.model.endsWith('.bin') ? ai.options.whisper?.model : ai.options.whisper?.model + '.bin';
|
this.whisperModel = ai.options.whisper?.model.endsWith('.bin') ? ai.options.whisper?.model : ai.options.whisper?.model + '.bin';
|
||||||
this.downloadAsrModel();
|
this.downloadAsrModel();
|
||||||
}
|
}
|
||||||
|
if(ai.options.piper?.model) {
|
||||||
|
if(!ai.options.piper.model.startsWith('http') || !ai.options.piper.model.endsWith('.onnx'))
|
||||||
|
throw new Error('Piper model should be a URL to an onnx file to download');
|
||||||
|
if(platform() != 'linux' || (arch() != 'x64' && arch() != 'arm64'))
|
||||||
|
throw new Error('Piper TTS only supported on Linux x64/arm64');
|
||||||
|
this.piperBinary = Path.join(import.meta.dirname, '../bin/piper');
|
||||||
|
this.downloadTtsModel();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
asr(path: string, model: string = this.whisperModel): AbortablePromise<string | null> {
|
||||||
* Convert audio to text using Auditory Speech Recognition
|
|
||||||
* @param {string} path Path to audio
|
|
||||||
* @param model Whisper model
|
|
||||||
* @returns {Promise<any>} Extracted text
|
|
||||||
*/
|
|
||||||
asr(path: string, model: string = this.whisperModel): {abort: () => void, response: Promise<string | null>} {
|
|
||||||
if(!this.ai.options.whisper?.binary) throw new Error('Whisper not configured');
|
if(!this.ai.options.whisper?.binary) throw new Error('Whisper not configured');
|
||||||
let abort: any = () => {};
|
let abort: any = () => {};
|
||||||
const response = new Promise<string | null>((resolve, reject) => {
|
const p = new Promise<string | null>(async (resolve, reject) => {
|
||||||
this.downloadAsrModel(model).then(m => {
|
const m = await this.downloadAsrModel(model);
|
||||||
let output = '';
|
let output = '';
|
||||||
const proc = spawn(<string>this.ai.options.whisper?.binary, ['-nt', '-np', '-m', m, '-f', path], {stdio: ['ignore', 'pipe', 'ignore']});
|
const proc = spawn(<string>this.ai.options.whisper?.binary, ['-nt', '-np', '-m', m, '-f', path], {stdio: ['ignore', 'pipe', 'ignore']});
|
||||||
abort = () => proc.kill('SIGTERM');
|
abort = () => proc.kill('SIGTERM');
|
||||||
@@ -35,20 +40,45 @@ export class Audio {
|
|||||||
else reject(new Error(`Exit code ${code}`));
|
else reject(new Error(`Exit code ${code}`));
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
return Object.assign(p, {abort});
|
||||||
return {response, abort};
|
}
|
||||||
|
|
||||||
|
tts(text: string, outputPath?: string, model: string = <string>this.ai.options.piper?.model): AbortablePromise<Buffer | string> {
|
||||||
|
if(!this.piperBinary) throw new Error('Piper not configured');
|
||||||
|
if(!model) throw new Error('Invalid Piper model');
|
||||||
|
let abort: any = () => {};
|
||||||
|
const p = new Promise<Buffer | string>(async (resolve, reject) => {
|
||||||
|
const modelPath = await this.downloadTtsModel(model);
|
||||||
|
const tmpFile = outputPath || Path.join(os.tmpdir(), `piper_${Date.now()}.wav`);
|
||||||
|
const proc = spawn(<string>this.piperBinary, ['--model', modelPath, '--output_file', tmpFile], {
|
||||||
|
stdio: ['pipe', 'ignore', 'ignore'],
|
||||||
|
env: {...process.env, LD_LIBRARY_PATH: Path.dirname(<string>this.piperBinary)}
|
||||||
|
});
|
||||||
|
abort = () => proc.kill('SIGTERM');
|
||||||
|
proc.stdin.write(text);
|
||||||
|
proc.stdin.end();
|
||||||
|
proc.on('error', (err: Error) => reject(err));
|
||||||
|
proc.on('close', async (code: number) => {
|
||||||
|
if(code === 0) {
|
||||||
|
if(outputPath) {
|
||||||
|
resolve(outputPath);
|
||||||
|
} else {
|
||||||
|
const buffer = await fs.readFile(tmpFile);
|
||||||
|
await fs.unlink(tmpFile).catch(() => {});
|
||||||
|
resolve(buffer);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
reject(new Error(`Exit code ${code}`));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
return Object.assign(p, {abort});
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Downloads the specified Whisper model if it is not already present locally.
|
|
||||||
*
|
|
||||||
* @param {string} model Whisper model that will be downloaded
|
|
||||||
* @return {Promise<string>} Absolute path to model file, resolves once downloaded
|
|
||||||
*/
|
|
||||||
async downloadAsrModel(model: string = this.whisperModel): Promise<string> {
|
async downloadAsrModel(model: string = this.whisperModel): Promise<string> {
|
||||||
if(!this.ai.options.whisper?.binary) throw new Error('Whisper not configured');
|
if(!this.ai.options.whisper?.binary) throw new Error('Whisper not configured');
|
||||||
if(!model.endsWith('.bin')) model += '.bin';
|
if(!model.endsWith('.bin')) model += '.bin';
|
||||||
const p = Path.join(this.ai.options.whisper.path, model);
|
const p = Path.join(<string>this.ai.options.path, model);
|
||||||
if(await fs.stat(p).then(() => true).catch(() => false)) return p;
|
if(await fs.stat(p).then(() => true).catch(() => false)) return p;
|
||||||
if(!!this.downloads[model]) return this.downloads[model];
|
if(!!this.downloads[model]) return this.downloads[model];
|
||||||
this.downloads[model] = fetch(`https://huggingface.co/ggerganov/whisper.cpp/resolve/main/${model}`)
|
this.downloads[model] = fetch(`https://huggingface.co/ggerganov/whisper.cpp/resolve/main/${model}`)
|
||||||
@@ -60,4 +90,24 @@ export class Audio {
|
|||||||
});
|
});
|
||||||
return this.downloads[model];
|
return this.downloads[model];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async downloadTtsModel(model: string = <string>this.ai.options.piper?.model): Promise<string> {
|
||||||
|
if(!model) throw new Error('Invalid Piper model');
|
||||||
|
const m = <string>model.split('/').pop();
|
||||||
|
const p = Path.join(<string>this.ai.options.path, m);
|
||||||
|
const [onnxExists, jsonExists] = await Promise.all([
|
||||||
|
fs.stat(p).then(() => true).catch(() => false),
|
||||||
|
fs.stat(p + '.json').then(() => true).catch(() => false)
|
||||||
|
]);
|
||||||
|
if(onnxExists && jsonExists) return p;
|
||||||
|
if(!!this.downloads[m]) return this.downloads[m];
|
||||||
|
this.downloads[m] = Promise.all([
|
||||||
|
onnxExists ? Promise.resolve() : fetch(model).then(r => r.arrayBuffer()).then(b => fs.writeFile(p, Buffer.from(b))),
|
||||||
|
jsonExists ? Promise.resolve() : fetch(model + '.json').then(r => r.arrayBuffer()).then(b => fs.writeFile(p + '.json', Buffer.from(b)))
|
||||||
|
]).then(() => {
|
||||||
|
delete this.downloads[m];
|
||||||
|
return p;
|
||||||
|
});
|
||||||
|
return this.downloads[m];
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
11
src/embedder.ts
Normal file
11
src/embedder.ts
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
import { pipeline } from '@xenova/transformers';
|
||||||
|
import { parentPort } from 'worker_threads';
|
||||||
|
|
||||||
|
let model: any;
|
||||||
|
|
||||||
|
parentPort?.on('message', async ({ id, text }) => {
|
||||||
|
if(!model) model = await pipeline('feature-extraction', 'Xenova/all-MiniLM-L6-v2');
|
||||||
|
const output = await model(text, { pooling: 'mean', normalize: true });
|
||||||
|
const embedding = Array.from(output.data);
|
||||||
|
parentPort?.postMessage({ id, embedding });
|
||||||
|
});
|
||||||
91
src/llm.ts
91
src/llm.ts
@@ -1,12 +1,13 @@
|
|||||||
import {pipeline} from '@xenova/transformers';
|
|
||||||
import {JSONAttemptParse} from '@ztimson/utils';
|
import {JSONAttemptParse} from '@ztimson/utils';
|
||||||
import {Ai} from './ai.ts';
|
import {AbortablePromise, Ai} from './ai.ts';
|
||||||
import {Anthropic} from './antrhopic.ts';
|
import {Anthropic} from './antrhopic.ts';
|
||||||
import {Ollama} from './ollama.ts';
|
import {Ollama} from './ollama.ts';
|
||||||
import {OpenAi} from './open-ai.ts';
|
import {OpenAi} from './open-ai.ts';
|
||||||
import {AbortablePromise, LLMProvider} from './provider.ts';
|
import {LLMProvider} from './provider.ts';
|
||||||
import {AiTool} from './tools.ts';
|
import {AiTool} from './tools.ts';
|
||||||
import * as tf from '@tensorflow/tfjs';
|
import {Worker} from 'worker_threads';
|
||||||
|
import {fileURLToPath} from 'url';
|
||||||
|
import {dirname, join} from 'path';
|
||||||
|
|
||||||
export type LLMMessage = {
|
export type LLMMessage = {
|
||||||
/** Message originator */
|
/** Message originator */
|
||||||
@@ -72,7 +73,7 @@ export type LLMRequest = {
|
|||||||
/** LLM model */
|
/** LLM model */
|
||||||
model?: string | [string, string];
|
model?: string | [string, string];
|
||||||
/** Stream response */
|
/** Stream response */
|
||||||
stream?: (chunk: {text?: string, done?: true}) => any;
|
stream?: (chunk: {text?: string, tool?: string, done?: true}) => any;
|
||||||
/** Compress old messages in the chat to free up context */
|
/** Compress old messages in the chat to free up context */
|
||||||
compress?: {
|
compress?: {
|
||||||
/** Trigger chat compression once context exceeds the token count */
|
/** Trigger chat compression once context exceeds the token count */
|
||||||
@@ -83,11 +84,22 @@ export type LLMRequest = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export class LLM {
|
export class LLM {
|
||||||
private embedModel: any;
|
private embedWorker: Worker | null = null;
|
||||||
|
private embedQueue = new Map<number, { resolve: (value: number[]) => void; reject: (error: any) => void }>();
|
||||||
|
private embedId = 0;
|
||||||
private providers: {[key: string]: LLMProvider} = {};
|
private providers: {[key: string]: LLMProvider} = {};
|
||||||
|
|
||||||
|
|
||||||
constructor(public readonly ai: Ai) {
|
constructor(public readonly ai: Ai) {
|
||||||
this.embedModel = pipeline('feature-extraction', 'Xenova/all-MiniLM-L6-v2');
|
this.embedWorker = new Worker(join(dirname(fileURLToPath(import.meta.url)), 'embedder.js'));
|
||||||
|
this.embedWorker.on('message', ({ id, embedding }) => {
|
||||||
|
const pending = this.embedQueue.get(id);
|
||||||
|
if (pending) {
|
||||||
|
pending.resolve(embedding);
|
||||||
|
this.embedQueue.delete(id);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
if(ai.options.anthropic?.token) this.providers.anthropic = new Anthropic(this.ai, ai.options.anthropic.token, ai.options.anthropic.model);
|
if(ai.options.anthropic?.token) this.providers.anthropic = new Anthropic(this.ai, ai.options.anthropic.token, ai.options.anthropic.model);
|
||||||
if(ai.options.ollama?.host) this.providers.ollama = new Ollama(this.ai, ai.options.ollama.host, ai.options.ollama.model);
|
if(ai.options.ollama?.host) this.providers.ollama = new Ollama(this.ai, ai.options.ollama.host, ai.options.ollama.model);
|
||||||
if(ai.options.openAi?.token) this.providers.openAi = new OpenAi(this.ai, ai.options.openAi.token, ai.options.openAi.model);
|
if(ai.options.openAi?.token) this.providers.openAi = new OpenAi(this.ai, ai.options.openAi.token, ai.options.openAi.model);
|
||||||
@@ -148,49 +160,44 @@ export class LLM {
|
|||||||
return denominator === 0 ? 0 : dotProduct / denominator;
|
return denominator === 0 ? 0 : dotProduct / denominator;
|
||||||
}
|
}
|
||||||
|
|
||||||
embedding(target: object | string, maxTokens = 500, overlapTokens = 50) {
|
chunk(target: object | string, maxTokens = 500, overlapTokens = 50): string[] {
|
||||||
const objString = (obj: any, path = ''): string[] => {
|
const objString = (obj: any, path = ''): string[] => {
|
||||||
if(obj === null || obj === undefined) return [];
|
if(!obj) return [];
|
||||||
return Object.entries(obj).flatMap(([key, value]) => {
|
return Object.entries(obj).flatMap(([key, value]) => {
|
||||||
const p = path ? `${path}${isNaN(+key) ? `.${key}` : `[${key}]`}` : key;
|
const p = path ? `${path}${isNaN(+key) ? `.${key}` : `[${key}]`}` : key;
|
||||||
if(typeof value === 'object' && value !== null && !Array.isArray(value)) return objString(value, p);
|
if(typeof value === 'object' && !Array.isArray(value)) return objString(value, p);
|
||||||
const valueStr = Array.isArray(value) ? value.join(', ') : String(value);
|
return `${p}: ${Array.isArray(value) ? value.join(', ') : value}`;
|
||||||
return `${p}: ${valueStr}`;
|
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
const embed = async (text: string): Promise<number[]> => {
|
const lines = typeof target === 'object' ? objString(target) : target.split('\n');
|
||||||
const model = await this.embedModel;
|
const tokens = lines.flatMap(l => [...l.split(/\s+/).filter(Boolean), '\n']);
|
||||||
const output = await model(text, {pooling: 'mean', normalize: true});
|
const chunks: string[] = [];
|
||||||
return Array.from(output.data);
|
for(let i = 0; i < tokens.length;) {
|
||||||
|
let text = '', j = i;
|
||||||
|
while(j < tokens.length) {
|
||||||
|
const next = text + (text ? ' ' : '') + tokens[j];
|
||||||
|
if(this.estimateTokens(next.replace(/\s*\n\s*/g, '\n')) > maxTokens && text) break;
|
||||||
|
text = next;
|
||||||
|
j++;
|
||||||
|
}
|
||||||
|
const clean = text.replace(/\s*\n\s*/g, '\n').trim();
|
||||||
|
if(clean) chunks.push(clean);
|
||||||
|
i = Math.max(j - overlapTokens, j === i ? i + 1 : j);
|
||||||
|
}
|
||||||
|
return chunks;
|
||||||
|
}
|
||||||
|
|
||||||
|
embedding(target: object | string, maxTokens = 500, overlapTokens = 50) {
|
||||||
|
const embed = (text: string): Promise<number[]> => {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
const id = this.embedId++;
|
||||||
|
this.embedQueue.set(id, { resolve, reject });
|
||||||
|
this.embedWorker?.postMessage({ id, text });
|
||||||
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
// Tokenize
|
const chunks = this.chunk(target, maxTokens, overlapTokens);
|
||||||
const lines = typeof target === 'object' ? objString(target) : target.split('\n');
|
|
||||||
const tokens = lines.flatMap(line => [...line.split(/\s+/).filter(w => w.trim()), '\n']);
|
|
||||||
|
|
||||||
// Chunk
|
|
||||||
const chunks: string[] = [];
|
|
||||||
let start = 0;
|
|
||||||
while (start < tokens.length) {
|
|
||||||
let end = start;
|
|
||||||
let text = '';
|
|
||||||
// Build chunk
|
|
||||||
while (end < tokens.length) {
|
|
||||||
const nextToken = tokens[end];
|
|
||||||
const testText = text + (text ? ' ' : '') + nextToken;
|
|
||||||
const testTokens = this.estimateTokens(testText.replace(/\s*\n\s*/g, '\n'));
|
|
||||||
if (testTokens > maxTokens && text) break;
|
|
||||||
text = testText;
|
|
||||||
end++;
|
|
||||||
}
|
|
||||||
// Save chunk
|
|
||||||
const cleanText = text.replace(/\s*\n\s*/g, '\n').trim();
|
|
||||||
if(cleanText) chunks.push(cleanText);
|
|
||||||
start = end - overlapTokens;
|
|
||||||
if (start <= end - tokens.length + end) start = end; // Safety: prevent infinite loop
|
|
||||||
}
|
|
||||||
|
|
||||||
return Promise.all(chunks.map(async (text, index) => ({
|
return Promise.all(chunks.map(async (text, index) => ({
|
||||||
index,
|
index,
|
||||||
embedding: await embed(text),
|
embedding: await embed(text),
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
import {findByProp, objectMap, JSONSanitize, JSONAttemptParse} from '@ztimson/utils';
|
import {findByProp, objectMap, JSONSanitize, JSONAttemptParse} from '@ztimson/utils';
|
||||||
import {Ai} from './ai.ts';
|
import {AbortablePromise, Ai} from './ai.ts';
|
||||||
import {LLMMessage, LLMRequest} from './llm.ts';
|
import {LLMMessage, LLMRequest} from './llm.ts';
|
||||||
import {AbortablePromise, LLMProvider} from './provider.ts';
|
import {LLMProvider} from './provider.ts';
|
||||||
import {Ollama as ollama} from 'ollama';
|
import {Ollama as ollama} from 'ollama';
|
||||||
|
|
||||||
export class Ollama extends LLMProvider {
|
export class Ollama extends LLMProvider {
|
||||||
@@ -49,6 +49,7 @@ export class Ollama extends LLMProvider {
|
|||||||
if(options.compress) history = await this.ai.language.compressHistory(<any>history, options.compress.max, options.compress.min);
|
if(options.compress) history = await this.ai.language.compressHistory(<any>history, options.compress.max, options.compress.min);
|
||||||
if(options.system) history.unshift({role: 'system', content: system})
|
if(options.system) history.unshift({role: 'system', content: system})
|
||||||
|
|
||||||
|
const tools = options.tools || this.ai.options.tools || [];
|
||||||
const requestParams: any = {
|
const requestParams: any = {
|
||||||
model: options.model || this.model,
|
model: options.model || this.model,
|
||||||
messages: history,
|
messages: history,
|
||||||
@@ -58,7 +59,7 @@ export class Ollama extends LLMProvider {
|
|||||||
temperature: options.temperature || this.ai.options.temperature || 0.7,
|
temperature: options.temperature || this.ai.options.temperature || 0.7,
|
||||||
num_predict: options.max_tokens || this.ai.options.max_tokens || 4096,
|
num_predict: options.max_tokens || this.ai.options.max_tokens || 4096,
|
||||||
},
|
},
|
||||||
tools: (options.tools || this.ai.options.tools || []).map(t => ({
|
tools: tools.map(t => ({
|
||||||
type: 'function',
|
type: 'function',
|
||||||
function: {
|
function: {
|
||||||
name: t.name,
|
name: t.name,
|
||||||
@@ -74,7 +75,11 @@ export class Ollama extends LLMProvider {
|
|||||||
|
|
||||||
let resp: any, isFirstMessage = true;
|
let resp: any, isFirstMessage = true;
|
||||||
do {
|
do {
|
||||||
resp = await this.client.chat(requestParams);
|
resp = await this.client.chat(requestParams).catch(err => {
|
||||||
|
err.message += `\n\nMessages:\n${JSON.stringify(history, null, 2)}`;
|
||||||
|
throw err;
|
||||||
|
});
|
||||||
|
|
||||||
if(options.stream) {
|
if(options.stream) {
|
||||||
if(!isFirstMessage) options.stream({text: '\n\n'});
|
if(!isFirstMessage) options.stream({text: '\n\n'});
|
||||||
else isFirstMessage = false;
|
else isFirstMessage = false;
|
||||||
@@ -93,7 +98,8 @@ export class Ollama extends LLMProvider {
|
|||||||
if(resp.message?.tool_calls?.length && !controller.signal.aborted) {
|
if(resp.message?.tool_calls?.length && !controller.signal.aborted) {
|
||||||
history.push(resp.message);
|
history.push(resp.message);
|
||||||
const results = await Promise.all(resp.message.tool_calls.map(async (toolCall: any) => {
|
const results = await Promise.all(resp.message.tool_calls.map(async (toolCall: any) => {
|
||||||
const tool = (options.tools || this.ai.options.tools)?.find(findByProp('name', toolCall.function.name));
|
const tool = tools.find(findByProp('name', toolCall.function.name));
|
||||||
|
if(options.stream) options.stream({tool: toolCall.function.name});
|
||||||
if(!tool) return {role: 'tool', tool_name: toolCall.function.name, content: '{"error": "Tool not found"}'};
|
if(!tool) return {role: 'tool', tool_name: toolCall.function.name, content: '{"error": "Tool not found"}'};
|
||||||
const args = typeof toolCall.function.arguments === 'string' ? JSONAttemptParse(toolCall.function.arguments, {}) : toolCall.function.arguments;
|
const args = typeof toolCall.function.arguments === 'string' ? JSONAttemptParse(toolCall.function.arguments, {}) : toolCall.function.arguments;
|
||||||
try {
|
try {
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
import {OpenAI as openAI} from 'openai';
|
import {OpenAI as openAI} from 'openai';
|
||||||
import {findByProp, objectMap, JSONSanitize, JSONAttemptParse} from '@ztimson/utils';
|
import {findByProp, objectMap, JSONSanitize, JSONAttemptParse} from '@ztimson/utils';
|
||||||
import {Ai} from './ai.ts';
|
import {AbortablePromise, Ai} from './ai.ts';
|
||||||
import {LLMMessage, LLMRequest} from './llm.ts';
|
import {LLMMessage, LLMRequest} from './llm.ts';
|
||||||
import {AbortablePromise, LLMProvider} from './provider.ts';
|
import {LLMProvider} from './provider.ts';
|
||||||
|
|
||||||
export class OpenAi extends LLMProvider {
|
export class OpenAi extends LLMProvider {
|
||||||
client!: openAI;
|
client!: openAI;
|
||||||
@@ -67,13 +67,14 @@ export class OpenAi extends LLMProvider {
|
|||||||
let history = this.fromStandard([...options.history || [], {role: 'user', content: message, timestamp: Date.now()}]);
|
let history = this.fromStandard([...options.history || [], {role: 'user', content: message, timestamp: Date.now()}]);
|
||||||
if(options.compress) history = await this.ai.language.compressHistory(<any>history, options.compress.max, options.compress.min, options);
|
if(options.compress) history = await this.ai.language.compressHistory(<any>history, options.compress.max, options.compress.min, options);
|
||||||
|
|
||||||
|
const tools = options.tools || this.ai.options.tools || [];
|
||||||
const requestParams: any = {
|
const requestParams: any = {
|
||||||
model: options.model || this.model,
|
model: options.model || this.model,
|
||||||
messages: history,
|
messages: history,
|
||||||
stream: !!options.stream,
|
stream: !!options.stream,
|
||||||
max_tokens: options.max_tokens || this.ai.options.max_tokens || 4096,
|
max_tokens: options.max_tokens || this.ai.options.max_tokens || 4096,
|
||||||
temperature: options.temperature || this.ai.options.temperature || 0.7,
|
temperature: options.temperature || this.ai.options.temperature || 0.7,
|
||||||
tools: (options.tools || this.ai.options.tools || []).map(t => ({
|
tools: tools.map(t => ({
|
||||||
type: 'function',
|
type: 'function',
|
||||||
function: {
|
function: {
|
||||||
name: t.name,
|
name: t.name,
|
||||||
@@ -89,7 +90,11 @@ export class OpenAi extends LLMProvider {
|
|||||||
|
|
||||||
let resp: any, isFirstMessage = true;
|
let resp: any, isFirstMessage = true;
|
||||||
do {
|
do {
|
||||||
resp = await this.client.chat.completions.create(requestParams);
|
resp = await this.client.chat.completions.create(requestParams).catch(err => {
|
||||||
|
err.message += `\n\nMessages:\n${JSON.stringify(history, null, 2)}`;
|
||||||
|
throw err;
|
||||||
|
});
|
||||||
|
|
||||||
if(options.stream) {
|
if(options.stream) {
|
||||||
if(!isFirstMessage) options.stream({text: '\n\n'});
|
if(!isFirstMessage) options.stream({text: '\n\n'});
|
||||||
else isFirstMessage = false;
|
else isFirstMessage = false;
|
||||||
@@ -110,7 +115,8 @@ export class OpenAi extends LLMProvider {
|
|||||||
if(toolCalls.length && !controller.signal.aborted) {
|
if(toolCalls.length && !controller.signal.aborted) {
|
||||||
history.push(resp.choices[0].message);
|
history.push(resp.choices[0].message);
|
||||||
const results = await Promise.all(toolCalls.map(async (toolCall: any) => {
|
const results = await Promise.all(toolCalls.map(async (toolCall: any) => {
|
||||||
const tool = options.tools?.find(findByProp('name', toolCall.function.name));
|
const tool = tools?.find(findByProp('name', toolCall.function.name));
|
||||||
|
if(options.stream) options.stream({tool: toolCall.function.name});
|
||||||
if(!tool) return {role: 'tool', tool_call_id: toolCall.id, content: '{"error": "Tool not found"}'};
|
if(!tool) return {role: 'tool', tool_call_id: toolCall.id, content: '{"error": "Tool not found"}'};
|
||||||
try {
|
try {
|
||||||
const args = JSONAttemptParse(toolCall.function.arguments, {});
|
const args = JSONAttemptParse(toolCall.function.arguments, {});
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
|
import {AbortablePromise} from './ai.ts';
|
||||||
import {LLMMessage, LLMOptions, LLMRequest} from './llm.ts';
|
import {LLMMessage, LLMOptions, LLMRequest} from './llm.ts';
|
||||||
|
|
||||||
export type AbortablePromise<T> = Promise<T> & {abort: () => void};
|
|
||||||
|
|
||||||
export abstract class LLMProvider {
|
export abstract class LLMProvider {
|
||||||
abstract ask(message: string, options: LLMRequest): AbortablePromise<LLMMessage[]>;
|
abstract ask(message: string, options: LLMRequest): AbortablePromise<LLMMessage[]>;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
import {createWorker} from 'tesseract.js';
|
import {createWorker} from 'tesseract.js';
|
||||||
import {Ai} from './ai.ts';
|
import {AbortablePromise, Ai} from './ai.ts';
|
||||||
|
|
||||||
export class Vision {
|
export class Vision {
|
||||||
|
|
||||||
@@ -8,18 +8,16 @@ export class Vision {
|
|||||||
/**
|
/**
|
||||||
* Convert image to text using Optical Character Recognition
|
* Convert image to text using Optical Character Recognition
|
||||||
* @param {string} path Path to image
|
* @param {string} path Path to image
|
||||||
* @returns {{abort: Function, response: Promise<string | null>}} Abort function & Promise of extracted text
|
* @returns {AbortablePromise<string | null>} Promise of extracted text with abort method
|
||||||
*/
|
*/
|
||||||
ocr(path: string): {abort: () => void, response: Promise<string | null>} {
|
ocr(path: string): AbortablePromise<string | null> {
|
||||||
let worker: any;
|
let worker: any;
|
||||||
return {
|
const p = new Promise<string | null>(async res => {
|
||||||
abort: () => { worker?.terminate(); },
|
worker = await createWorker(this.ai.options.tesseract?.model || 'eng', 2, {cachePath: this.ai.options.path});
|
||||||
response: new Promise(async res => {
|
|
||||||
worker = await createWorker('eng');
|
|
||||||
const {data} = await worker.recognize(path);
|
const {data} = await worker.recognize(path);
|
||||||
await worker.terminate();
|
await worker.terminate();
|
||||||
res(data.text.trim() || null);
|
res(data.text.trim() || null);
|
||||||
})
|
});
|
||||||
}
|
return Object.assign(p, {abort: () => worker?.terminate()});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,12 +1,19 @@
|
|||||||
import {defineConfig} from 'vite';
|
import {defineConfig} from 'vite';
|
||||||
import dts from 'vite-plugin-dts';
|
import dts from 'vite-plugin-dts';
|
||||||
|
import {resolve} from 'path';
|
||||||
|
|
||||||
export default defineConfig({
|
export default defineConfig({
|
||||||
build: {
|
build: {
|
||||||
lib: {
|
lib: {
|
||||||
entry: './src/index.ts',
|
entry: {
|
||||||
|
index: './src/index.ts',
|
||||||
|
embedder: './src/embedder.ts',
|
||||||
|
},
|
||||||
name: 'utils',
|
name: 'utils',
|
||||||
fileName: (format) => (format === 'es' ? 'index.mjs' : 'index.js'),
|
fileName: (format, entryName) => {
|
||||||
|
if (entryName === 'embedder') return 'embedder.js';
|
||||||
|
return format === 'es' ? 'index.mjs' : 'index.js';
|
||||||
|
},
|
||||||
},
|
},
|
||||||
ssr: true,
|
ssr: true,
|
||||||
emptyOutDir: true,
|
emptyOutDir: true,
|
||||||
|
|||||||
Reference in New Issue
Block a user