4 Commits
0.2.4 ... 0.3.0

Author SHA1 Message Date
28904cddbe TTS
All checks were successful
Publish Library / Build NPM Project (push) Successful in 49s
Publish Library / Tag Version (push) Successful in 16s
2026-01-30 15:39:29 -05:00
d5bf1ec47e Pulled chunking out into its own exported function for easy access
All checks were successful
Publish Library / Build NPM Project (push) Successful in 41s
Publish Library / Tag Version (push) Successful in 7s
2026-01-30 10:38:51 -05:00
cb60a0b0c5 Moved embeddings to worker to prevent blocking
All checks were successful
Publish Library / Build NPM Project (push) Successful in 41s
Publish Library / Tag Version (push) Successful in 7s
2026-01-28 22:17:39 -05:00
1c59379c7d Set tesseract model
All checks were successful
Publish Library / Build NPM Project (push) Successful in 31s
Publish Library / Tag Version (push) Successful in 5s
2026-01-16 20:33:51 -05:00
12 changed files with 765 additions and 982 deletions

1472
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
{ {
"name": "@ztimson/ai-utils", "name": "@ztimson/ai-utils",
"version": "0.2.4", "version": "0.3.0",
"description": "AI Utility library", "description": "AI Utility library",
"author": "Zak Timson", "author": "Zak Timson",
"license": "MIT", "license": "MIT",
@@ -42,6 +42,7 @@
"vite-plugin-dts": "^4.5.3" "vite-plugin-dts": "^4.5.3"
}, },
"files": [ "files": [
"bin",
"dist" "dist"
] ]
} }

View File

@@ -3,15 +3,28 @@ import {LLM, LLMOptions} from './llm';
import { Audio } from './audio.ts'; import { Audio } from './audio.ts';
import {Vision} from './vision.ts'; import {Vision} from './vision.ts';
export type AbortablePromise<T> = Promise<T> & {abort: () => any};
export type AiOptions = LLMOptions & { export type AiOptions = LLMOptions & {
/** Path to models */
path?: string;
/** Piper TTS configuratoin */
piper?: {
/** Model URL: `https://huggingface.co/rhasspy/piper-voices/tree/main/.../model.onnx` */
model: string;
},
/** Tesseract OCR configuration */
tesseract?: {
/** Model: eng, eng_best, eng_fast */
model?: string;
}
/** Whisper ASR configuration */
whisper?: { whisper?: {
/** Whisper binary location */ /** Whisper binary location */
binary: string; binary: string;
/** Model: `ggml-base.en.bin` */ /** Model: `ggml-base.en.bin` */
model: string; model: string;
} }
/** Path to models */
path?: string;
} }
export class Ai { export class Ai {

View File

@@ -1,8 +1,8 @@
import {Anthropic as anthropic} from '@anthropic-ai/sdk'; import {Anthropic as anthropic} from '@anthropic-ai/sdk';
import {findByProp, objectMap, JSONSanitize, JSONAttemptParse, deepCopy} from '@ztimson/utils'; import {findByProp, objectMap, JSONSanitize, JSONAttemptParse, deepCopy} from '@ztimson/utils';
import {Ai} from './ai.ts'; import {AbortablePromise, Ai} from './ai.ts';
import {LLMMessage, LLMRequest} from './llm.ts'; import {LLMMessage, LLMRequest} from './llm.ts';
import {AbortablePromise, LLMProvider} from './provider.ts'; import {LLMProvider} from './provider.ts';
export class Anthropic extends LLMProvider { export class Anthropic extends LLMProvider {
client!: anthropic; client!: anthropic;
@@ -120,6 +120,7 @@ export class Anthropic extends LLMProvider {
original.push({role: 'assistant', content: resp.content}); original.push({role: 'assistant', content: resp.content});
const results = await Promise.all(toolCalls.map(async (toolCall: any) => { const results = await Promise.all(toolCalls.map(async (toolCall: any) => {
const tool = tools.find(findByProp('name', toolCall.name)); const tool = tools.find(findByProp('name', toolCall.name));
if(options.stream) options.stream({tool: toolCall.name});
if(!tool) return {tool_use_id: toolCall.id, is_error: true, content: 'Tool not found'}; if(!tool) return {tool_use_id: toolCall.id, is_error: true, content: 'Tool not found'};
try { try {
const result = await tool.fn(toolCall.input, this.ai); const result = await tool.fn(toolCall.input, this.ai);

View File

@@ -1,50 +1,80 @@
import {spawn} from 'node:child_process'; import {spawn} from 'node:child_process';
import * as os from 'node:os';
import {platform, arch} from 'node:os';
import fs from 'node:fs/promises'; import fs from 'node:fs/promises';
import Path from 'node:path'; import Path from 'node:path';
import {Ai} from './ai.ts'; import {AbortablePromise, Ai} from './ai.ts';
export class Audio { export class Audio {
private downloads: {[key: string]: Promise<string>} = {}; private downloads: {[key: string]: Promise<string>} = {};
private whisperModel!: string; private whisperModel!: string;
private piperBinary?: string;
constructor(private ai: Ai) { constructor(private ai: Ai) {
if(ai.options.whisper?.binary) { if(ai.options.whisper?.binary) {
this.whisperModel = ai.options.whisper?.model.endsWith('.bin') ? ai.options.whisper?.model : ai.options.whisper?.model + '.bin'; this.whisperModel = ai.options.whisper?.model.endsWith('.bin') ? ai.options.whisper?.model : ai.options.whisper?.model + '.bin';
this.downloadAsrModel(); this.downloadAsrModel();
} }
if(ai.options.piper?.model) {
if(!ai.options.piper.model.startsWith('http') || !ai.options.piper.model.endsWith('.onnx'))
throw new Error('Piper model should be a URL to an onnx file to download');
if(platform() != 'linux' || (arch() != 'x64' && arch() != 'arm64'))
throw new Error('Piper TTS only supported on Linux x64/arm64');
this.piperBinary = Path.join(import.meta.dirname, '../bin/piper');
this.downloadTtsModel();
}
} }
/** asr(path: string, model: string = this.whisperModel): AbortablePromise<string | null> {
* Convert audio to text using Auditory Speech Recognition
* @param {string} path Path to audio
* @param model Whisper model
* @returns {Promise<any>} Extracted text
*/
asr(path: string, model: string = this.whisperModel): {abort: () => void, response: Promise<string | null>} {
if(!this.ai.options.whisper?.binary) throw new Error('Whisper not configured'); if(!this.ai.options.whisper?.binary) throw new Error('Whisper not configured');
let abort: any = () => {}; let abort: any = () => {};
const response = new Promise<string | null>((resolve, reject) => { const p = new Promise<string | null>(async (resolve, reject) => {
this.downloadAsrModel(model).then(m => { const m = await this.downloadAsrModel(model);
let output = ''; let output = '';
const proc = spawn(<string>this.ai.options.whisper?.binary, ['-nt', '-np', '-m', m, '-f', path], {stdio: ['ignore', 'pipe', 'ignore']}); const proc = spawn(<string>this.ai.options.whisper?.binary, ['-nt', '-np', '-m', m, '-f', path], {stdio: ['ignore', 'pipe', 'ignore']});
abort = () => proc.kill('SIGTERM'); abort = () => proc.kill('SIGTERM');
proc.on('error', (err: Error) => reject(err)); proc.on('error', (err: Error) => reject(err));
proc.stdout.on('data', (data: Buffer) => output += data.toString()); proc.stdout.on('data', (data: Buffer) => output += data.toString());
proc.on('close', (code: number) => { proc.on('close', (code: number) => {
if(code === 0) resolve(output.trim() || null); if(code === 0) resolve(output.trim() || null);
else reject(new Error(`Exit code ${code}`)); else reject(new Error(`Exit code ${code}`));
});
}); });
}); });
return {response, abort}; return Object.assign(p, {abort});
}
tts(text: string, outputPath?: string, model: string = <string>this.ai.options.piper?.model): AbortablePromise<Buffer | string> {
if(!this.piperBinary) throw new Error('Piper not configured');
if(!model) throw new Error('Invalid Piper model');
let abort: any = () => {};
const p = new Promise<Buffer | string>(async (resolve, reject) => {
const modelPath = await this.downloadTtsModel(model);
const tmpFile = outputPath || Path.join(os.tmpdir(), `piper_${Date.now()}.wav`);
const proc = spawn(<string>this.piperBinary, ['--model', modelPath, '--output_file', tmpFile], {
stdio: ['pipe', 'ignore', 'ignore'],
env: {...process.env, LD_LIBRARY_PATH: Path.dirname(<string>this.piperBinary)}
});
abort = () => proc.kill('SIGTERM');
proc.stdin.write(text);
proc.stdin.end();
proc.on('error', (err: Error) => reject(err));
proc.on('close', async (code: number) => {
if(code === 0) {
if(outputPath) {
resolve(outputPath);
} else {
const buffer = await fs.readFile(tmpFile);
await fs.unlink(tmpFile).catch(() => {});
resolve(buffer);
}
} else {
reject(new Error(`Exit code ${code}`));
}
});
});
return Object.assign(p, {abort});
} }
/**
* Downloads the specified Whisper model if it is not already present locally.
*
* @param {string} model Whisper model that will be downloaded
* @return {Promise<string>} Absolute path to model file, resolves once downloaded
*/
async downloadAsrModel(model: string = this.whisperModel): Promise<string> { async downloadAsrModel(model: string = this.whisperModel): Promise<string> {
if(!this.ai.options.whisper?.binary) throw new Error('Whisper not configured'); if(!this.ai.options.whisper?.binary) throw new Error('Whisper not configured');
if(!model.endsWith('.bin')) model += '.bin'; if(!model.endsWith('.bin')) model += '.bin';
@@ -60,4 +90,24 @@ export class Audio {
}); });
return this.downloads[model]; return this.downloads[model];
} }
async downloadTtsModel(model: string = <string>this.ai.options.piper?.model): Promise<string> {
if(!model) throw new Error('Invalid Piper model');
const m = <string>model.split('/').pop();
const p = Path.join(<string>this.ai.options.path, m);
const [onnxExists, jsonExists] = await Promise.all([
fs.stat(p).then(() => true).catch(() => false),
fs.stat(p + '.json').then(() => true).catch(() => false)
]);
if(onnxExists && jsonExists) return p;
if(!!this.downloads[m]) return this.downloads[m];
this.downloads[m] = Promise.all([
onnxExists ? Promise.resolve() : fetch(model).then(r => r.arrayBuffer()).then(b => fs.writeFile(p, Buffer.from(b))),
jsonExists ? Promise.resolve() : fetch(model + '.json').then(r => r.arrayBuffer()).then(b => fs.writeFile(p + '.json', Buffer.from(b)))
]).then(() => {
delete this.downloads[m];
return p;
});
return this.downloads[m];
}
} }

11
src/embedder.ts Normal file
View File

@@ -0,0 +1,11 @@
import { pipeline } from '@xenova/transformers';
import { parentPort } from 'worker_threads';
let model: any;
parentPort?.on('message', async ({ id, text }) => {
if(!model) model = await pipeline('feature-extraction', 'Xenova/all-MiniLM-L6-v2');
const output = await model(text, { pooling: 'mean', normalize: true });
const embedding = Array.from(output.data);
parentPort?.postMessage({ id, embedding });
});

View File

@@ -1,12 +1,13 @@
import {pipeline} from '@xenova/transformers';
import {JSONAttemptParse} from '@ztimson/utils'; import {JSONAttemptParse} from '@ztimson/utils';
import {Ai} from './ai.ts'; import {AbortablePromise, Ai} from './ai.ts';
import {Anthropic} from './antrhopic.ts'; import {Anthropic} from './antrhopic.ts';
import {Ollama} from './ollama.ts'; import {Ollama} from './ollama.ts';
import {OpenAi} from './open-ai.ts'; import {OpenAi} from './open-ai.ts';
import {AbortablePromise, LLMProvider} from './provider.ts'; import {LLMProvider} from './provider.ts';
import {AiTool} from './tools.ts'; import {AiTool} from './tools.ts';
import * as tf from '@tensorflow/tfjs'; import {Worker} from 'worker_threads';
import {fileURLToPath} from 'url';
import {dirname, join} from 'path';
export type LLMMessage = { export type LLMMessage = {
/** Message originator */ /** Message originator */
@@ -72,7 +73,7 @@ export type LLMRequest = {
/** LLM model */ /** LLM model */
model?: string | [string, string]; model?: string | [string, string];
/** Stream response */ /** Stream response */
stream?: (chunk: {text?: string, done?: true}) => any; stream?: (chunk: {text?: string, tool?: string, done?: true}) => any;
/** Compress old messages in the chat to free up context */ /** Compress old messages in the chat to free up context */
compress?: { compress?: {
/** Trigger chat compression once context exceeds the token count */ /** Trigger chat compression once context exceeds the token count */
@@ -83,11 +84,22 @@ export type LLMRequest = {
} }
export class LLM { export class LLM {
private embedModel: any; private embedWorker: Worker | null = null;
private embedQueue = new Map<number, { resolve: (value: number[]) => void; reject: (error: any) => void }>();
private embedId = 0;
private providers: {[key: string]: LLMProvider} = {}; private providers: {[key: string]: LLMProvider} = {};
constructor(public readonly ai: Ai) { constructor(public readonly ai: Ai) {
this.embedModel = pipeline('feature-extraction', 'Xenova/all-MiniLM-L6-v2'); this.embedWorker = new Worker(join(dirname(fileURLToPath(import.meta.url)), 'embedder.js'));
this.embedWorker.on('message', ({ id, embedding }) => {
const pending = this.embedQueue.get(id);
if (pending) {
pending.resolve(embedding);
this.embedQueue.delete(id);
}
});
if(ai.options.anthropic?.token) this.providers.anthropic = new Anthropic(this.ai, ai.options.anthropic.token, ai.options.anthropic.model); if(ai.options.anthropic?.token) this.providers.anthropic = new Anthropic(this.ai, ai.options.anthropic.token, ai.options.anthropic.model);
if(ai.options.ollama?.host) this.providers.ollama = new Ollama(this.ai, ai.options.ollama.host, ai.options.ollama.model); if(ai.options.ollama?.host) this.providers.ollama = new Ollama(this.ai, ai.options.ollama.host, ai.options.ollama.model);
if(ai.options.openAi?.token) this.providers.openAi = new OpenAi(this.ai, ai.options.openAi.token, ai.options.openAi.model); if(ai.options.openAi?.token) this.providers.openAi = new OpenAi(this.ai, ai.options.openAi.token, ai.options.openAi.model);
@@ -148,49 +160,44 @@ export class LLM {
return denominator === 0 ? 0 : dotProduct / denominator; return denominator === 0 ? 0 : dotProduct / denominator;
} }
embedding(target: object | string, maxTokens = 500, overlapTokens = 50) { chunk(target: object | string, maxTokens = 500, overlapTokens = 50): string[] {
const objString = (obj: any, path = ''): string[] => { const objString = (obj: any, path = ''): string[] => {
if(obj === null || obj === undefined) return []; if(!obj) return [];
return Object.entries(obj).flatMap(([key, value]) => { return Object.entries(obj).flatMap(([key, value]) => {
const p = path ? `${path}${isNaN(+key) ? `.${key}` : `[${key}]`}` : key; const p = path ? `${path}${isNaN(+key) ? `.${key}` : `[${key}]`}` : key;
if(typeof value === 'object' && value !== null && !Array.isArray(value)) return objString(value, p); if(typeof value === 'object' && !Array.isArray(value)) return objString(value, p);
const valueStr = Array.isArray(value) ? value.join(', ') : String(value); return `${p}: ${Array.isArray(value) ? value.join(', ') : value}`;
return `${p}: ${valueStr}`;
}); });
}; };
const embed = async (text: string): Promise<number[]> => { const lines = typeof target === 'object' ? objString(target) : target.split('\n');
const model = await this.embedModel; const tokens = lines.flatMap(l => [...l.split(/\s+/).filter(Boolean), '\n']);
const output = await model(text, {pooling: 'mean', normalize: true}); const chunks: string[] = [];
return Array.from(output.data); for(let i = 0; i < tokens.length;) {
let text = '', j = i;
while(j < tokens.length) {
const next = text + (text ? ' ' : '') + tokens[j];
if(this.estimateTokens(next.replace(/\s*\n\s*/g, '\n')) > maxTokens && text) break;
text = next;
j++;
}
const clean = text.replace(/\s*\n\s*/g, '\n').trim();
if(clean) chunks.push(clean);
i = Math.max(j - overlapTokens, j === i ? i + 1 : j);
}
return chunks;
}
embedding(target: object | string, maxTokens = 500, overlapTokens = 50) {
const embed = (text: string): Promise<number[]> => {
return new Promise((resolve, reject) => {
const id = this.embedId++;
this.embedQueue.set(id, { resolve, reject });
this.embedWorker?.postMessage({ id, text });
});
}; };
// Tokenize const chunks = this.chunk(target, maxTokens, overlapTokens);
const lines = typeof target === 'object' ? objString(target) : target.split('\n');
const tokens = lines.flatMap(line => [...line.split(/\s+/).filter(w => w.trim()), '\n']);
// Chunk
const chunks: string[] = [];
let start = 0;
while (start < tokens.length) {
let end = start;
let text = '';
// Build chunk
while (end < tokens.length) {
const nextToken = tokens[end];
const testText = text + (text ? ' ' : '') + nextToken;
const testTokens = this.estimateTokens(testText.replace(/\s*\n\s*/g, '\n'));
if (testTokens > maxTokens && text) break;
text = testText;
end++;
}
// Save chunk
const cleanText = text.replace(/\s*\n\s*/g, '\n').trim();
if(cleanText) chunks.push(cleanText);
start = end - overlapTokens;
if (start <= end - tokens.length + end) start = end; // Safety: prevent infinite loop
}
return Promise.all(chunks.map(async (text, index) => ({ return Promise.all(chunks.map(async (text, index) => ({
index, index,
embedding: await embed(text), embedding: await embed(text),

View File

@@ -1,7 +1,7 @@
import {findByProp, objectMap, JSONSanitize, JSONAttemptParse} from '@ztimson/utils'; import {findByProp, objectMap, JSONSanitize, JSONAttemptParse} from '@ztimson/utils';
import {Ai} from './ai.ts'; import {AbortablePromise, Ai} from './ai.ts';
import {LLMMessage, LLMRequest} from './llm.ts'; import {LLMMessage, LLMRequest} from './llm.ts';
import {AbortablePromise, LLMProvider} from './provider.ts'; import {LLMProvider} from './provider.ts';
import {Ollama as ollama} from 'ollama'; import {Ollama as ollama} from 'ollama';
export class Ollama extends LLMProvider { export class Ollama extends LLMProvider {
@@ -99,6 +99,7 @@ export class Ollama extends LLMProvider {
history.push(resp.message); history.push(resp.message);
const results = await Promise.all(resp.message.tool_calls.map(async (toolCall: any) => { const results = await Promise.all(resp.message.tool_calls.map(async (toolCall: any) => {
const tool = tools.find(findByProp('name', toolCall.function.name)); const tool = tools.find(findByProp('name', toolCall.function.name));
if(options.stream) options.stream({tool: toolCall.function.name});
if(!tool) return {role: 'tool', tool_name: toolCall.function.name, content: '{"error": "Tool not found"}'}; if(!tool) return {role: 'tool', tool_name: toolCall.function.name, content: '{"error": "Tool not found"}'};
const args = typeof toolCall.function.arguments === 'string' ? JSONAttemptParse(toolCall.function.arguments, {}) : toolCall.function.arguments; const args = typeof toolCall.function.arguments === 'string' ? JSONAttemptParse(toolCall.function.arguments, {}) : toolCall.function.arguments;
try { try {

View File

@@ -1,8 +1,8 @@
import {OpenAI as openAI} from 'openai'; import {OpenAI as openAI} from 'openai';
import {findByProp, objectMap, JSONSanitize, JSONAttemptParse} from '@ztimson/utils'; import {findByProp, objectMap, JSONSanitize, JSONAttemptParse} from '@ztimson/utils';
import {Ai} from './ai.ts'; import {AbortablePromise, Ai} from './ai.ts';
import {LLMMessage, LLMRequest} from './llm.ts'; import {LLMMessage, LLMRequest} from './llm.ts';
import {AbortablePromise, LLMProvider} from './provider.ts'; import {LLMProvider} from './provider.ts';
export class OpenAi extends LLMProvider { export class OpenAi extends LLMProvider {
client!: openAI; client!: openAI;
@@ -116,6 +116,7 @@ export class OpenAi extends LLMProvider {
history.push(resp.choices[0].message); history.push(resp.choices[0].message);
const results = await Promise.all(toolCalls.map(async (toolCall: any) => { const results = await Promise.all(toolCalls.map(async (toolCall: any) => {
const tool = tools?.find(findByProp('name', toolCall.function.name)); const tool = tools?.find(findByProp('name', toolCall.function.name));
if(options.stream) options.stream({tool: toolCall.function.name});
if(!tool) return {role: 'tool', tool_call_id: toolCall.id, content: '{"error": "Tool not found"}'}; if(!tool) return {role: 'tool', tool_call_id: toolCall.id, content: '{"error": "Tool not found"}'};
try { try {
const args = JSONAttemptParse(toolCall.function.arguments, {}); const args = JSONAttemptParse(toolCall.function.arguments, {});

View File

@@ -1,7 +1,6 @@
import {AbortablePromise} from './ai.ts';
import {LLMMessage, LLMOptions, LLMRequest} from './llm.ts'; import {LLMMessage, LLMOptions, LLMRequest} from './llm.ts';
export type AbortablePromise<T> = Promise<T> & {abort: () => void};
export abstract class LLMProvider { export abstract class LLMProvider {
abstract ask(message: string, options: LLMRequest): AbortablePromise<LLMMessage[]>; abstract ask(message: string, options: LLMRequest): AbortablePromise<LLMMessage[]>;
} }

View File

@@ -1,5 +1,5 @@
import {createWorker} from 'tesseract.js'; import {createWorker} from 'tesseract.js';
import {Ai} from './ai.ts'; import {AbortablePromise, Ai} from './ai.ts';
export class Vision { export class Vision {
@@ -8,18 +8,16 @@ export class Vision {
/** /**
* Convert image to text using Optical Character Recognition * Convert image to text using Optical Character Recognition
* @param {string} path Path to image * @param {string} path Path to image
* @returns {{abort: Function, response: Promise<string | null>}} Abort function & Promise of extracted text * @returns {AbortablePromise<string | null>} Promise of extracted text with abort method
*/ */
ocr(path: string): {abort: () => void, response: Promise<string | null>} { ocr(path: string): AbortablePromise<string | null> {
let worker: any; let worker: any;
return { const p = new Promise<string | null>(async res => {
abort: () => { worker?.terminate(); }, worker = await createWorker(this.ai.options.tesseract?.model || 'eng', 2, {cachePath: this.ai.options.path});
response: new Promise(async res => { const {data} = await worker.recognize(path);
worker = await createWorker('eng', 1, {cachePath: this.ai.options.path}); await worker.terminate();
const {data} = await worker.recognize(path); res(data.text.trim() || null);
await worker.terminate(); });
res(data.text.trim() || null); return Object.assign(p, {abort: () => worker?.terminate()});
})
}
} }
} }

View File

@@ -1,12 +1,19 @@
import {defineConfig} from 'vite'; import {defineConfig} from 'vite';
import dts from 'vite-plugin-dts'; import dts from 'vite-plugin-dts';
import {resolve} from 'path';
export default defineConfig({ export default defineConfig({
build: { build: {
lib: { lib: {
entry: './src/index.ts', entry: {
index: './src/index.ts',
embedder: './src/embedder.ts',
},
name: 'utils', name: 'utils',
fileName: (format) => (format === 'es' ? 'index.mjs' : 'index.js'), fileName: (format, entryName) => {
if (entryName === 'embedder') return 'embedder.js';
return format === 'es' ? 'index.mjs' : 'index.js';
},
}, },
ssr: true, ssr: true,
emptyOutDir: true, emptyOutDir: true,