TTS
All checks were successful
Publish Library / Build NPM Project (push) Successful in 49s
Publish Library / Tag Version (push) Successful in 16s

This commit is contained in:
2026-01-30 15:39:20 -05:00
parent d5bf1ec47e
commit 28904cddbe
10 changed files with 696 additions and 944 deletions

1472
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
{ {
"name": "@ztimson/ai-utils", "name": "@ztimson/ai-utils",
"version": "0.2.7", "version": "0.3.0",
"description": "AI Utility library", "description": "AI Utility library",
"author": "Zak Timson", "author": "Zak Timson",
"license": "MIT", "license": "MIT",
@@ -42,6 +42,7 @@
"vite-plugin-dts": "^4.5.3" "vite-plugin-dts": "^4.5.3"
}, },
"files": [ "files": [
"bin",
"dist" "dist"
] ]
} }

View File

@@ -3,9 +3,21 @@ import {LLM, LLMOptions} from './llm';
import { Audio } from './audio.ts'; import { Audio } from './audio.ts';
import {Vision} from './vision.ts'; import {Vision} from './vision.ts';
export type AbortablePromise<T> = Promise<T> & {abort: () => any};
export type AiOptions = LLMOptions & { export type AiOptions = LLMOptions & {
/** Path to models */ /** Path to models */
path?: string; path?: string;
/** Piper TTS configuratoin */
piper?: {
/** Model URL: `https://huggingface.co/rhasspy/piper-voices/tree/main/.../model.onnx` */
model: string;
},
/** Tesseract OCR configuration */
tesseract?: {
/** Model: eng, eng_best, eng_fast */
model?: string;
}
/** Whisper ASR configuration */ /** Whisper ASR configuration */
whisper?: { whisper?: {
/** Whisper binary location */ /** Whisper binary location */
@@ -13,11 +25,6 @@ export type AiOptions = LLMOptions & {
/** Model: `ggml-base.en.bin` */ /** Model: `ggml-base.en.bin` */
model: string; model: string;
} }
/** Tesseract OCR configuration */
tesseract?: {
/** Model: eng, eng_best, eng_fast */
model?: string;
}
} }
export class Ai { export class Ai {

View File

@@ -1,8 +1,8 @@
import {Anthropic as anthropic} from '@anthropic-ai/sdk'; import {Anthropic as anthropic} from '@anthropic-ai/sdk';
import {findByProp, objectMap, JSONSanitize, JSONAttemptParse, deepCopy} from '@ztimson/utils'; import {findByProp, objectMap, JSONSanitize, JSONAttemptParse, deepCopy} from '@ztimson/utils';
import {Ai} from './ai.ts'; import {AbortablePromise, Ai} from './ai.ts';
import {LLMMessage, LLMRequest} from './llm.ts'; import {LLMMessage, LLMRequest} from './llm.ts';
import {AbortablePromise, LLMProvider} from './provider.ts'; import {LLMProvider} from './provider.ts';
export class Anthropic extends LLMProvider { export class Anthropic extends LLMProvider {
client!: anthropic; client!: anthropic;
@@ -120,6 +120,7 @@ export class Anthropic extends LLMProvider {
original.push({role: 'assistant', content: resp.content}); original.push({role: 'assistant', content: resp.content});
const results = await Promise.all(toolCalls.map(async (toolCall: any) => { const results = await Promise.all(toolCalls.map(async (toolCall: any) => {
const tool = tools.find(findByProp('name', toolCall.name)); const tool = tools.find(findByProp('name', toolCall.name));
if(options.stream) options.stream({tool: toolCall.name});
if(!tool) return {tool_use_id: toolCall.id, is_error: true, content: 'Tool not found'}; if(!tool) return {tool_use_id: toolCall.id, is_error: true, content: 'Tool not found'};
try { try {
const result = await tool.fn(toolCall.input, this.ai); const result = await tool.fn(toolCall.input, this.ai);

View File

@@ -1,30 +1,35 @@
import {spawn} from 'node:child_process'; import {spawn} from 'node:child_process';
import * as os from 'node:os';
import {platform, arch} from 'node:os';
import fs from 'node:fs/promises'; import fs from 'node:fs/promises';
import Path from 'node:path'; import Path from 'node:path';
import {Ai} from './ai.ts'; import {AbortablePromise, Ai} from './ai.ts';
export class Audio { export class Audio {
private downloads: {[key: string]: Promise<string>} = {}; private downloads: {[key: string]: Promise<string>} = {};
private whisperModel!: string; private whisperModel!: string;
private piperBinary?: string;
constructor(private ai: Ai) { constructor(private ai: Ai) {
if(ai.options.whisper?.binary) { if(ai.options.whisper?.binary) {
this.whisperModel = ai.options.whisper?.model.endsWith('.bin') ? ai.options.whisper?.model : ai.options.whisper?.model + '.bin'; this.whisperModel = ai.options.whisper?.model.endsWith('.bin') ? ai.options.whisper?.model : ai.options.whisper?.model + '.bin';
this.downloadAsrModel(); this.downloadAsrModel();
} }
if(ai.options.piper?.model) {
if(!ai.options.piper.model.startsWith('http') || !ai.options.piper.model.endsWith('.onnx'))
throw new Error('Piper model should be a URL to an onnx file to download');
if(platform() != 'linux' || (arch() != 'x64' && arch() != 'arm64'))
throw new Error('Piper TTS only supported on Linux x64/arm64');
this.piperBinary = Path.join(import.meta.dirname, '../bin/piper');
this.downloadTtsModel();
}
} }
/** asr(path: string, model: string = this.whisperModel): AbortablePromise<string | null> {
* Convert audio to text using Auditory Speech Recognition
* @param {string} path Path to audio
* @param model Whisper model
* @returns {Promise<any>} Extracted text
*/
asr(path: string, model: string = this.whisperModel): {abort: () => void, response: Promise<string | null>} {
if(!this.ai.options.whisper?.binary) throw new Error('Whisper not configured'); if(!this.ai.options.whisper?.binary) throw new Error('Whisper not configured');
let abort: any = () => {}; let abort: any = () => {};
const response = new Promise<string | null>((resolve, reject) => { const p = new Promise<string | null>(async (resolve, reject) => {
this.downloadAsrModel(model).then(m => { const m = await this.downloadAsrModel(model);
let output = ''; let output = '';
const proc = spawn(<string>this.ai.options.whisper?.binary, ['-nt', '-np', '-m', m, '-f', path], {stdio: ['ignore', 'pipe', 'ignore']}); const proc = spawn(<string>this.ai.options.whisper?.binary, ['-nt', '-np', '-m', m, '-f', path], {stdio: ['ignore', 'pipe', 'ignore']});
abort = () => proc.kill('SIGTERM'); abort = () => proc.kill('SIGTERM');
@@ -35,16 +40,41 @@ export class Audio {
else reject(new Error(`Exit code ${code}`)); else reject(new Error(`Exit code ${code}`));
}); });
}); });
}); return Object.assign(p, {abort});
return {response, abort}; }
tts(text: string, outputPath?: string, model: string = <string>this.ai.options.piper?.model): AbortablePromise<Buffer | string> {
if(!this.piperBinary) throw new Error('Piper not configured');
if(!model) throw new Error('Invalid Piper model');
let abort: any = () => {};
const p = new Promise<Buffer | string>(async (resolve, reject) => {
const modelPath = await this.downloadTtsModel(model);
const tmpFile = outputPath || Path.join(os.tmpdir(), `piper_${Date.now()}.wav`);
const proc = spawn(<string>this.piperBinary, ['--model', modelPath, '--output_file', tmpFile], {
stdio: ['pipe', 'ignore', 'ignore'],
env: {...process.env, LD_LIBRARY_PATH: Path.dirname(<string>this.piperBinary)}
});
abort = () => proc.kill('SIGTERM');
proc.stdin.write(text);
proc.stdin.end();
proc.on('error', (err: Error) => reject(err));
proc.on('close', async (code: number) => {
if(code === 0) {
if(outputPath) {
resolve(outputPath);
} else {
const buffer = await fs.readFile(tmpFile);
await fs.unlink(tmpFile).catch(() => {});
resolve(buffer);
}
} else {
reject(new Error(`Exit code ${code}`));
}
});
});
return Object.assign(p, {abort});
} }
/**
* Downloads the specified Whisper model if it is not already present locally.
*
* @param {string} model Whisper model that will be downloaded
* @return {Promise<string>} Absolute path to model file, resolves once downloaded
*/
async downloadAsrModel(model: string = this.whisperModel): Promise<string> { async downloadAsrModel(model: string = this.whisperModel): Promise<string> {
if(!this.ai.options.whisper?.binary) throw new Error('Whisper not configured'); if(!this.ai.options.whisper?.binary) throw new Error('Whisper not configured');
if(!model.endsWith('.bin')) model += '.bin'; if(!model.endsWith('.bin')) model += '.bin';
@@ -60,4 +90,24 @@ export class Audio {
}); });
return this.downloads[model]; return this.downloads[model];
} }
async downloadTtsModel(model: string = <string>this.ai.options.piper?.model): Promise<string> {
if(!model) throw new Error('Invalid Piper model');
const m = <string>model.split('/').pop();
const p = Path.join(<string>this.ai.options.path, m);
const [onnxExists, jsonExists] = await Promise.all([
fs.stat(p).then(() => true).catch(() => false),
fs.stat(p + '.json').then(() => true).catch(() => false)
]);
if(onnxExists && jsonExists) return p;
if(!!this.downloads[m]) return this.downloads[m];
this.downloads[m] = Promise.all([
onnxExists ? Promise.resolve() : fetch(model).then(r => r.arrayBuffer()).then(b => fs.writeFile(p, Buffer.from(b))),
jsonExists ? Promise.resolve() : fetch(model + '.json').then(r => r.arrayBuffer()).then(b => fs.writeFile(p + '.json', Buffer.from(b)))
]).then(() => {
delete this.downloads[m];
return p;
});
return this.downloads[m];
}
} }

View File

@@ -1,9 +1,9 @@
import {JSONAttemptParse} from '@ztimson/utils'; import {JSONAttemptParse} from '@ztimson/utils';
import {Ai} from './ai.ts'; import {AbortablePromise, Ai} from './ai.ts';
import {Anthropic} from './antrhopic.ts'; import {Anthropic} from './antrhopic.ts';
import {Ollama} from './ollama.ts'; import {Ollama} from './ollama.ts';
import {OpenAi} from './open-ai.ts'; import {OpenAi} from './open-ai.ts';
import {AbortablePromise, LLMProvider} from './provider.ts'; import {LLMProvider} from './provider.ts';
import {AiTool} from './tools.ts'; import {AiTool} from './tools.ts';
import {Worker} from 'worker_threads'; import {Worker} from 'worker_threads';
import {fileURLToPath} from 'url'; import {fileURLToPath} from 'url';
@@ -73,7 +73,7 @@ export type LLMRequest = {
/** LLM model */ /** LLM model */
model?: string | [string, string]; model?: string | [string, string];
/** Stream response */ /** Stream response */
stream?: (chunk: {text?: string, done?: true}) => any; stream?: (chunk: {text?: string, tool?: string, done?: true}) => any;
/** Compress old messages in the chat to free up context */ /** Compress old messages in the chat to free up context */
compress?: { compress?: {
/** Trigger chat compression once context exceeds the token count */ /** Trigger chat compression once context exceeds the token count */

View File

@@ -1,7 +1,7 @@
import {findByProp, objectMap, JSONSanitize, JSONAttemptParse} from '@ztimson/utils'; import {findByProp, objectMap, JSONSanitize, JSONAttemptParse} from '@ztimson/utils';
import {Ai} from './ai.ts'; import {AbortablePromise, Ai} from './ai.ts';
import {LLMMessage, LLMRequest} from './llm.ts'; import {LLMMessage, LLMRequest} from './llm.ts';
import {AbortablePromise, LLMProvider} from './provider.ts'; import {LLMProvider} from './provider.ts';
import {Ollama as ollama} from 'ollama'; import {Ollama as ollama} from 'ollama';
export class Ollama extends LLMProvider { export class Ollama extends LLMProvider {
@@ -99,6 +99,7 @@ export class Ollama extends LLMProvider {
history.push(resp.message); history.push(resp.message);
const results = await Promise.all(resp.message.tool_calls.map(async (toolCall: any) => { const results = await Promise.all(resp.message.tool_calls.map(async (toolCall: any) => {
const tool = tools.find(findByProp('name', toolCall.function.name)); const tool = tools.find(findByProp('name', toolCall.function.name));
if(options.stream) options.stream({tool: toolCall.function.name});
if(!tool) return {role: 'tool', tool_name: toolCall.function.name, content: '{"error": "Tool not found"}'}; if(!tool) return {role: 'tool', tool_name: toolCall.function.name, content: '{"error": "Tool not found"}'};
const args = typeof toolCall.function.arguments === 'string' ? JSONAttemptParse(toolCall.function.arguments, {}) : toolCall.function.arguments; const args = typeof toolCall.function.arguments === 'string' ? JSONAttemptParse(toolCall.function.arguments, {}) : toolCall.function.arguments;
try { try {

View File

@@ -1,8 +1,8 @@
import {OpenAI as openAI} from 'openai'; import {OpenAI as openAI} from 'openai';
import {findByProp, objectMap, JSONSanitize, JSONAttemptParse} from '@ztimson/utils'; import {findByProp, objectMap, JSONSanitize, JSONAttemptParse} from '@ztimson/utils';
import {Ai} from './ai.ts'; import {AbortablePromise, Ai} from './ai.ts';
import {LLMMessage, LLMRequest} from './llm.ts'; import {LLMMessage, LLMRequest} from './llm.ts';
import {AbortablePromise, LLMProvider} from './provider.ts'; import {LLMProvider} from './provider.ts';
export class OpenAi extends LLMProvider { export class OpenAi extends LLMProvider {
client!: openAI; client!: openAI;
@@ -116,6 +116,7 @@ export class OpenAi extends LLMProvider {
history.push(resp.choices[0].message); history.push(resp.choices[0].message);
const results = await Promise.all(toolCalls.map(async (toolCall: any) => { const results = await Promise.all(toolCalls.map(async (toolCall: any) => {
const tool = tools?.find(findByProp('name', toolCall.function.name)); const tool = tools?.find(findByProp('name', toolCall.function.name));
if(options.stream) options.stream({tool: toolCall.function.name});
if(!tool) return {role: 'tool', tool_call_id: toolCall.id, content: '{"error": "Tool not found"}'}; if(!tool) return {role: 'tool', tool_call_id: toolCall.id, content: '{"error": "Tool not found"}'};
try { try {
const args = JSONAttemptParse(toolCall.function.arguments, {}); const args = JSONAttemptParse(toolCall.function.arguments, {});

View File

@@ -1,7 +1,6 @@
import {AbortablePromise} from './ai.ts';
import {LLMMessage, LLMOptions, LLMRequest} from './llm.ts'; import {LLMMessage, LLMOptions, LLMRequest} from './llm.ts';
export type AbortablePromise<T> = Promise<T> & {abort: () => void};
export abstract class LLMProvider { export abstract class LLMProvider {
abstract ask(message: string, options: LLMRequest): AbortablePromise<LLMMessage[]>; abstract ask(message: string, options: LLMRequest): AbortablePromise<LLMMessage[]>;
} }

View File

@@ -1,5 +1,5 @@
import {createWorker} from 'tesseract.js'; import {createWorker} from 'tesseract.js';
import {Ai} from './ai.ts'; import {AbortablePromise, Ai} from './ai.ts';
export class Vision { export class Vision {
@@ -8,18 +8,16 @@ export class Vision {
/** /**
* Convert image to text using Optical Character Recognition * Convert image to text using Optical Character Recognition
* @param {string} path Path to image * @param {string} path Path to image
* @returns {{abort: Function, response: Promise<string | null>}} Abort function & Promise of extracted text * @returns {AbortablePromise<string | null>} Promise of extracted text with abort method
*/ */
ocr(path: string): {abort: () => void, response: Promise<string | null>} { ocr(path: string): AbortablePromise<string | null> {
let worker: any; let worker: any;
return { const p = new Promise<string | null>(async res => {
abort: () => { worker?.terminate(); },
response: new Promise(async res => {
worker = await createWorker(this.ai.options.tesseract?.model || 'eng', 2, {cachePath: this.ai.options.path}); worker = await createWorker(this.ai.options.tesseract?.model || 'eng', 2, {cachePath: this.ai.options.path});
const {data} = await worker.recognize(path); const {data} = await worker.recognize(path);
await worker.terminate(); await worker.terminate();
res(data.text.trim() || null); res(data.text.trim() || null);
}) });
} return Object.assign(p, {abort: () => worker?.terminate()});
} }
} }