Compare commits

...

7 Commits
0.7.1 ... 0.7.7

Author SHA1 Message Date
790608f020 Queue OCR & ASR work
All checks were successful
Publish Library / Build NPM Project (push) Successful in 35s
Publish Library / Tag Version (push) Successful in 6s
2026-02-20 19:05:19 -05:00
473424ae23 segfault fix
All checks were successful
Publish Library / Build NPM Project (push) Successful in 33s
Publish Library / Tag Version (push) Successful in 6s
2026-02-20 17:31:49 -05:00
9b831f7d95 Better ASR IDing
All checks were successful
Publish Library / Build NPM Project (push) Successful in 34s
Publish Library / Tag Version (push) Successful in 5s
2026-02-20 16:55:25 -05:00
498b326e45 Bump 0.7.4
All checks were successful
Publish Library / Build NPM Project (push) Successful in 34s
Publish Library / Tag Version (push) Successful in 5s
2026-02-20 14:19:17 -05:00
56e4efec94 Use either python or python3 or diarization 2026-02-20 14:14:30 -05:00
a07f069ad0 One embedding at a time
All checks were successful
Publish Library / Build NPM Project (push) Successful in 27s
Publish Library / Tag Version (push) Successful in 7s
2026-02-19 22:58:53 -05:00
da15d299e6 parallel embedding cap
All checks were successful
Publish Library / Build NPM Project (push) Successful in 31s
Publish Library / Tag Version (push) Successful in 5s
2026-02-19 21:37:58 -05:00
7 changed files with 254 additions and 959 deletions

1038
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
{ {
"name": "@ztimson/ai-utils", "name": "@ztimson/ai-utils",
"version": "0.7.1", "version": "0.7.7",
"description": "AI Utility library", "description": "AI Utility library",
"author": "Zak Timson", "author": "Zak Timson",
"license": "MIT", "license": "MIT",
@@ -25,14 +25,14 @@
"watch": "npx vite build --watch" "watch": "npx vite build --watch"
}, },
"dependencies": { "dependencies": {
"@anthropic-ai/sdk": "^0.67.0", "@anthropic-ai/sdk": "^0.78.0",
"@tensorflow/tfjs": "^4.22.0", "@tensorflow/tfjs": "^4.22.0",
"@xenova/transformers": "^2.17.2", "@xenova/transformers": "^2.17.2",
"@ztimson/node-utils": "^1.0.4", "@ztimson/node-utils": "^1.0.7",
"@ztimson/utils": "^0.27.9", "@ztimson/utils": "^0.28.13",
"cheerio": "^1.2.0", "cheerio": "^1.2.0",
"openai": "^6.6.0", "openai": "^6.22.0",
"tesseract.js": "^6.0.1", "tesseract.js": "^7.0.0",
"wavefile": "^11.0.0" "wavefile": "^11.0.0"
}, },
"devDependencies": { "devDependencies": {

View File

@@ -9,15 +9,20 @@ import wavefile from 'wavefile';
let whisperPipeline: any; let whisperPipeline: any;
export async function canDiarization(): Promise<boolean> { export async function canDiarization(): Promise<string | null> {
return new Promise((resolve) => { const checkPython = (cmd: string) => {
const proc = spawn('python', ['-c', 'import pyannote.audio']); return new Promise<boolean>((resolve) => {
const proc = spawn(cmd, ['-c', 'import pyannote.audio']);
proc.on('close', (code: number) => resolve(code === 0)); proc.on('close', (code: number) => resolve(code === 0));
proc.on('error', () => resolve(false)); proc.on('error', () => resolve(false));
}); });
};
if(await checkPython('python3')) return 'python3';
if(await checkPython('python')) return 'python';
return null;
} }
async function runDiarization(audioPath: string, dir: string, token: string): Promise<any[]> { async function runDiarization(binary: string, audioPath: string, dir: string, token: string): Promise<any[]> {
const script = ` const script = `
import sys import sys
import json import json
@@ -37,7 +42,7 @@ print(json.dumps(segments))
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
let output = ''; let output = '';
const proc = spawn('python', ['-c', script, audioPath]); const proc = spawn(binary, ['-c', script, audioPath]);
proc.stdout.on('data', (data: Buffer) => output += data.toString()); proc.stdout.on('data', (data: Buffer) => output += data.toString());
proc.stderr.on('data', (data: Buffer) => console.error(data.toString())); proc.stderr.on('data', (data: Buffer) => console.error(data.toString()));
proc.on('close', (code: number) => { proc.on('close', (code: number) => {
@@ -105,30 +110,28 @@ function prepareAudioBuffer(file: string): [string, Float32Array] {
} }
parentPort?.on('message', async ({ file, speaker, model, modelDir, token }) => { parentPort?.on('message', async ({ file, speaker, model, modelDir, token }) => {
let tempFile = null;
try { try {
if(!whisperPipeline) whisperPipeline = await pipeline('automatic-speech-recognition', `Xenova/${model}`, {cache_dir: modelDir, quantized: true}); if(!whisperPipeline) whisperPipeline = await pipeline('automatic-speech-recognition', `Xenova/${model}`, {cache_dir: modelDir, quantized: true});
// Prepare audio file
const [f, buffer] = prepareAudioBuffer(file); const [f, buffer] = prepareAudioBuffer(file);
tempFile = f !== file ? f : null;
// Fetch transcript and speakers const hasDiarization = await canDiarization();
const hasDiarization = speaker && await canDiarization();
const [transcript, speakers] = await Promise.all([ const [transcript, speakers] = await Promise.all([
whisperPipeline(buffer, {return_timestamps: speaker ? 'word' : false}), whisperPipeline(buffer, {return_timestamps: speaker ? 'word' : false}),
(!speaker || !token || !hasDiarization) ? Promise.resolve(): runDiarization(f, modelDir, token), (!speaker || !token || !hasDiarization) ? Promise.resolve(): runDiarization(hasDiarization, f, modelDir, token),
]); ]);
if(file != f) rmSync(f, { recursive: true, force: true });
// Return any results / errors if no more processing required
const text = transcript.text?.trim() || null; const text = transcript.text?.trim() || null;
if(!speaker) return parentPort?.postMessage({ text }); if(!speaker) return parentPort?.postMessage({ text });
if(!token) return parentPort?.postMessage({ text, error: 'HuggingFace token required' }); if(!token) return parentPort?.postMessage({ text, error: 'HuggingFace token required' });
if(!hasDiarization) return parentPort?.postMessage({ text, error: 'Speaker diarization unavailable' }); if(!hasDiarization) return parentPort?.postMessage({ text, error: 'Speaker diarization unavailable' });
// Combine transcript and speakers
const combined = combineSpeakerTranscript(transcript.chunks || [], speakers || []); const combined = combineSpeakerTranscript(transcript.chunks || [], speakers || []);
parentPort?.postMessage({ text: combined }); parentPort?.postMessage({ text: combined });
} catch (err: any) { } catch (err: any) {
parentPort?.postMessage({ error: err.stack || err.message }); parentPort?.postMessage({ error: err.stack || err.message });
} finally {
if(tempFile) rmSync(tempFile, { recursive: true, force: true });
} }
}); });

View File

@@ -5,45 +5,67 @@ import {canDiarization} from './asr.ts';
import {dirname, join} from 'path'; import {dirname, join} from 'path';
export class Audio { export class Audio {
private busy = false;
private currentJob: any;
private queue: Array<{file: string, model: string, speaker: boolean | 'id', modelDir: string, token: string, resolve: any, reject: any}> = [];
private worker: Worker | null = null;
constructor(private ai: Ai) {} constructor(private ai: Ai) {}
private processQueue() {
if(this.busy || !this.queue.length) return;
this.busy = true;
const job = this.queue.shift()!;
if(!this.worker) {
this.worker = new Worker(join(dirname(fileURLToPath(import.meta.url)), 'asr.js'));
this.worker.on('message', this.handleMessage.bind(this));
this.worker.on('error', this.handleError.bind(this));
}
this.currentJob = job;
this.worker.postMessage({file: job.file, model: job.model, speaker: job.speaker, modelDir: job.modelDir, token: job.token});
}
private handleMessage({text, warning, error}: any) {
const job = this.currentJob!;
this.busy = false;
if(error) job.reject(new Error(error));
else {
if(warning) console.warn(warning);
job.resolve(text);
}
this.processQueue();
}
private handleError(err: Error) {
if(this.currentJob) {
this.currentJob.reject(err);
this.busy = false;
this.processQueue();
}
}
asr(file: string, options: { model?: string; speaker?: boolean | 'id' } = {}): AbortablePromise<string | null> { asr(file: string, options: { model?: string; speaker?: boolean | 'id' } = {}): AbortablePromise<string | null> {
const { model = this.ai.options.asr || 'whisper-base', speaker = false } = options; const { model = this.ai.options.asr || 'whisper-base', speaker = false } = options;
let aborted = false; let aborted = false;
const abort = () => { aborted = true; }; const abort = () => { aborted = true; };
let p = new Promise<string | null>((resolve, reject) => { let p = new Promise<string | null>((resolve, reject) => {
const worker = new Worker(join(dirname(fileURLToPath(import.meta.url)), 'asr.js')); this.queue.push({file, model, speaker, modelDir: <string>this.ai.options.path, token: <string>this.ai.options.hfToken,
const handleMessage = ({ text, warning, error }: any) => { resolve: (text: string | null) => !aborted && resolve(text),
worker.terminate(); reject: (err: Error) => !aborted && reject(err)
if(aborted) return;
if(error) reject(new Error(error));
else {
if(warning) console.warn(warning);
resolve(text);
}
};
const handleError = (err: Error) => {
worker.terminate();
if(!aborted) reject(err);
};
worker.on('message', handleMessage);
worker.on('error', handleError);
worker.on('exit', (code) => {
if(code !== 0 && !aborted) reject(new Error(`Worker exited with code ${code}`));
}); });
worker.postMessage({file, model, speaker, modelDir: this.ai.options.path, token: this.ai.options.hfToken}); this.processQueue();
}); });
// Name speakers using AI
if(options.speaker == 'id') { if(options.speaker == 'id') {
if(!this.ai.language.defaultModel) throw new Error('Configure an LLM for advanced ASR speaker detection'); if(!this.ai.language.defaultModel) throw new Error('Configure an LLM for advanced ASR speaker detection');
p = p.then(async transcript => { p = p.then(async transcript => {
if(!transcript) return transcript; if(!transcript) return transcript;
let chunks = this.ai.language.chunk(transcript, 500, 0); let chunks = this.ai.language.chunk(transcript, 500, 0);
if(chunks.length > 4) chunks = [...chunks.slice(0, 3), <string>chunks.at(-1)]; if(chunks.length > 4) chunks = [...chunks.slice(0, 3), <string>chunks.at(-1)];
const names = await this.ai.language.json(chunks.join('\n'), '{1: "Detected Name"}', { const names = await this.ai.language.json(chunks.join('\n'), '{1: "Detected Name", 2: "Second Name"}', {
system: 'Use this following transcript to identify speakers. Only identify speakers you are sure about', system: 'Use the following transcript to identify speakers. Only identify speakers you are positive about, dont mention speakers you are unsure about in your response',
temperature: 0.1, temperature: 0.1,
}); });
Object.entries(names).forEach(([speaker, name]) => { Object.entries(names).forEach(([speaker, name]) => {
@@ -56,5 +78,5 @@ export class Audio {
return Object.assign(p, { abort }); return Object.assign(p, { abort });
} }
canDiarization = canDiarization; canDiarization = () => canDiarization().then(resp => !!resp);
} }

View File

@@ -255,11 +255,11 @@ class LLM {
/** /**
* Create a vector representation of a string * Create a vector representation of a string
* @param {object | string} target Item that will be embedded (objects get converted) * @param {object | string} target Item that will be embedded (objects get converted)
* @param {number} maxTokens Chunking size. More = better context, less = more specific (Search by paragraphs or lines) * @param {maxTokens?: number, overlapTokens?: number} opts Options for embedding such as chunk sizes
* @param {number} overlapTokens Includes previous X tokens to provide continuity to AI (In addition to max tokens)
* @returns {Promise<Awaited<{index: number, embedding: number[], text: string, tokens: number}>[]>} Chunked embeddings * @returns {Promise<Awaited<{index: number, embedding: number[], text: string, tokens: number}>[]>} Chunked embeddings
*/ */
embedding(target: object | string, maxTokens = 500, overlapTokens = 50) { async embedding(target: object | string, opts: {maxTokens?: number, overlapTokens?: number} = {}) {
let {maxTokens = 500, overlapTokens = 50} = opts;
const embed = (text: string): Promise<number[]> => { const embed = (text: string): Promise<number[]> => {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
const worker = new Worker(join(dirname(fileURLToPath(import.meta.url)), 'embedder.js')); const worker = new Worker(join(dirname(fileURLToPath(import.meta.url)), 'embedder.js'));
@@ -279,13 +279,13 @@ class LLM {
worker.postMessage({text, model: this.ai.options?.embedder || 'bge-small-en-v1.5', modelDir: this.ai.options.path}); worker.postMessage({text, model: this.ai.options?.embedder || 'bge-small-en-v1.5', modelDir: this.ai.options.path});
}); });
}; };
const chunks = this.chunk(target, maxTokens, overlapTokens); const chunks = this.chunk(target, maxTokens, overlapTokens), results: any[] = [];
return Promise.all(chunks.map(async (text, index) => ({ for(let i = 0; i < chunks.length; i++) {
index, const text= chunks[i];
embedding: await embed(text), const embedding = await embed(text);
text, results.push({index: i, embedding, text, tokens: this.estimateTokens(text)});
tokens: this.estimateTokens(text), }
}))); return results;
} }
/** /**

View File

@@ -2,8 +2,26 @@ import {createWorker} from 'tesseract.js';
import {AbortablePromise, Ai} from './ai.ts'; import {AbortablePromise, Ai} from './ai.ts';
export class Vision { export class Vision {
private worker: any = null;
private queue: Array<{ path: string, resolve: any, reject: any }> = [];
private busy = false;
constructor(private ai: Ai) { } constructor(private ai: Ai) {}
private async processQueue() {
if(this.busy || !this.queue.length) return;
this.busy = true;
const job = this.queue.shift()!;
if(!this.worker) this.worker = await createWorker(this.ai.options.ocr || 'eng', 2, {cachePath: this.ai.options.path});
try {
const {data} = await this.worker.recognize(job.path);
job.resolve(data.text.trim() || null);
} catch(err) {
job.reject(err);
}
this.busy = false;
this.processQueue();
}
/** /**
* Convert image to text using Optical Character Recognition * Convert image to text using Optical Character Recognition
@@ -11,13 +29,16 @@ export class Vision {
* @returns {AbortablePromise<string | null>} Promise of extracted text with abort method * @returns {AbortablePromise<string | null>} Promise of extracted text with abort method
*/ */
ocr(path: string): AbortablePromise<string | null> { ocr(path: string): AbortablePromise<string | null> {
let worker: any; let aborted = false;
const p = new Promise<string | null>(async res => { const abort = () => { aborted = true; };
worker = await createWorker(this.ai.options.ocr || 'eng', 2, {cachePath: this.ai.options.path}); const p = new Promise<string | null>((resolve, reject) => {
const {data} = await worker.recognize(path); this.queue.push({
await worker.terminate(); path,
res(data.text.trim() || null); resolve: (text: string | null) => !aborted && resolve(text),
reject: (err: Error) => !aborted && reject(err)
}); });
return Object.assign(p, {abort: () => worker?.terminate()}); this.processQueue();
});
return Object.assign(p, {abort});
} }
} }

View File

@@ -1,6 +1,5 @@
import {defineConfig} from 'vite'; import {defineConfig} from 'vite';
import dts from 'vite-plugin-dts'; import dts from 'vite-plugin-dts';
import {resolve} from 'path';
export default defineConfig({ export default defineConfig({
build: { build: {