parallel embedding cap
All checks were successful
Publish Library / Build NPM Project (push) Successful in 31s
Publish Library / Tag Version (push) Successful in 5s

This commit is contained in:
2026-02-19 21:37:58 -05:00
parent 7ef7c3f676
commit da15d299e6
2 changed files with 18 additions and 11 deletions

View File

@@ -1,6 +1,6 @@
{ {
"name": "@ztimson/ai-utils", "name": "@ztimson/ai-utils",
"version": "0.7.1", "version": "0.7.2",
"description": "AI Utility library", "description": "AI Utility library",
"author": "Zak Timson", "author": "Zak Timson",
"license": "MIT", "license": "MIT",

View File

@@ -255,11 +255,12 @@ class LLM {
/** /**
* Create a vector representation of a string * Create a vector representation of a string
* @param {object | string} target Item that will be embedded (objects get converted) * @param {object | string} target Item that will be embedded (objects get converted)
* @param {number} maxTokens Chunking size. More = better context, less = more specific (Search by paragraphs or lines) * @param {maxTokens?: number, overlapTokens?: number, parellel?: number} opts Options for embedding such as chunk sizes and parallel processing
* @param {number} overlapTokens Includes previous X tokens to provide continuity to AI (In addition to max tokens)
* @returns {Promise<Awaited<{index: number, embedding: number[], text: string, tokens: number}>[]>} Chunked embeddings * @returns {Promise<Awaited<{index: number, embedding: number[], text: string, tokens: number}>[]>} Chunked embeddings
*/ */
embedding(target: object | string, maxTokens = 500, overlapTokens = 50) { async embedding(target: object | string, opts: {maxTokens?: number, overlapTokens?: number, parallel?: number} = {}) {
let {maxTokens = 500, overlapTokens = 50, parallel = 1} = opts;
const embed = (text: string): Promise<number[]> => { const embed = (text: string): Promise<number[]> => {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
const worker = new Worker(join(dirname(fileURLToPath(import.meta.url)), 'embedder.js')); const worker = new Worker(join(dirname(fileURLToPath(import.meta.url)), 'embedder.js'));
@@ -279,13 +280,19 @@ class LLM {
worker.postMessage({text, model: this.ai.options?.embedder || 'bge-small-en-v1.5', modelDir: this.ai.options.path}); worker.postMessage({text, model: this.ai.options?.embedder || 'bge-small-en-v1.5', modelDir: this.ai.options.path});
}); });
}; };
const chunks = this.chunk(target, maxTokens, overlapTokens); let i = 0, chunks = this.chunk(target, maxTokens, overlapTokens), results: any[] = [];
return Promise.all(chunks.map(async (text, index) => ({ const next: Function = () => {
index, const index = i++;
embedding: await embed(text), if(index >= chunks.length) return;
text, const text = chunks[index];
tokens: this.estimateTokens(text), return embed(text).then(embedding => {
}))); results.push({index, embedding, text, tokens: this.estimateTokens(text)});
return next();
})
}
await Promise.all(Array(parallel).fill(null).map(() => next()));
return results.toSorted((a, b) => a.index - b.index);
} }
/** /**