Compare commits

..

1 Commits
0.7.2 ... 0.7.3

Author SHA1 Message Date
a07f069ad0 One embedding at a time
All checks were successful
Publish Library / Build NPM Project (push) Successful in 27s
Publish Library / Tag Version (push) Successful in 7s
2026-02-19 22:58:53 -05:00
2 changed files with 10 additions and 17 deletions

View File

@@ -1,6 +1,6 @@
{
"name": "@ztimson/ai-utils",
"version": "0.7.2",
"version": "0.7.3",
"description": "AI Utility library",
"author": "Zak Timson",
"license": "MIT",

View File

@@ -255,12 +255,11 @@ class LLM {
/**
* Create a vector representation of a string
* @param {object | string} target Item that will be embedded (objects get converted)
* @param {maxTokens?: number, overlapTokens?: number, parellel?: number} opts Options for embedding such as chunk sizes and parallel processing
* @param {maxTokens?: number, overlapTokens?: number} opts Options for embedding such as chunk sizes
* @returns {Promise<Awaited<{index: number, embedding: number[], text: string, tokens: number}>[]>} Chunked embeddings
*/
async embedding(target: object | string, opts: {maxTokens?: number, overlapTokens?: number, parallel?: number} = {}) {
let {maxTokens = 500, overlapTokens = 50, parallel = 1} = opts;
async embedding(target: object | string, opts: {maxTokens?: number, overlapTokens?: number} = {}) {
let {maxTokens = 500, overlapTokens = 50} = opts;
const embed = (text: string): Promise<number[]> => {
return new Promise((resolve, reject) => {
const worker = new Worker(join(dirname(fileURLToPath(import.meta.url)), 'embedder.js'));
@@ -280,19 +279,13 @@ class LLM {
worker.postMessage({text, model: this.ai.options?.embedder || 'bge-small-en-v1.5', modelDir: this.ai.options.path});
});
};
let i = 0, chunks = this.chunk(target, maxTokens, overlapTokens), results: any[] = [];
const next: Function = () => {
const index = i++;
if(index >= chunks.length) return;
const text = chunks[index];
return embed(text).then(embedding => {
results.push({index, embedding, text, tokens: this.estimateTokens(text)});
return next();
})
const chunks = this.chunk(target, maxTokens, overlapTokens), results: any[] = [];
for(let i = 0; i < chunks.length; i++) {
const text= chunks[i];
const embedding = await embed(text);
results.push({index: i, embedding, text, tokens: this.estimateTokens(text)});
}
await Promise.all(Array(parallel).fill(null).map(() => next()));
return results.toSorted((a, b) => a.index - b.index);
return results;
}
/**