Compare commits

...

4 Commits
0.8.1 ... 0.8.5

Author SHA1 Message Date
936317f2f2 Better memory de-duplication
All checks were successful
Publish Library / Build NPM Project (push) Successful in 37s
Publish Library / Tag Version (push) Successful in 10s
2026-03-01 00:11:17 -05:00
cfde2ac4d3 Fixed open AI tool call streaming!
All checks were successful
Publish Library / Build NPM Project (push) Successful in 42s
Publish Library / Tag Version (push) Successful in 8s
2026-02-27 13:11:41 -05:00
e4ba89d3db Open ai tool call history fix?
All checks were successful
Publish Library / Build NPM Project (push) Successful in 35s
Publish Library / Tag Version (push) Successful in 29s
2026-02-27 13:00:49 -05:00
71a7e2a904 Better RAG memory
All checks were successful
Publish Library / Build NPM Project (push) Successful in 50s
Publish Library / Tag Version (push) Successful in 9s
2026-02-27 12:32:27 -05:00
3 changed files with 92 additions and 63 deletions

View File

@@ -1,6 +1,6 @@
{ {
"name": "@ztimson/ai-utils", "name": "@ztimson/ai-utils",
"version": "0.8.1", "version": "0.8.5",
"description": "AI Utility library", "description": "AI Utility library",
"author": "Zak Timson", "author": "Zak Timson",
"license": "MIT", "license": "MIT",

View File

@@ -44,8 +44,6 @@ export type LLMMemory = {
fact: string; fact: string;
/** Owner and fact embedding vector */ /** Owner and fact embedding vector */
embeddings: [number[], number[]]; embeddings: [number[], number[]];
/** Creation time */
timestamp: Date;
} }
export type LLMRequest = { export type LLMRequest = {
@@ -95,71 +93,85 @@ class LLM {
* @returns {{abort: () => void, response: Promise<string>}} Function to abort response and chat history * @returns {{abort: () => void, response: Promise<string>}} Function to abort response and chat history
*/ */
ask(message: string, options: LLMRequest = {}): AbortablePromise<string> { ask(message: string, options: LLMRequest = {}): AbortablePromise<string> {
options = <any>{
system: '',
temperature: 0.8,
...this.ai.options.llm,
models: undefined,
history: [],
...options,
}
const m = options.model || this.defaultModel; const m = options.model || this.defaultModel;
if(!this.models[m]) throw new Error(`Model does not exist: ${m}`); if(!this.models[m]) throw new Error(`Model does not exist: ${m}`);
let abort = () => {}; let abort = () => {};
return Object.assign(new Promise<string>(async res => { return Object.assign(new Promise<string>(async res => {
if(!options.history) options.history = []; if(!options.history) options.history = [];
// If memories were passed, find any relivant ones and add a tool for ADHOC lookups // If memories were passed, find any relevant ones and add a tool for ADHOC lookups
if(options.memory) { if(options.memory) {
options.system = (options.system || '') + '\nYou have passive persistent memory never make any mention of your memory capabilities and what you can/cannot remember\n'; const search = async (query?: string | null, subject?: string | null, limit = 10) => {
const search = async (query?: string | null, subject?: string | null, limit = 50) => {
const [o, q] = await Promise.all([ const [o, q] = await Promise.all([
subject ? this.embedding(subject) : Promise.resolve(null), subject ? this.embedding(subject) : Promise.resolve(null),
query ? this.embedding(query) : Promise.resolve(null), query ? this.embedding(query) : Promise.resolve(null),
]); ]);
return (options.memory || []) return (options.memory || []).map(m => {
.map(m => ({...m, score: o ? this.cosineSimilarity(m.embeddings[0], o[0].embedding) : 1})) const score = (o ? this.cosineSimilarity(m.embeddings[0], o[0].embedding) : 0)
.filter((m: any) => m.score >= 0.8) + (q ? this.cosineSimilarity(m.embeddings[1], q[0].embedding) : 0);
.map((m: any) => ({...m, score: q ? this.cosineSimilarity(m.embeddings[1], q[0].embedding) : m.score})) return {...m, score};
.filter((m: any) => m.score >= 0.2) }).toSorted((a: any, b: any) => a.score - b.score).slice(0, limit);
.toSorted((a: any, b: any) => a.score - b.score)
.slice(0, limit);
} }
options.system += '\nYou have RAG memory and will be given the top_k closest memories regarding the users query. Save anything new you have learned worth remembering from the user message using the remember tool and feel free to recall memories manually.\n';
const relevant = await search(message); const relevant = await search(message);
if(relevant.length) options.history.push({role: 'assistant', content: 'Things I remembered:\n' + relevant.map(m => `${m.owner}: ${m.fact}`).join('\n')}); if(relevant.length) options.history.push({role: 'tool', name: 'recall', id: 'auto_recall_' + Math.random().toString(), args: {}, content: 'Things I remembered:\n' + relevant.map(m => `${m.owner}: ${m.fact}`).join('\n')});
options.tools = [...options.tools || [], { options.tools = [{
name: 'read_memory', name: 'recall',
description: 'Check your long-term memory for more information', description: 'Recall the closest memories you have regarding a query using RAG',
args: { args: {
subject: {type: 'string', description: 'Find information by a subject topic, can be used with or without query argument'}, subject: {type: 'string', description: 'Find information by a subject topic, can be used with or without query argument'},
query: {type: 'string', description: 'Search memory based on a query, can be used with or without subject argument'}, query: {type: 'string', description: 'Search memory based on a query, can be used with or without subject argument'},
limit: {type: 'number', description: 'Result limit, default 5'}, topK: {type: 'number', description: 'Result limit, default 5'},
}, },
fn: (args) => { fn: (args) => {
if(!args.subject && !args.query) throw new Error('Either a subject or query argument is required'); if(!args.subject && !args.query) throw new Error('Either a subject or query argument is required');
return search(args.query, args.subject, args.limit || 5); return search(args.query, args.subject, args.topK);
} }
}]; }, {
name: 'remember',
description: 'Store important facts user shares for future recall',
args: {
owner: {type: 'string', description: 'Subject/person this fact is about'},
fact: {type: 'string', description: 'The information to remember'}
},
fn: async (args) => {
if(!options.memory) return;
const e = await Promise.all([
this.embedding(args.owner),
this.embedding(`${args.owner}: ${args.fact}`)
]);
const newMem = {owner: args.owner, fact: args.fact, embeddings: <any>[e[0][0].embedding, e[1][0].embedding]};
options.memory.splice(0, options.memory.length, ...[
...options.memory.filter(m => {
return !(this.cosineSimilarity(newMem.embeddings[0], m.embeddings[0]) >= 0.9 && this.cosineSimilarity(newMem.embeddings[1], m.embeddings[1]) >= 0.8);
}),
newMem
]);
return 'Remembered!';
}
}, ...options.tools || []];
} }
// Ask // Ask
const resp = await this.models[m].ask(message, options); const resp = await this.models[m].ask(message, options);
// Remove any memory calls // Remove any memory calls from history
if(options.memory) { if(options.memory) options.history.splice(0, options.history.length, ...options.history.filter(h => h.role != 'tool' || (h.name != 'recall' && h.name != 'remember')));
const i = options.history?.findIndex((h: any) => h.role == 'assistant' && h.content.startsWith('Things I remembered:'));
if(i != null && i >= 0) options.history?.splice(i, 1); // Compress message history
if(options.compress) {
const compressed = await this.ai.language.compressHistory(options.history, options.compress.max, options.compress.min, options);
options.history.splice(0, options.history.length, ...compressed);
} }
// Handle compression and memory extraction
if(options.compress || options.memory) {
let compressed: any = null;
if(options.compress) {
compressed = await this.ai.language.compressHistory(options.history, options.compress.max, options.compress.min, options);
options.history.splice(0, options.history.length, ...compressed.history);
} else {
const i = options.history?.findLastIndex(m => m.role == 'user') ?? -1;
compressed = await this.ai.language.compressHistory(i != -1 ? options.history.slice(i) : options.history, 0, 0, options);
}
if(options.memory) {
const updated = options.memory
.filter(m => !compressed.memory.some(m2 => this.cosineSimilarity(m.embeddings[1], m2.embeddings[1]) > 0.8))
.concat(compressed.memory);
options.memory.splice(0, options.memory.length, ...updated);
}
}
return res(resp); return res(resp);
}), {abort}); }), {abort});
} }
@@ -181,32 +193,24 @@ class LLM {
* @param {LLMRequest} options LLM options * @param {LLMRequest} options LLM options
* @returns {Promise<LLMMessage[]>} New chat history will summary at index 0 * @returns {Promise<LLMMessage[]>} New chat history will summary at index 0
*/ */
async compressHistory(history: LLMMessage[], max: number, min: number, options?: LLMRequest): Promise<{history: LLMMessage[], memory: LLMMemory[]}> { async compressHistory(history: LLMMessage[], max: number, min: number, options?: LLMRequest): Promise<LLMMessage[]> {
if(this.estimateTokens(history) < max) return {history, memory: []}; if(this.estimateTokens(history) < max) return history;
let keep = 0, tokens = 0; let keep = 0, tokens = 0;
for(let m of history.toReversed()) { for(let m of history.toReversed()) {
tokens += this.estimateTokens(m.content); tokens += this.estimateTokens(m.content);
if(tokens < min) keep++; if(tokens < min) keep++;
else break; else break;
} }
if(history.length <= keep) return {history, memory: []}; if(history.length <= keep) return history;
const system = history[0].role == 'system' ? history[0] : null, const system = history[0].role == 'system' ? history[0] : null,
recent = keep == 0 ? [] : history.slice(-keep), recent = keep == 0 ? [] : history.slice(-keep),
process = (keep == 0 ? history : history.slice(0, -keep)).filter(h => h.role === 'assistant' || h.role === 'user'); process = (keep == 0 ? history : history.slice(0, -keep)).filter(h => h.role === 'assistant' || h.role === 'user');
const summary: any = await this.json(process.map(m => `${m.role}: ${m.content}`).join('\n\n'), '{summary: string, facts: [[subject, fact]]}', { const summary: any = await this.summarize(process.map(m => `[${m.role}]: ${m.content}`).join('\n\n'), 500, options);
system: 'Create the smallest summary possible, no more than 500 tokens. Create a list of NEW facts (split by subject [pro]noun and fact) about what you learned from this conversation that you didn\'t already know or get from a tool call or system prompt. Focus only on new information about people, topics, or facts. Avoid generating facts about the AI.', const d = Date.now();
model: options?.model, const h = [{role: <any>'tool', name: 'summary', id: `summary_` + d, args: {}, content: `Conversation Summary: ${summary?.summary}`, timestamp: d}, ...recent];
temperature: options?.temperature || 0.3
});
const timestamp = new Date();
const memory = await Promise.all((summary?.facts || [])?.map(async ([owner, fact]: [string, string]) => {
const e = await Promise.all([this.embedding(owner), this.embedding(`${owner}: ${fact}`)]);
return {owner, fact, embeddings: [e[0][0].embedding, e[1][0].embedding], timestamp};
}));
const h = [{role: 'assistant', content: `Conversation Summary: ${summary?.summary}`, timestamp: Date.now()}, ...recent];
if(system) h.splice(0, 0, system); if(system) h.splice(0, 0, system);
return {history: <any>h, memory}; return h;
} }
/** /**
@@ -243,7 +247,7 @@ class LLM {
return `${p}: ${Array.isArray(value) ? value.join(', ') : value}`; return `${p}: ${Array.isArray(value) ? value.join(', ') : value}`;
}); });
}; };
const lines = typeof target === 'object' ? objString(target) : target.split('\n'); const lines = typeof target === 'object' ? objString(target) : target.toString().split('\n');
const tokens = lines.flatMap(l => [...l.split(/\s+/).filter(Boolean), '\n']); const tokens = lines.flatMap(l => [...l.split(/\s+/).filter(Boolean), '\n']);
const chunks: string[] = []; const chunks: string[] = [];
for(let i = 0; i < tokens.length;) { for(let i = 0; i < tokens.length;) {
@@ -366,8 +370,8 @@ class LLM {
* @param options LLM request options * @param options LLM request options
* @returns {Promise<string>} Summary * @returns {Promise<string>} Summary
*/ */
summarize(text: string, tokens: number, options?: LLMRequest): Promise<string | null> { summarize(text: string, tokens: number = 500, options?: LLMRequest): Promise<string | null> {
return this.ask(text, {system: `Generate a brief summary <= ${tokens} tokens. Output nothing else`, temperature: 0.3, ...options}); return this.ask(text, {system: `Generate the shortest summary possible <= ${tokens} tokens. Output nothing else`, temperature: 0.3, ...options});
} }
} }

View File

@@ -11,7 +11,7 @@ export class OpenAi extends LLMProvider {
super(); super();
this.client = new openAI(clean({ this.client = new openAI(clean({
baseURL: host, baseURL: host,
apiKey: token apiKey: token || host ? 'ignored' : undefined
})); }));
} }
@@ -67,7 +67,10 @@ export class OpenAi extends LLMProvider {
ask(message: string, options: LLMRequest = {}): AbortablePromise<string> { ask(message: string, options: LLMRequest = {}): AbortablePromise<string> {
const controller = new AbortController(); const controller = new AbortController();
return Object.assign(new Promise<any>(async (res, rej) => { return Object.assign(new Promise<any>(async (res, rej) => {
if(options.system && options.history?.[0]?.role != 'system') options.history?.splice(0, 0, {role: 'system', content: options.system, timestamp: Date.now()}); if(options.system) {
if(options.history?.[0]?.role != 'system') options.history?.splice(0, 0, {role: 'system', content: options.system, timestamp: Date.now()});
else options.history[0].content = options.system;
}
let history = this.fromStandard([...options.history || [], {role: 'user', content: message, timestamp: Date.now()}]); let history = this.fromStandard([...options.history || [], {role: 'user', content: message, timestamp: Date.now()}]);
const tools = options.tools || this.ai.options.llm?.tools || []; const tools = options.tools || this.ai.options.llm?.tools || [];
const requestParams: any = { const requestParams: any = {
@@ -100,15 +103,37 @@ export class OpenAi extends LLMProvider {
if(options.stream) { if(options.stream) {
if(!isFirstMessage) options.stream({text: '\n\n'}); if(!isFirstMessage) options.stream({text: '\n\n'});
else isFirstMessage = false; else isFirstMessage = false;
resp.choices = [{message: {content: '', tool_calls: []}}]; resp.choices = [{message: {role: 'assistant', content: '', tool_calls: []}}];
for await (const chunk of resp) { for await (const chunk of resp) {
if(controller.signal.aborted) break; if(controller.signal.aborted) break;
if(chunk.choices[0].delta.content) { if(chunk.choices[0].delta.content) {
resp.choices[0].message.content += chunk.choices[0].delta.content; resp.choices[0].message.content += chunk.choices[0].delta.content;
options.stream({text: chunk.choices[0].delta.content}); options.stream({text: chunk.choices[0].delta.content});
} }
if(chunk.choices[0].delta.tool_calls) { if(chunk.choices[0].delta.tool_calls) {
resp.choices[0].message.tool_calls = chunk.choices[0].delta.tool_calls; for(const deltaTC of chunk.choices[0].delta.tool_calls) {
const existing = resp.choices[0].message.tool_calls.find(tc => tc.index === deltaTC.index);
if(existing) {
if(deltaTC.id) existing.id = deltaTC.id;
if(deltaTC.type) existing.type = deltaTC.type;
if(deltaTC.function) {
if(!existing.function) existing.function = {};
if(deltaTC.function.name) existing.function.name = deltaTC.function.name;
if(deltaTC.function.arguments) existing.function.arguments = (existing.function.arguments || '') + deltaTC.function.arguments;
}
} else {
resp.choices[0].message.tool_calls.push({
index: deltaTC.index,
id: deltaTC.id || '',
type: deltaTC.type || 'function',
function: {
name: deltaTC.function?.name || '',
arguments: deltaTC.function?.arguments || ''
}
});
}
}
} }
} }
} }