Compare commits

...

8 Commits

Author SHA1 Message Date
b814ea8b28 Improved memory recall results
All checks were successful
Publish Library / Build NPM Project (push) Successful in 42s
Publish Library / Tag Version (push) Successful in 10s
2026-03-03 00:26:00 -05:00
06dda88dbc Removed log statements
All checks were successful
Publish Library / Build NPM Project (push) Successful in 36s
Publish Library / Tag Version (push) Successful in 10s
2026-03-02 14:00:58 -05:00
5d34652d46 Fixed CLI tool
All checks were successful
Publish Library / Build NPM Project (push) Successful in 39s
Publish Library / Tag Version (push) Successful in 10s
2026-03-01 18:11:25 -05:00
6454548364 Fixed CLI tool
All checks were successful
Publish Library / Build NPM Project (push) Successful in 41s
Publish Library / Tag Version (push) Successful in 9s
2026-03-01 17:18:30 -05:00
936317f2f2 Better memory de-duplication
All checks were successful
Publish Library / Build NPM Project (push) Successful in 37s
Publish Library / Tag Version (push) Successful in 10s
2026-03-01 00:11:17 -05:00
cfde2ac4d3 Fixed open AI tool call streaming!
All checks were successful
Publish Library / Build NPM Project (push) Successful in 42s
Publish Library / Tag Version (push) Successful in 8s
2026-02-27 13:11:41 -05:00
e4ba89d3db Open ai tool call history fix?
All checks were successful
Publish Library / Build NPM Project (push) Successful in 35s
Publish Library / Tag Version (push) Successful in 29s
2026-02-27 13:00:49 -05:00
71a7e2a904 Better RAG memory
All checks were successful
Publish Library / Build NPM Project (push) Successful in 50s
Publish Library / Tag Version (push) Successful in 9s
2026-02-27 12:32:27 -05:00
4 changed files with 110 additions and 73 deletions

View File

@@ -1,6 +1,6 @@
{
"name": "@ztimson/ai-utils",
"version": "0.8.1",
"version": "0.8.9",
"description": "AI Utility library",
"author": "Zak Timson",
"license": "MIT",

View File

@@ -44,8 +44,6 @@ export type LLMMemory = {
fact: string;
/** Owner and fact embedding vector */
embeddings: [number[], number[]];
/** Creation time */
timestamp: Date;
}
export type LLMRequest = {
@@ -95,71 +93,86 @@ class LLM {
* @returns {{abort: () => void, response: Promise<string>}} Function to abort response and chat history
*/
ask(message: string, options: LLMRequest = {}): AbortablePromise<string> {
options = <any>{
system: '',
temperature: 0.8,
...this.ai.options.llm,
models: undefined,
history: [],
...options,
}
const m = options.model || this.defaultModel;
if(!this.models[m]) throw new Error(`Model does not exist: ${m}`);
let abort = () => {};
return Object.assign(new Promise<string>(async res => {
if(!options.history) options.history = [];
// If memories were passed, find any relivant ones and add a tool for ADHOC lookups
// If memories were passed, find any relevant ones and add a tool for ADHOC lookups
if(options.memory) {
options.system = (options.system || '') + '\nYou have passive persistent memory never make any mention of your memory capabilities and what you can/cannot remember\n';
const search = async (query?: string | null, subject?: string | null, limit = 50) => {
const search = async (query?: string | null, subject?: string | null, limit = 10) => {
const [o, q] = await Promise.all([
subject ? this.embedding(subject) : Promise.resolve(null),
query ? this.embedding(query) : Promise.resolve(null),
]);
return (options.memory || [])
.map(m => ({...m, score: o ? this.cosineSimilarity(m.embeddings[0], o[0].embedding) : 1}))
.filter((m: any) => m.score >= 0.8)
.map((m: any) => ({...m, score: q ? this.cosineSimilarity(m.embeddings[1], q[0].embedding) : m.score}))
.filter((m: any) => m.score >= 0.2)
.toSorted((a: any, b: any) => a.score - b.score)
.slice(0, limit);
return (options.memory || []).map(m => {
const score = (o ? this.cosineSimilarity(m.embeddings[0], o[0].embedding) : 0)
+ (q ? this.cosineSimilarity(m.embeddings[1], q[0].embedding) : 0);
return {...m, score};
}).toSorted((a: any, b: any) => a.score - b.score).slice(0, limit)
.map(m => `- ${m.owner}: ${m.fact}`).join('\n');
}
options.system += '\nYou have RAG memory and will be given the top_k closest memories regarding the users query. Save anything new you have learned worth remembering from the user message using the remember tool and feel free to recall memories manually.\n';
const relevant = await search(message);
if(relevant.length) options.history.push({role: 'assistant', content: 'Things I remembered:\n' + relevant.map(m => `${m.owner}: ${m.fact}`).join('\n')});
options.tools = [...options.tools || [], {
name: 'read_memory',
description: 'Check your long-term memory for more information',
if(relevant.length) options.history.push({role: 'tool', name: 'recall', id: 'auto_recall_' + Math.random().toString(), args: {}, content: `Things I remembered:\n${relevant}`});
options.tools = [{
name: 'recall',
description: 'Recall the closest memories you have regarding a query using RAG',
args: {
subject: {type: 'string', description: 'Find information by a subject topic, can be used with or without query argument'},
query: {type: 'string', description: 'Search memory based on a query, can be used with or without subject argument'},
limit: {type: 'number', description: 'Result limit, default 5'},
topK: {type: 'number', description: 'Result limit, default 5'},
},
fn: (args) => {
if(!args.subject && !args.query) throw new Error('Either a subject or query argument is required');
return search(args.query, args.subject, args.limit || 5);
return search(args.query, args.subject, args.topK);
}
}];
}, {
name: 'remember',
description: 'Store important facts user shares for future recall',
args: {
owner: {type: 'string', description: 'Subject/person this fact is about'},
fact: {type: 'string', description: 'The information to remember'}
},
fn: async (args) => {
if(!options.memory) return;
const e = await Promise.all([
this.embedding(args.owner),
this.embedding(`${args.owner}: ${args.fact}`)
]);
const newMem = {owner: args.owner, fact: args.fact, embeddings: <any>[e[0][0].embedding, e[1][0].embedding]};
options.memory.splice(0, options.memory.length, ...[
...options.memory.filter(m => {
return !(this.cosineSimilarity(newMem.embeddings[0], m.embeddings[0]) >= 0.9 && this.cosineSimilarity(newMem.embeddings[1], m.embeddings[1]) >= 0.8);
}),
newMem
]);
return 'Remembered!';
}
}, ...options.tools || []];
}
// Ask
const resp = await this.models[m].ask(message, options);
// Remove any memory calls
if(options.memory) {
const i = options.history?.findIndex((h: any) => h.role == 'assistant' && h.content.startsWith('Things I remembered:'));
if(i != null && i >= 0) options.history?.splice(i, 1);
// Remove any memory calls from history
if(options.memory) options.history.splice(0, options.history.length, ...options.history.filter(h => h.role != 'tool' || (h.name != 'recall' && h.name != 'remember')));
// Compress message history
if(options.compress) {
const compressed = await this.ai.language.compressHistory(options.history, options.compress.max, options.compress.min, options);
options.history.splice(0, options.history.length, ...compressed);
}
// Handle compression and memory extraction
if(options.compress || options.memory) {
let compressed: any = null;
if(options.compress) {
compressed = await this.ai.language.compressHistory(options.history, options.compress.max, options.compress.min, options);
options.history.splice(0, options.history.length, ...compressed.history);
} else {
const i = options.history?.findLastIndex(m => m.role == 'user') ?? -1;
compressed = await this.ai.language.compressHistory(i != -1 ? options.history.slice(i) : options.history, 0, 0, options);
}
if(options.memory) {
const updated = options.memory
.filter(m => !compressed.memory.some(m2 => this.cosineSimilarity(m.embeddings[1], m2.embeddings[1]) > 0.8))
.concat(compressed.memory);
options.memory.splice(0, options.memory.length, ...updated);
}
}
return res(resp);
}), {abort});
}
@@ -181,32 +194,24 @@ class LLM {
* @param {LLMRequest} options LLM options
* @returns {Promise<LLMMessage[]>} New chat history will summary at index 0
*/
async compressHistory(history: LLMMessage[], max: number, min: number, options?: LLMRequest): Promise<{history: LLMMessage[], memory: LLMMemory[]}> {
if(this.estimateTokens(history) < max) return {history, memory: []};
async compressHistory(history: LLMMessage[], max: number, min: number, options?: LLMRequest): Promise<LLMMessage[]> {
if(this.estimateTokens(history) < max) return history;
let keep = 0, tokens = 0;
for(let m of history.toReversed()) {
tokens += this.estimateTokens(m.content);
if(tokens < min) keep++;
else break;
}
if(history.length <= keep) return {history, memory: []};
if(history.length <= keep) return history;
const system = history[0].role == 'system' ? history[0] : null,
recent = keep == 0 ? [] : history.slice(-keep),
process = (keep == 0 ? history : history.slice(0, -keep)).filter(h => h.role === 'assistant' || h.role === 'user');
const summary: any = await this.json(process.map(m => `${m.role}: ${m.content}`).join('\n\n'), '{summary: string, facts: [[subject, fact]]}', {
system: 'Create the smallest summary possible, no more than 500 tokens. Create a list of NEW facts (split by subject [pro]noun and fact) about what you learned from this conversation that you didn\'t already know or get from a tool call or system prompt. Focus only on new information about people, topics, or facts. Avoid generating facts about the AI.',
model: options?.model,
temperature: options?.temperature || 0.3
});
const timestamp = new Date();
const memory = await Promise.all((summary?.facts || [])?.map(async ([owner, fact]: [string, string]) => {
const e = await Promise.all([this.embedding(owner), this.embedding(`${owner}: ${fact}`)]);
return {owner, fact, embeddings: [e[0][0].embedding, e[1][0].embedding], timestamp};
}));
const h = [{role: 'assistant', content: `Conversation Summary: ${summary?.summary}`, timestamp: Date.now()}, ...recent];
const summary: any = await this.summarize(process.map(m => `[${m.role}]: ${m.content}`).join('\n\n'), 500, options);
const d = Date.now();
const h = [{role: <any>'tool', name: 'summary', id: `summary_` + d, args: {}, content: `Conversation Summary: ${summary?.summary}`, timestamp: d}, ...recent];
if(system) h.splice(0, 0, system);
return {history: <any>h, memory};
return h;
}
/**
@@ -243,7 +248,7 @@ class LLM {
return `${p}: ${Array.isArray(value) ? value.join(', ') : value}`;
});
};
const lines = typeof target === 'object' ? objString(target) : target.split('\n');
const lines = typeof target === 'object' ? objString(target) : target.toString().split('\n');
const tokens = lines.flatMap(l => [...l.split(/\s+/).filter(Boolean), '\n']);
const chunks: string[] = [];
for(let i = 0; i < tokens.length;) {
@@ -366,8 +371,8 @@ class LLM {
* @param options LLM request options
* @returns {Promise<string>} Summary
*/
summarize(text: string, tokens: number, options?: LLMRequest): Promise<string | null> {
return this.ask(text, {system: `Generate a brief summary <= ${tokens} tokens. Output nothing else`, temperature: 0.3, ...options});
summarize(text: string, tokens: number = 500, options?: LLMRequest): Promise<string | null> {
return this.ask(text, {system: `Generate the shortest summary possible <= ${tokens} tokens. Output nothing else`, temperature: 0.3, ...options});
}
}

View File

@@ -11,7 +11,7 @@ export class OpenAi extends LLMProvider {
super();
this.client = new openAI(clean({
baseURL: host,
apiKey: token
apiKey: token || host ? 'ignored' : undefined
}));
}
@@ -67,7 +67,10 @@ export class OpenAi extends LLMProvider {
ask(message: string, options: LLMRequest = {}): AbortablePromise<string> {
const controller = new AbortController();
return Object.assign(new Promise<any>(async (res, rej) => {
if(options.system && options.history?.[0]?.role != 'system') options.history?.splice(0, 0, {role: 'system', content: options.system, timestamp: Date.now()});
if(options.system) {
if(options.history?.[0]?.role != 'system') options.history?.splice(0, 0, {role: 'system', content: options.system, timestamp: Date.now()});
else options.history[0].content = options.system;
}
let history = this.fromStandard([...options.history || [], {role: 'user', content: message, timestamp: Date.now()}]);
const tools = options.tools || this.ai.options.llm?.tools || [];
const requestParams: any = {
@@ -100,15 +103,37 @@ export class OpenAi extends LLMProvider {
if(options.stream) {
if(!isFirstMessage) options.stream({text: '\n\n'});
else isFirstMessage = false;
resp.choices = [{message: {content: '', tool_calls: []}}];
resp.choices = [{message: {role: 'assistant', content: '', tool_calls: []}}];
for await (const chunk of resp) {
if(controller.signal.aborted) break;
if(chunk.choices[0].delta.content) {
resp.choices[0].message.content += chunk.choices[0].delta.content;
options.stream({text: chunk.choices[0].delta.content});
}
if(chunk.choices[0].delta.tool_calls) {
resp.choices[0].message.tool_calls = chunk.choices[0].delta.tool_calls;
for(const deltaTC of chunk.choices[0].delta.tool_calls) {
const existing = resp.choices[0].message.tool_calls.find(tc => tc.index === deltaTC.index);
if(existing) {
if(deltaTC.id) existing.id = deltaTC.id;
if(deltaTC.type) existing.type = deltaTC.type;
if(deltaTC.function) {
if(!existing.function) existing.function = {};
if(deltaTC.function.name) existing.function.name = deltaTC.function.name;
if(deltaTC.function.arguments) existing.function.arguments = (existing.function.arguments || '') + deltaTC.function.arguments;
}
} else {
resp.choices[0].message.tool_calls.push({
index: deltaTC.index,
id: deltaTC.id || '',
type: deltaTC.type || 'function',
function: {
name: deltaTC.function?.name || '',
arguments: deltaTC.function?.arguments || ''
}
});
}
}
}
}
}

View File

@@ -1,9 +1,15 @@
import * as cheerio from 'cheerio';
import {$, $Sync} from '@ztimson/node-utils';
import {$Sync} from '@ztimson/node-utils';
import {ASet, consoleInterceptor, Http, fn as Fn} from '@ztimson/utils';
import * as os from 'node:os';
import {Ai} from './ai.ts';
import {LLMRequest} from './llm.ts';
const getShell = () => {
if(os.platform() == 'win32') return 'cmd';
return $Sync`echo $SHELL`?.split('/').pop() || 'bash';
}
export type AiToolArg = {[key: string]: {
/** Argument type */
type: 'array' | 'boolean' | 'number' | 'object' | 'string',
@@ -40,7 +46,7 @@ export const CliTool: AiTool = {
name: 'cli',
description: 'Use the command line interface, returns any output',
args: {command: {type: 'string', description: 'Command to run', required: true}},
fn: (args: {command: string}) => $`${args.command}`
fn: (args: {command: string}) => $Sync`${args.command}`
}
export const DateTimeTool: AiTool = {
@@ -54,19 +60,20 @@ export const ExecTool: AiTool = {
name: 'exec',
description: 'Run code/scripts',
args: {
language: {type: 'string', description: 'Execution language', enum: ['cli', 'node', 'python'], required: true},
language: {type: 'string', description: `Execution language (CLI: ${getShell()})`, enum: ['cli', 'node', 'python'], required: true},
code: {type: 'string', description: 'Code to execute', required: true}
},
fn: async (args, stream, ai) => {
try {
switch(args.type) {
case 'bash':
switch(args.language) {
case 'cli':
return await CliTool.fn({command: args.code}, stream, ai);
case 'node':
return await JSTool.fn({code: args.code}, stream, ai);
case 'python': {
case 'python':
return await PythonTool.fn({code: args.code}, stream, ai);
}
default:
throw new Error(`Unsupported language: ${args.language}`);
}
} catch(err: any) {
return {error: err?.message || err.toString()};
@@ -98,9 +105,9 @@ export const JSTool: AiTool = {
code: {type: 'string', description: 'CommonJS javascript', required: true}
},
fn: async (args: {code: string}) => {
const console = consoleInterceptor(null);
const resp = await Fn<any>({console}, args.code, true).catch((err: any) => console.output.error.push(err));
return {...console.output, return: resp, stdout: undefined, stderr: undefined};
const c = consoleInterceptor(null);
const resp = await Fn<any>({console: c}, args.code, true).catch((err: any) => c.output.error.push(err));
return {...c.output, return: resp, stdout: undefined, stderr: undefined};
}
}