init
This commit is contained in:
15
.editorconfig
Normal file
15
.editorconfig
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
root = true
|
||||||
|
|
||||||
|
[*]
|
||||||
|
charset = utf-8
|
||||||
|
indent_style = tab
|
||||||
|
indent_size = 4
|
||||||
|
insert_final_newline = true
|
||||||
|
trim_trailing_whitespace = true
|
||||||
|
|
||||||
|
[*.ts]
|
||||||
|
quote_type = single
|
||||||
|
|
||||||
|
[*.md]
|
||||||
|
max_line_length = off
|
||||||
|
trim_trailing_whitespace = false
|
||||||
48
.github/workflows/build.yaml
vendored
Normal file
48
.github/workflows/build.yaml
vendored
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
name: Publish Library
|
||||||
|
run-name: Publish Library
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
name: Build NPM Project
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
container: node:alpine
|
||||||
|
steps:
|
||||||
|
- name: Clone Repository
|
||||||
|
uses: ztimson/actions/clone@develop
|
||||||
|
|
||||||
|
- name: Install & Build
|
||||||
|
run: npm i && npm run build
|
||||||
|
|
||||||
|
- name: Publish Library
|
||||||
|
run: |
|
||||||
|
if [ "${{github.ref_name}}" = "master" ]; then
|
||||||
|
REGISTRY="${{github.server_url}}/api/packages/${{github.repository_owner}}/npm/"
|
||||||
|
npm set registry "$REGISTRY"
|
||||||
|
npm set $(echo $REGISTRY | sed s%http:%% | sed s%https:%% ):_authToken "${{secrets.DEPLOY_TOKEN}}"
|
||||||
|
npm publish || echo "Failed to publish"
|
||||||
|
|
||||||
|
REGISTRY="https://registry.npmjs.org/"
|
||||||
|
npm set registry "$REGISTRY"
|
||||||
|
npm set $(echo $REGISTRY | sed s%http:%% | sed s%https:%% ):_authToken "${{secrets.NPM_TOKEN}}"
|
||||||
|
npm publish || echo "Failed to publish"
|
||||||
|
fi
|
||||||
|
tag:
|
||||||
|
name: Tag Version
|
||||||
|
needs: build
|
||||||
|
if: ${{github.ref_name}} == 'release'
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
container: node:alpine
|
||||||
|
steps:
|
||||||
|
- name: Clone Repository
|
||||||
|
uses: ztimson/actions/clone@develop
|
||||||
|
|
||||||
|
- name: Get Version Number
|
||||||
|
run: echo "VERSION=$(cat package.json | grep version | grep -Eo ':.+' | grep -Eo '[[:alnum:]\.\/\-]+')" >> $GITHUB_ENV
|
||||||
|
|
||||||
|
- name: Tag Version
|
||||||
|
uses: ztimson/actions/tag@develop
|
||||||
|
with:
|
||||||
|
tag: ${{env.VERSION}}
|
||||||
24
.gitignore
vendored
Normal file
24
.gitignore
vendored
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
# IDEs
|
||||||
|
.idea
|
||||||
|
.vscode
|
||||||
|
|
||||||
|
# Artifacts
|
||||||
|
coverage
|
||||||
|
dist
|
||||||
|
node_modules
|
||||||
|
uploads
|
||||||
|
public/momentum*js
|
||||||
|
junit.xml
|
||||||
|
|
||||||
|
# Logs
|
||||||
|
*.log
|
||||||
|
*.tsbuildinfo
|
||||||
|
|
||||||
|
# Databases
|
||||||
|
*.db
|
||||||
|
*.db3
|
||||||
|
*.sqlite
|
||||||
|
*.sqlite3
|
||||||
|
|
||||||
|
# Environment files
|
||||||
|
*.local
|
||||||
5
.gitmodules
vendored
Normal file
5
.gitmodules
vendored
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
[submodule "docs"]
|
||||||
|
path = docs
|
||||||
|
url = ../utils.wiki.git
|
||||||
|
branch = master
|
||||||
|
ignore = all
|
||||||
12
.npmignore
Normal file
12
.npmignore
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
src
|
||||||
|
tests
|
||||||
|
.editorconfig
|
||||||
|
.gitignore
|
||||||
|
.gitmodules
|
||||||
|
.npmignore
|
||||||
|
CODEOWNERS
|
||||||
|
Dockerfile
|
||||||
|
index.html
|
||||||
|
jest.config.js
|
||||||
|
tsconfig.json
|
||||||
|
vite.config.js
|
||||||
7
LICENSE
Normal file
7
LICENSE
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
Copyright (c) 2023 Zakary Timson
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the “Software”), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||||
106
README.md
Normal file
106
README.md
Normal file
@@ -0,0 +1,106 @@
|
|||||||
|
<!-- Header -->
|
||||||
|
<div id="top" align="center">
|
||||||
|
<br />
|
||||||
|
|
||||||
|
<!-- Logo -->
|
||||||
|
<img src="https://git.zakscode.com/repo-avatars/a90851ca730480ec37a5c0c2c4f1b4609eee5eadf806eaf16c83ac4cb7493aa9" alt="Logo" width="200" height="200">
|
||||||
|
|
||||||
|
<!-- Title -->
|
||||||
|
### @ztimson/ai-utils
|
||||||
|
|
||||||
|
<!-- Description -->
|
||||||
|
AI Utility Library - Unified interface for multiple AI providers
|
||||||
|
|
||||||
|
<!-- Repo badges -->
|
||||||
|
[](https://git.zakscode.com/ztimson/ai-utils/tags)
|
||||||
|
[](https://git.zakscode.com/ztimson/ai-utils/pulls)
|
||||||
|
[](https://git.zakscode.com/ztimson/ai-utils/issues)
|
||||||
|
|
||||||
|
<!-- Links -->
|
||||||
|
|
||||||
|
---
|
||||||
|
<div>
|
||||||
|
<a href="https://ai-utils.docs.zakscode.com" target="_blank">Documentation</a>
|
||||||
|
• <a href="https://git.zakscode.com/ztimson/ai-utils/releases" target="_blank">Release Notes</a>
|
||||||
|
• <a href="https://git.zakscode.com/ztimson/ai-utils/issues/new?template=.github%2fissue_template%2fbug.md" target="_blank">Report a Bug</a>
|
||||||
|
• <a href="https://git.zakscode.com/ztimson/ai-utils/issues/new?template=.github%2fissue_template%2fenhancement.md" target="_blank">Request a Feature</a>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
---
|
||||||
|
</div>
|
||||||
|
|
||||||
|
## Table of Contents
|
||||||
|
- [@ztimson/ai-utils](#top)
|
||||||
|
- [About](#about)
|
||||||
|
- [Features](#features)
|
||||||
|
- [Built With](#built-with)
|
||||||
|
- [Setup](#setup)
|
||||||
|
- [Production](#production)
|
||||||
|
- [Development](#development)
|
||||||
|
- [Documentation](https://ai-utils.docs.zakscode.com/)
|
||||||
|
- [License](#license)
|
||||||
|
|
||||||
|
## About
|
||||||
|
|
||||||
|
A TypeScript library that provides a unified interface for working with multiple AI providers, making it easy to integrate various AI capabilities into your applications.
|
||||||
|
|
||||||
|
### Features
|
||||||
|
|
||||||
|
- **Multi-Provider LLM Support**: Seamlessly work with OpenAI, Anthropic (Claude), and Self-hosted (Ollama) models
|
||||||
|
- **Audio Speech Recognition (ASR)**: Convert audio to text using Whisper models
|
||||||
|
- **Optical Character Recognition (OCR)**: Extract text from images using Tesseract
|
||||||
|
- **Semantic Similarity**: Compare text similarity using tensor-based cosine similarity
|
||||||
|
- **Provider Abstraction**: Switch between AI providers without changing your code
|
||||||
|
|
||||||
|
### Built With
|
||||||
|
[](https://anthropic.com/)
|
||||||
|
[](https://openai.com/)
|
||||||
|
[](https://ollama.com/)
|
||||||
|
[](https://tensorflow.org/)
|
||||||
|
[](https://tesseract-ocr.github.io/)
|
||||||
|
[](https://typescriptlang.org/)
|
||||||
|
[](https://github.com/ggerganov/whisper.cpp)
|
||||||
|
|
||||||
|
## Setup
|
||||||
|
|
||||||
|
<details>
|
||||||
|
<summary>
|
||||||
|
<h3 id="production" style="display: inline">
|
||||||
|
Production
|
||||||
|
</h3>
|
||||||
|
</summary>
|
||||||
|
|
||||||
|
#### Prerequisites
|
||||||
|
- [Node.js](https://nodejs.org/en/download)
|
||||||
|
|
||||||
|
#### Instructions
|
||||||
|
1. Install the package: `npm i @ztimson/ai-utils`
|
||||||
|
|
||||||
|
</details>
|
||||||
|
|
||||||
|
<details>
|
||||||
|
<summary>
|
||||||
|
<h3 id="development" style="display: inline">
|
||||||
|
Development
|
||||||
|
</h3>
|
||||||
|
</summary>
|
||||||
|
|
||||||
|
#### Prerequisites
|
||||||
|
- [Node.js](https://nodejs.org/en/download)
|
||||||
|
|
||||||
|
#### Instructions
|
||||||
|
1. Install the dependencies: `npm i`
|
||||||
|
2. Build library: `npm build`
|
||||||
|
3. Run unit tests: `npm test`
|
||||||
|
|
||||||
|
</details>
|
||||||
|
|
||||||
|
## Documentation
|
||||||
|
|
||||||
|
[Available Here](https://ai-utils.docs.zakscode.com/)
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
Copyright © 2023 Zakary Timson | Available under MIT Licensing
|
||||||
|
|
||||||
|
See the [license](_media/LICENSE) for more information.
|
||||||
9
ai-utils.iml
Normal file
9
ai-utils.iml
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<module type="WEB_MODULE" version="4">
|
||||||
|
<component name="NewModuleRootManager" inherit-compiler-output="true">
|
||||||
|
<exclude-output />
|
||||||
|
<content url="file://$MODULE_DIR$" />
|
||||||
|
<orderEntry type="inheritedJdk" />
|
||||||
|
<orderEntry type="sourceFolder" forTests="false" />
|
||||||
|
</component>
|
||||||
|
</module>
|
||||||
3599
package-lock.json
generated
Normal file
3599
package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
46
package.json
Normal file
46
package.json
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
{
|
||||||
|
"name": "@ztimson/ai-utils",
|
||||||
|
"version": "0.1.4",
|
||||||
|
"description": "AI Utility library",
|
||||||
|
"author": "Zak Timson",
|
||||||
|
"license": "MIT",
|
||||||
|
"private": false,
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "https://git.zakscode.com/ztimson/ai-utilities"
|
||||||
|
},
|
||||||
|
"main": "./dist/index.cjs",
|
||||||
|
"module": "./dist/index.mjs",
|
||||||
|
"types": "./dist/index.d.ts",
|
||||||
|
"exports": {
|
||||||
|
".": {
|
||||||
|
"types": "./dist/index.d.ts",
|
||||||
|
"import": "./dist/index.mjs",
|
||||||
|
"require": "./dist/index.cjs"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"build": "npx tsc && npx vite build",
|
||||||
|
"docs": "typedoc --cleanOutputDir false --out ./docs --entryPoints src/**/*.ts --readme none",
|
||||||
|
"watch": "npx vite build --watch"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"@anthropic-ai/sdk": "^0.67.0",
|
||||||
|
"@tensorflow/tfjs": "^4.22.0",
|
||||||
|
"@ztimson/node-utils": "^1.0.4",
|
||||||
|
"@ztimson/utils": "^0.27.9",
|
||||||
|
"ollama": "^0.6.0",
|
||||||
|
"openai": "^6.6.0",
|
||||||
|
"tesseract.js": "^6.0.1"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@types/node": "^24.8.1",
|
||||||
|
"typedoc": "^0.26.7",
|
||||||
|
"typescript": "^5.3.3",
|
||||||
|
"vite": "^5.0.12",
|
||||||
|
"vite-plugin-dts": "^4.5.3"
|
||||||
|
},
|
||||||
|
"files": [
|
||||||
|
"dist"
|
||||||
|
]
|
||||||
|
}
|
||||||
115
src/ai.ts
Normal file
115
src/ai.ts
Normal file
@@ -0,0 +1,115 @@
|
|||||||
|
import {$} from '@ztimson/node-utils';
|
||||||
|
import {createWorker} from 'tesseract.js';
|
||||||
|
import {LLM, LLMOptions} from './llm';
|
||||||
|
import fs from 'node:fs/promises';
|
||||||
|
import Path from 'node:path';
|
||||||
|
import * as tf from '@tensorflow/tfjs';
|
||||||
|
|
||||||
|
export type AiOptions = LLMOptions & {
|
||||||
|
whisper?: {
|
||||||
|
/** Whisper binary location */
|
||||||
|
binary: string;
|
||||||
|
/** Model */
|
||||||
|
model: WhisperModel;
|
||||||
|
/** Working directory for models and temporary files */
|
||||||
|
path: string;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export type WhisperModel = 'tiny' | 'base' | 'small' | 'medium' | 'large';
|
||||||
|
|
||||||
|
export class Ai {
|
||||||
|
private downloads: {[key: string]: Promise<void>} = {};
|
||||||
|
private whisperModel!: string;
|
||||||
|
|
||||||
|
/** Large Language Models */
|
||||||
|
llm!: LLM;
|
||||||
|
|
||||||
|
constructor(public readonly options: AiOptions) {
|
||||||
|
this.llm = new LLM(this, options);
|
||||||
|
if(this.options.whisper?.binary) this.downloadAsrModel(this.options.whisper.model);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Convert audio to text using Auditory Speech Recognition
|
||||||
|
* @param {string} path Path to audio
|
||||||
|
* @param model Whisper model
|
||||||
|
* @returns {Promise<any>} Extracted text
|
||||||
|
*/
|
||||||
|
async asr(path: string, model?: WhisperModel): Promise<string | null> {
|
||||||
|
if(!this.options.whisper?.binary) throw new Error('Whisper not configured');
|
||||||
|
if(!model) model = this.options.whisper.model;
|
||||||
|
await this.downloadAsrModel(<string>model);
|
||||||
|
const name = Math.random().toString(36).substring(2, 10) + '-' + path.split('/').pop();
|
||||||
|
const output = Path.join(this.options.whisper.path || '/tmp', name);
|
||||||
|
await $`rm -f /tmp/${name}.txt && ${this.options.whisper.binary} -nt -np -m ${this.whisperModel} -f ${path} -otxt -of ${output}`;
|
||||||
|
return fs.readFile(output, 'utf-8').then(text => text?.trim() || null)
|
||||||
|
.finally(() => fs.rm(output, {force: true}).catch(() => {}));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Downloads the specified Whisper model if it is not already present locally.
|
||||||
|
*
|
||||||
|
* @param {string} model Whisper model that will be downloaded
|
||||||
|
* @return {Promise<void>} A promise that resolves once the model is downloaded and saved locally.
|
||||||
|
*/
|
||||||
|
async downloadAsrModel(model: string): Promise<void> {
|
||||||
|
if(!this.options.whisper?.binary) throw new Error('Whisper not configured');
|
||||||
|
this.whisperModel = Path.join(<string>this.options.whisper?.path, this.options.whisper?.model + '.bin');
|
||||||
|
if(await fs.stat(this.whisperModel).then(() => true).catch(() => false)) return;
|
||||||
|
if(!!this.downloads[model]) return this.downloads[model];
|
||||||
|
this.downloads[model] = fetch(`https://huggingface.co/ggerganov/whisper.cpp/resolve/main/${this.options.whisper?.model}.bin`)
|
||||||
|
.then(resp => resp.arrayBuffer()).then(arr => Buffer.from(arr)).then(async buffer => {
|
||||||
|
await fs.writeFile(this.whisperModel, buffer);
|
||||||
|
delete this.downloads[model];
|
||||||
|
});
|
||||||
|
return this.downloads[model];
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Convert image to text using Optical Character Recognition
|
||||||
|
* @param {string} path Path to image
|
||||||
|
* @returns {{abort: Function, response: Promise<string | null>}} Abort function & Promise of extracted text
|
||||||
|
*/
|
||||||
|
ocr(path: string): {abort: () => void, response: Promise<string | null>} {
|
||||||
|
let worker: any;
|
||||||
|
return {
|
||||||
|
abort: () => { worker?.terminate(); },
|
||||||
|
response: new Promise(async res => {
|
||||||
|
worker = await createWorker('eng');
|
||||||
|
const {data} = await worker.recognize(path);
|
||||||
|
await worker.terminate();
|
||||||
|
res(data.text.trim() || null);
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Compare the difference between two strings using tensor math
|
||||||
|
* @param target Text that will checked
|
||||||
|
* @param {string} searchTerms Multiple search terms to check against target
|
||||||
|
* @returns {{avg: number, max: number, similarities: number[]}} Similarity values 0-1: 0 = unique, 1 = identical
|
||||||
|
*/
|
||||||
|
semanticSimilarity(target: string, ...searchTerms: string[]) {
|
||||||
|
if(searchTerms.length < 2) throw new Error('Requires at least 2 strings to compare');
|
||||||
|
|
||||||
|
const vector = (text: string, dimensions: number = 10): number[] => {
|
||||||
|
return text.toLowerCase().split('').map((char, index) =>
|
||||||
|
(char.charCodeAt(0) * (index + 1)) % dimensions / dimensions).slice(0, dimensions);
|
||||||
|
}
|
||||||
|
|
||||||
|
const cosineSimilarity = (v1: number[], v2: number[]): number => {
|
||||||
|
if (v1.length !== v2.length) throw new Error('Vectors must be same length');
|
||||||
|
const tensor1 = tf.tensor1d(v1), tensor2 = tf.tensor1d(v2)
|
||||||
|
const dotProduct = tf.dot(tensor1, tensor2)
|
||||||
|
const magnitude1 = tf.norm(tensor1)
|
||||||
|
const magnitude2 = tf.norm(tensor2)
|
||||||
|
if(magnitude1.dataSync()[0] === 0 || magnitude2.dataSync()[0] === 0) return 0
|
||||||
|
return dotProduct.dataSync()[0] / (magnitude1.dataSync()[0] * magnitude2.dataSync()[0])
|
||||||
|
}
|
||||||
|
|
||||||
|
const v = vector(target);
|
||||||
|
const similarities = searchTerms.map(t => vector(t)).map(refVector => cosineSimilarity(v, refVector))
|
||||||
|
return {avg: similarities.reduce((acc, s) => acc + s, 0) / similarities.length, max: Math.max(...similarities), similarities}
|
||||||
|
}
|
||||||
|
}
|
||||||
133
src/antrhopic.ts
Normal file
133
src/antrhopic.ts
Normal file
@@ -0,0 +1,133 @@
|
|||||||
|
import {Anthropic as anthropic} from '@anthropic-ai/sdk';
|
||||||
|
import {findByProp, objectMap, JSONSanitize, JSONAttemptParse} from '@ztimson/utils';
|
||||||
|
import {Ai} from './ai.ts';
|
||||||
|
import {LLMMessage, LLMRequest} from './llm.ts';
|
||||||
|
import {AbortablePromise, LLMProvider} from './provider.ts';
|
||||||
|
|
||||||
|
export class Anthropic extends LLMProvider {
|
||||||
|
client!: anthropic;
|
||||||
|
|
||||||
|
constructor(public readonly ai: Ai, public readonly apiToken: string, public model: string) {
|
||||||
|
super();
|
||||||
|
this.client = new anthropic({apiKey: apiToken});
|
||||||
|
}
|
||||||
|
|
||||||
|
private toStandard(history: any[]): LLMMessage[] {
|
||||||
|
for(let i = 0; i < history.length; i++) {
|
||||||
|
const orgI = i;
|
||||||
|
if(typeof history[orgI].content != 'string') {
|
||||||
|
if(history[orgI].role == 'assistant') {
|
||||||
|
history[orgI].content.filter((c: any) => c.type =='tool_use').forEach((c: any) => {
|
||||||
|
i++;
|
||||||
|
history.splice(i, 0, {role: 'tool', id: c.id, name: c.name, args: c.input});
|
||||||
|
});
|
||||||
|
} else if(history[orgI].role == 'user') {
|
||||||
|
history[orgI].content.filter((c: any) => c.type =='tool_result').forEach((c: any) => {
|
||||||
|
const h = history.find((h: any) => h.id == c.tool_use_id);
|
||||||
|
h[c.is_error ? 'error' : 'content'] = c.content;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
history[orgI].content = history[orgI].content.filter((c: any) => c.type == 'text').map((c: any) => c.text).join('\n\n');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return history.filter(h => !!h.content);
|
||||||
|
}
|
||||||
|
|
||||||
|
private fromStandard(history: LLMMessage[]): any[] {
|
||||||
|
for(let i = 0; i < history.length; i++) {
|
||||||
|
if(history[i].role == 'tool') {
|
||||||
|
const h: any = history[i];
|
||||||
|
history.splice(i, 1,
|
||||||
|
{role: 'assistant', content: [{type: 'tool_use', id: h.id, name: h.name, input: h.args}]},
|
||||||
|
{role: 'user', content: [{type: 'tool_result', tool_use_id: h.id, is_error: !!h.error, content: h.error || h.content}]}
|
||||||
|
)
|
||||||
|
i++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return history;
|
||||||
|
}
|
||||||
|
|
||||||
|
ask(message: string, options: LLMRequest = {}): AbortablePromise<LLMMessage[]> {
|
||||||
|
const controller = new AbortController();
|
||||||
|
const response = new Promise<any>(async (res, rej) => {
|
||||||
|
let history = this.fromStandard([...options.history || [], {role: 'user', content: message}]);
|
||||||
|
if(options.compress) history = await this.ai.llm.compress(<any>history, options.compress.max, options.compress.min, options);
|
||||||
|
const requestParams: any = {
|
||||||
|
model: options.model || this.model,
|
||||||
|
max_tokens: options.max_tokens || this.ai.options.max_tokens || 4096,
|
||||||
|
system: options.system || this.ai.options.system || '',
|
||||||
|
temperature: options.temperature || this.ai.options.temperature || 0.7,
|
||||||
|
tools: (options.tools || this.ai.options.tools || []).map(t => ({
|
||||||
|
name: t.name,
|
||||||
|
description: t.description,
|
||||||
|
input_schema: {
|
||||||
|
type: 'object',
|
||||||
|
properties: t.args ? objectMap(t.args, (key, value) => ({...value, required: undefined})) : {},
|
||||||
|
required: t.args ? Object.entries(t.args).filter(t => t[1].required).map(t => t[0]) : []
|
||||||
|
},
|
||||||
|
fn: undefined
|
||||||
|
})),
|
||||||
|
messages: history,
|
||||||
|
stream: !!options.stream,
|
||||||
|
};
|
||||||
|
|
||||||
|
// Run tool changes
|
||||||
|
let resp: any;
|
||||||
|
do {
|
||||||
|
resp = await this.client.messages.create(requestParams);
|
||||||
|
|
||||||
|
// Streaming mode
|
||||||
|
if(options.stream) {
|
||||||
|
resp.content = [];
|
||||||
|
for await (const chunk of resp) {
|
||||||
|
if(controller.signal.aborted) break;
|
||||||
|
if(chunk.type === 'content_block_start') {
|
||||||
|
if(chunk.content_block.type === 'text') {
|
||||||
|
resp.content.push({type: 'text', text: ''});
|
||||||
|
} else if(chunk.content_block.type === 'tool_use') {
|
||||||
|
resp.content.push({type: 'tool_use', id: chunk.content_block.id, name: chunk.content_block.name, input: <any>''});
|
||||||
|
}
|
||||||
|
} else if(chunk.type === 'content_block_delta') {
|
||||||
|
if(chunk.delta.type === 'text_delta') {
|
||||||
|
const text = chunk.delta.text;
|
||||||
|
resp.content.at(-1).text += text;
|
||||||
|
options.stream({text});
|
||||||
|
} else if(chunk.delta.type === 'input_json_delta') {
|
||||||
|
resp.content.at(-1).input += chunk.delta.partial_json;
|
||||||
|
}
|
||||||
|
} else if(chunk.type === 'content_block_stop') {
|
||||||
|
const last = resp.content.at(-1);
|
||||||
|
if(last.input != null) last.input = last.input ? JSONAttemptParse(last.input, {}) : {};
|
||||||
|
} else if(chunk.type === 'message_stop') {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Run tools
|
||||||
|
const toolCalls = resp.content.filter((c: any) => c.type === 'tool_use');
|
||||||
|
if(toolCalls.length && !controller.signal.aborted) {
|
||||||
|
history.push({role: 'assistant', content: resp.content});
|
||||||
|
const results = await Promise.all(toolCalls.map(async (toolCall: any) => {
|
||||||
|
const tool = options.tools?.find(findByProp('name', toolCall.name));
|
||||||
|
if(!tool) return {tool_use_id: toolCall.id, is_error: true, content: 'Tool not found'};
|
||||||
|
try {
|
||||||
|
const result = await tool.fn(toolCall.input, this.ai);
|
||||||
|
return {type: 'tool_result', tool_use_id: toolCall.id, content: JSONSanitize(result)};
|
||||||
|
} catch (err: any) {
|
||||||
|
return {type: 'tool_result', tool_use_id: toolCall.id, is_error: true, content: err?.message || err?.toString() || 'Unknown'};
|
||||||
|
}
|
||||||
|
}));
|
||||||
|
history.push({role: 'user', content: results});
|
||||||
|
requestParams.messages = history;
|
||||||
|
}
|
||||||
|
} while (!controller.signal.aborted && resp.content.some((c: any) => c.type === 'tool_use'));
|
||||||
|
if(options.stream) options.stream({done: true});
|
||||||
|
res(this.toStandard([...history, {
|
||||||
|
role: 'assistant',
|
||||||
|
content: resp.content.filter((c: any) => c.type == 'text').map((c: any) => c.text).join('\n\n')
|
||||||
|
}]));
|
||||||
|
});
|
||||||
|
return Object.assign(response, {abort: () => controller.abort()});
|
||||||
|
}
|
||||||
|
}
|
||||||
4
src/index.ts
Normal file
4
src/index.ts
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
export * from './ai';
|
||||||
|
export * from './antrhopic';
|
||||||
|
export * from './llm';
|
||||||
|
export * from './tools';
|
||||||
161
src/llm.ts
Normal file
161
src/llm.ts
Normal file
@@ -0,0 +1,161 @@
|
|||||||
|
import {JSONAttemptParse} from '@ztimson/utils';
|
||||||
|
import {Ai} from './ai.ts';
|
||||||
|
import {Anthropic} from './antrhopic.ts';
|
||||||
|
import {Ollama} from './ollama.ts';
|
||||||
|
import {OpenAi} from './open-ai.ts';
|
||||||
|
import {AbortablePromise, LLMProvider} from './provider.ts';
|
||||||
|
import {AiTool} from './tools.ts';
|
||||||
|
|
||||||
|
export type LLMMessage = {
|
||||||
|
/** Message originator */
|
||||||
|
role: 'assistant' | 'system' | 'user';
|
||||||
|
/** Message content */
|
||||||
|
content: string | any;
|
||||||
|
} | {
|
||||||
|
/** Tool call */
|
||||||
|
role: 'tool';
|
||||||
|
/** Unique ID for call */
|
||||||
|
id: string;
|
||||||
|
/** Tool that was run */
|
||||||
|
name: string;
|
||||||
|
/** Tool arguments */
|
||||||
|
args: any;
|
||||||
|
/** Tool result */
|
||||||
|
content: undefined | string;
|
||||||
|
/** Tool error */
|
||||||
|
error: undefined | string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export type LLMOptions = {
|
||||||
|
/** Anthropic settings */
|
||||||
|
anthropic?: {
|
||||||
|
/** API Token */
|
||||||
|
token: string;
|
||||||
|
/** Default model */
|
||||||
|
model: string;
|
||||||
|
},
|
||||||
|
/** Ollama settings */
|
||||||
|
ollama?: {
|
||||||
|
/** connection URL */
|
||||||
|
host: string;
|
||||||
|
/** Default model */
|
||||||
|
model: string;
|
||||||
|
},
|
||||||
|
/** Open AI settings */
|
||||||
|
openAi?: {
|
||||||
|
/** API Token */
|
||||||
|
token: string;
|
||||||
|
/** Default model */
|
||||||
|
model: string;
|
||||||
|
},
|
||||||
|
/** Default provider & model */
|
||||||
|
model: string | [string, string];
|
||||||
|
} & Omit<LLMRequest, 'model'>;
|
||||||
|
|
||||||
|
export type LLMRequest = {
|
||||||
|
/** System prompt */
|
||||||
|
system?: string;
|
||||||
|
/** Message history */
|
||||||
|
history?: LLMMessage[];
|
||||||
|
/** Max tokens for request */
|
||||||
|
max_tokens?: number;
|
||||||
|
/** 0 = Rigid Logic, 1 = Balanced, 2 = Hyper Creative **/
|
||||||
|
temperature?: number;
|
||||||
|
/** Available tools */
|
||||||
|
tools?: AiTool[];
|
||||||
|
/** LLM model */
|
||||||
|
model?: string | [string, string];
|
||||||
|
/** Stream response */
|
||||||
|
stream?: (chunk: {text?: string, done?: true}) => any;
|
||||||
|
/** Compress old messages in the chat to free up context */
|
||||||
|
compress?: {
|
||||||
|
/** Trigger chat compression once context exceeds the token count */
|
||||||
|
max: number;
|
||||||
|
/** Compress chat until context size smaller than */
|
||||||
|
min: number
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export class LLM {
|
||||||
|
private providers: {[key: string]: LLMProvider} = {};
|
||||||
|
|
||||||
|
constructor(public readonly ai: Ai, public readonly options: LLMOptions) {
|
||||||
|
if(options.anthropic?.token) this.providers.anthropic = new Anthropic(this.ai, options.anthropic.token, options.anthropic.model);
|
||||||
|
if(options.ollama?.host) this.providers.ollama = new Ollama(this.ai, options.ollama.host, options.ollama.model);
|
||||||
|
if(options.openAi?.token) this.providers.openAi = new OpenAi(this.ai, options.openAi.token, options.openAi.model);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Chat with LLM
|
||||||
|
* @param {string} message Question
|
||||||
|
* @param {LLMRequest} options Configuration options and chat history
|
||||||
|
* @returns {{abort: () => void, response: Promise<LLMMessage[]>}} Function to abort response and chat history
|
||||||
|
*/
|
||||||
|
ask(message: string, options: LLMRequest = {}): AbortablePromise<LLMMessage[]> {
|
||||||
|
let model: any = [null, null];
|
||||||
|
if(options.model) {
|
||||||
|
if(typeof options.model == 'object') model = options.model;
|
||||||
|
else model = [options.model, (<any>this.options)[options.model]?.model];
|
||||||
|
}
|
||||||
|
if(!options.model || model[1] == null) {
|
||||||
|
if(typeof this.options.model == 'object') model = this.options.model;
|
||||||
|
else model = [this.options.model, (<any>this.options)[this.options.model]?.model];
|
||||||
|
}
|
||||||
|
if(!model[0] || !model[1]) throw new Error(`Unknown LLM provider or model: ${model[0]} / ${model[1]}`);
|
||||||
|
return this.providers[model[0]].ask(message, {...options, model: model[1]});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Compress chat history to reduce context size
|
||||||
|
* @param {LLMMessage[]} history Chatlog that will be compressed
|
||||||
|
* @param max Trigger compression once context is larger than max
|
||||||
|
* @param min Summarize until context size is less than min
|
||||||
|
* @param {LLMRequest} options LLM options
|
||||||
|
* @returns {Promise<LLMMessage[]>} New chat history will summary at index 0
|
||||||
|
*/
|
||||||
|
async compress(history: LLMMessage[], max: number, min: number, options?: LLMRequest): Promise<LLMMessage[]> {
|
||||||
|
if(this.estimateTokens(history) < max) return history;
|
||||||
|
let keep = 0, tokens = 0;
|
||||||
|
for(let m of history.toReversed()) {
|
||||||
|
tokens += this.estimateTokens(m.content);
|
||||||
|
if(tokens < min) keep++;
|
||||||
|
else break;
|
||||||
|
}
|
||||||
|
if(history.length <= keep) return history;
|
||||||
|
const recent = keep == 0 ? [] : history.slice(-keep),
|
||||||
|
process = (keep == 0 ? history : history.slice(0, -keep)).filter(h => h.role === 'assistant' || h.role === 'user');
|
||||||
|
const summary = await this.summarize(process.map(m => `${m.role}: ${m.content}`).join('\n\n'), 250, options);
|
||||||
|
return [{role: 'assistant', content: `Conversation Summary: ${summary}`}, ...recent];
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Estimate variable as tokens
|
||||||
|
* @param history Object to size
|
||||||
|
* @returns {number} Rough token count
|
||||||
|
*/
|
||||||
|
estimateTokens(history: any): number {
|
||||||
|
const text = JSON.stringify(history);
|
||||||
|
return Math.ceil((text.length / 4) * 1.2);
|
||||||
|
}
|
||||||
|
|
||||||
|
async json(message: string, options: LLMRequest) {
|
||||||
|
let resp = await this.ask(message, {
|
||||||
|
system: '',
|
||||||
|
...options
|
||||||
|
});
|
||||||
|
if(!resp?.[0]?.content) return {};
|
||||||
|
return JSONAttemptParse(new RegExp('\{[\s\S]*\}').exec(resp[0].content), {});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a summary of some text
|
||||||
|
* @param {string} text Text to summarize
|
||||||
|
* @param {number} tokens Max number of tokens
|
||||||
|
* @param options LLM request options
|
||||||
|
* @returns {Promise<string>} Summary
|
||||||
|
*/
|
||||||
|
summarize(text: string, tokens: number, options?: LLMRequest): Promise<string | null> {
|
||||||
|
return this.ask(text, {system: `Generate a brief summary <= ${tokens} tokens. Output nothing else`, temperature: 0.3, ...options})
|
||||||
|
.then(history => <string>history.pop()?.content || null);
|
||||||
|
}
|
||||||
|
}
|
||||||
113
src/ollama.ts
Normal file
113
src/ollama.ts
Normal file
@@ -0,0 +1,113 @@
|
|||||||
|
import {findByProp, objectMap, JSONSanitize, JSONAttemptParse} from '@ztimson/utils';
|
||||||
|
import {Ai} from './ai.ts';
|
||||||
|
import {LLMMessage, LLMRequest} from './llm.ts';
|
||||||
|
import {AbortablePromise, LLMProvider} from './provider.ts';
|
||||||
|
import {Ollama as ollama} from 'ollama';
|
||||||
|
|
||||||
|
export class Ollama extends LLMProvider {
|
||||||
|
client!: ollama;
|
||||||
|
|
||||||
|
constructor(public readonly ai: Ai, public host: string, public model: string) {
|
||||||
|
super();
|
||||||
|
this.client = new ollama({host});
|
||||||
|
}
|
||||||
|
|
||||||
|
private toStandard(history: any[]): LLMMessage[] {
|
||||||
|
for(let i = 0; i < history.length; i++) {
|
||||||
|
if(history[i].role == 'assistant' && history[i].tool_calls) {
|
||||||
|
if(history[i].content) delete history[i].tool_calls;
|
||||||
|
else {
|
||||||
|
history.splice(i, 1);
|
||||||
|
i--;
|
||||||
|
}
|
||||||
|
} else if(history[i].role == 'tool') {
|
||||||
|
const error = history[i].content.startsWith('{"error":');
|
||||||
|
history[i] = {role: 'tool', name: history[i].tool_name, args: history[i].args, [error ? 'error' : 'content']: history[i].content};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return history;
|
||||||
|
}
|
||||||
|
|
||||||
|
private fromStandard(history: LLMMessage[]): any[] {
|
||||||
|
return history.map((h: any) => {
|
||||||
|
if(h.role != 'tool') return h;
|
||||||
|
return {role: 'tool', tool_name: h.name, content: h.error || h.content}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
ask(message: string, options: LLMRequest = {}): AbortablePromise<LLMMessage[]> {
|
||||||
|
const controller = new AbortController();
|
||||||
|
const response = new Promise<any>(async (res, rej) => {
|
||||||
|
let system = options.system || this.ai.options.system;
|
||||||
|
let history = this.fromStandard([...options.history || [], {role: 'user', content: message}]);
|
||||||
|
if(history[0].roll == 'system') {
|
||||||
|
if(!system) system = history.shift();
|
||||||
|
else history.shift();
|
||||||
|
}
|
||||||
|
if(options.compress) history = await this.ai.llm.compress(<any>history, options.compress.max, options.compress.min);
|
||||||
|
if(options.system) history.unshift({role: 'system', content: system})
|
||||||
|
|
||||||
|
const requestParams: any = {
|
||||||
|
model: options.model || this.model,
|
||||||
|
messages: history,
|
||||||
|
stream: !!options.stream,
|
||||||
|
signal: controller.signal,
|
||||||
|
options: {
|
||||||
|
temperature: options.temperature || this.ai.options.temperature || 0.7,
|
||||||
|
num_predict: options.max_tokens || this.ai.options.max_tokens || 4096,
|
||||||
|
},
|
||||||
|
tools: (options.tools || this.ai.options.tools || []).map(t => ({
|
||||||
|
type: 'function',
|
||||||
|
function: {
|
||||||
|
name: t.name,
|
||||||
|
description: t.description,
|
||||||
|
parameters: {
|
||||||
|
type: 'object',
|
||||||
|
properties: t.args ? objectMap(t.args, (key, value) => ({...value, required: undefined})) : {},
|
||||||
|
required: t.args ? Object.entries(t.args).filter(t => t[1].required).map(t => t[0]) : []
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Run tool chains
|
||||||
|
let resp: any;
|
||||||
|
do {
|
||||||
|
resp = await this.client.chat(requestParams);
|
||||||
|
if(options.stream) {
|
||||||
|
resp.message = {role: 'assistant', content: '', tool_calls: []};
|
||||||
|
for await (const chunk of resp) {
|
||||||
|
if(controller.signal.aborted) break;
|
||||||
|
if(chunk.message?.content) {
|
||||||
|
resp.message.content += chunk.message.content;
|
||||||
|
options.stream({text: chunk.message.content});
|
||||||
|
}
|
||||||
|
if(chunk.message?.tool_calls) resp.message.tool_calls = chunk.message.tool_calls;
|
||||||
|
if(chunk.done) break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Run tools
|
||||||
|
if(resp.message?.tool_calls?.length && !controller.signal.aborted) {
|
||||||
|
history.push(resp.message);
|
||||||
|
const results = await Promise.all(resp.message.tool_calls.map(async (toolCall: any) => {
|
||||||
|
const tool = (options.tools || this.ai.options.tools)?.find(findByProp('name', toolCall.function.name));
|
||||||
|
if(!tool) return {role: 'tool', tool_name: toolCall.function.name, content: '{"error": "Tool not found"}'};
|
||||||
|
const args = typeof toolCall.function.arguments === 'string' ? JSONAttemptParse(toolCall.function.arguments, {}) : toolCall.function.arguments;
|
||||||
|
try {
|
||||||
|
const result = await tool.fn(args, this.ai);
|
||||||
|
return {role: 'tool', tool_name: toolCall.function.name, args, content: JSONSanitize(result)};
|
||||||
|
} catch (err: any) {
|
||||||
|
return {role: 'tool', tool_name: toolCall.function.name, args, content: JSONSanitize({error: err?.message || err?.toString() || 'Unknown'})};
|
||||||
|
}
|
||||||
|
}));
|
||||||
|
history.push(...results);
|
||||||
|
requestParams.messages = history;
|
||||||
|
}
|
||||||
|
} while (!controller.signal.aborted && resp.message?.tool_calls?.length);
|
||||||
|
if(options.stream) options.stream({done: true});
|
||||||
|
res(this.toStandard([...history, {role: 'assistant', content: resp.message?.content}]));
|
||||||
|
});
|
||||||
|
return Object.assign(response, {abort: () => controller.abort()});
|
||||||
|
}
|
||||||
|
}
|
||||||
130
src/open-ai.ts
Normal file
130
src/open-ai.ts
Normal file
@@ -0,0 +1,130 @@
|
|||||||
|
import {OpenAI as openAI} from 'openai';
|
||||||
|
import {findByProp, objectMap, JSONSanitize, JSONAttemptParse} from '@ztimson/utils';
|
||||||
|
import {Ai} from './ai.ts';
|
||||||
|
import {LLMMessage, LLMRequest} from './llm.ts';
|
||||||
|
import {AbortablePromise, LLMProvider} from './provider.ts';
|
||||||
|
|
||||||
|
export class OpenAi extends LLMProvider {
|
||||||
|
client!: openAI;
|
||||||
|
|
||||||
|
constructor(public readonly ai: Ai, public readonly apiToken: string, public model: string) {
|
||||||
|
super();
|
||||||
|
this.client = new openAI({apiKey: apiToken});
|
||||||
|
}
|
||||||
|
|
||||||
|
private toStandard(history: any[]): LLMMessage[] {
|
||||||
|
for(let i = 0; i < history.length; i++) {
|
||||||
|
const h = history[i];
|
||||||
|
if(h.role === 'assistant' && h.tool_calls) {
|
||||||
|
const tools = h.tool_calls.map((tc: any) => ({
|
||||||
|
role: 'tool',
|
||||||
|
id: tc.id,
|
||||||
|
name: tc.function.name,
|
||||||
|
args: JSONAttemptParse(tc.function.arguments, {})
|
||||||
|
}));
|
||||||
|
history.splice(i, 1, ...tools);
|
||||||
|
i += tools.length - 1;
|
||||||
|
} else if(h.role === 'tool' && h.content) {
|
||||||
|
const record = history.find(h2 => h.tool_call_id == h2.id);
|
||||||
|
if(record) {
|
||||||
|
if(h.content.includes('"error":')) record.error = h.content;
|
||||||
|
else record.content = h.content;
|
||||||
|
}
|
||||||
|
history.splice(i, 1);
|
||||||
|
i--;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
return history;
|
||||||
|
}
|
||||||
|
|
||||||
|
private fromStandard(history: LLMMessage[]): any[] {
|
||||||
|
return history.reduce((result, h) => {
|
||||||
|
if(h.role === 'tool') {
|
||||||
|
result.push({
|
||||||
|
role: 'assistant',
|
||||||
|
content: null,
|
||||||
|
tool_calls: [{ id: h.id, type: 'function', function: { name: h.name, arguments: JSON.stringify(h.args) } }],
|
||||||
|
refusal: null,
|
||||||
|
annotations: [],
|
||||||
|
}, {
|
||||||
|
role: 'tool',
|
||||||
|
tool_call_id: h.id,
|
||||||
|
content: h.error || h.content
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
result.push(h);
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}, [] as any[]);
|
||||||
|
}
|
||||||
|
|
||||||
|
ask(message: string, options: LLMRequest = {}): AbortablePromise<LLMMessage[]> {
|
||||||
|
const controller = new AbortController();
|
||||||
|
const response = new Promise<any>(async (res, rej) => {
|
||||||
|
let history = this.fromStandard([...options.history || [], {role: 'user', content: message}]);
|
||||||
|
if(options.compress) history = await this.ai.llm.compress(<any>history, options.compress.max, options.compress.min, options);
|
||||||
|
|
||||||
|
const requestParams: any = {
|
||||||
|
model: options.model || this.model,
|
||||||
|
messages: history,
|
||||||
|
stream: !!options.stream,
|
||||||
|
max_tokens: options.max_tokens || this.ai.options.max_tokens || 4096,
|
||||||
|
temperature: options.temperature || this.ai.options.temperature || 0.7,
|
||||||
|
tools: (options.tools || this.ai.options.tools || []).map(t => ({
|
||||||
|
type: 'function',
|
||||||
|
function: {
|
||||||
|
name: t.name,
|
||||||
|
description: t.description,
|
||||||
|
parameters: {
|
||||||
|
type: 'object',
|
||||||
|
properties: t.args ? objectMap(t.args, (key, value) => ({...value, required: undefined})) : {},
|
||||||
|
required: t.args ? Object.entries(t.args).filter(t => t[1].required).map(t => t[0]) : []
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}))
|
||||||
|
};
|
||||||
|
|
||||||
|
// Tool call and streaming logic similar to other providers
|
||||||
|
let resp: any;
|
||||||
|
do {
|
||||||
|
resp = await this.client.chat.completions.create(requestParams);
|
||||||
|
|
||||||
|
// Implement streaming and tool call handling
|
||||||
|
if(options.stream) {
|
||||||
|
resp.choices = [];
|
||||||
|
for await (const chunk of resp) {
|
||||||
|
if(controller.signal.aborted) break;
|
||||||
|
if(chunk.choices[0].delta.content) {
|
||||||
|
options.stream({text: chunk.choices[0].delta.content});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Run tools
|
||||||
|
const toolCalls = resp.choices[0].message.tool_calls || [];
|
||||||
|
if(toolCalls.length && !controller.signal.aborted) {
|
||||||
|
history.push(resp.choices[0].message);
|
||||||
|
const results = await Promise.all(toolCalls.map(async (toolCall: any) => {
|
||||||
|
const tool = options.tools?.find(findByProp('name', toolCall.function.name));
|
||||||
|
if(!tool) return {role: 'tool', tool_call_id: toolCall.id, content: '{"error": "Tool not found"}'};
|
||||||
|
try {
|
||||||
|
const args = JSONAttemptParse(toolCall.function.arguments, {});
|
||||||
|
const result = await tool.fn(args, this.ai);
|
||||||
|
return {role: 'tool', tool_call_id: toolCall.id, content: JSONSanitize(result)};
|
||||||
|
} catch (err: any) {
|
||||||
|
return {role: 'tool', tool_call_id: toolCall.id, content: JSONSanitize({error: err?.message || err?.toString() || 'Unknown'})};
|
||||||
|
}
|
||||||
|
}));
|
||||||
|
history.push(...results);
|
||||||
|
requestParams.messages = history;
|
||||||
|
}
|
||||||
|
} while (!controller.signal.aborted && resp.choices?.[0]?.message?.tool_calls?.length);
|
||||||
|
|
||||||
|
if(options.stream) options.stream({done: true});
|
||||||
|
res(this.toStandard([...history, {role: 'assistant', content: resp.choices[0].message.content || ''}]));
|
||||||
|
});
|
||||||
|
|
||||||
|
return Object.assign(response, {abort: () => controller.abort()});
|
||||||
|
}
|
||||||
|
}
|
||||||
7
src/provider.ts
Normal file
7
src/provider.ts
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
import {LLMMessage, LLMOptions, LLMRequest} from './llm.ts';
|
||||||
|
|
||||||
|
export type AbortablePromise<T> = Promise<T> & {abort: () => void};
|
||||||
|
|
||||||
|
export abstract class LLMProvider {
|
||||||
|
abstract ask(message: string, options: LLMRequest): AbortablePromise<LLMMessage[]>;
|
||||||
|
}
|
||||||
138
src/tools.ts
Normal file
138
src/tools.ts
Normal file
@@ -0,0 +1,138 @@
|
|||||||
|
import {$, $Sync} from '@ztimson/node-utils';
|
||||||
|
import {ASet, consoleInterceptor, Http, fn as Fn} from '@ztimson/utils';
|
||||||
|
import {Ai} from './ai.ts';
|
||||||
|
|
||||||
|
export type AiToolArg = {[key: string]: {
|
||||||
|
/** Argument type */
|
||||||
|
type: 'array' | 'boolean' | 'number' | 'object' | 'string',
|
||||||
|
/** Argument description */
|
||||||
|
description: string,
|
||||||
|
/** Required argument */
|
||||||
|
required?: boolean;
|
||||||
|
/** Default value */
|
||||||
|
default?: any,
|
||||||
|
/** Options */
|
||||||
|
enum?: string[],
|
||||||
|
/** Minimum value or length */
|
||||||
|
min?: number,
|
||||||
|
/** Maximum value or length */
|
||||||
|
max?: number,
|
||||||
|
/** Match pattern */
|
||||||
|
pattern?: string,
|
||||||
|
/** Child arguments */
|
||||||
|
items?: {[key: string]: AiToolArg}
|
||||||
|
}}
|
||||||
|
|
||||||
|
export type AiTool = {
|
||||||
|
/** Tool ID / Name - Must be snail_case */
|
||||||
|
name: string,
|
||||||
|
/** Tool description / prompt */
|
||||||
|
description: string,
|
||||||
|
/** Tool arguments */
|
||||||
|
args?: AiToolArg,
|
||||||
|
/** Callback function */
|
||||||
|
fn: (args: any, ai: Ai) => any | Promise<any>,
|
||||||
|
};
|
||||||
|
|
||||||
|
export const CliTool: AiTool = {
|
||||||
|
name: 'cli',
|
||||||
|
description: 'Use the command line interface, returns any output',
|
||||||
|
args: {command: {type: 'string', description: 'Command to run', required: true}},
|
||||||
|
fn: (args: {command: string}) => $`${args.command}`
|
||||||
|
}
|
||||||
|
|
||||||
|
export const DateTimeTool: AiTool = {
|
||||||
|
name: 'get_datetime',
|
||||||
|
description: 'Get current date and time',
|
||||||
|
args: {},
|
||||||
|
fn: async () => new Date().toISOString()
|
||||||
|
}
|
||||||
|
|
||||||
|
export const ExecTool: AiTool = {
|
||||||
|
name: 'exec',
|
||||||
|
description: 'Run code/scripts',
|
||||||
|
args: {
|
||||||
|
language: {type: 'string', description: 'Execution language', enum: ['cli', 'node', 'python'], required: true},
|
||||||
|
code: {type: 'string', description: 'Code to execute', required: true}
|
||||||
|
},
|
||||||
|
fn: async (args, ai) => {
|
||||||
|
try {
|
||||||
|
switch(args.type) {
|
||||||
|
case 'bash':
|
||||||
|
return await CliTool.fn({command: args.code}, ai);
|
||||||
|
case 'node':
|
||||||
|
return await JSTool.fn({code: args.code}, ai);
|
||||||
|
case 'python': {
|
||||||
|
return await PythonTool.fn({code: args.code}, ai);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch(err: any) {
|
||||||
|
return {error: err?.message || err.toString()};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const FetchTool: AiTool = {
|
||||||
|
name: 'fetch',
|
||||||
|
description: 'Make HTTP request to URL',
|
||||||
|
args: {
|
||||||
|
url: {type: 'string', description: 'URL to fetch', required: true},
|
||||||
|
method: {type: 'string', description: 'HTTP method to use', enum: ['GET', 'POST', 'PUT', 'DELETE'], default: 'GET'},
|
||||||
|
headers: {type: 'object', description: 'HTTP headers to send', default: {}},
|
||||||
|
body: {type: 'object', description: 'HTTP body to send'},
|
||||||
|
},
|
||||||
|
fn: (args: {
|
||||||
|
url: string;
|
||||||
|
method: 'GET' | 'POST' | 'PUT' | 'DELETE';
|
||||||
|
headers: {[key: string]: string};
|
||||||
|
body: any;
|
||||||
|
}) => new Http({url: args.url, headers: args.headers}).request({method: args.method || 'GET', body: args.body})
|
||||||
|
}
|
||||||
|
|
||||||
|
export const JSTool: AiTool = {
|
||||||
|
name: 'exec_javascript',
|
||||||
|
description: 'Execute commonjs javascript',
|
||||||
|
args: {
|
||||||
|
code: {type: 'string', description: 'CommonJS javascript', required: true}
|
||||||
|
},
|
||||||
|
fn: async (args: {code: string}) => {
|
||||||
|
const console = consoleInterceptor(null);
|
||||||
|
const resp = await Fn<any>({console}, args.code, true).catch((err: any) => console.output.error.push(err));
|
||||||
|
return {...console.output, return: resp, stdout: undefined, stderr: undefined};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const PythonTool: AiTool = {
|
||||||
|
name: 'exec_javascript',
|
||||||
|
description: 'Execute commonjs javascript',
|
||||||
|
args: {
|
||||||
|
code: {type: 'string', description: 'CommonJS javascript', required: true}
|
||||||
|
},
|
||||||
|
fn: async (args: {code: string}) => ({result: $Sync`python -c "${args.code}"`})
|
||||||
|
}
|
||||||
|
|
||||||
|
export const SearchTool: AiTool = {
|
||||||
|
name: 'search',
|
||||||
|
description: 'Use a search engine to find relevant URLs, should be changed with fetch to scrape sources',
|
||||||
|
args: {
|
||||||
|
query: {type: 'string', description: 'Search string', required: true},
|
||||||
|
length: {type: 'string', description: 'Number of results to return', default: 5},
|
||||||
|
},
|
||||||
|
fn: async (args: {
|
||||||
|
query: string;
|
||||||
|
length: number;
|
||||||
|
}) => {
|
||||||
|
const html = await fetch(`https://html.duckduckgo.com/html/?q=${encodeURIComponent(args.query)}`, {
|
||||||
|
headers: {"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64)", "Accept-Language": "en-US,en;q=0.9"}
|
||||||
|
}).then(resp => resp.text());
|
||||||
|
let match, regex = /<a .*?href="(.+?)".+?<\/a>/g;
|
||||||
|
const results = new ASet<string>();
|
||||||
|
while((match = regex.exec(html)) !== null) {
|
||||||
|
let url = /uddg=(.+)&?/.exec(decodeURIComponent(match[1]))?.[1];
|
||||||
|
if(url) url = decodeURIComponent(url);
|
||||||
|
if(url) results.add(url);
|
||||||
|
if(results.size >= (args.length || 5)) break;
|
||||||
|
}
|
||||||
|
return results;
|
||||||
|
}
|
||||||
|
}
|
||||||
20
tsconfig.json
Normal file
20
tsconfig.json
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
{
|
||||||
|
"include": ["src"],
|
||||||
|
"compilerOptions": {
|
||||||
|
"target": "ESNext",
|
||||||
|
"useDefineForClassFields": true,
|
||||||
|
"module": "ESNext",
|
||||||
|
"lib": ["ESNext"],
|
||||||
|
"skipLibCheck": true,
|
||||||
|
|
||||||
|
/* Bundler mode */
|
||||||
|
"moduleResolution": "bundler",
|
||||||
|
"allowImportingTsExtensions": true,
|
||||||
|
"resolveJsonModule": true,
|
||||||
|
"isolatedModules": true,
|
||||||
|
"noEmit": true,
|
||||||
|
|
||||||
|
/* Linting */
|
||||||
|
"strict": true
|
||||||
|
}
|
||||||
|
}
|
||||||
21
vite.config.ts
Normal file
21
vite.config.ts
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
import {resolve} from 'path';
|
||||||
|
import {defineConfig} from 'vite';
|
||||||
|
import dts from 'vite-plugin-dts';
|
||||||
|
|
||||||
|
export default defineConfig({
|
||||||
|
build: {
|
||||||
|
lib: {
|
||||||
|
entry: resolve(process.cwd(), 'src/index.ts'),
|
||||||
|
name: 'utils',
|
||||||
|
fileName: (module, entryName) => {
|
||||||
|
if(module == 'es') return 'index.mjs';
|
||||||
|
if(module == 'umd') return 'index.cjs';
|
||||||
|
}
|
||||||
|
},
|
||||||
|
ssr: true,
|
||||||
|
emptyOutDir: true,
|
||||||
|
minify: false,
|
||||||
|
sourcemap: true
|
||||||
|
},
|
||||||
|
plugins: [dts()],
|
||||||
|
});
|
||||||
Reference in New Issue
Block a user