import * as os from 'node:os'; import LLM, {AnthropicConfig, OllamaConfig, OpenAiConfig, LLMRequest} from './llm'; import { Audio } from './audio.ts'; import {Vision} from './vision.ts'; export type AbortablePromise = Promise & { abort: () => any }; export type AiOptions = { /** Path to models */ path?: string; /** Large language models, first is default */ llm?: Omit & { models: {[model: string]: AnthropicConfig | OllamaConfig | OpenAiConfig}; } /** Tesseract OCR configuration */ tesseract?: { /** Model: eng, eng_best, eng_fast */ model?: string; } /** Whisper ASR configuration */ whisper?: { /** Whisper binary location */ binary: string; /** Model: `ggml-base.en.bin` */ model: string; } } export class Ai { /** Audio processing AI */ audio!: Audio; /** Language processing AI */ language!: LLM; /** Vision processing AI */ vision!: Vision; constructor(public readonly options: AiOptions) { if(!options.path) options.path = os.tmpdir(); process.env.TRANSFORMERS_CACHE = options.path; this.audio = new Audio(this); this.language = new LLM(this); this.vision = new Vision(this); } }