Compare commits
16 Commits
Author | SHA1 | Date | |
---|---|---|---|
a8b6fdd5a6 | |||
3e899e6ae4 | |||
08fa5b6783 | |||
529f1e2b30 | |||
87ef246760 | |||
10ac6101f4 | |||
b5d534dc8c | |||
d140b2edd1 | |||
56f6681492 | |||
64945a647c | |||
adafd61c83 | |||
7747dc5558 | |||
947bdbc6d4 | |||
e2b8c35535 | |||
3b7d28de4b | |||
8f7e816571 |
2
.gitmodules
vendored
2
.gitmodules
vendored
@ -1,5 +1,5 @@
|
||||
[submodule "docs"]
|
||||
path = docs
|
||||
url = git@git.zakscode.com:ztimson/utils.wiki.git
|
||||
url = ../utils.wiki.git
|
||||
branch = master
|
||||
ignore = all
|
||||
|
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@ztimson/utils",
|
||||
"version": "0.25.15",
|
||||
"version": "0.25.26",
|
||||
"description": "Utility library",
|
||||
"author": "Zak Timson",
|
||||
"license": "MIT",
|
||||
|
61
src/cache.ts
61
src/cache.ts
@ -1,13 +1,11 @@
|
||||
import {Table} from './database.ts';
|
||||
import {Database, Table} from './database.ts';
|
||||
import {deepCopy, includes, JSONSanitize} from './objects.ts';
|
||||
|
||||
export type CacheOptions = {
|
||||
/** Delete keys automatically after x amount of seconds */
|
||||
ttl?: number;
|
||||
/** Storage to persist cache */
|
||||
storage?: Storage | Table<any, any>;
|
||||
/** Key cache will be stored under */
|
||||
storageKey?: string;
|
||||
persistentStorage?: {storage: Storage | Database, key: string} | string;
|
||||
/** Keep or delete cached items once expired, defaults to delete */
|
||||
expiryPolicy?: 'delete' | 'keep';
|
||||
}
|
||||
@ -24,6 +22,8 @@ export class Cache<K extends string | number | symbol, T> {
|
||||
[key: string | number | symbol]: CachedValue<T> | any;
|
||||
/** Whether cache is complete */
|
||||
complete = false;
|
||||
/** Await initial loading */
|
||||
loading!: Promise<void>;
|
||||
|
||||
/**
|
||||
* Create new cache
|
||||
@ -31,20 +31,30 @@ export class Cache<K extends string | number | symbol, T> {
|
||||
* @param options
|
||||
*/
|
||||
constructor(public readonly key?: keyof T, public readonly options: CacheOptions = {}) {
|
||||
if(options.storageKey && !options.storage && typeof(Storage) !== 'undefined') options.storage = localStorage;
|
||||
if(options.storage) {
|
||||
if(options.storage instanceof Table) {
|
||||
(async () => (await options.storage?.getAll()).forEach((v: any) => {
|
||||
if(v) {
|
||||
try { this.add(v) }
|
||||
catch { }
|
||||
}
|
||||
}))()
|
||||
} else if(options.storageKey) {
|
||||
const stored = options.storage?.getItem(options.storageKey);
|
||||
let done!: Function;
|
||||
this.loading = new Promise(r => done = r);
|
||||
|
||||
// Persistent storage
|
||||
if(this.options.persistentStorage != null) {
|
||||
if(typeof this.options.persistentStorage == 'string')
|
||||
this.options.persistentStorage = {storage: localStorage, key: this.options.persistentStorage};
|
||||
|
||||
if(this.options.persistentStorage?.storage instanceof Database) {
|
||||
(async () => {
|
||||
const persists: any = this.options.persistentStorage;
|
||||
const table: Table<any, any> = await persists.storage.createTable({name: persists.key, key: this.key});
|
||||
const rows = await table.getAll();
|
||||
Object.assign(this.store, rows.reduce((acc, row) => ({...acc, [this.getKey(row)]: row}), {}));
|
||||
done();
|
||||
})();
|
||||
} else {
|
||||
const stored = this.options.persistentStorage.storage.getItem(this.options.persistentStorage.key);
|
||||
if(stored != null) try { Object.assign(this.store, JSON.parse(stored)); } catch { }
|
||||
done();
|
||||
}
|
||||
}
|
||||
|
||||
// Handle index lookups
|
||||
return new Proxy(this, {
|
||||
get: (target: this, prop: string | symbol) => {
|
||||
if(prop in target) return (target as any)[prop];
|
||||
@ -64,12 +74,20 @@ export class Cache<K extends string | number | symbol, T> {
|
||||
return <K>value[this.key];
|
||||
}
|
||||
|
||||
private save(key: K) {
|
||||
if(this.options.storage) {
|
||||
if(this.options.storage instanceof Table) {
|
||||
this.options.storage.put(key, this.store[key]);
|
||||
} else if(this.options.storageKey) {
|
||||
this.options.storage.setItem(this.options.storageKey, JSONSanitize(this.store));
|
||||
private save(key?: K) {
|
||||
const persists: any = this.options.persistentStorage;
|
||||
if(!!persists?.storage) {
|
||||
if(persists.storage instanceof Database) {
|
||||
(<Database>persists.storage).createTable({name: persists.storage.key, key: <string>this.key}).then(table => {
|
||||
if(key) {
|
||||
table.set(key, this.get(key));
|
||||
} else {
|
||||
table.clear();
|
||||
this.all().forEach(row => table.add(row));
|
||||
}
|
||||
});
|
||||
} else {
|
||||
persists.storage.setItem(persists.storage.key, JSONSanitize(this.all(true)));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -114,6 +132,7 @@ export class Cache<K extends string | number | symbol, T> {
|
||||
clear(): this {
|
||||
this.complete = false;
|
||||
this.store = <any>{};
|
||||
this.save();
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -1,5 +1,6 @@
|
||||
import {findByProp} from './array.ts';
|
||||
import {ASet} from './aset.ts';
|
||||
import {sleepWhile} from './time.ts';
|
||||
|
||||
export type TableOptions = {
|
||||
name: string;
|
||||
@ -7,11 +8,24 @@ export type TableOptions = {
|
||||
autoIncrement?: boolean;
|
||||
};
|
||||
|
||||
class AsyncLock {
|
||||
private p = Promise.resolve();
|
||||
run<T>(fn: () => Promise<T>): Promise<T> {
|
||||
const res = this.p.then(fn, fn);
|
||||
this.p = res.then(() => {}, () => {});
|
||||
return res;
|
||||
}
|
||||
}
|
||||
|
||||
export class Database {
|
||||
private schemaLock = new AsyncLock();
|
||||
private upgrading = false;
|
||||
|
||||
connection!: Promise<IDBDatabase>;
|
||||
ready = false;
|
||||
tables!: TableOptions[];
|
||||
|
||||
get ready() { return !this.upgrading; }
|
||||
|
||||
constructor(public readonly database: string, tables?: (string | TableOptions)[], public version?: number) {
|
||||
this.connection = new Promise((resolve, reject) => {
|
||||
const req = indexedDB.open(this.database, this.version);
|
||||
@ -26,7 +40,7 @@ export class Database {
|
||||
const db = req.result;
|
||||
const existing = Array.from(db.objectStoreNames);
|
||||
if(!tables) this.tables = existing.map(t => {
|
||||
const tx = db.transaction(t, 'readonly', )
|
||||
const tx = db.transaction(t, 'readonly');
|
||||
const store = tx.objectStore(t);
|
||||
return {name: t, key: <string>store.keyPath};
|
||||
});
|
||||
@ -39,10 +53,11 @@ export class Database {
|
||||
this.version = db.version;
|
||||
resolve(db);
|
||||
}
|
||||
this.ready = true;
|
||||
this.upgrading = false;
|
||||
};
|
||||
|
||||
req.onupgradeneeded = () => {
|
||||
this.upgrading = true;
|
||||
const db = req.result;
|
||||
const existingTables = new ASet(Array.from(db.objectStoreNames));
|
||||
if(tables) {
|
||||
@ -52,7 +67,7 @@ export class Database {
|
||||
const t = this.tables.find(findByProp('name', name));
|
||||
db.createObjectStore(name, {
|
||||
keyPath: t?.key,
|
||||
autoIncrement: t?.autoIncrement || !t?.key
|
||||
autoIncrement: t?.autoIncrement || !t?.key,
|
||||
});
|
||||
});
|
||||
}
|
||||
@ -60,22 +75,34 @@ export class Database {
|
||||
});
|
||||
}
|
||||
|
||||
waitForUpgrade = () => sleepWhile(() => this.upgrading);
|
||||
|
||||
async createTable<K extends IDBValidKey = any, T = any>(table: string | TableOptions): Promise<Table<K, T>> {
|
||||
if(typeof table == 'string') table = {name: table};
|
||||
const conn = await this.connection;
|
||||
if(!this.includes(table.name)) {
|
||||
conn.close();
|
||||
Object.assign(this, new Database(this.database, [...this.tables, table], (this.version ?? 0) + 1));
|
||||
}
|
||||
return this.table<K, T>(table.name);
|
||||
return this.schemaLock.run(async () => {
|
||||
if (typeof table == 'string') table = { name: table };
|
||||
const conn = await this.connection;
|
||||
if (!this.includes(table.name)) {
|
||||
const newDb = new Database(this.database, [...this.tables, table], (this.version ?? 0) + 1);
|
||||
conn.close();
|
||||
Object.assign(this, newDb);
|
||||
await this.connection;
|
||||
}
|
||||
return this.table<K, T>(table.name);
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
async deleteTable(table: string | TableOptions): Promise<void> {
|
||||
if(typeof table == 'string') table = {name: table};
|
||||
if(!this.includes(table.name)) return;
|
||||
const conn = await this.connection;
|
||||
conn.close();
|
||||
Object.assign(this, new Database(this.database, this.tables.filter(t => t.name != table.name), (this.version ?? 0) + 1));
|
||||
return this.schemaLock.run(async () => {
|
||||
if (typeof table == 'string') table = { name: table };
|
||||
if (!this.includes(table.name)) return;
|
||||
const conn = await this.connection;
|
||||
const newDb = new Database(this.database, this.tables.filter(t => t.name != (<TableOptions>table).name), (this.version ?? 0) + 1);
|
||||
conn.close();
|
||||
Object.assign(this, newDb);
|
||||
await this.connection;
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
includes(name: any): boolean {
|
||||
@ -96,12 +123,13 @@ export class Table<K extends IDBValidKey = any, T = any> {
|
||||
}
|
||||
|
||||
async tx<R>(table: string, fn: (store: IDBObjectStore) => IDBRequest, readonly = false): Promise<R> {
|
||||
await this.database.waitForUpgrade();
|
||||
const db = await this.database.connection;
|
||||
const tx = db.transaction(table, readonly ? 'readonly' : 'readwrite');
|
||||
const store = tx.objectStore(table);
|
||||
return new Promise<R>((resolve, reject) => {
|
||||
const request = fn(store);
|
||||
request.onsuccess = () => resolve(request.result as R); // ✅ explicit cast
|
||||
request.onsuccess = () => resolve(request.result as R);
|
||||
request.onerror = () => reject(request.error);
|
||||
});
|
||||
}
|
||||
|
24
src/misc.ts
24
src/misc.ts
@ -1,6 +1,16 @@
|
||||
import {PathEvent} from './path-events.ts';
|
||||
import {md5} from './string';
|
||||
|
||||
/**
|
||||
* Escape any regex special characters to avoid misinterpretation during search
|
||||
*
|
||||
* @param {string} value String which should be escaped
|
||||
* @return {string} New escaped sequence
|
||||
*/
|
||||
export function escapeRegex(value: string) {
|
||||
return value.replace(/[.*+?^${}()|\[\]\\]/g, '\\$&');
|
||||
}
|
||||
|
||||
/**
|
||||
* Run a stringified function with arguments asynchronously
|
||||
* @param {object} args Map of key/value arguments
|
||||
@ -26,13 +36,15 @@ export function gravatar(email: string, def='mp') {
|
||||
}
|
||||
|
||||
/**
|
||||
* Escape any regex special characters to avoid misinterpretation during search
|
||||
*
|
||||
* @param {string} value String which should be escaped
|
||||
* @return {string} New escaped sequence
|
||||
* Convert IPv6 to v4 because who uses that, NAT4Life
|
||||
* @param {string} ip IPv6 address, e.g. 2001:0db8:85a3:0000:0000:8a2e:0370:7334
|
||||
* @returns {string | null} IPv4 address, e.g. 172.16.58.3
|
||||
*/
|
||||
export function escapeRegex(value: string) {
|
||||
return value.replace(/[.*+?^${}()|\[\]\\]/g, '\\$&');
|
||||
export function ipV6ToV4(ip: string) {
|
||||
if(!ip) return null;
|
||||
const ipv4 = ip.split(':').splice(-1)[0];
|
||||
if(ipv4 == '1') return '127.0.0.1';
|
||||
return ipv4;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -99,7 +99,7 @@ export class PathEvent {
|
||||
method = '*';
|
||||
}
|
||||
let temp = p.split('/').filter(p => !!p);
|
||||
this.module = temp.splice(0, 1)[0]?.toLowerCase() || '';
|
||||
this.module = temp.splice(0, 1)[0] || '';
|
||||
this.path = temp.join('/');
|
||||
this.fullPath = `${this.module}${this.module && this.path ? '/' : ''}${this.path}`;
|
||||
this.name = temp.pop() || '';
|
||||
@ -140,13 +140,14 @@ export class PathEvent {
|
||||
*/
|
||||
static filter(target: string | PathEvent | (string | PathEvent)[], ...filter: (string | PathEvent)[]): PathEvent[] {
|
||||
const parsedTarget = makeArray(target).map(pe => new PathEvent(pe));
|
||||
const parsedFind = makeArray(filter).map(pe => new PathEvent(pe));
|
||||
return parsedTarget.filter(t => {
|
||||
if(!t.fullPath && t.all) return true;
|
||||
return !!parsedFind.find(f =>
|
||||
(t.fullPath.startsWith(f.fullPath) || f.fullPath.startsWith(t.fullPath)) &&
|
||||
(f.all || t.all || t.methods.intersection(f.methods).length));
|
||||
});
|
||||
const parsedFilter = makeArray(filter).map(pe => new PathEvent(pe));
|
||||
return parsedTarget.filter(t => !!parsedFilter.find(r => {
|
||||
const wildcard = r.fullPath == '*' || t.fullPath == '*';
|
||||
const p1 = r.fullPath.slice(0, r.fullPath.indexOf('*')), p2 = t.fullPath.slice(0, t.fullPath.indexOf('*'))
|
||||
const scope = p1.startsWith(p2) || p2.startsWith(p1);
|
||||
const methods = r.all || t.all || r.methods.intersection(t.methods).length;
|
||||
return (wildcard || scope) && methods;
|
||||
}));
|
||||
}
|
||||
|
||||
/**
|
||||
@ -157,15 +158,15 @@ export class PathEvent {
|
||||
* @return {boolean} Whether there is any overlap
|
||||
*/
|
||||
static has(target: string | PathEvent | (string | PathEvent)[], ...has: (string | PathEvent)[]): boolean {
|
||||
const parsedRequired = makeArray(has).map(pe => new PathEvent(pe));
|
||||
const parsedTarget = makeArray(target).map(pe => new PathEvent(pe));
|
||||
return !!parsedRequired.find(r => {
|
||||
if(!r.fullPath && r.all) return true;
|
||||
const filtered = parsedTarget.filter(p => r.fullPath.startsWith(p.fullPath));
|
||||
if(!filtered.length) return false;
|
||||
const combined = PathEvent.combine(...filtered);
|
||||
return (!combined.none && (combined.all || r.all)) || combined.methods.intersection(r.methods).length;
|
||||
});
|
||||
const parsedRequired = makeArray(has).map(pe => new PathEvent(pe));
|
||||
return !!parsedRequired.find(r => !!parsedTarget.find(t => {
|
||||
const wildcard = r.fullPath == '*' || t.fullPath == '*';
|
||||
const p1 = r.fullPath.slice(0, r.fullPath.indexOf('*')), p2 = t.fullPath.slice(0, t.fullPath.indexOf('*'))
|
||||
const scope = p1.startsWith(p2);
|
||||
const methods = r.all || t.all || r.methods.intersection(t.methods).length;
|
||||
return (wildcard || scope) && methods;
|
||||
}));
|
||||
}
|
||||
|
||||
/**
|
||||
@ -213,6 +214,44 @@ export class PathEvent {
|
||||
return p;
|
||||
}
|
||||
|
||||
/**
|
||||
* Squash 2 sets of paths & return true if any overlap is found
|
||||
*
|
||||
* @param has Target must have at least one of these path
|
||||
* @return {boolean} Whether there is any overlap
|
||||
*/
|
||||
has(...has: (string | PathEvent)[]): boolean {
|
||||
return PathEvent.has(this, ...has);
|
||||
}
|
||||
|
||||
/**
|
||||
* Squash 2 sets of paths & return true if the target has all paths
|
||||
*
|
||||
* @param has Target must have all these paths
|
||||
* @return {boolean} Whether there is any overlap
|
||||
*/
|
||||
hasAll(...has: (string | PathEvent)[]): boolean {
|
||||
return PathEvent.hasAll(this, ...has);
|
||||
}
|
||||
|
||||
/**
|
||||
* Same as `has` but raises an error if there is no overlap
|
||||
*
|
||||
* @param has Target must have at least one of these path
|
||||
*/
|
||||
hasFatal(...has: (string | PathEvent)[]): void {
|
||||
return PathEvent.hasFatal(this, ...has);
|
||||
}
|
||||
|
||||
/**
|
||||
* Same as `hasAll` but raises an error if the target is missing any paths
|
||||
*
|
||||
* @param has Target must have all these paths
|
||||
*/
|
||||
hasAllFatal(...has: (string | PathEvent)[]): void {
|
||||
return PathEvent.hasAllFatal(this, ...has);
|
||||
}
|
||||
|
||||
/**
|
||||
* Filter a set of paths based on this event
|
||||
*
|
||||
@ -265,7 +304,7 @@ export class PathEventEmitter implements IPathEventEmitter{
|
||||
|
||||
on(event: Event | Event[], listener: PathListener): PathUnsubscribe {
|
||||
makeArray(event).forEach(e => this.listeners.push([
|
||||
new PathEvent(`${this.prefix}/${new PathEvent(e).toString()}`),
|
||||
new PathEvent(`${this.prefix}/${e}`),
|
||||
listener
|
||||
]));
|
||||
return () => this.off(listener);
|
||||
|
@ -1,7 +1,7 @@
|
||||
import {Cache} from '../src';
|
||||
import { Cache } from '../src';
|
||||
|
||||
describe('Cache', () => {
|
||||
type TestItem = { id: string; value: string; };
|
||||
type TestItem = { id: string; value: string };
|
||||
|
||||
let cache: Cache<string, TestItem>;
|
||||
let storageMock: Storage;
|
||||
@ -17,93 +17,114 @@ describe('Cache', () => {
|
||||
key: jest.fn(),
|
||||
length: 0,
|
||||
};
|
||||
|
||||
// Spies
|
||||
storageGetItemSpy = jest.spyOn(storageMock, 'getItem');
|
||||
storageSetItemSpy = jest.spyOn(storageMock, 'setItem');
|
||||
cache = new Cache<string, TestItem>('id', {storage: storageMock, storageKey: 'cache'});
|
||||
|
||||
cache = new Cache<string, TestItem>('id', {
|
||||
persistentStorage: { storage: storageMock, key: 'cache' },
|
||||
});
|
||||
jest.clearAllMocks();
|
||||
jest.useFakeTimers();
|
||||
});
|
||||
|
||||
test('should add and get an item', () => {
|
||||
const item = {id: '1', value: 'a'};
|
||||
it('adds and gets an item', () => {
|
||||
const item = { id: '1', value: 'a' };
|
||||
cache.add(item);
|
||||
expect(cache.get('1')).toEqual(item);
|
||||
});
|
||||
|
||||
test('should not get an expired item when expired option not set', () => {
|
||||
const item = {id: '1', value: 'a'};
|
||||
cache.set('1', item);
|
||||
it('skips expired items by default but fetches if requested', () => {
|
||||
const item = { id: '2', value: 'b' };
|
||||
cache.set('2', item);
|
||||
cache.options.expiryPolicy = 'keep';
|
||||
cache.expire('1');
|
||||
expect(cache.get('1')).toBeNull();
|
||||
expect(cache.get('1', true)).toEqual({...item, _expired: true});
|
||||
cache.expire('2');
|
||||
expect(cache.get('2')).toBeNull();
|
||||
expect(cache.get('2', true)).toEqual({ ...item, _expired: true });
|
||||
});
|
||||
|
||||
test('should set and get via property access (proxy)', () => {
|
||||
(cache as any)['2'] = {id: '2', value: 'b'};
|
||||
expect((cache as any)['2']).toEqual({id: '2', value: 'b'});
|
||||
it('supports property access and setting via Proxy', () => {
|
||||
(cache as any)['3'] = { id: '3', value: 'c' };
|
||||
expect((cache as any)['3']).toEqual({ id: '3', value: 'c' });
|
||||
expect(cache.get('3')).toEqual({ id: '3', value: 'c' });
|
||||
});
|
||||
|
||||
test('should remove an item', () => {
|
||||
cache.set('1', {id: '1', value: 'a'});
|
||||
cache.delete('1');
|
||||
expect(cache.get('1')).toBeNull();
|
||||
it('removes an item and persists', () => {
|
||||
cache.add({ id: '4', value: 'd' });
|
||||
cache.delete('4');
|
||||
expect(cache.get('4')).toBeNull();
|
||||
expect(storageSetItemSpy).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test('should clear the cache', () => {
|
||||
cache.add({id: '1', value: 'a'});
|
||||
it('clears the cache', () => {
|
||||
cache.add({ id: '1', value: 'test' });
|
||||
cache.clear();
|
||||
expect(cache.get('1')).toBeNull();
|
||||
expect(cache.complete).toBe(false);
|
||||
});
|
||||
|
||||
test('should add multiple items and mark complete', () => {
|
||||
const rows = [
|
||||
{id: 'a', value: '1'},
|
||||
{id: 'b', value: '2'},
|
||||
it('bulk adds, marks complete', () => {
|
||||
const items = [
|
||||
{ id: 'a', value: '1' },
|
||||
{ id: 'b', value: '2' },
|
||||
];
|
||||
cache.addAll(rows);
|
||||
cache.addAll(items);
|
||||
expect(cache.all().length).toBe(2);
|
||||
expect(cache.complete).toBe(true);
|
||||
});
|
||||
|
||||
test('should return all, keys, entries, and map', () => {
|
||||
cache.add({id: '1', value: 'a'});
|
||||
cache.add({id: '2', value: 'b'});
|
||||
expect(cache.all().length).toBe(2);
|
||||
expect(cache.keys().sort()).toEqual(['1', '2']);
|
||||
expect(cache.entries().length).toBe(2);
|
||||
expect(Object.keys(cache.map())).toContain('1');
|
||||
expect(Object.keys(cache.map())).toContain('2');
|
||||
it('returns correct keys, entries, and map', () => {
|
||||
cache.add({ id: 'x', value: 'foo' });
|
||||
cache.add({ id: 'y', value: 'bar' });
|
||||
expect(cache.keys().sort()).toEqual(['x', 'y']);
|
||||
expect(cache.entries().map(e => e[0]).sort()).toEqual(['x', 'y']);
|
||||
const m = cache.map();
|
||||
expect(Object.keys(m)).toEqual(expect.arrayContaining(['x', 'y']));
|
||||
expect(m['x'].value).toBe('foo');
|
||||
});
|
||||
|
||||
// test('should expire/delete items after TTL', () => {
|
||||
// jest.useFakeTimers();
|
||||
// cache = new Cache<string, TestItem>('id', {ttl: 0.1});
|
||||
// cache.add({id: '3', value: 'x'});
|
||||
// jest.advanceTimersByTime(250);
|
||||
// expect(cache.get('3')).toBeNull();
|
||||
it('persists and restores from storage', () => {
|
||||
(storageMock.getItem as jest.Mock).mockReturnValueOnce(
|
||||
JSON.stringify({ z: { id: 'z', value: 'from-storage' } }),
|
||||
);
|
||||
const c = new Cache<string, TestItem>('id', {
|
||||
persistentStorage: { storage: storageMock, key: 'cache' },
|
||||
});
|
||||
expect(c.get('z')).toEqual({ id: 'z', value: 'from-storage' });
|
||||
});
|
||||
|
||||
it('expiryPolicy "delete" removes expired items completely', () => {
|
||||
cache.options.expiryPolicy = 'delete';
|
||||
cache.add({ id: 'del1', value: 'gone' });
|
||||
cache.expire('del1');
|
||||
expect(cache.get('del1', true)).toBeNull();
|
||||
expect(cache.get('del1')).toBeNull();
|
||||
});
|
||||
|
||||
it('expiryPolicy "keep" marks as expired but does not delete', () => {
|
||||
cache.options.expiryPolicy = 'keep';
|
||||
cache.add({ id: 'keep1', value: 'kept' });
|
||||
cache.expire('keep1');
|
||||
expect(cache.get('keep1')).toBeNull();
|
||||
const val = cache.get('keep1', true);
|
||||
expect(val && val._expired).toBe(true);
|
||||
});
|
||||
|
||||
// Uncomment and adapt this test if TTL/expiry timers are supported by your implementation
|
||||
// it('expires and deletes items after TTL', () => {
|
||||
// jest.useFakeTimers();
|
||||
// cache = new Cache<string, TestItem>('id', { ttl: 0.01 });
|
||||
// cache.add({ id: 'ttl1', value: 'temp' });
|
||||
// jest.advanceTimersByTime(100);
|
||||
// expect(cache.get('ttl1')).toBeNull();
|
||||
// });
|
||||
|
||||
test('should persist and restore from storage', () => {
|
||||
(storageMock.getItem as jest.Mock).mockReturnValueOnce(JSON.stringify({a: {id: 'a', value: 'from-storage'}}));
|
||||
const c = new Cache<string, TestItem>('id', {storage: storageMock, storageKey: 'cache'});
|
||||
expect(c.get('a')).toEqual({id: 'a', value: 'from-storage'});
|
||||
});
|
||||
|
||||
test('should handle expiryPolicy "delete"', () => {
|
||||
cache.options.expiryPolicy = 'delete';
|
||||
cache.add({id: 'k1', value: 'KeepMe'});
|
||||
cache.expire('k1');
|
||||
expect(cache.get('k1', true)).toBeNull();
|
||||
});
|
||||
|
||||
test('should handle expiryPolicy "keep"', () => {
|
||||
cache.options.expiryPolicy = 'keep';
|
||||
cache.add({id: 'k1', value: 'KeepMe'});
|
||||
cache.expire('k1');
|
||||
expect(cache.get('k1')).toBeNull();
|
||||
expect(cache.get('k1', true)?._expired).toBe(true);
|
||||
// Edge: add error handling test
|
||||
it('throws if instantiating with invalid key property', () => {
|
||||
expect(() => {
|
||||
const invalid = new Cache<'string', TestItem>('id');
|
||||
// try invalid.add({id: 'z', value: 'fail'}) if needed
|
||||
}).not.toThrow();
|
||||
});
|
||||
});
|
||||
|
@ -27,7 +27,7 @@ describe('Path Events', () => {
|
||||
|
||||
describe('PathEvent', () => {
|
||||
it('parses event string', () => {
|
||||
const pe = new PathEvent('Users/system:cr');
|
||||
const pe = new PathEvent('users/system:cr');
|
||||
expect(pe.module).toBe('users');
|
||||
expect(pe.fullPath).toBe('users/system');
|
||||
expect(pe.name).toBe('system');
|
||||
@ -103,11 +103,9 @@ describe('Path Events', () => {
|
||||
});
|
||||
|
||||
it('has returns true for overlapping', () => {
|
||||
const events = [
|
||||
new PathEvent('users/sys:cr'),
|
||||
];
|
||||
expect(PathEvent.has(events, 'users/sys:r')).toBe(true);
|
||||
expect(PathEvent.has(events, 'users/nope:r')).toBe(false);
|
||||
const events = [new PathEvent('users/sys:cr')];
|
||||
expect(PathEvent.has(events, 'users/sys:r')).toBeTruthy();
|
||||
expect(PathEvent.has(events, 'users/nope:r')).toBeFalsy();
|
||||
});
|
||||
|
||||
it('hasAll returns true only if all overlap', () => {
|
||||
|
Reference in New Issue
Block a user