break up library, move bots to their own repositories
This commit is contained in:
23
packages/util/README.md
Normal file
23
packages/util/README.md
Normal file
@@ -0,0 +1,23 @@
|
||||
# tsdown-starter
|
||||
|
||||
A starter for creating a TypeScript package.
|
||||
|
||||
## Development
|
||||
|
||||
- Install dependencies:
|
||||
|
||||
```bash
|
||||
npm install
|
||||
```
|
||||
|
||||
- Run the unit tests:
|
||||
|
||||
```bash
|
||||
npm run test
|
||||
```
|
||||
|
||||
- Build the library:
|
||||
|
||||
```bash
|
||||
npm run build
|
||||
```
|
||||
7
packages/util/bunfig.toml
Normal file
7
packages/util/bunfig.toml
Normal file
@@ -0,0 +1,7 @@
|
||||
[test]
|
||||
coverage = true
|
||||
coverageSkipTestFiles = true
|
||||
coverageReporter = ["text", "lcov"]
|
||||
|
||||
[run]
|
||||
bun = true
|
||||
32
packages/util/fixtures/jsonQuery/test-data-array.json
Normal file
32
packages/util/fixtures/jsonQuery/test-data-array.json
Normal file
@@ -0,0 +1,32 @@
|
||||
[
|
||||
{
|
||||
"id": 1,
|
||||
"name": "Alice",
|
||||
"age": 30,
|
||||
"department": "Engineering"
|
||||
},
|
||||
{
|
||||
"id": 2,
|
||||
"name": "Bob",
|
||||
"age": 25,
|
||||
"department": "Marketing"
|
||||
},
|
||||
{
|
||||
"id": 3,
|
||||
"name": "Charlie",
|
||||
"age": 35,
|
||||
"department": "Engineering"
|
||||
},
|
||||
{
|
||||
"id": 4,
|
||||
"name": "Diana",
|
||||
"age": 28,
|
||||
"department": "Sales"
|
||||
},
|
||||
{
|
||||
"id": 5,
|
||||
"name": "Eve",
|
||||
"age": 32,
|
||||
"department": "Engineering"
|
||||
}
|
||||
]
|
||||
3
packages/util/fixtures/jsonQuery/test-data-invalid.json
Normal file
3
packages/util/fixtures/jsonQuery/test-data-invalid.json
Normal file
@@ -0,0 +1,3 @@
|
||||
{
|
||||
"invalid": "json",
|
||||
"missing": "closing brace"
|
||||
32
packages/util/fixtures/jsonQuery/test-data-object.json
Normal file
32
packages/util/fixtures/jsonQuery/test-data-object.json
Normal file
@@ -0,0 +1,32 @@
|
||||
{
|
||||
"users": {
|
||||
"alice": {
|
||||
"id": 1,
|
||||
"name": "Alice",
|
||||
"age": 30,
|
||||
"department": "Engineering"
|
||||
},
|
||||
"bob": {
|
||||
"id": 2,
|
||||
"name": "Bob",
|
||||
"age": 25,
|
||||
"department": "Marketing"
|
||||
}
|
||||
},
|
||||
"departments": {
|
||||
"engineering": {
|
||||
"name": "Engineering",
|
||||
"budget": 1000000,
|
||||
"headCount": 15
|
||||
},
|
||||
"marketing": {
|
||||
"name": "Marketing",
|
||||
"budget": 500000,
|
||||
"headCount": 8
|
||||
}
|
||||
},
|
||||
"config": {
|
||||
"version": "1.0.0",
|
||||
"environment": "test"
|
||||
}
|
||||
}
|
||||
90
packages/util/package.json
Normal file
90
packages/util/package.json
Normal file
@@ -0,0 +1,90 @@
|
||||
{
|
||||
"name": "@star-kitten/util",
|
||||
"version": "0.0.1",
|
||||
"description": "Star Kitten Utility Library.",
|
||||
"type": "module",
|
||||
"license": "MIT",
|
||||
"homepage": "https://git.f302.me/jb/star-kitten#readme",
|
||||
"bugs": {
|
||||
"url": "https://git.f302.me/jb/star-kitten/issues"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://git.f302.me/jb/star-kitten.git"
|
||||
},
|
||||
"author": "JB <j-b-3.deviate267@passmail.net>",
|
||||
"files": [
|
||||
"dist"
|
||||
],
|
||||
"main": "./dist/index.js",
|
||||
"module": "./dist/index.js",
|
||||
"exports": {
|
||||
".": {
|
||||
"import": "./dist/index.js",
|
||||
"require": "./dist/index.js",
|
||||
"types": "./dist/index*.d.ts"
|
||||
},
|
||||
"./*.js": {
|
||||
"import": "./dist/*.js",
|
||||
"require": "./dist/*.js",
|
||||
"types": "./dist/*.d.ts"
|
||||
},
|
||||
"./oauth": {
|
||||
"import": "./dist/oauth/index.js",
|
||||
"require": "./dist/oauth/index.js",
|
||||
"types": "./dist/types/oauth/index.d.ts"
|
||||
},
|
||||
"./scheduler": {
|
||||
"import": "./dist/scheduler/index.js",
|
||||
"require": "./dist/scheduler/index.js",
|
||||
"types": "./dist/types/scheduler/index.d.ts"
|
||||
}
|
||||
},
|
||||
"publishConfig": {
|
||||
"access": "public"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/bun": "^1.3.5",
|
||||
"@types/jsonwebtoken": "^9.0.10",
|
||||
"@types/jwk-to-pem": "^2.0.3",
|
||||
"@types/lodash": "^4.17.20",
|
||||
"@types/node": "^22.15.17",
|
||||
"@types/node-cache": "^4.2.5",
|
||||
"@types/stream-chain": "^2.1.0",
|
||||
"@types/stream-json": "^1.7.8",
|
||||
"@vitest/coverage-v8": "^3.2.4",
|
||||
"bumpp": "^10.1.0",
|
||||
"drizzle-kit": "^0.31.4",
|
||||
"openapi-fetch": "^0.15.0",
|
||||
"openapi-typescript": "^7.10.1",
|
||||
"prettier-plugin-multiline-arrays": "^4.0.3",
|
||||
"tsdown": "^0.14.2",
|
||||
"typescript": "beta"
|
||||
},
|
||||
"dependencies": {
|
||||
"@orama/orama": "^3.1.13",
|
||||
"@oslojs/encoding": "^1.1.0",
|
||||
"cron-parser": "^5.3.1",
|
||||
"date-fns": "^4.1.0",
|
||||
"domhandler": "^5.0.3",
|
||||
"drizzle-orm": "^0.44.5",
|
||||
"elysia": "^1.4.20",
|
||||
"fp-filters": "^0.5.4",
|
||||
"html-dom-parser": "^5.1.1",
|
||||
"jsonwebtoken": "^9.0.2",
|
||||
"jwk-to-pem": "^2.0.7",
|
||||
"jwt-decode": "^4.0.0",
|
||||
"lodash": "^4.17.21",
|
||||
"node-cache": "^5.1.2",
|
||||
"stream-chain": "^3.4.0",
|
||||
"stream-json": "^1.9.1",
|
||||
"winston": "^3.17.0"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "tsdown",
|
||||
"dev": "tsdown --watch",
|
||||
"test": "bun test",
|
||||
"typecheck": "tsc --noEmit",
|
||||
"release": "bumpp && npm publish"
|
||||
}
|
||||
}
|
||||
10
packages/util/src/index.ts
Normal file
10
packages/util/src/index.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
export * as scheduler from './scheduler';
|
||||
export * from './json-query';
|
||||
export * from './kv';
|
||||
export * from './logger';
|
||||
export * from './reactive-state';
|
||||
export * from './text';
|
||||
export * from './time';
|
||||
|
||||
import * as locales from './locales';
|
||||
export { locales };
|
||||
262
packages/util/src/json-query.test.ts
Normal file
262
packages/util/src/json-query.test.ts
Normal file
@@ -0,0 +1,262 @@
|
||||
import { describe, it, expect, beforeEach } from 'bun:test';
|
||||
import { queryJsonArray, queryJsonObject } from './json-query';
|
||||
import * as path from 'path';
|
||||
|
||||
// Test data interfaces
|
||||
interface TestUser {
|
||||
id: number;
|
||||
name: string;
|
||||
age: number;
|
||||
department: string;
|
||||
}
|
||||
|
||||
interface TestKeyValue {
|
||||
key: string;
|
||||
value: any;
|
||||
}
|
||||
|
||||
// Test file paths
|
||||
const basePath = path.join(__dirname, '../fixtures/jsonQuery');
|
||||
const testArrayFile = path.join(basePath, 'test-data-array.json');
|
||||
const testObjectFile = path.join(basePath, 'test-data-object.json');
|
||||
const testInvalidFile = path.join(basePath, 'test-data-invalid.json');
|
||||
const nonExistentFile = path.join(basePath, 'non-existent.json');
|
||||
|
||||
describe('queryJsonArray', () => {
|
||||
beforeEach(() => {
|
||||
// Clear any existing cache before each test
|
||||
const NodeCache = require('node-cache');
|
||||
const cache = new NodeCache();
|
||||
cache.flushAll();
|
||||
});
|
||||
|
||||
it('should find a matching item in JSON array', async () => {
|
||||
const result = await queryJsonArray<TestUser>(testArrayFile, (user) => user.name === 'Alice');
|
||||
|
||||
expect(result).not.toBeNull();
|
||||
expect(result?.name).toBe('Alice');
|
||||
expect(result?.id).toBe(1);
|
||||
expect(result?.age).toBe(30);
|
||||
expect(result?.department).toBe('Engineering');
|
||||
});
|
||||
|
||||
it('should find the first matching item when multiple matches exist', async () => {
|
||||
const result = await queryJsonArray<TestUser>(testArrayFile, (user) => user.department === 'Engineering');
|
||||
|
||||
expect(result).not.toBeNull();
|
||||
expect(result?.name).toBe('Alice'); // First engineering employee
|
||||
expect(result?.id).toBe(1);
|
||||
});
|
||||
|
||||
it('should return null when no match is found', async () => {
|
||||
const result = await queryJsonArray<TestUser>(testArrayFile, (user) => user.name === 'NonExistent');
|
||||
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it('should handle complex query conditions', async () => {
|
||||
const result = await queryJsonArray<TestUser>(testArrayFile, (user) => user.age > 30 && user.department === 'Engineering');
|
||||
|
||||
expect(result).not.toBeNull();
|
||||
expect(result?.name).toBe('Charlie');
|
||||
expect(result?.age).toBe(35);
|
||||
});
|
||||
|
||||
it('should cache results when cacheKey is provided', async () => {
|
||||
const cacheKey = 'test-alice-query';
|
||||
|
||||
// First call should hit the file
|
||||
const result1 = await queryJsonArray<TestUser>(testArrayFile, (user) => user.name === 'Alice', cacheKey);
|
||||
|
||||
// Second call should hit the cache (we can't directly verify this without mocking,
|
||||
// but we can verify the result is consistent)
|
||||
const result2 = await queryJsonArray<TestUser>(
|
||||
testArrayFile,
|
||||
(user) => user.name === 'Bob', // Different query, but should return cached Alice
|
||||
cacheKey,
|
||||
);
|
||||
|
||||
expect(result1).toEqual(result2);
|
||||
expect(result1?.name).toBe('Alice');
|
||||
expect(result2?.name).toBe('Alice');
|
||||
});
|
||||
|
||||
it('should respect custom cache expiry', async () => {
|
||||
const cacheKey = 'test-expiry-query';
|
||||
const customExpiry = 1; // 1 second
|
||||
|
||||
const result = await queryJsonArray<TestUser>(testArrayFile, (user) => user.name === 'Bob', cacheKey, customExpiry);
|
||||
|
||||
expect(result).not.toBeNull();
|
||||
expect(result?.name).toBe('Bob');
|
||||
});
|
||||
|
||||
it('should handle file read errors gracefully', async () => {
|
||||
await expect(queryJsonArray<TestUser>(nonExistentFile, (user) => user.name === 'Alice')).rejects.toThrow();
|
||||
});
|
||||
|
||||
it('should handle invalid JSON gracefully', async () => {
|
||||
await expect(queryJsonArray<TestUser>(testInvalidFile, (user) => user.name === 'Alice')).rejects.toThrow();
|
||||
});
|
||||
|
||||
it('should work with numeric queries', async () => {
|
||||
const result = await queryJsonArray<TestUser>(testArrayFile, (user) => user.id === 3);
|
||||
|
||||
expect(result).not.toBeNull();
|
||||
expect(result?.name).toBe('Charlie');
|
||||
expect(result?.id).toBe(3);
|
||||
});
|
||||
|
||||
it('should work with range queries', async () => {
|
||||
const result = await queryJsonArray<TestUser>(testArrayFile, (user) => user.age >= 30 && user.age <= 32);
|
||||
|
||||
expect(result).not.toBeNull();
|
||||
expect(result?.name).toBe('Alice'); // First match: age 30
|
||||
});
|
||||
|
||||
it('should handle empty query results', async () => {
|
||||
const result = await queryJsonArray<TestUser>(
|
||||
testArrayFile,
|
||||
(user) => user.age > 100, // No one is over 100
|
||||
);
|
||||
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe('queryJsonObject', () => {
|
||||
beforeEach(() => {
|
||||
// Clear any existing cache before each test
|
||||
const NodeCache = require('node-cache');
|
||||
const cache = new NodeCache();
|
||||
cache.flushAll();
|
||||
});
|
||||
|
||||
it('should find a matching key-value pair in JSON object', async () => {
|
||||
const result = await queryJsonObject<any, TestKeyValue>(testObjectFile, (item) => item.key === 'users');
|
||||
|
||||
expect(result).not.toBeNull();
|
||||
expect(typeof result).toBe('object');
|
||||
expect(result?.alice?.name).toBe('Alice');
|
||||
expect(result?.bob?.name).toBe('Bob');
|
||||
});
|
||||
|
||||
it('should find nested object values', async () => {
|
||||
const result = await queryJsonObject<any, TestKeyValue>(testObjectFile, (item) => item.key === 'departments');
|
||||
|
||||
expect(result).not.toBeNull();
|
||||
expect(result?.engineering?.name).toBe('Engineering');
|
||||
expect(result?.marketing?.budget).toBe(500000);
|
||||
});
|
||||
|
||||
it('should find specific configuration values', async () => {
|
||||
const result = await queryJsonObject<any, TestKeyValue>(testObjectFile, (item) => item.key === 'config');
|
||||
|
||||
expect(result).not.toBeNull();
|
||||
expect(result?.version).toBe('1.0.0');
|
||||
expect(result?.environment).toBe('test');
|
||||
});
|
||||
|
||||
it('should return null when no match is found', async () => {
|
||||
const result = await queryJsonObject<any, TestKeyValue>(testObjectFile, (item) => item.key === 'nonexistent');
|
||||
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it('should handle complex query conditions on values', async () => {
|
||||
const result = await queryJsonObject<any, TestKeyValue>(testObjectFile, (item) => {
|
||||
if (item.key === 'departments' && typeof item.value === 'object') {
|
||||
return Object.values(item.value).some((dept: any) => dept.budget > 800000);
|
||||
}
|
||||
return false;
|
||||
});
|
||||
|
||||
expect(result).not.toBeNull();
|
||||
expect(result?.engineering?.budget).toBe(1000000);
|
||||
});
|
||||
|
||||
it('should cache results when cacheKey is provided', async () => {
|
||||
const cacheKey = 'test-config-query';
|
||||
|
||||
// First call should hit the file
|
||||
const result1 = await queryJsonObject<any, TestKeyValue>(testObjectFile, (item) => item.key === 'config', cacheKey);
|
||||
|
||||
// Second call should hit the cache
|
||||
const result2 = await queryJsonObject<any, TestKeyValue>(
|
||||
testObjectFile,
|
||||
(item) => item.key === 'users', // Different query, but should return cached config
|
||||
cacheKey,
|
||||
);
|
||||
|
||||
expect(result1).toEqual(result2);
|
||||
expect(result1?.version).toBe('1.0.0');
|
||||
expect(result2?.version).toBe('1.0.0');
|
||||
});
|
||||
|
||||
it('should respect custom cache expiry', async () => {
|
||||
const cacheKey = 'test-object-expiry-query';
|
||||
const customExpiry = 2; // 2 seconds
|
||||
|
||||
const result = await queryJsonObject<any, TestKeyValue>(testObjectFile, (item) => item.key === 'users', cacheKey, customExpiry);
|
||||
|
||||
expect(result).not.toBeNull();
|
||||
expect(result?.alice?.name).toBe('Alice');
|
||||
});
|
||||
|
||||
it('should handle file read errors gracefully', async () => {
|
||||
await expect(queryJsonObject<any, TestKeyValue>(nonExistentFile, (item) => item.key === 'config')).rejects.toThrow();
|
||||
});
|
||||
|
||||
it('should handle invalid JSON gracefully', async () => {
|
||||
await expect(queryJsonObject<any, TestKeyValue>(testInvalidFile, (item) => item.key === 'config')).rejects.toThrow();
|
||||
});
|
||||
|
||||
it('should work with value-based queries', async () => {
|
||||
const result = await queryJsonObject<any, TestKeyValue>(testObjectFile, (item) => {
|
||||
return typeof item.value === 'object' && item.value?.version === '1.0.0';
|
||||
});
|
||||
|
||||
expect(result).not.toBeNull();
|
||||
expect(result?.version).toBe('1.0.0');
|
||||
expect(result?.environment).toBe('test');
|
||||
});
|
||||
|
||||
it('should handle queries that check both key and value', async () => {
|
||||
const result = await queryJsonObject<any, TestKeyValue>(testObjectFile, (item) => {
|
||||
return item.key.startsWith('dep') && typeof item.value === 'object';
|
||||
});
|
||||
|
||||
expect(result).not.toBeNull();
|
||||
expect(result?.engineering?.name).toBe('Engineering');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Edge cases and error handling', () => {
|
||||
it('should handle empty file paths', async () => {
|
||||
await expect(queryJsonArray<any>('', () => true)).rejects.toThrow();
|
||||
});
|
||||
|
||||
it('should handle null query functions gracefully', async () => {
|
||||
await expect(queryJsonArray<TestUser>(testArrayFile, null as any)).rejects.toThrow();
|
||||
});
|
||||
|
||||
it('should handle undefined query functions gracefully', async () => {
|
||||
await expect(queryJsonArray<TestUser>(testArrayFile, undefined as any)).rejects.toThrow();
|
||||
});
|
||||
|
||||
it('should work without caching parameters', async () => {
|
||||
const result = await queryJsonArray<TestUser>(testArrayFile, (user) => user.name === 'Diana');
|
||||
|
||||
expect(result).not.toBeNull();
|
||||
expect(result?.name).toBe('Diana');
|
||||
expect(result?.department).toBe('Sales');
|
||||
});
|
||||
|
||||
it('should work with minimal cache configuration', async () => {
|
||||
const result = await queryJsonArray<TestUser>(testArrayFile, (user) => user.name === 'Eve', 'minimal-cache');
|
||||
|
||||
expect(result).not.toBeNull();
|
||||
expect(result?.name).toBe('Eve');
|
||||
expect(result?.age).toBe(32);
|
||||
});
|
||||
});
|
||||
94
packages/util/src/json-query.ts
Normal file
94
packages/util/src/json-query.ts
Normal file
@@ -0,0 +1,94 @@
|
||||
import fs from 'node:fs';
|
||||
import { chain } from 'stream-chain';
|
||||
import { parser } from 'stream-json';
|
||||
import { streamArray } from 'stream-json/streamers/StreamArray';
|
||||
import { streamObject } from 'stream-json/streamers/StreamObject';
|
||||
import NodeCache from 'node-cache';
|
||||
|
||||
const cache = new NodeCache({ stdTTL: 3600 });
|
||||
|
||||
/**
|
||||
* Queries a large JSON array file for an item matching the provided query function.
|
||||
* This function streams the file to avoid loading the entire content into memory.
|
||||
*
|
||||
* @param filePath - The path to the JSON file containing an array of items.
|
||||
* @param query - A function that takes an item and returns true if it matches the criteria.
|
||||
* @returns A promise that resolves to the first matching item or null if no match is found.
|
||||
*/
|
||||
export function queryJsonArray<T>(
|
||||
filePath: string,
|
||||
query: (item: T) => boolean,
|
||||
cacheKey?: string,
|
||||
cacheExpiry?: number,
|
||||
): Promise<T | null> {
|
||||
if (cacheKey) {
|
||||
const cached = cache.get<T>(cacheKey);
|
||||
if (cached) {
|
||||
return Promise.resolve(cached);
|
||||
}
|
||||
}
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const pipeline = chain([fs.createReadStream(filePath), parser(), streamArray(), (data) => (query(data.value) ? data.value : null)]);
|
||||
|
||||
pipeline.on('data', (value) => {
|
||||
if (value) {
|
||||
if (cacheKey) {
|
||||
cache.set(cacheKey, value, cacheExpiry || 3600);
|
||||
}
|
||||
resolve(value);
|
||||
}
|
||||
});
|
||||
|
||||
pipeline.on('end', () => {
|
||||
resolve(null); // No match found
|
||||
});
|
||||
|
||||
pipeline.on('error', (err) => {
|
||||
reject(err);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Queries a large JSON object file for a value matching the provided query function.
|
||||
* This function streams the file to avoid loading the entire content into memory.
|
||||
*
|
||||
* @param filePath - The path to the JSON file containing an object of key-value pairs.
|
||||
* @param query - A function that takes a key-value pair and returns true if it matches the criteria.
|
||||
* @returns A promise that resolves to the first matching value or null if no match is found.
|
||||
*/
|
||||
export function queryJsonObject<T, K = { key: string; value: T }>(
|
||||
filePath: string,
|
||||
query: (item: K) => boolean,
|
||||
cacheKey?: string,
|
||||
cacheExpiry?: number,
|
||||
): Promise<T | null> {
|
||||
if (cacheKey) {
|
||||
const cached = cache.get<T>(cacheKey);
|
||||
if (cached) {
|
||||
return Promise.resolve(cached);
|
||||
}
|
||||
}
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const pipeline = chain([fs.createReadStream(filePath), parser(), streamObject(), (data) => (query(data) ? data.value : null)]);
|
||||
|
||||
pipeline.on('data', (value) => {
|
||||
if (value) {
|
||||
if (cacheKey) {
|
||||
cache.set(cacheKey, value, cacheExpiry || 3600);
|
||||
}
|
||||
resolve(value);
|
||||
}
|
||||
});
|
||||
|
||||
pipeline.on('end', () => {
|
||||
resolve(null); // No match found
|
||||
});
|
||||
|
||||
pipeline.on('error', (err) => {
|
||||
reject(err);
|
||||
});
|
||||
});
|
||||
}
|
||||
188
packages/util/src/kv.test.ts
Normal file
188
packages/util/src/kv.test.ts
Normal file
@@ -0,0 +1,188 @@
|
||||
import { describe, test, expect, beforeEach, afterEach } from 'bun:test';
|
||||
import { connectDB, get, set, del, has, clear, getDB, asyncKV } from './kv';
|
||||
|
||||
beforeEach(() => {
|
||||
// Use in-memory database for tests
|
||||
connectDB(':memory:');
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
// Clear the database after each test
|
||||
clear();
|
||||
});
|
||||
|
||||
describe('KV Store Tests', () => {
|
||||
test('set and get string value', async () => {
|
||||
await set('testKey', 'testValue');
|
||||
const value = await get<string>('testKey');
|
||||
expect(value).toBe('testValue');
|
||||
});
|
||||
|
||||
test('set and get object value', async () => {
|
||||
const obj = { name: 'test', value: 42 };
|
||||
await set('testKey', obj);
|
||||
const value = await get<typeof obj>('testKey');
|
||||
expect(value).toEqual(obj);
|
||||
});
|
||||
|
||||
test('set and get boolean value', async () => {
|
||||
await set('testKey', true);
|
||||
const value = await get<boolean>('testKey');
|
||||
expect(value).toBe(true);
|
||||
});
|
||||
|
||||
test('set and get null value', async () => {
|
||||
await set('testKey', null);
|
||||
const value = await get<null>('testKey');
|
||||
expect(value).toBe(null);
|
||||
});
|
||||
|
||||
test('get non-existent key returns undefined', async () => {
|
||||
const value = await get('nonExistent');
|
||||
expect(value).toBeUndefined();
|
||||
});
|
||||
|
||||
test('set with TTL and get before expiration', async () => {
|
||||
await set('testKey', 'testValue', 1); // 1 second TTL
|
||||
const value = await get<string>('testKey');
|
||||
expect(value).toBe('testValue');
|
||||
});
|
||||
|
||||
test('set with TTL and get after expiration', async () => {
|
||||
await set('testKey', 'testValue', 0.001); // Very short TTL
|
||||
await new Promise((resolve) => setTimeout(resolve, 10)); // Wait for expiration
|
||||
const value = await get<string>('testKey');
|
||||
expect(value).toBeUndefined();
|
||||
});
|
||||
|
||||
test('del existing key', async () => {
|
||||
await set('testKey', 'testValue');
|
||||
const deleted = await del('testKey');
|
||||
expect(deleted).toBe(1); // Number of rows deleted
|
||||
const value = await get('testKey');
|
||||
expect(value).toBeUndefined();
|
||||
});
|
||||
|
||||
test('del non-existent key', async () => {
|
||||
const deleted = await del('nonExistent');
|
||||
expect(deleted).toBe(0); // del returns 0 for non-existent keys
|
||||
});
|
||||
|
||||
test('del multiple keys', async () => {
|
||||
await set('key1', 'value1');
|
||||
await set('key2', 'value2');
|
||||
await set('key3', 'value3');
|
||||
const deleted = await del(['key1', 'key2', 'nonExistent']);
|
||||
expect(deleted).toBe(2); // key1 and key2 deleted, nonExistent not
|
||||
expect(await get('key1')).toBeUndefined();
|
||||
expect(await get('key2')).toBeUndefined();
|
||||
expect(await get('key3')).toBeDefined(); // key3 still exists
|
||||
});
|
||||
|
||||
test('has existing key', async () => {
|
||||
await set('testKey', 'testValue');
|
||||
const exists = await has('testKey');
|
||||
expect(exists).toBe(true);
|
||||
});
|
||||
|
||||
test('has non-existent key', async () => {
|
||||
const exists = await has('nonExistent');
|
||||
expect(exists).toBe(false);
|
||||
});
|
||||
|
||||
test('has key with expired TTL', async () => {
|
||||
await set('testKey', 'testValue', 0.001);
|
||||
await new Promise((resolve) => setTimeout(resolve, 10));
|
||||
const exists = await has('testKey');
|
||||
expect(exists).toBe(false);
|
||||
});
|
||||
|
||||
test('clear all keys', async () => {
|
||||
await set('key1', 'value1');
|
||||
await set('key2', 'value2');
|
||||
await clear();
|
||||
const value1 = await get('key1');
|
||||
const value2 = await get('key2');
|
||||
expect(value1).toBeUndefined();
|
||||
expect(value2).toBeUndefined();
|
||||
});
|
||||
|
||||
test('getDB returns database instance', () => {
|
||||
const db = getDB();
|
||||
expect(db).toBeDefined();
|
||||
expect(db.constructor.name).toBe('Database');
|
||||
});
|
||||
|
||||
test('connectDB with custom path', () => {
|
||||
connectDB(':memory:'); // Already done in beforeEach, but testing again
|
||||
const db = getDB();
|
||||
expect(db).toBeDefined();
|
||||
});
|
||||
|
||||
test('set and get zero value', async () => {
|
||||
await set('testKey', 0);
|
||||
const value = await get<number>('testKey');
|
||||
expect(value).toBe(0);
|
||||
});
|
||||
|
||||
test('set and get empty string', async () => {
|
||||
await set('testKey', '');
|
||||
const value = await get<string>('testKey');
|
||||
expect(value).toBe('');
|
||||
});
|
||||
|
||||
test('set with false value', async () => {
|
||||
await set('testKey', false);
|
||||
const value = await get<boolean>('testKey');
|
||||
expect(value).toBe(false);
|
||||
});
|
||||
|
||||
test('JSON parsing error falls back to string', async () => {
|
||||
// Manually insert invalid JSON to test fallback
|
||||
const db = getDB();
|
||||
db.run(`INSERT OR REPLACE INTO kvstore VALUES (?, ?, ?, ?)`, ['testKey', 'invalid json', null, new Date().toISOString()]);
|
||||
const value = await get<string>('testKey');
|
||||
expect(value).toBe('invalid json');
|
||||
});
|
||||
|
||||
// Tests for asyncKV wrapper functions
|
||||
test('asyncKV set and get', async () => {
|
||||
await asyncKV.set('asyncKey', 'asyncValue');
|
||||
const value = await asyncKV.get<string>('asyncKey');
|
||||
expect(value).toBe('asyncValue');
|
||||
});
|
||||
|
||||
test('asyncKV del', async () => {
|
||||
await asyncKV.set('asyncKey', 'asyncValue');
|
||||
const deleted = await asyncKV.del('asyncKey');
|
||||
expect(deleted).toBe(1);
|
||||
const value = await asyncKV.get('asyncKey');
|
||||
expect(value).toBeUndefined();
|
||||
});
|
||||
|
||||
test('asyncKV has', async () => {
|
||||
await asyncKV.set('asyncKey', 'asyncValue');
|
||||
const exists = await asyncKV.has('asyncKey');
|
||||
expect(exists).toBe(true);
|
||||
await asyncKV.del('asyncKey');
|
||||
const existsAfter = await asyncKV.has('asyncKey');
|
||||
expect(existsAfter).toBe(false);
|
||||
});
|
||||
|
||||
test('asyncKV clear', async () => {
|
||||
await asyncKV.set('key1', 'value1');
|
||||
await asyncKV.set('key2', 'value2');
|
||||
await asyncKV.clear();
|
||||
const value1 = await asyncKV.get('key1');
|
||||
const value2 = await asyncKV.get('key2');
|
||||
expect(value1).toBeUndefined();
|
||||
expect(value2).toBeUndefined();
|
||||
});
|
||||
|
||||
test('asyncKV del multiple keys', async () => {
|
||||
await asyncKV.set('key1', 'value1');
|
||||
await asyncKV.set('key2', 'value2');
|
||||
const deleted = await asyncKV.del(['key1', 'key2']);
|
||||
expect(deleted).toBe(2);
|
||||
});
|
||||
});
|
||||
170
packages/util/src/kv.ts
Normal file
170
packages/util/src/kv.ts
Normal file
@@ -0,0 +1,170 @@
|
||||
/**
|
||||
* A simple key-value store using Bun's SQLite support.
|
||||
* Supports string, object, boolean, and null values.
|
||||
* Values can have an optional TTL (time-to-live) in seconds.
|
||||
*
|
||||
* Exports a set of async functions for easy replacement
|
||||
* with other storage solutions in the future.
|
||||
*
|
||||
* Usage:
|
||||
* import { get, set, del, has, clear, connectDB } from './kv';
|
||||
*
|
||||
* await connectDB(); // Optional: specify a custom database file path
|
||||
* await set('myKey', 'myValue', 60); // Set a key with a value and TTL
|
||||
* const value = await get('myKey'); // Get the value by key
|
||||
* await del('myKey'); // Delete the key
|
||||
* const exists = await has('myKey'); // Check if the key exists
|
||||
* await clear(); // Clear all keys
|
||||
*/
|
||||
|
||||
import { Database } from 'bun:sqlite';
|
||||
import _ from 'lodash';
|
||||
|
||||
interface KVItem {
|
||||
key: string;
|
||||
value: string;
|
||||
ttl: number | null; // Unix timestamp in milliseconds
|
||||
}
|
||||
|
||||
let db: Database | null = null;
|
||||
|
||||
export function connectDB(dbPath: string = process.env.STAR_KITTEN_KV_DB_PATH || ':memory:') {
|
||||
db = new Database(dbPath, { readwrite: true, create: true });
|
||||
db.run(
|
||||
`CREATE TABLE IF NOT EXISTS kvstore (
|
||||
key TEXT PRIMARY KEY,
|
||||
value TEXT,
|
||||
ttl INTEGER,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
UNIQUE(key)
|
||||
)`,
|
||||
);
|
||||
}
|
||||
|
||||
export function getDB() {
|
||||
if (!db) {
|
||||
connectDB();
|
||||
}
|
||||
return db!;
|
||||
}
|
||||
|
||||
export function get<T>(key: string): T | undefined {
|
||||
const query = getDB().prepare(`SELECT value, ttl FROM kvstore WHERE key = ?`);
|
||||
const row = query.get(key) as KVItem | undefined;
|
||||
if (!row) return undefined;
|
||||
|
||||
if (row.ttl && Date.now() > row.ttl) {
|
||||
del(key);
|
||||
return undefined;
|
||||
}
|
||||
|
||||
try {
|
||||
return JSON.parse(row.value) as T;
|
||||
} catch (error) {
|
||||
return row.value as unknown as T;
|
||||
}
|
||||
}
|
||||
|
||||
export function del(key: string | string[]): number {
|
||||
try {
|
||||
if (typeof key === 'string') {
|
||||
const result = getDB().run(`DELETE FROM kvstore WHERE key = ?`, [key]);
|
||||
return result.changes;
|
||||
} else {
|
||||
const keys = key.map(() => '?').join(', ');
|
||||
const query = `DELETE FROM kvstore WHERE key IN (${keys})`;
|
||||
const result = getDB().run(query, key);
|
||||
return result.changes;
|
||||
}
|
||||
} catch (error) {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
function _set<T = any>(key: string, value: T, ttl: number | null = null): boolean {
|
||||
try {
|
||||
const stmt = getDB().run(`INSERT OR REPLACE INTO kvstore VALUES (?, ?, ?, ?)`, [
|
||||
key,
|
||||
JSON.stringify(value),
|
||||
ttl,
|
||||
new Date().toISOString(),
|
||||
]);
|
||||
return true;
|
||||
} catch (error) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Set a key with an optional TTL in seconds.
|
||||
* @param key Key
|
||||
* @param value data
|
||||
* @param ttl Time to live in seconds
|
||||
* @returns {boolean}
|
||||
*/
|
||||
export function set<T = any>(key: string, value: T, ttlInSeconds: number | null = null): boolean {
|
||||
const ttl = ttlInSeconds ? Date.now() + ttlInSeconds * 1000 : null;
|
||||
return _set<T>(key, value, ttl);
|
||||
}
|
||||
|
||||
/**
|
||||
* Set a key with an exact expiration time in milliseconds.
|
||||
* @param key Key
|
||||
* @param value data
|
||||
* @param expires Exact ms of expiration
|
||||
* @returns {boolean}
|
||||
*/
|
||||
export function setExact<T>(key: string, value: T, expires: number): boolean {
|
||||
const ttl = expires;
|
||||
return _set<T>(key, value, ttl);
|
||||
}
|
||||
|
||||
export function has(key: string): boolean {
|
||||
const query = getDB().prepare(`SELECT ttl FROM kvstore WHERE key = ?`);
|
||||
const row = query.get(key) as { ttl?: number } | undefined;
|
||||
if (!row) return false;
|
||||
if (row.ttl && Date.now() > row.ttl) {
|
||||
del(key);
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
export function purgeOlderThan(ageInSeconds: number): number {
|
||||
const cutoff = Date.now() - ageInSeconds * 1000;
|
||||
const result = getDB().run(`DELETE FROM kvstore WHERE created_at < ?`, [new Date(cutoff).toISOString()]);
|
||||
return result.changes;
|
||||
}
|
||||
|
||||
export function purgeExpired(): number {
|
||||
const now = Date.now();
|
||||
const result = getDB().run(`DELETE FROM kvstore WHERE ttl IS NOT NULL AND ttl < ?`, [now]);
|
||||
return result.changes;
|
||||
}
|
||||
|
||||
setInterval(() => {
|
||||
purgeExpired();
|
||||
}, 1000 * 60 * 5); // Purge expired keys every 5 minutes
|
||||
|
||||
export function clear(): void {
|
||||
getDB().run(`DELETE FROM kvstore`);
|
||||
}
|
||||
|
||||
export default {
|
||||
get,
|
||||
set,
|
||||
delete: del,
|
||||
del,
|
||||
has,
|
||||
clear,
|
||||
};
|
||||
|
||||
export const asyncKV = {
|
||||
get: <T>(key: string): Promise<T | undefined> => Promise.resolve(get<T>(key)),
|
||||
set: <T>(key: string, value: T, ttlInSeconds: number | null = null): Promise<boolean> =>
|
||||
Promise.resolve(set<T>(key, value, ttlInSeconds)),
|
||||
delete: (key: string | string[]): Promise<number> => Promise.resolve(del(key)),
|
||||
del: (key: string | string[]): Promise<number> => Promise.resolve(del(key)),
|
||||
has: (key: string): Promise<boolean> => Promise.resolve(has(key)),
|
||||
clear: (): Promise<void> => Promise.resolve(clear()),
|
||||
};
|
||||
35
packages/util/src/locales.ts
Normal file
35
packages/util/src/locales.ts
Normal file
@@ -0,0 +1,35 @@
|
||||
export type Locales = 'en' | 'ru' | 'de' | 'fr' | 'ja' | 'es' | 'zh' | 'ko';
|
||||
export const ALL_LOCALES: Locales[] = ['en', 'ru', 'de', 'fr', 'ja', 'es', 'zh', 'ko'];
|
||||
export const DEFAULT_LOCALE: Locales = 'en';
|
||||
export const LOCALE_NAMES: { [key in Locales]: string } = {
|
||||
en: 'English',
|
||||
ru: 'Русский',
|
||||
de: 'Deutsch',
|
||||
fr: 'Français',
|
||||
ja: '日本語',
|
||||
es: 'Español',
|
||||
zh: '中文',
|
||||
ko: '한국어',
|
||||
};
|
||||
export function toDiscordLocale(locale: Locales): string {
|
||||
switch (locale) {
|
||||
case 'en':
|
||||
return 'en-US';
|
||||
case 'ru':
|
||||
return 'ru';
|
||||
case 'de':
|
||||
return 'de';
|
||||
case 'fr':
|
||||
return 'fr';
|
||||
case 'ja':
|
||||
return 'ja';
|
||||
case 'es':
|
||||
return 'es-ES';
|
||||
case 'zh':
|
||||
return 'zh-CN';
|
||||
case 'ko':
|
||||
return 'ko';
|
||||
default:
|
||||
return 'en-US';
|
||||
}
|
||||
}
|
||||
106
packages/util/src/logger.ts
Normal file
106
packages/util/src/logger.ts
Normal file
@@ -0,0 +1,106 @@
|
||||
import { createLogger, format, Logger, transports } from 'winston';
|
||||
|
||||
const development = 'development';
|
||||
const production = 'production';
|
||||
const NODE_ENV = process.env.NODE_ENV || development;
|
||||
const LOG_LEVEL = process.env.LOG_LEVEL || NODE_ENV === development ? 'debug' : 'info';
|
||||
const DEBUG = LOG_LEVEL === 'debug';
|
||||
|
||||
export function init(name: string = 'App'): Logger {
|
||||
const jsonFormat = format.combine(format.timestamp(), format.json());
|
||||
const logger = createLogger({
|
||||
level: LOG_LEVEL,
|
||||
format: format.json(),
|
||||
defaultMeta: { service: name },
|
||||
});
|
||||
|
||||
if (NODE_ENV !== development) {
|
||||
logger.add(
|
||||
new transports.Console({
|
||||
format: jsonFormat,
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
if (NODE_ENV !== production) {
|
||||
const simpleFormat = format.printf(({ level, message, label, timestamp, stack }) => {
|
||||
return `${timestamp} [${label || 'App'}] ${level}: ${message}${DEBUG && stack ? `\n${stack}` : ''}`;
|
||||
});
|
||||
logger.add(
|
||||
new transports.Console({
|
||||
format: format.combine(format.colorize(), format.timestamp(), simpleFormat),
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
enum LogLevel {
|
||||
DEBUG = 0,
|
||||
INFO = 1,
|
||||
WARN = 2,
|
||||
ERROR = 3,
|
||||
NONE = 4,
|
||||
}
|
||||
|
||||
function logLevelValue(level: string) {
|
||||
switch (level) {
|
||||
case 'debug':
|
||||
return 0;
|
||||
case 'info':
|
||||
return 1;
|
||||
case 'warn':
|
||||
return 2;
|
||||
case 'error':
|
||||
return 3;
|
||||
default:
|
||||
return 4;
|
||||
}
|
||||
}
|
||||
|
||||
const debug = (message: string, ...args: any[]) => {
|
||||
if (logLevelValue(LOG_LEVEL) > LogLevel.DEBUG) return;
|
||||
let e = new Error();
|
||||
let frame = e.stack?.split('\n')[2]; // change to 3 for grandparent func
|
||||
let lineNumber = frame?.split(':').reverse()[1];
|
||||
let functionName = frame?.split(' ')[5];
|
||||
let file = frame?.match(/src\/[a-zA-Z.:0-9]*/)?.[0];
|
||||
logger.child({ label: functionName, lineNumber, file }).debug(message, ...args);
|
||||
};
|
||||
|
||||
const info = (message: string, ...args: any[]) => {
|
||||
if (logLevelValue(LOG_LEVEL) > LogLevel.INFO) return;
|
||||
let e = new Error();
|
||||
let frame = e.stack?.split('\n')[2]; // change to 3 for grandparent func
|
||||
let lineNumber = frame?.split(':').reverse()[1];
|
||||
let functionName = frame?.split(' ')[5];
|
||||
let file = frame?.match(/src\/[a-zA-Z.:0-9]*/)?.[0];
|
||||
logger.child({ label: functionName, lineNumber, file }).info(message, ...args);
|
||||
};
|
||||
|
||||
const warn = (message: string, ...args: any[]) => {
|
||||
if (logLevelValue(LOG_LEVEL) > LogLevel.WARN) return;
|
||||
let e = new Error();
|
||||
let frame = e.stack?.split('\n')[2]; // change to 3 for grandparent func
|
||||
let lineNumber = frame?.split(':').reverse()[1];
|
||||
let functionName = frame?.split(' ')[5];
|
||||
let file = frame?.match(/src\/[a-zA-Z.:0-9]*/)?.[0];
|
||||
logger.child({ label: functionName, lineNumber, file }).warn(message, ...args);
|
||||
};
|
||||
|
||||
const error = (message: string, ...args: any[]) => {
|
||||
if (logLevelValue(LOG_LEVEL) > LogLevel.ERROR) return;
|
||||
let e = new Error();
|
||||
let frame = e.stack?.split('\n')[2]; // change to 3 for grandparent func
|
||||
let lineNumber = frame?.split(':').reverse()[1];
|
||||
let functionName = frame?.split(' ')[5];
|
||||
let file = frame?.match(/src\/[a-zA-Z.:0-9]*/)?.[0];
|
||||
logger.child({ label: functionName, lineNumber, file }).error(message, ...args);
|
||||
};
|
||||
|
||||
console.log = info;
|
||||
console.debug = debug;
|
||||
console.info = info;
|
||||
console.warn = warn;
|
||||
console.error = error;
|
||||
|
||||
return logger;
|
||||
}
|
||||
15
packages/util/src/oauth/authorization-url.ts
Normal file
15
packages/util/src/oauth/authorization-url.ts
Normal file
@@ -0,0 +1,15 @@
|
||||
import { generateState } from './state';
|
||||
|
||||
export async function createAuthorizationURL(authUrl: string, callbackUrl: string, clientId: string, scopes: string[] | string) {
|
||||
const state = generateState();
|
||||
const url = new URL(authUrl);
|
||||
url.searchParams.set('response_type', 'code');
|
||||
url.searchParams.set('redirect_uri', callbackUrl);
|
||||
url.searchParams.set('client_id', clientId);
|
||||
url.searchParams.set('state', state);
|
||||
url.searchParams.set('scope', Array.isArray(scopes) ? scopes.join(' ') : scopes);
|
||||
return {
|
||||
url,
|
||||
state,
|
||||
};
|
||||
}
|
||||
5
packages/util/src/oauth/index.ts
Normal file
5
packages/util/src/oauth/index.ts
Normal file
@@ -0,0 +1,5 @@
|
||||
export * from './state';
|
||||
export * from './verify';
|
||||
export * from './refresh';
|
||||
export * from './authorization-url';
|
||||
export * from './validate-code';
|
||||
42
packages/util/src/oauth/refresh.ts
Normal file
42
packages/util/src/oauth/refresh.ts
Normal file
@@ -0,0 +1,42 @@
|
||||
export interface RefreshOptions {
|
||||
clientId: string;
|
||||
url: string;
|
||||
scope?: string | string[];
|
||||
auth?: {
|
||||
clientSecret: string;
|
||||
type: 'basic' | 'bearer' | 'params';
|
||||
};
|
||||
}
|
||||
|
||||
export async function refresh<TOKEN = any>(refresh_token: string, options: RefreshOptions) {
|
||||
const params = {
|
||||
grant_type: 'refresh_token',
|
||||
refresh_token,
|
||||
};
|
||||
|
||||
if (options.scope) {
|
||||
params['scope'] = Array.isArray(options.scope) ? options.scope.join(' ') : options.scope;
|
||||
}
|
||||
|
||||
const headers = {
|
||||
'Content-Type': 'application/x-www-form-urlencoded',
|
||||
};
|
||||
|
||||
if (options.auth) {
|
||||
if (options.auth.type === 'basic') {
|
||||
headers['Authorization'] = `Basic ${Buffer.from(`${options.clientId}:${options.auth.clientSecret}`).toString('base64')}`;
|
||||
} else if (options.auth.type === 'bearer') {
|
||||
headers['Authorization'] = `Bearer ${options.auth.clientSecret}`;
|
||||
} else if (options.auth.type === 'params') {
|
||||
params['client_id'] = options.clientId;
|
||||
params['client_secret'] = options.auth.clientSecret;
|
||||
}
|
||||
}
|
||||
|
||||
const response = await fetch(options.url, {
|
||||
method: 'POST',
|
||||
headers: headers,
|
||||
body: new URLSearchParams(params),
|
||||
});
|
||||
return (await response.json()) as TOKEN;
|
||||
}
|
||||
13
packages/util/src/oauth/state.ts
Normal file
13
packages/util/src/oauth/state.ts
Normal file
@@ -0,0 +1,13 @@
|
||||
export function atobWeb(input: string): string {
|
||||
return atob(input.replaceAll(/-/g, '+').replaceAll(/_/g, '/'));
|
||||
}
|
||||
|
||||
export function btoaWeb(input: string): string {
|
||||
return btoa(input).replaceAll(/\+/g, '-').replaceAll(/\//g, '_');
|
||||
}
|
||||
|
||||
export function generateState(): string {
|
||||
const randomValues = new Uint8Array(32);
|
||||
crypto.getRandomValues(randomValues);
|
||||
return btoaWeb(String.fromCharCode(...randomValues));
|
||||
}
|
||||
36
packages/util/src/oauth/validate-code.ts
Normal file
36
packages/util/src/oauth/validate-code.ts
Normal file
@@ -0,0 +1,36 @@
|
||||
export interface ValidateCodeOptions {
|
||||
clientId: string;
|
||||
url: string;
|
||||
auth?: {
|
||||
clientSecret: string;
|
||||
type: 'basic' | 'bearer' | 'params';
|
||||
};
|
||||
}
|
||||
export async function validateCode<TOKENS = any>(code: string, options: ValidateCodeOptions) {
|
||||
const headers = {
|
||||
'Content-Type': 'application/x-www-form-urlencoded',
|
||||
};
|
||||
|
||||
const params: any = {
|
||||
grant_type: 'authorization_code',
|
||||
code,
|
||||
};
|
||||
|
||||
if (options.auth) {
|
||||
if (options.auth.type === 'basic') {
|
||||
headers['Authorization'] = `Basic ${Buffer.from(`${options.clientId}:${options.auth.clientSecret}`).toString('base64')}`;
|
||||
} else if (options.auth.type === 'bearer') {
|
||||
headers['Authorization'] = `Bearer ${options.auth.clientSecret}`;
|
||||
} else if (options.auth.type === 'params') {
|
||||
params['client_id'] = options.clientId;
|
||||
params['client_secret'] = options.auth.clientSecret;
|
||||
}
|
||||
}
|
||||
|
||||
const response = await fetch(options.url, {
|
||||
method: 'POST',
|
||||
headers,
|
||||
body: new URLSearchParams(params),
|
||||
});
|
||||
return (await response.json()) as TOKENS;
|
||||
}
|
||||
5
packages/util/src/oauth/verify.ts
Normal file
5
packages/util/src/oauth/verify.ts
Normal file
@@ -0,0 +1,5 @@
|
||||
import jwt from 'jsonwebtoken';
|
||||
|
||||
export function verify(token: string, publicKey: string) {
|
||||
return jwt.verify(token, publicKey);
|
||||
}
|
||||
10
packages/util/src/promise.ts
Normal file
10
packages/util/src/promise.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
export function isPromise<T>(value: T | Promise<T>): value is Promise<T> {
|
||||
return typeof (value as Promise<T>)?.then === 'function';
|
||||
}
|
||||
|
||||
export async function awaitMaybePromise<T>(value: T | Promise<T>) {
|
||||
if (isPromise(value)) {
|
||||
return await value;
|
||||
}
|
||||
return Promise.resolve(value);
|
||||
}
|
||||
118
packages/util/src/reactive-state.test.ts
Normal file
118
packages/util/src/reactive-state.test.ts
Normal file
@@ -0,0 +1,118 @@
|
||||
import { describe, it, expect } from 'bun:test';
|
||||
import { createReactiveState } from './reactive-state';
|
||||
|
||||
describe('createReactiveState', () => {
|
||||
it('should call callback on property change with new and old state', async () => {
|
||||
const initial = { count: 0 };
|
||||
let called = false;
|
||||
let newState: any, oldState: any;
|
||||
const [state, subscribe] = createReactiveState(initial);
|
||||
subscribe((n, o) => {
|
||||
called = true;
|
||||
newState = n;
|
||||
oldState = o;
|
||||
});
|
||||
state.count = 1;
|
||||
await new Promise((resolve) => setTimeout(resolve, 0)); // wait for microtask
|
||||
expect(called).toBe(true);
|
||||
expect(newState.count).toBe(1);
|
||||
expect(oldState.count).toBe(0);
|
||||
});
|
||||
|
||||
it('should maintain reactivity when assigned to another variable', async () => {
|
||||
const initial = { count: 0 };
|
||||
let called = false;
|
||||
const [state, subscribe] = createReactiveState(initial);
|
||||
subscribe((n, o) => {
|
||||
called = true;
|
||||
});
|
||||
const newState = state;
|
||||
newState.count = 1;
|
||||
await new Promise((resolve) => setTimeout(resolve, 0)); // wait for microtask
|
||||
expect(called).toBe(true);
|
||||
expect(newState.count).toBe(1);
|
||||
});
|
||||
|
||||
it('should be reactive when new properties are added', async () => {
|
||||
const initial = { count: 0 };
|
||||
let called = 0;
|
||||
const [state, subscribe] = createReactiveState(initial);
|
||||
subscribe((n, o) => {
|
||||
called++;
|
||||
});
|
||||
state.newproperty = 'hello';
|
||||
await new Promise((resolve) => setTimeout(resolve, 0)); // wait for microtask
|
||||
state.secondProperty = 'world';
|
||||
await new Promise((resolve) => setTimeout(resolve, 0)); // wait for microtask
|
||||
expect(called).toBe(2);
|
||||
expect(state.count).toBe(0);
|
||||
expect(state.newproperty).toBe('hello');
|
||||
expect(state.secondProperty).toBe('world');
|
||||
});
|
||||
|
||||
it('should batch multiple changes into one callback call', async () => {
|
||||
const initial = { a: 1, b: 2 };
|
||||
let callCount = 0;
|
||||
let finalNewState: any;
|
||||
const [state, subscribe] = createReactiveState(initial);
|
||||
subscribe((n) => {
|
||||
callCount++;
|
||||
finalNewState = n;
|
||||
});
|
||||
state.a = 10;
|
||||
state.b = 20;
|
||||
await new Promise((resolve) => setTimeout(resolve, 0));
|
||||
expect(callCount).toBe(1);
|
||||
expect(finalNewState.a).toBe(10);
|
||||
expect(finalNewState.b).toBe(20);
|
||||
});
|
||||
|
||||
it('should support nested object changes', async () => {
|
||||
const initial = { nested: { value: 1 } };
|
||||
let called = false;
|
||||
let newState: any;
|
||||
const [state, subscribe] = createReactiveState(initial);
|
||||
subscribe((n) => {
|
||||
called = true;
|
||||
newState = n;
|
||||
});
|
||||
state.nested.value = 2;
|
||||
await new Promise((resolve) => setTimeout(resolve, 0));
|
||||
expect(called).toBe(true);
|
||||
expect(newState.nested.value).toBe(2);
|
||||
});
|
||||
|
||||
it('should allow unsubscribing from callbacks', async () => {
|
||||
const initial = { count: 0 };
|
||||
let callCount = 0;
|
||||
const [state, subscribe] = createReactiveState(initial);
|
||||
const unsubscribe = subscribe(() => {
|
||||
callCount++;
|
||||
});
|
||||
state.count = 1;
|
||||
await new Promise((resolve) => setTimeout(resolve, 0));
|
||||
expect(callCount).toBe(1);
|
||||
unsubscribe();
|
||||
state.count = 2;
|
||||
await new Promise((resolve) => setTimeout(resolve, 0));
|
||||
expect(callCount).toBe(1); // should not increase
|
||||
});
|
||||
|
||||
it('should support multiple subscribers', async () => {
|
||||
const initial = { count: 0 };
|
||||
let callCount1 = 0,
|
||||
callCount2 = 0;
|
||||
const [state, subscribe] = createReactiveState(initial);
|
||||
const unsubscribe1 = subscribe(() => callCount1++);
|
||||
const unsubscribe2 = subscribe(() => callCount2++);
|
||||
state.count = 1;
|
||||
await new Promise((resolve) => setTimeout(resolve, 0));
|
||||
expect(callCount1).toBe(1);
|
||||
expect(callCount2).toBe(1);
|
||||
unsubscribe1();
|
||||
state.count = 2;
|
||||
await new Promise((resolve) => setTimeout(resolve, 0));
|
||||
expect(callCount1).toBe(1);
|
||||
expect(callCount2).toBe(2);
|
||||
});
|
||||
});
|
||||
73
packages/util/src/reactive-state.ts
Normal file
73
packages/util/src/reactive-state.ts
Normal file
@@ -0,0 +1,73 @@
|
||||
import cloneDeep from 'lodash/cloneDeep';
|
||||
|
||||
/**
|
||||
* Creates a reactive state object that batches changes and notifies all subscribers with the full old and new state.
|
||||
*
|
||||
* @param initialState - The initial state object
|
||||
* @returns Object with { state: reactive proxy, subscribe: function to add callbacks }
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* const { state, subscribe } = createBatchedReactiveState({ count: 0, user: { name: 'Alice' } });
|
||||
*
|
||||
* const unsubscribe1 = subscribe((newState, oldState) => {
|
||||
* console.log('Subscriber 1:', oldState, '->', newState);
|
||||
* });
|
||||
*
|
||||
* const unsubscribe2 = subscribe((newState, oldState) => {
|
||||
* console.log('Subscriber 2:', newState.count);
|
||||
* });
|
||||
*
|
||||
* state.count = 1; // Triggers both subscribers once
|
||||
* state.user.name = 'Bob'; // Triggers both subscribers once (batched)
|
||||
*
|
||||
* unsubscribe1(); // Remove first subscriber
|
||||
* state.count = 2; // Only subscriber 2 is called
|
||||
* ```
|
||||
*/
|
||||
export function createReactiveState<T extends object>(initialState: T) {
|
||||
const callbacks = new Set<(newState: T, oldState: T) => void>();
|
||||
let isBatching = false;
|
||||
let oldState: T | null = null;
|
||||
let scheduled = false;
|
||||
|
||||
const rootState = cloneDeep(initialState);
|
||||
|
||||
function createReactiveObject(obj: any): any {
|
||||
return new Proxy(obj, {
|
||||
get(target, property, receiver) {
|
||||
const value = Reflect.get(target, property, receiver);
|
||||
return typeof value === 'object' && value !== null ? createReactiveObject(value) : value;
|
||||
},
|
||||
set(target, property, value, receiver) {
|
||||
if (!isBatching) {
|
||||
isBatching = true;
|
||||
oldState = cloneDeep(rootState);
|
||||
}
|
||||
const success = Reflect.set(target, property, value, receiver);
|
||||
if (success) {
|
||||
if (!scheduled) {
|
||||
scheduled = true;
|
||||
queueMicrotask(() => {
|
||||
callbacks.forEach((cb) => cb(rootState, oldState!));
|
||||
isBatching = false;
|
||||
oldState = null;
|
||||
scheduled = false;
|
||||
});
|
||||
}
|
||||
}
|
||||
return success;
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
const state = createReactiveObject(rootState);
|
||||
|
||||
return [
|
||||
state,
|
||||
function subscribe(callback: (newState: T, oldState: T) => void) {
|
||||
callbacks.add(callback);
|
||||
return () => callbacks.delete(callback);
|
||||
},
|
||||
] as const;
|
||||
}
|
||||
1
packages/util/src/scheduler/index.ts
Normal file
1
packages/util/src/scheduler/index.ts
Normal file
@@ -0,0 +1 @@
|
||||
export * from './scheduler.service';
|
||||
1
packages/util/src/scheduler/lib/index.ts
Normal file
1
packages/util/src/scheduler/lib/index.ts
Normal file
@@ -0,0 +1 @@
|
||||
export * from './workerMessageHandler';
|
||||
12
packages/util/src/scheduler/lib/workerMessageHandler.ts
Normal file
12
packages/util/src/scheduler/lib/workerMessageHandler.ts
Normal file
@@ -0,0 +1,12 @@
|
||||
import type { Job } from '../types';
|
||||
|
||||
export default function createWorkerMessageHandler(
|
||||
workerName: string,
|
||||
executor: (_: { jobId: number; job: Job }) => Promise<void>,
|
||||
) {
|
||||
return async (event: MessageEvent) => {
|
||||
const { jobId, job } = event.data as { jobId: number; job: Job };
|
||||
console.log(`${workerName} received job ${job.name} with data ${JSON.stringify(job.data)}`);
|
||||
await executor({ jobId, job });
|
||||
};
|
||||
}
|
||||
117
packages/util/src/scheduler/queue.ts
Normal file
117
packages/util/src/scheduler/queue.ts
Normal file
@@ -0,0 +1,117 @@
|
||||
import { Database } from 'bun:sqlite';
|
||||
import { serialize, deserialize } from 'node:v8';
|
||||
|
||||
export interface QueueItem {
|
||||
id: number;
|
||||
jobId: string;
|
||||
payload: any;
|
||||
status: 'pending' | 'processing' | 'completed' | 'failed' | 'cancelled';
|
||||
created_at: Date;
|
||||
execute_at: Date;
|
||||
completed_at?: Date;
|
||||
failed_at?: Date;
|
||||
cancelled_at?: Date;
|
||||
}
|
||||
|
||||
export class Queue<DATA = any> {
|
||||
private db: Database;
|
||||
|
||||
constructor(dbPath: string) {
|
||||
this.db = new Database(dbPath, { create: true });
|
||||
this.db.exec('PRAGMA journal_mode = WAL;');
|
||||
this.db.run(`
|
||||
CREATE TABLE IF NOT EXISTS queue_items (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
jobId TEXT NOT NULL,
|
||||
payload BLOB NOT NULL,
|
||||
status TEXT NOT NULL,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
execute_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
completed_at DATETIME,
|
||||
failed_at DATETIME,
|
||||
cancelled_at DATETIME
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_jobId ON queue_items (jobId);
|
||||
CREATE INDEX IF NOT EXISTS idx_status ON queue_items (status);
|
||||
CREATE INDEX IF NOT EXISTS idx_execute_at ON queue_items (execute_at);
|
||||
`);
|
||||
}
|
||||
|
||||
enqueue(jobId: string, payload: DATA, delay?: number | Date) {
|
||||
const executeAt = delay instanceof Date ? delay : new Date(Date.now() + (delay || 0));
|
||||
this.db.run(
|
||||
`INSERT INTO queue_items (jobId, payload, status, execute_at) VALUES (?, ?, ?, ?)`,
|
||||
jobId,
|
||||
serialize(payload) as any,
|
||||
'pending',
|
||||
executeAt.toISOString(),
|
||||
);
|
||||
}
|
||||
|
||||
dequeue() {
|
||||
const stmt = this.db.prepare(`
|
||||
UPDATE queue_items
|
||||
SET status = 'processing'
|
||||
WHERE id = (
|
||||
SELECT id FROM queue_items
|
||||
WHERE status = 'pending' AND execute_at <= ?
|
||||
ORDER BY execute_at ASC, created_at ASC
|
||||
LIMIT 1
|
||||
)
|
||||
RETURNING *
|
||||
`);
|
||||
const result = stmt.get(new Date().toISOString()) as QueueItem | null;
|
||||
return result ? { ...result, payload: deserialize(result.payload) as DATA } : null;
|
||||
}
|
||||
|
||||
complete(id: number) {
|
||||
const stmt = this.db.prepare(`
|
||||
UPDATE queue_items
|
||||
SET status = 'completed', completed_at = ?
|
||||
WHERE id = ?
|
||||
`);
|
||||
stmt.run(new Date().toISOString(), id);
|
||||
}
|
||||
|
||||
fail(id: number) {
|
||||
const stmt = this.db.prepare(`
|
||||
UPDATE queue_items
|
||||
SET status = 'failed', failed_at = ?
|
||||
WHERE id = ?
|
||||
`);
|
||||
stmt.run(new Date().toISOString(), id);
|
||||
}
|
||||
|
||||
getNextExecutionTime() {
|
||||
const stmt = this.db.prepare(`
|
||||
SELECT execute_at
|
||||
FROM queue_items
|
||||
WHERE status = 'pending'
|
||||
ORDER BY execute_at ASC, created_at ASC
|
||||
LIMIT 1
|
||||
`);
|
||||
const result = stmt.get() as { execute_at: string } | null;
|
||||
return result ? new Date(result.execute_at) : null;
|
||||
}
|
||||
|
||||
cancel(jobId: string) {
|
||||
const stmt = this.db.prepare(`
|
||||
UPDATE queue_items
|
||||
SET status = 'cancelled', cancelled_at = ?
|
||||
WHERE jobId = ?
|
||||
AND status = 'pending'
|
||||
`);
|
||||
stmt.run(new Date().toISOString(), jobId);
|
||||
}
|
||||
|
||||
isEmpty() {
|
||||
const stmt = this.db.prepare(`
|
||||
SELECT COUNT(*) as count
|
||||
FROM queue_items
|
||||
WHERE status = 'pending'
|
||||
`);
|
||||
const result = stmt.get() as { count: number };
|
||||
return result.count === 0;
|
||||
}
|
||||
}
|
||||
116
packages/util/src/scheduler/scheduler.service.ts
Normal file
116
packages/util/src/scheduler/scheduler.service.ts
Normal file
@@ -0,0 +1,116 @@
|
||||
import cronParser from 'cron-parser';
|
||||
import { Queue } from './queue';
|
||||
import { JobType, type Job } from './types';
|
||||
|
||||
let queue: Queue<Job>;
|
||||
const workers: { [key: string]: Worker } = {};
|
||||
const MAX_DEQUEUE_DELAY = parseInt(process.env.MAX_DEQUEUE_DELAY || '1000');
|
||||
const workerMap: { [key: string]: string } = {
|
||||
[JobType.EMAIL]: `${import.meta.dir}/workers/email.worker.ts`,
|
||||
};
|
||||
let paused = true;
|
||||
let isRunning = false;
|
||||
|
||||
export const init = async () => {
|
||||
queue = new Queue(process.env.QUEUE_DB_PATH || 'queue.db');
|
||||
};
|
||||
|
||||
const getWorker = (type: string) => {
|
||||
const worker = workers[type];
|
||||
if (!worker) {
|
||||
console.log(`Worker not found for job ${type}, creating new worker`);
|
||||
return createWorker(type, workerMap[type]);
|
||||
}
|
||||
return worker;
|
||||
};
|
||||
|
||||
const createWorker = (type: string, path: string) => {
|
||||
console.debug(`Creating worker for job ${type} at path ${path}`);
|
||||
// @ts-expect-error
|
||||
const worker = new Worker(path, { type, smol: true });
|
||||
workers[type] = worker;
|
||||
return worker;
|
||||
};
|
||||
|
||||
const runQueue = () => {
|
||||
if (paused) {
|
||||
console.debug('Scheduler paused. Not running jobs.');
|
||||
isRunning = false;
|
||||
return;
|
||||
}
|
||||
isRunning = true;
|
||||
const item = queue.dequeue();
|
||||
if (item) {
|
||||
getWorker(item.payload.type).postMessage({
|
||||
id: item.id,
|
||||
job: item.payload,
|
||||
});
|
||||
return runQueue();
|
||||
}
|
||||
|
||||
const nextExecutionTime = queue.getNextExecutionTime();
|
||||
if (!nextExecutionTime && queue.isEmpty()) {
|
||||
console.debug('No jobs to run. Exiting scheduler.');
|
||||
shutdown();
|
||||
return;
|
||||
}
|
||||
|
||||
const delay = Math.min(
|
||||
nextExecutionTime ? nextExecutionTime.getTime() - Date.now() : MAX_DEQUEUE_DELAY,
|
||||
MAX_DEQUEUE_DELAY,
|
||||
);
|
||||
console.debug(`No jobs to run now. Next execution time is ${new Date(Date.now() + delay)}`);
|
||||
setTimeout(runQueue, delay);
|
||||
};
|
||||
|
||||
export const start = () => {
|
||||
if (queue.isEmpty()) {
|
||||
console.debug('No jobs to run. Exiting scheduler.');
|
||||
return;
|
||||
}
|
||||
paused = false;
|
||||
runQueue();
|
||||
};
|
||||
|
||||
export const pause = () => {
|
||||
paused = true;
|
||||
};
|
||||
|
||||
export const resume = () => {
|
||||
paused = false;
|
||||
if (!isRunning) {
|
||||
runQueue();
|
||||
}
|
||||
};
|
||||
|
||||
export function shutdown() {
|
||||
try {
|
||||
for (const key in workers) {
|
||||
workers[key].terminate();
|
||||
delete workers[key];
|
||||
}
|
||||
isRunning = false;
|
||||
paused = true;
|
||||
} catch (error) {
|
||||
console.error(`Failed to shutdown workers.`, error);
|
||||
}
|
||||
}
|
||||
|
||||
export const schedule = (job: Job) => {
|
||||
if (!job.start && job.repeat) {
|
||||
// If job is set to repeat, get the next execution time based on the cron pattern if no start time is provided
|
||||
const interval = cronParser.parse(job.repeat);
|
||||
job.start = interval.next().getTime();
|
||||
}
|
||||
const delay = job.start ? job.start - Date.now() : 0;
|
||||
queue.enqueue(job.id, job, Math.max(delay, 0));
|
||||
|
||||
if (!isRunning) {
|
||||
start();
|
||||
}
|
||||
};
|
||||
|
||||
export const unschedule = (jobId: string) => {
|
||||
console.debug(`Unscheduling job ${jobId}`);
|
||||
return queue.cancel(jobId);
|
||||
};
|
||||
1
packages/util/src/scheduler/types/index.ts
Normal file
1
packages/util/src/scheduler/types/index.ts
Normal file
@@ -0,0 +1 @@
|
||||
export * from './jobs';
|
||||
22
packages/util/src/scheduler/types/jobs.ts
Normal file
22
packages/util/src/scheduler/types/jobs.ts
Normal file
@@ -0,0 +1,22 @@
|
||||
export enum JobType {
|
||||
EMAIL = 'email',
|
||||
}
|
||||
|
||||
export interface Job {
|
||||
id: string;
|
||||
name: string;
|
||||
type: JobType;
|
||||
start: number;
|
||||
repeat?: string;
|
||||
data: any;
|
||||
}
|
||||
|
||||
export interface EmailJob extends Job {
|
||||
type: JobType.EMAIL;
|
||||
data: {
|
||||
to: string;
|
||||
from: string;
|
||||
subject: string;
|
||||
body: string;
|
||||
};
|
||||
}
|
||||
11
packages/util/src/scheduler/workers/email.worker.ts
Normal file
11
packages/util/src/scheduler/workers/email.worker.ts
Normal file
@@ -0,0 +1,11 @@
|
||||
import createWorkerMessageHandler from '../lib/workerMessageHandler';
|
||||
import type { EmailJob } from '../types';
|
||||
|
||||
const sendMail = async ({ jobId, job: { name, data } }: { jobId: number; job: EmailJob }) => {
|
||||
console.log(`Sending mail for job ${name} with data ${JSON.stringify(data)}`);
|
||||
|
||||
self.postMessage({ name, data, status: 'completed' });
|
||||
};
|
||||
|
||||
declare var self: Worker;
|
||||
self.onmessage = createWorkerMessageHandler('email', sendMail);
|
||||
1
packages/util/src/scheduler/workers/index.ts
Normal file
1
packages/util/src/scheduler/workers/index.ts
Normal file
@@ -0,0 +1 @@
|
||||
export * from './email.worker';
|
||||
99
packages/util/src/sqlite.ts
Normal file
99
packages/util/src/sqlite.ts
Normal file
@@ -0,0 +1,99 @@
|
||||
import { Database } from 'bun:sqlite';
|
||||
|
||||
export function dynamicInsert<T extends {} = any>(db: Database, table: string, item: T) {
|
||||
const keys = Object.keys(item);
|
||||
const values = keys.map((k) => item[k]);
|
||||
|
||||
const stmt = db.prepare(`INSERT INTO ${table} (${keys.join(', ')}) VALUES (${keys.map((k) => '?').join(', ')})`);
|
||||
return stmt.run(...values).lastInsertRowid;
|
||||
}
|
||||
|
||||
export function dynamicUpdate<T extends {} = any>(db: Database, table: string, item: T, key: string, keyValue: any) {
|
||||
const keys = Object.keys(item);
|
||||
const values = keys.map((k) => item[k]);
|
||||
|
||||
const stmt = db.prepare(`UPDATE ${table} SET ${keys.map((k) => `${k} = ?`).join(', ')} WHERE ${key} = ?`);
|
||||
return stmt.run(...values, keyValue).changes;
|
||||
}
|
||||
|
||||
export interface QueryOptions<T extends {}> {
|
||||
skip?: number;
|
||||
limit?: number;
|
||||
orderBy?: Partial<Record<keyof T, 'ASC' | 'DESC'>>;
|
||||
}
|
||||
|
||||
type Where<T> = Partial<Record<keyof T, any>> & { or?: Partial<Record<keyof T, any>> };
|
||||
|
||||
export function select<T extends {}>(db: Database, table: string, options?: QueryOptions<T>, select: string = '*', where?: Where<T>): T[] {
|
||||
let query = `SELECT ${select} FROM ${table}\n`;
|
||||
const values = [];
|
||||
if (where) {
|
||||
query += `WHERE `;
|
||||
|
||||
if (where.or) {
|
||||
query += `${Object.keys(where.or)
|
||||
.map((k) => {
|
||||
values.push(where.or[k]);
|
||||
return `${k} = ?\n`;
|
||||
})
|
||||
.join('OR ')}`;
|
||||
delete where.or;
|
||||
}
|
||||
|
||||
if (Object.keys(where).length > 0) {
|
||||
query += `${Object.keys(where)
|
||||
.map((k) => {
|
||||
values.push(where[k]);
|
||||
return `${k} = ?\n`;
|
||||
})
|
||||
.join('AND ')}`;
|
||||
}
|
||||
}
|
||||
|
||||
if (options?.orderBy) {
|
||||
query += `ORDER BY\n${Object.keys(options.orderBy)
|
||||
.map((k) => {
|
||||
values.push(options.orderBy[k]);
|
||||
return `${k} ?`;
|
||||
})
|
||||
.join(',\n')}\n`;
|
||||
}
|
||||
|
||||
if (options?.skip) {
|
||||
values.push(options.limit || -1);
|
||||
values.push(options.skip);
|
||||
query += `LIMIT ? OFFSET ?`;
|
||||
}
|
||||
|
||||
return db.prepare(query).all(...values) as T[];
|
||||
}
|
||||
|
||||
export function remove<T>(db: Database, table: string, where?: Where<T>) {
|
||||
let query = `DELETE FROM ${table}`;
|
||||
|
||||
const values = [];
|
||||
if (where) {
|
||||
query += `WHERE `;
|
||||
|
||||
if (where.or) {
|
||||
query += `${Object.keys(where.or)
|
||||
.map((k) => {
|
||||
values.push(where.or[k]);
|
||||
return `${k} = ?\n`;
|
||||
})
|
||||
.join('OR ')}`;
|
||||
delete where.or;
|
||||
}
|
||||
|
||||
if (Object.keys(where).length > 0) {
|
||||
query += `${Object.keys(where)
|
||||
.map((k) => {
|
||||
values.push(where[k]);
|
||||
return `${k} = ?\n`;
|
||||
})
|
||||
.join('AND ')}`;
|
||||
}
|
||||
}
|
||||
|
||||
return db.prepare(query).run(...values).changes;
|
||||
}
|
||||
54
packages/util/src/text.test.ts
Normal file
54
packages/util/src/text.test.ts
Normal file
@@ -0,0 +1,54 @@
|
||||
import { describe, it, expect } from "bun:test";
|
||||
import { truncateText, formatNumberToShortForm } from "./text";
|
||||
|
||||
describe("truncateText", () => {
|
||||
it("should truncate text longer than the specified length", () => {
|
||||
const input = "This is a long text that needs to be truncated.";
|
||||
const result = truncateText(input, 10);
|
||||
expect(result).toBe("This is a ...");
|
||||
});
|
||||
|
||||
it("should not truncate text shorter than the specified length", () => {
|
||||
const input = "Short text";
|
||||
const result = truncateText(input, 20);
|
||||
expect(result).toBe(input);
|
||||
});
|
||||
|
||||
it("should use the default length of 1000 if no length is specified", () => {
|
||||
const input = "A".repeat(1001);
|
||||
const result = truncateText(input);
|
||||
expect(result).toBe("A".repeat(1000) + "...");
|
||||
});
|
||||
});
|
||||
|
||||
describe("formatNumberToShortForm", () => {
|
||||
it("should format numbers in billions with 'b' suffix", () => {
|
||||
const result = formatNumberToShortForm(1_500_000_000);
|
||||
expect(result).toMatch(/1\.5.*b/);
|
||||
});
|
||||
|
||||
it("should format numbers in millions with 'M' suffix", () => {
|
||||
const result = formatNumberToShortForm(2_300_000);
|
||||
expect(result).toMatch(/2\.3.*M/);
|
||||
});
|
||||
|
||||
it("should format numbers in thousands with 'k' suffix", () => {
|
||||
const result = formatNumberToShortForm(12_000);
|
||||
expect(result).toMatch(/12.*k/);
|
||||
});
|
||||
|
||||
it("should format numbers below 1000 without a suffix", () => {
|
||||
const result = formatNumberToShortForm(999);
|
||||
expect(result).toBe("999");
|
||||
});
|
||||
|
||||
it("should handle negative numbers correctly", () => {
|
||||
const result = formatNumberToShortForm(-1_200_000);
|
||||
expect(result).toMatch(/-1\.2.*M/);
|
||||
});
|
||||
|
||||
it("should respect the specified locale", () => {
|
||||
const result = formatNumberToShortForm(1_234_567, "de-DE");
|
||||
expect(result).toMatch(/1,23.*M/);
|
||||
});
|
||||
});
|
||||
62
packages/util/src/text.ts
Normal file
62
packages/util/src/text.ts
Normal file
@@ -0,0 +1,62 @@
|
||||
export function truncateText(input: string, length: number = 1000): string {
|
||||
return input.length > length ? input.substring(0, length) + '...' : input;
|
||||
}
|
||||
|
||||
export function formatNumberToShortForm(number: number, locale: string = 'en-uS') {
|
||||
let suffix = '';
|
||||
let value = number;
|
||||
|
||||
if (Math.abs(number) >= 1e9) {
|
||||
value = number / 1e9;
|
||||
suffix = 'b';
|
||||
} else if (Math.abs(number) >= 1e6) {
|
||||
value = number / 1e6;
|
||||
suffix = 'M';
|
||||
} else if (Math.abs(number) >= 1e3) {
|
||||
value = number / 1e3;
|
||||
suffix = 'k';
|
||||
}
|
||||
|
||||
// Format the number to have up to 4 significant digits
|
||||
const formattedValue = new Intl.NumberFormat(locale, {
|
||||
maximumSignificantDigits: 4,
|
||||
minimumSignificantDigits: 3,
|
||||
minimumFractionDigits: 0,
|
||||
maximumFractionDigits: 2,
|
||||
}).format(value);
|
||||
|
||||
return `${formattedValue}${suffix}`;
|
||||
}
|
||||
|
||||
export function normalize(value: string) {
|
||||
return value
|
||||
.toString()
|
||||
.normalize('NFD')
|
||||
.replace(/[\u0300-\u036f]/g, '')
|
||||
.replace(/[^a-zA-Z\s\d]/g, '')
|
||||
.trim();
|
||||
}
|
||||
|
||||
export function toTitleCase(value: string) {
|
||||
return value.replace(/\w\S*/g, (txt) => txt.charAt(0).toUpperCase() + txt.substr(1).toLowerCase());
|
||||
}
|
||||
|
||||
export function escapeMarkdown(text: string) {
|
||||
return text.replace(/([\\_*~`|])/g, '\\$1');
|
||||
}
|
||||
|
||||
export function escapeCodeBlock(text: string) {
|
||||
return text.replace(/```/g, '`\u200b``');
|
||||
}
|
||||
|
||||
export function escapeInlineCode(text: string) {
|
||||
return text.replace(/`/g, '\u200b`');
|
||||
}
|
||||
|
||||
export function escapeSpoiler(text: string) {
|
||||
return text.replace(/\|\|/g, '|\u200b|');
|
||||
}
|
||||
|
||||
export function escapeAll(text: string) {
|
||||
return escapeMarkdown(escapeCodeBlock(escapeInlineCode(escapeSpoiler(text))));
|
||||
}
|
||||
286
packages/util/src/time.test.ts
Normal file
286
packages/util/src/time.test.ts
Normal file
@@ -0,0 +1,286 @@
|
||||
import { describe, it, expect } from "bun:test";
|
||||
import { msToDuration, secondsToDuration } from "./time";
|
||||
|
||||
describe("msToDuration", () => {
|
||||
it("should convert milliseconds to duration with all units", () => {
|
||||
// 1 day, 2 hours, 3 minutes, 4 seconds = 93784000 ms
|
||||
const ms = 1 * 24 * 60 * 60 * 1000 + 2 * 60 * 60 * 1000 + 3 * 60 * 1000 + 4 * 1000;
|
||||
const result = msToDuration(ms);
|
||||
|
||||
expect(result).toEqual({
|
||||
days: 1,
|
||||
hours: 2,
|
||||
minutes: 3,
|
||||
seconds: 4
|
||||
});
|
||||
});
|
||||
|
||||
it("should handle zero milliseconds", () => {
|
||||
const result = msToDuration(0);
|
||||
|
||||
expect(result).toEqual({
|
||||
days: 0,
|
||||
hours: 0,
|
||||
minutes: 0,
|
||||
seconds: 0
|
||||
});
|
||||
});
|
||||
|
||||
it("should handle only seconds", () => {
|
||||
const result = msToDuration(5000); // 5 seconds
|
||||
|
||||
expect(result).toEqual({
|
||||
days: 0,
|
||||
hours: 0,
|
||||
minutes: 0,
|
||||
seconds: 5
|
||||
});
|
||||
});
|
||||
|
||||
it("should handle only minutes", () => {
|
||||
const result = msToDuration(2 * 60 * 1000); // 2 minutes
|
||||
|
||||
expect(result).toEqual({
|
||||
days: 0,
|
||||
hours: 0,
|
||||
minutes: 2,
|
||||
seconds: 0
|
||||
});
|
||||
});
|
||||
|
||||
it("should handle only hours", () => {
|
||||
const result = msToDuration(3 * 60 * 60 * 1000); // 3 hours
|
||||
|
||||
expect(result).toEqual({
|
||||
days: 0,
|
||||
hours: 3,
|
||||
minutes: 0,
|
||||
seconds: 0
|
||||
});
|
||||
});
|
||||
|
||||
it("should handle only days", () => {
|
||||
const result = msToDuration(2 * 24 * 60 * 60 * 1000); // 2 days
|
||||
|
||||
expect(result).toEqual({
|
||||
days: 2,
|
||||
hours: 0,
|
||||
minutes: 0,
|
||||
seconds: 0
|
||||
});
|
||||
});
|
||||
|
||||
it("should handle partial seconds (floor down)", () => {
|
||||
const result = msToDuration(1500); // 1.5 seconds
|
||||
|
||||
expect(result).toEqual({
|
||||
days: 0,
|
||||
hours: 0,
|
||||
minutes: 0,
|
||||
seconds: 1
|
||||
});
|
||||
});
|
||||
|
||||
it("should handle large values", () => {
|
||||
// 365 days, 5 hours, 30 minutes, 45 seconds
|
||||
const ms = 365 * 24 * 60 * 60 * 1000 + 5 * 60 * 60 * 1000 + 30 * 60 * 1000 + 45 * 1000;
|
||||
const result = msToDuration(ms);
|
||||
|
||||
expect(result).toEqual({
|
||||
days: 365,
|
||||
hours: 5,
|
||||
minutes: 30,
|
||||
seconds: 45
|
||||
});
|
||||
});
|
||||
|
||||
it("should handle rollover correctly", () => {
|
||||
// 25 hours should be 1 day, 1 hour
|
||||
const ms = 25 * 60 * 60 * 1000;
|
||||
const result = msToDuration(ms);
|
||||
|
||||
expect(result).toEqual({
|
||||
days: 1,
|
||||
hours: 1,
|
||||
minutes: 0,
|
||||
seconds: 0
|
||||
});
|
||||
});
|
||||
|
||||
it("should handle 61 minutes correctly", () => {
|
||||
// 61 minutes should be 1 hour, 1 minute
|
||||
const ms = 61 * 60 * 1000;
|
||||
const result = msToDuration(ms);
|
||||
|
||||
expect(result).toEqual({
|
||||
days: 0,
|
||||
hours: 1,
|
||||
minutes: 1,
|
||||
seconds: 0
|
||||
});
|
||||
});
|
||||
|
||||
it("should handle negative values", () => {
|
||||
const result = msToDuration(-5000);
|
||||
|
||||
expect(result).toEqual({
|
||||
days: -1,
|
||||
hours: -1,
|
||||
minutes: -1,
|
||||
seconds: -5
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("secondsToDuration", () => {
|
||||
it("should convert seconds to duration with all units", () => {
|
||||
// 1 day, 2 hours, 3 minutes, 4 seconds = 93784 seconds
|
||||
const seconds = 1 * 24 * 60 * 60 + 2 * 60 * 60 + 3 * 60 + 4;
|
||||
const result = secondsToDuration(seconds);
|
||||
|
||||
expect(result).toEqual({
|
||||
days: 1,
|
||||
hours: 2,
|
||||
minutes: 3,
|
||||
seconds: 4
|
||||
});
|
||||
});
|
||||
|
||||
it("should handle zero seconds", () => {
|
||||
const result = secondsToDuration(0);
|
||||
|
||||
expect(result).toEqual({
|
||||
days: 0,
|
||||
hours: 0,
|
||||
minutes: 0,
|
||||
seconds: 0
|
||||
});
|
||||
});
|
||||
|
||||
it("should handle only seconds", () => {
|
||||
const result = secondsToDuration(45);
|
||||
|
||||
expect(result).toEqual({
|
||||
days: 0,
|
||||
hours: 0,
|
||||
minutes: 0,
|
||||
seconds: 45
|
||||
});
|
||||
});
|
||||
|
||||
it("should handle only minutes", () => {
|
||||
const result = secondsToDuration(5 * 60); // 5 minutes
|
||||
|
||||
expect(result).toEqual({
|
||||
days: 0,
|
||||
hours: 0,
|
||||
minutes: 5,
|
||||
seconds: 0
|
||||
});
|
||||
});
|
||||
|
||||
it("should handle only hours", () => {
|
||||
const result = secondsToDuration(4 * 60 * 60); // 4 hours
|
||||
|
||||
expect(result).toEqual({
|
||||
days: 0,
|
||||
hours: 4,
|
||||
minutes: 0,
|
||||
seconds: 0
|
||||
});
|
||||
});
|
||||
|
||||
it("should handle only days", () => {
|
||||
const result = secondsToDuration(3 * 24 * 60 * 60); // 3 days
|
||||
|
||||
expect(result).toEqual({
|
||||
days: 3,
|
||||
hours: 0,
|
||||
minutes: 0,
|
||||
seconds: 0
|
||||
});
|
||||
});
|
||||
|
||||
it("should handle decimal seconds (floor down)", () => {
|
||||
const result = secondsToDuration(59.7);
|
||||
|
||||
expect(result).toEqual({
|
||||
days: 0,
|
||||
hours: 0,
|
||||
minutes: 0,
|
||||
seconds: 59
|
||||
});
|
||||
});
|
||||
|
||||
it("should handle large values", () => {
|
||||
// 100 days, 12 hours, 45 minutes, 30 seconds
|
||||
const seconds = 100 * 24 * 60 * 60 + 12 * 60 * 60 + 45 * 60 + 30;
|
||||
const result = secondsToDuration(seconds);
|
||||
|
||||
expect(result).toEqual({
|
||||
days: 100,
|
||||
hours: 12,
|
||||
minutes: 45,
|
||||
seconds: 30
|
||||
});
|
||||
});
|
||||
|
||||
it("should handle rollover correctly", () => {
|
||||
// 25 hours should be 1 day, 1 hour
|
||||
const seconds = 25 * 60 * 60;
|
||||
const result = secondsToDuration(seconds);
|
||||
|
||||
expect(result).toEqual({
|
||||
days: 1,
|
||||
hours: 1,
|
||||
minutes: 0,
|
||||
seconds: 0
|
||||
});
|
||||
});
|
||||
|
||||
it("should handle 61 minutes correctly", () => {
|
||||
// 61 minutes should be 1 hour, 1 minute
|
||||
const seconds = 61 * 60;
|
||||
const result = secondsToDuration(seconds);
|
||||
|
||||
expect(result).toEqual({
|
||||
days: 0,
|
||||
hours: 1,
|
||||
minutes: 1,
|
||||
seconds: 0
|
||||
});
|
||||
});
|
||||
|
||||
it("should handle 61 seconds correctly", () => {
|
||||
// 61 seconds should be 1 minute, 1 second
|
||||
const result = secondsToDuration(61);
|
||||
|
||||
expect(result).toEqual({
|
||||
days: 0,
|
||||
hours: 0,
|
||||
minutes: 1,
|
||||
seconds: 1
|
||||
});
|
||||
});
|
||||
|
||||
it("should handle negative values", () => {
|
||||
const result = secondsToDuration(-3665); // -1 hour, -1 minute, -5 seconds
|
||||
|
||||
expect(result).toEqual({
|
||||
days: -1,
|
||||
hours: -2,
|
||||
minutes: -2,
|
||||
seconds: -5
|
||||
});
|
||||
});
|
||||
|
||||
it("should match msToDuration for equivalent values", () => {
|
||||
const seconds = 3665; // 1 hour, 1 minute, 5 seconds
|
||||
const ms = seconds * 1000;
|
||||
|
||||
const fromSeconds = secondsToDuration(seconds);
|
||||
const fromMs = msToDuration(ms);
|
||||
|
||||
expect(fromSeconds).toEqual(fromMs);
|
||||
});
|
||||
});
|
||||
19
packages/util/src/time.ts
Normal file
19
packages/util/src/time.ts
Normal file
@@ -0,0 +1,19 @@
|
||||
import { type Duration } from 'date-fns';
|
||||
|
||||
export function msToDuration(ms: number): Duration {
|
||||
const seconds = Math.floor((ms / 1000) % 60);
|
||||
const minutes = Math.floor((ms / (1000 * 60)) % 60);
|
||||
const hours = Math.floor((ms / (1000 * 60 * 60)) % 24);
|
||||
const days = Math.floor(ms / (1000 * 60 * 60 * 24));
|
||||
|
||||
return { days, hours, minutes, seconds };
|
||||
}
|
||||
|
||||
export function secondsToDuration(secondsInput: number): Duration {
|
||||
const seconds = Math.floor(secondsInput % 60);
|
||||
const minutes = Math.floor((secondsInput / 60) % 60);
|
||||
const hours = Math.floor((secondsInput / 3600) % 24);
|
||||
const days = Math.floor(secondsInput / 86400);
|
||||
|
||||
return { days, hours, minutes, seconds };
|
||||
}
|
||||
17
packages/util/tsconfig.json
Normal file
17
packages/util/tsconfig.json
Normal file
@@ -0,0 +1,17 @@
|
||||
{
|
||||
"extends": "../../tsconfig.base.json",
|
||||
"compilerOptions": {
|
||||
"composite": true,
|
||||
"strict": false,
|
||||
"noImplicitAny": false,
|
||||
"skipLibCheck": true,
|
||||
"emitDeclarationOnly": true,
|
||||
"noEmit": false,
|
||||
"noEmitOnError": false,
|
||||
"declaration": true,
|
||||
"rootDir": ".",
|
||||
"allowImportingTsExtensions": true
|
||||
},
|
||||
"include": ["src"],
|
||||
"exclude": ["node_modules", "dist", "build", "**/*.test.ts"]
|
||||
}
|
||||
16
packages/util/tsdown.config.ts
Normal file
16
packages/util/tsdown.config.ts
Normal file
@@ -0,0 +1,16 @@
|
||||
import { defineConfig } from 'tsdown';
|
||||
|
||||
export default defineConfig([
|
||||
{
|
||||
entry: [
|
||||
'./src/**/*.ts',
|
||||
'!./src/**/*.test.ts',
|
||||
],
|
||||
platform: 'node',
|
||||
dts: true,
|
||||
minify: false,
|
||||
sourcemap: true,
|
||||
unbundle: true,
|
||||
external: ['bun:sqlite', 'bun'],
|
||||
},
|
||||
]);
|
||||
Reference in New Issue
Block a user