diff --git a/.gitignore b/.gitignore index 2d1e601..8085d59 100644 --- a/.gitignore +++ b/.gitignore @@ -143,7 +143,10 @@ vite.config.ts.timestamp-* # Vite cache directory .vite -db/ +db/* +!db/functions/* +!db/functions +db/functions/test-* db_test/ .vscode diff --git a/api/lib/functions.ts b/api/lib/functions.ts new file mode 100644 index 0000000..0cec74d --- /dev/null +++ b/api/lib/functions.ts @@ -0,0 +1,191 @@ +import { batch } from '/api/lib/json_store.ts' +import { join } from '@std/path' +import { ensureDir } from '@std/fs' + +// Define the function signatures +export type FunctionContext = { + deploymentUrl: string + projectId: string +} + +export type ReadTransformer = ( + data: T, + ctx: FunctionContext, +) => T | Promise + +export type WriteTransformer = ( + table: string, + data: T, + query: string | undefined, + ctx: FunctionContext, +) => T | Promise + +export type ProjectFunctionModule = { + read?: ReadTransformer + write?: WriteTransformer + config?: { + targets?: string[] + } +} + +export type LoadedFunction = { + name: string // filename + module: ProjectFunctionModule +} + +// Map +const functionsMap = new Map() +let watcher: Deno.FsWatcher | null = null +const functionsDir = './db/functions' + +export async function init() { + await ensureDir(functionsDir) + await loadAll() + startWatcher() +} + +async function loadAll() { + console.info('Loading project functions...') + for await (const entry of Deno.readDir(functionsDir)) { + if (entry.isDirectory) { + await reloadProjectFunctions(entry.name) + } + } +} + +async function reloadProjectFunctions(slug: string) { + const projectDir = join(functionsDir, slug) + const loaded: LoadedFunction[] = [] + + try { + await batch(5, Deno.readDir(projectDir), async (entry) => { + if (entry.isFile && entry.name.endsWith('.js')) { + const mainFile = join(projectDir, entry.name) + // Build a fresh import URL to bust cache + const importUrl = `file://${await Deno.realPath( + mainFile, + )}?t=${Date.now()}` + try { + const module = await import(importUrl) + // We expect a default export or specific named exports + const fns = module.default + if (fns && typeof fns === 'object') { + loaded.push({ name: entry.name, module: fns }) + } + } catch (e) { + console.error(`Failed to import ${entry.name} for ${slug}:`, e) + } + } + }) + + // Sort by filename to ensure deterministic execution order + loaded.sort((a, b) => a.name.localeCompare(b.name)) + + if (loaded.length > 0) { + functionsMap.set(slug, loaded) + console.info(`Loaded ${loaded.length} functions for project: ${slug}`) + } else { + functionsMap.delete(slug) + } + } catch (err) { + if (!(err instanceof Deno.errors.NotFound)) { + console.error(`Failed to load functions for ${slug}:`, err) + } + functionsMap.delete(slug) + } +} + +function startWatcher() { + if (watcher) return + console.info(`Starting function watcher on ${functionsDir}`) + watcher = Deno.watchFs(functionsDir, { recursive: true }) // Process events + ;(async () => { + for await (const event of watcher!) { + if (!['modify', 'create', 'remove', 'rename'].includes(event.kind)) { + continue + } + for (const path of event.paths) { + if (!path.endsWith('.js')) continue + const parts = path.split('/') + const fileName = parts.pop() + const slug = parts.pop() + if (!fileName || !slug) continue + await reloadProjectFunctions(slug) + } + } + })() +} + +export function getProjectFunctions( + slug: string, +): LoadedFunction[] | undefined { + return functionsMap.get(slug) +} + +export function stopWatcher() { + if (watcher) { + watcher.close() + watcher = null + } +} + +export async function applyReadTransformers( + data: T, + projectId: string, + deploymentUrl: string, + tableName: string, + projectFunctions?: LoadedFunction[], +): Promise { + if (!projectFunctions || projectFunctions.length === 0) { + return data + } + let currentData = data + for (const { module } of projectFunctions) { + if (!module.read) continue + if (module.config?.targets && !module.config.targets.includes(tableName)) { + continue + } + + const ctx: FunctionContext = { + deploymentUrl, + projectId, + } + + currentData = await module.read(currentData, ctx) as T + } + + return currentData +} + +export async function applyWriteTransformers( + data: T, + projectId: string, + deploymentUrl: string, + tableName: string, + projectFunctions?: LoadedFunction[], +): Promise { + if (!projectFunctions || projectFunctions.length === 0) { + return data + } + let currentData = data + for (const { module } of projectFunctions) { + if (!module.write) continue + if (module.config?.targets && !module.config.targets.includes(tableName)) { + continue + } + + const ctx: FunctionContext = { + deploymentUrl, + projectId, + } + + currentData = await module.write( + tableName, + currentData, + undefined, + ctx, + ) as T + } + + return currentData +} diff --git a/api/lib/functions_test.ts b/api/lib/functions_test.ts new file mode 100644 index 0000000..8914422 --- /dev/null +++ b/api/lib/functions_test.ts @@ -0,0 +1,109 @@ +import { assertEquals } from '@std/assert' +import * as functions from './functions.ts' +import { join } from '@std/path' +import { ensureDir } from '@std/fs' + +Deno.test('Functions Module - Pipeline & Config', async () => { + const testSlug = 'test-project-' + Date.now() + const functionsDir = './db/functions' + const projectDir = join(functionsDir, testSlug) + const file1 = join(projectDir, '01-first.js') + const file2 = join(projectDir, '02-second.js') + + try { + await Deno.remove('./db_test/deployment_functions', { recursive: true }) + await ensureDir('./db_test/deployment_functions') + } catch { + // Skipped + } + + await ensureDir(projectDir) + + // Initialize module + await functions.init() + + // Define test row type + type TestRow = { + id: number + step1?: boolean + step2?: boolean + var1?: string + } + + // 1. Create function files + const code1 = ` + export default { + read: (row, ctx) => { + return { ...row, step1: true, var1: 'secret-value' } + } + } + ` + const code2 = ` + export default { + read: (row) => { + return { ...row, step2: true } + } + } + ` + await Deno.writeTextFile(file1, code1) + await Deno.writeTextFile(file2, code2) + + // Give watcher time + await new Promise((r) => setTimeout(r, 1000)) + + // 2. Verify loading and sorting + const loaded = functions.getProjectFunctions(testSlug) + if (!loaded) throw new Error('Functions not loaded') + assertEquals(loaded.length, 2) + assertEquals(loaded[0].name, '01-first.js') + assertEquals(loaded[1].name, '02-second.js') + + // 3. Mock Deployment Config + const deploymentUrl = 'test-pipeline-' + Date.now() + '.com' + + // 4. Simulate Pipeline Execution (Manually, echoing sql.ts logic) + // We can't import sql.ts functions easily here without mocking runSQL, + // so we re-implement the pipeline logic to verify the components work. + + let row: TestRow = { id: 1 } + + for (const { module } of loaded) { + if (!module.read) continue + + const ctx = { + deploymentUrl, + projectId: testSlug, + } + row = await module.read(row, ctx) as TestRow + } + + const result = row + assertEquals(result.step1, true) + assertEquals(result.var1, 'secret-value') + + // Rerun pipeline + row = { id: 1 } + + for (const { module } of loaded) { + if (!module.read) continue + const ctx = { + deploymentUrl, + projectId: testSlug, + } + row = await module.read(row, ctx) as TestRow + } + + const result2 = row + assertEquals(result2.step1, true) + assertEquals(result2.step2, true) + + // Cleanup + await Deno.remove(projectDir, { recursive: true }) + try { + await Deno.remove('./db_test/deployment_functions', { recursive: true }) + } catch { + // Skipped + } + await new Promise((r) => setTimeout(r, 500)) + functions.stopWatcher() +}) diff --git a/api/lib/json_store.test.ts b/api/lib/json_store.test.ts index 26ec259..7645dbd 100644 --- a/api/lib/json_store.test.ts +++ b/api/lib/json_store.test.ts @@ -1,8 +1,7 @@ // db_test.ts -import { afterEach, beforeEach, describe, it } from '@std/testing/bdd' +import { afterEach, describe, it } from '@std/testing/bdd' import { assert, assertEquals, assertExists, assertRejects } from '@std/assert' import { createCollection } from './json_store.ts' -import { ensureDir } from '@std/fs' type User = { id: number @@ -11,12 +10,8 @@ type User = { age?: number | null } -let dbDir: string - -beforeEach(async () => { - dbDir = './db_test' - await ensureDir(dbDir) -}) +const TEST_COLLECTION = 'users_test' +const dbDir = './db_test/' + TEST_COLLECTION afterEach(async () => { try { @@ -29,7 +24,7 @@ afterEach(async () => { describe('createCollection', () => { it('inserts a record with an auto-generated numeric id', async () => { const users = await createCollection({ - name: 'users', + name: TEST_COLLECTION, primaryKey: 'email', }) @@ -47,7 +42,7 @@ describe('createCollection', () => { it('finds a record by id', async () => { const users = await createCollection({ - name: 'users', + name: TEST_COLLECTION, primaryKey: 'email', }) @@ -63,7 +58,7 @@ describe('createCollection', () => { it('returns null when record not found by id', async () => { const users = await createCollection({ - name: 'users', + name: TEST_COLLECTION, primaryKey: 'id', }) @@ -73,7 +68,7 @@ describe('createCollection', () => { it('updates a record', async () => { const users = await createCollection({ - name: 'users', + name: TEST_COLLECTION, primaryKey: 'id', }) @@ -91,7 +86,7 @@ describe('createCollection', () => { it('deletes a record', async () => { const users = await createCollection({ - name: 'users', + name: TEST_COLLECTION, primaryKey: 'id', }) @@ -109,7 +104,7 @@ describe('createCollection', () => { it('finds records using predicate', async () => { const users = await createCollection({ - name: 'users', + name: TEST_COLLECTION, primaryKey: 'id', }) @@ -138,7 +133,7 @@ describe('createCollection', () => { it('enforces unique key constraint on insert', async () => { const users = await createCollection({ - name: 'users', + name: TEST_COLLECTION, primaryKey: 'email', }) @@ -158,7 +153,7 @@ describe('createCollection', () => { it('returns null/false for update/delete on non-existent id', async () => { const users = await createCollection({ - name: 'users', + name: TEST_COLLECTION, primaryKey: 'id', }) @@ -177,7 +172,7 @@ describe('createCollection', () => { it('handles null/undefined unique fields gracefully', async () => { const users = await createCollection({ - name: 'users', + name: TEST_COLLECTION, primaryKey: 'id', }) @@ -199,7 +194,7 @@ describe('createCollection', () => { it('evicts LRU cache when cacheSize exceeded', async () => { const users = await createCollection({ - name: 'users', + name: TEST_COLLECTION, primaryKey: 'id', }) diff --git a/api/lib/json_store.ts b/api/lib/json_store.ts index 14700c3..53c2701 100644 --- a/api/lib/json_store.ts +++ b/api/lib/json_store.ts @@ -15,7 +15,7 @@ async function atomicWrite(filePath: string, content: string): Promise { await Deno.rename(tmp, filePath) } -const batch = async ( +export const batch = async ( concurrency: number, source: AsyncIterable, handler: (item: T) => Promise, diff --git a/api/server.ts b/api/server.ts index 3127dc5..b876ef2 100644 --- a/api/server.ts +++ b/api/server.ts @@ -4,6 +4,9 @@ import { server } from '@01edu/api/server' import { Log } from '@01edu/api/log' import { routeHandler } from '/api/routes.ts' import { PORT } from './lib/env.ts' +import { init } from '/api/lib/functions.ts' + +await init() const fetch = server({ log: console as unknown as Log, routeHandler }) export default { diff --git a/api/sql.ts b/api/sql.ts index 118a8ca..f12e4db 100644 --- a/api/sql.ts +++ b/api/sql.ts @@ -4,6 +4,10 @@ import { DeploymentsCollection, } from '/api/schema.ts' import { DB_SCHEMA_REFRESH_MS } from '/api/lib/env.ts' +import { + applyReadTransformers, + getProjectFunctions, +} from '/api/lib/functions.ts' export class SQLQueryError extends Error { constructor(message: string, body: string) { @@ -223,6 +227,8 @@ export const fetchTablesData = async ( if (!sqlToken || !sqlEndpoint) { throw Error('Missing SQL endpoint or token') } + const projectFunctions = getProjectFunctions(params.deployment.projectId) + const whereClause = constructWhereClause(params, columnsMap) const orderByClause = constructOrderByClause(params, columnsMap) @@ -244,8 +250,17 @@ export const fetchTablesData = async ( `SELECT COUNT(*) as count FROM ${params.table} ${whereClause}` const rows = await runSQL(sqlEndpoint, sqlToken, query) - return { + // Apply read transformer pipeline + const transformedRows = await applyReadTransformers( rows, + params.deployment.projectId, + params.deployment.url, + params.table, + projectFunctions, + ) + + return { + rows: transformedRows, totalRows: limit > 0 ? ((await runSQL(sqlEndpoint, sqlToken, countQuery))[0].count) as number : rows.length, diff --git a/db/functions/tournament/1772019921601-hash-passwords.js b/db/functions/tournament/1772019921601-hash-passwords.js new file mode 100644 index 0000000..5543a20 --- /dev/null +++ b/db/functions/tournament/1772019921601-hash-passwords.js @@ -0,0 +1,15 @@ +import { respond } from '@01edu/api/response' + +export default { + write: (_table, data, _query, _ctx) => { + if (data && typeof data === 'object' && 'password' in data) { + throw respond.ForbiddenError({ + message: 'Password update is not allowed.', + }) + } + return data + }, + config: { + targets: ['users'], + }, +} diff --git a/db/functions/tournament/1772019944781-tactic-handler.js b/db/functions/tournament/1772019944781-tactic-handler.js new file mode 100644 index 0000000..7b989d2 --- /dev/null +++ b/db/functions/tournament/1772019944781-tactic-handler.js @@ -0,0 +1,71 @@ +import { encodeBase64Url } from '@std/encoding/base64url' +import { crypto } from '@std/crypto' +import { brotliCompress, brotliDecompressSync } from 'node:zlib' +import { promisify } from 'node:util' + +const encoder = new TextEncoder() +const decoder = new TextDecoder() + +const compress = promisify(brotliCompress) +const decompress = (data) => new Uint8Array(brotliDecompressSync(data)) + +async function transformRowRead(row) { + if (!row || typeof row !== 'object') return row + const transformed = {} + + for (const [key, value] of Object.entries(row)) { + if (value instanceof Uint8Array) { + if (key === 'tacticContent') { + try { + transformed[key] = decoder.decode(await decompress(value)) + } catch (e) { + console.error(`Failed to decompress tacticContent:`, e) + transformed[key] = encodeBase64Url(value) + } + } else { + transformed[key] = encodeBase64Url(value) + } + } else { + transformed[key] = value + } + } + return transformed +} + +async function transformRowWrite(table, data) { + if (table !== 'tactic' || !data || typeof data !== 'object') return data + + // Only process if tacticContent is a string (new upload) + if (typeof data.tacticContent === 'string') { + const bytes = encoder.encode(data.tacticContent) + const hash = await crypto.subtle.digest('BLAKE3', bytes) + const compressed = await compress(bytes) + + return { + ...data, + tacticHash: encodeBase64Url(hash), + tacticSize: compressed.byteLength, + tacticContent: compressed, + } + } + + return data +} + +export default { + read: (data, _ctx) => { + if (Array.isArray(data)) { + return Promise.all(data.map(transformRowRead)) + } + return transformRowRead(data) + }, + write: (table, data) => { + if (Array.isArray(data)) { + return Promise.all(data.map((row) => transformRowWrite(table, row))) + } + return transformRowWrite(table, data) + }, + config: { + targets: ['tactic'], + }, +} diff --git a/web/pages/SettingsPage.tsx b/web/pages/SettingsPage.tsx index 2f6a08d..1f18037 100644 --- a/web/pages/SettingsPage.tsx +++ b/web/pages/SettingsPage.tsx @@ -344,22 +344,6 @@ const Accordion = ({ ) } -const ToolCard = ( - { title, desc, empty }: { title: string; desc: string; empty: string }, -) => ( -
-

{title}

-

{desc}

-
{empty}
- - Add {title.split(' ').pop()} - -
-) - const addDeploymentError = signal(null) const handleSubmit = async (e: TargetedEvent) => { e.preventDefault() @@ -700,22 +684,6 @@ const DeploymentsSettingsPage = () => { )} - {dep && ( - -
- - -
-
- )} {!dep && deps.length > 0 && (
Select a deployment to view its configuration.