diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..7e8dad1
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,8 @@
+# dependencies
+node_modules
+package-lock.json
+
+
+# misc
+.DS_Store
+.env*
diff --git a/.prettierrc b/.prettierrc
new file mode 100644
index 0000000..2001ab5
--- /dev/null
+++ b/.prettierrc
@@ -0,0 +1,9 @@
+{
+ "tabWidth": 2,
+ "printWidth": 80,
+ "endOfLine": "auto",
+ "singleQuote": true,
+ "arrowParens": "avoid",
+ "trailingComma": "es5",
+ "bracketSameLine": true
+}
\ No newline at end of file
diff --git a/bin/dev.js b/bin/dev.js
new file mode 100644
index 0000000..e69de29
diff --git a/bin/migrate.js b/bin/migrate.js
new file mode 100644
index 0000000..72e036e
--- /dev/null
+++ b/bin/migrate.js
@@ -0,0 +1,8 @@
+import { presets } from '#env';
+import { migrate } from '#db/migrate/run.js';
+
+await migrate({
+ ...presets.migrate,
+ command: process.argv[2] ?? 'up',
+ options: process.argv.slice(3),
+});
diff --git a/bin/prepare.js b/bin/prepare.js
new file mode 100644
index 0000000..23bdeaf
--- /dev/null
+++ b/bin/prepare.js
@@ -0,0 +1,12 @@
+import { copyFileSync } from 'fs';
+import { resolve as resolvePath } from 'path';
+
+import { presets } from '#env';
+import { CWD, resolve } from '#utils/location.js';
+
+if (!presets.app.isProduction) {
+ const source = resolve(import.meta, '../src/utils/pre-push');
+ const target = resolvePath(CWD, '.git/hooks/pre-push');
+
+ copyFileSync(source, target);
+}
diff --git a/bin/test.js b/bin/test.js
new file mode 100644
index 0000000..5ad6800
--- /dev/null
+++ b/bin/test.js
@@ -0,0 +1,5 @@
+import { runAllTestFiles, printAllTestTree } from '#test/run.js';
+
+process.argv[2] === 'status'
+ ? await printAllTestTree()
+ : await runAllTestFiles();
diff --git a/index.d.ts b/index.d.ts
new file mode 100644
index 0000000..e69de29
diff --git a/package.json b/package.json
new file mode 100644
index 0000000..29bc58f
--- /dev/null
+++ b/package.json
@@ -0,0 +1,29 @@
+{
+ "name": "@uah/server",
+ "private": true,
+ "author": "UAH",
+ "description": "description",
+ "version": "1.0.0",
+ "license": "ISC",
+ "type": "module",
+ "typings": "index.d.ts",
+ "engines": {
+ "node": ">=20.1.0"
+ },
+ "imports": {
+ "#runtime/*": "./src/runtime/*",
+ "#compiler/*": "./src/compiler/*"
+ },
+ "dependencies": {
+ "typescript": "next",
+ "uWebSockets.js": "uNetworking/uWebSockets.js#v20.30.0"
+ },
+ "devDependencies": {
+ "@typescript-eslint/eslint-plugin": "latest",
+ "@typescript-eslint/parser": "latest",
+ "eslint": "latest",
+ "eslint-config-prettier": "latest",
+ "eslint-plugin-prettier": "latest",
+ "prettier": "latest"
+ }
+}
\ No newline at end of file
diff --git a/src/config.js b/src/config.js
new file mode 100644
index 0000000..af085df
--- /dev/null
+++ b/src/config.js
@@ -0,0 +1,67 @@
+import process from 'process';
+import { pathToFileURL } from 'url';
+
+export const CWD = process.cwd();
+export const CWD_URL = pathToFileURL(CWD).href;
+
+export const LIB_NAME = '@uah/server';
+
+export const SERVER = {
+ host: 'localhost',
+ port: 80,
+ path: '/',
+ url: 'http://localhost/',
+ origin: 'http://localhost',
+
+ instance: null,
+ secure: false,
+
+ set(config) {
+ const url = new URL(config.url);
+
+ this.url = url.href;
+ this.host = url.hostname;
+ this.path = url.pathname;
+ this.origin = url.origin;
+
+ this.secure = url.protocol === 'https:';
+ this.port = url.port ? +url.port : this.secure ? 443 : 80;
+
+ if (config.proxy) {
+ this.proxy = config.proxy;
+ }
+ },
+};
+
+export function initConfig({ pathsBasePath, plugins }) {
+ const config = plugins?.find(({ name }) => name === LIB_NAME);
+
+ if (config?.server) {
+ SERVER.set(config.server);
+ }
+
+ if (config?.bundle) {
+ BUNDLE.set(config.bundle);
+ }
+
+ LOCATION.root.url = SERVER.path;
+ LOCATION.root.path = pathsBasePath + '/';
+
+ LOCATION.src.url = LOCATION.root.url + BUNDLE.source + '/';
+ LOCATION.src.path = LOCATION.root.path + LOCATION.src.name + '/';
+
+ LOCATION.app.url = LOCATION.src.url + LOCATION.app.name + '/';
+ LOCATION.app.path = LOCATION.src.path + LOCATION.app.name + '/';
+
+ LOCATION.lib.url = LOCATION.src.url + LOCATION.lib.name + '/';
+ LOCATION.lib.path = LOCATION.src.path + LOCATION.lib.name + '/';
+
+ LOCATION.dev.url = LOCATION.src.url + LOCATION.dev.name + '/';
+
+ LOCATION.assets.url = LOCATION.src.url + LOCATION.assets.name + '/';
+ LOCATION.assets.path = LOCATION.src.path + LOCATION.assets.name + '/';
+
+ LOCATION.runtime.url = LOCATION.src.url;
+ LOCATION.runtime.path =
+ LOCATION.root.path + 'node_modules/' + LIB_NAME + '/src/runtime/';
+}
diff --git a/src/runtime/app.js b/src/runtime/app.js
new file mode 100644
index 0000000..850e61b
--- /dev/null
+++ b/src/runtime/app.js
@@ -0,0 +1,7 @@
+import { startServer } from './server/app.js';
+
+export const app = {
+ server: {
+ run: startServer,
+ },
+};
diff --git a/src/runtime/db/actions.js b/src/runtime/db/actions.js
new file mode 100644
index 0000000..9d39566
--- /dev/null
+++ b/src/runtime/db/actions.js
@@ -0,0 +1,58 @@
+import { fs } from '../utils/native.js';
+import { CWD } from '../utils/location.js';
+
+const mapVars = Object.assign(Object.create(null), {
+ _uid: context => context.uid,
+ _lang: context => context.language,
+});
+
+const getValues = (context, payload, params) => {
+ const values = [];
+
+ for (let i = 0; i < params.length; i++) {
+ const name = params[i];
+ values[i] = mapVars[name]?.(context) ?? payload[name];
+ }
+
+ return values;
+};
+
+export const createAction =
+ ({ text, params, returnMethod }) =>
+ async (context, payload) =>
+ returnMethod(
+ await context.db.unsafe(text, getValues(context, payload, params))
+ );
+
+export const createActionFromFileSQL = ({ path: names }) => {
+ let action = async (context, payload) => {
+ const name = `${names[0]}/${names[1]}/${names[2]}`;
+ const path = `${CWD}/src/app/${names[0]}/${names[1]}/sql/${names[2]}.sql`;
+
+ const params = [];
+ const source = (await fs.readFile(path, 'utf8')).split('${');
+
+ for (let i = 1; i < source.length; i++) {
+ const sql = source[i];
+ const index = sql.indexOf('}');
+
+ const key = sql.slice(0, index).trim();
+ const num = params.includes(key)
+ ? params.indexOf(key) + 1
+ : params.push(key);
+
+ source[i] = '$' + num + sql.slice(index + 1);
+ }
+
+ action = createAction({
+ name,
+ params,
+ text: source.join(''),
+ returnMethod: res => res.rows,
+ });
+
+ return await action(context, payload);
+ };
+
+ return (context, payload) => action(context, payload);
+};
diff --git a/src/runtime/db/client.js b/src/runtime/db/client.js
new file mode 100644
index 0000000..8458f9f
--- /dev/null
+++ b/src/runtime/db/client.js
@@ -0,0 +1,38 @@
+import postgres from 'postgres';
+import { presets } from '#env';
+import { noop } from '#utils/native.js';
+import { onAborted } from '#utils/process.js';
+
+export const getConnectOptions = (
+ options = presets.db,
+ ns = presets.app.id.toUpperCase()
+) => {
+ options = {
+ onnotice: noop,
+ connect_timeout: 60,
+ ...options,
+ connection: {
+ application_name: presets.app.id,
+ ...options?.connection,
+ },
+ };
+
+ const { env } = process;
+
+ if (env[ns + '_DB_USER']) options.username = env[ns + '_DB_USER'];
+ if (env[ns + '_DB_PASS']) options.password = env[ns + '_DB_PASS'];
+
+ if (env.DB_PORT) options.port = env.DB_PORT;
+ if (env.DB_HOST) options.host = env.DB_HOST;
+ if (env.DB_NAME) options.database = env.DB_NAME;
+
+ return options;
+};
+
+export const createClient = (options = getConnectOptions()) => {
+ const client = postgres(options);
+
+ onAborted(() => client.end({ timeout: 0 }));
+
+ return client;
+};
diff --git a/src/runtime/db/constants.js b/src/runtime/db/constants.js
new file mode 100644
index 0000000..4cb331a
--- /dev/null
+++ b/src/runtime/db/constants.js
@@ -0,0 +1,15 @@
+export const CODES = {
+ INVALID_TEXT_REPRESENTATION: '22P02',
+ INTEGRITY_CONSTRAINT_VIOLATION: '23000',
+ RESTRICT_VIOLATION: '23001',
+ NOT_NULL_VIOLATION: '23502',
+ FOREIGN_KEY_VIOLATION: '23503',
+ UNIQUE_VIOLATION: '23505',
+ CHECK_VIOLATION: '23514',
+ EXCLUSION_VIOLATION: '23P01',
+ INSUFFICIENT_PRIVILEGE: '42501',
+ INVALID_DATETIME_FORMAT: '22007',
+ DATETIME_FIELD_OVERFLOW: '22008',
+ INVALID_BINARY_REPRESENTATION: '22P03',
+ NUMERIC_VALUE_OUT_OF_RANGE: '22003',
+};
diff --git a/src/runtime/db/context.js b/src/runtime/db/context.js
new file mode 100644
index 0000000..1c60260
--- /dev/null
+++ b/src/runtime/db/context.js
@@ -0,0 +1,83 @@
+import { presets } from '#env';
+import { quoteLiteral } from './utils/text.js';
+import { createClient } from './client.js';
+import { factory } from './sql/query.js';
+
+const DB_POOL = Symbol('DB POOL');
+
+function setCustomParams({ db, uid, route, service }, params) {
+ let sql = 'SET LOCAL "custom.app"=' + quoteLiteral(presets.app.id);
+
+ if (uid) {
+ sql += ';SET LOCAL "custom.uid"=' + quoteLiteral(uid);
+ }
+
+ if (route?.name) {
+ sql += ';SET LOCAL "custom.action"=' + quoteLiteral(route.name);
+ } else if (service?.name) {
+ sql += ';SET LOCAL "custom.action"=' + quoteLiteral(service.name);
+ }
+
+ if (params) {
+ for (const key of Object.keys(params)) {
+ const value = params[key];
+ if (value != null) {
+ sql += `;SET LOCAL "custom.${key}"=`;
+ sql +=
+ value === true
+ ? "'t'"
+ : value === false
+ ? "'f'"
+ : quoteLiteral(value);
+ }
+ }
+ }
+
+ return db.unsafe(sql);
+}
+
+async function transaction(action, payload, params) {
+ if (this[DB_POOL]) {
+ return await this.db.savepoint(async () => {
+ await setCustomParams(this, params?.custom);
+ return await action(this, payload);
+ });
+ }
+
+ this[DB_POOL] = this.db;
+
+ try {
+ return await this.db.begin(async db => {
+ this.db = db;
+ await setCustomParams(this, params?.custom);
+ return await action(this, payload);
+ });
+ } finally {
+ this.db = this[DB_POOL];
+ this[DB_POOL] = null;
+
+ if (this.transactionTasks) {
+ for (const action of this.transactionTasks)
+ await action(this).catch(console.error);
+ this.transactionTasks.clear();
+ }
+ }
+}
+
+function isTransaction() {
+ return !!this[DB_POOL];
+}
+
+async function runAfterTransaction(action) {
+ (this.transactionTasks ??= new Set()).add(action);
+}
+
+export const setDataBaseContext = (context, options) => {
+ context[DB_POOL] = null;
+ context.transaction = transaction;
+ context.transactionTasks = null;
+ context.isTransaction = isTransaction;
+ context.runAfterTransaction = runAfterTransaction;
+ context.db = createClient(options);
+ context.sql = factory(context);
+};
diff --git a/src/runtime/db/copy.js b/src/runtime/db/copy.js
new file mode 100644
index 0000000..b8d62c1
--- /dev/null
+++ b/src/runtime/db/copy.js
@@ -0,0 +1,57 @@
+import { once } from 'events';
+import { addAbortSignal } from 'stream';
+import { createClient } from './client.js';
+import { signal } from '#utils/process.js';
+import { makeError } from './utils/errors.js';
+import { inlineSQL } from './utils/text.js';
+
+export const copyTo = async ({ db }, query) =>
+ addAbortSignal(signal, await db.unsafe(query).readable());
+
+export const copyFrom = async ({ db }, query) =>
+ addAbortSignal(signal, await db.unsafe(query).writable());
+
+export const respondToFile = async (context, { fileName, query }) => {
+ const meta = fileName.endsWith('.csv')
+ ? { delimiter: ',', type: 'text/csv; charset=utf-8' }
+ : { delimiter: '\t', type: 'text/tab-separated-values; charset=utf-8' };
+
+ const stream = await copyTo(
+ context,
+ `COPY (${await inlineSQL(query)}) TO STDOUT WITH(
+ FORMAT csv,
+ HEADER true,
+ DELIMITER '${meta.delimiter}')`
+ );
+
+ return context.respondStream({
+ fileName,
+ stream,
+ compress: false,
+ type: meta.type,
+ });
+};
+
+export const copyFromLink = async (ctx, link, tables) => {
+ link = { db: createClient(link) };
+
+ await ctx.sql`SET session_replication_role = 'replica'`;
+
+ try {
+ for (const table of tables) {
+ const source = `COPY ${table.source} TO STDOUT`;
+ const target = `COPY ${table.target} FROM STDIN`;
+ await once(
+ (await copyTo(link, source)).pipe(await copyFrom(ctx, target)),
+ 'finish'
+ );
+ }
+ } catch (error) {
+ throw makeError(error);
+ } finally {
+ await link.db.end({ timeout: 0 });
+ }
+
+ await ctx.sql`SET session_replication_role = 'origin'`;
+ await ctx.sql`ANALYZE (SKIP_LOCKED)`;
+};
diff --git a/src/runtime/db/helpers.js b/src/runtime/db/helpers.js
new file mode 100644
index 0000000..acd2208
--- /dev/null
+++ b/src/runtime/db/helpers.js
@@ -0,0 +1,40 @@
+import { readFile } from 'fs/promises';
+
+const trimQuery = query => query.trim();
+const filterQuery = command => query =>
+ !!query && (!command || query.startsWith(`${command} `));
+
+export const execBatchSQL = async ({ db }, data, command) => {
+ const queries = [];
+ const blocks = data.split('$$');
+
+ for (let i = 0; i < blocks.length; i++) {
+ if (i % 2) {
+ const blockNext = blocks[i + 1];
+ const index = blockNext.indexOf(';');
+
+ if (command) {
+ blocks[i + 1] = blockNext.slice(index + 1);
+ } else {
+ const block = blocks[i].trim();
+ const lastIndex = queries.length - 1;
+ const queryEnd = blockNext.slice(0, index).trim();
+
+ blocks[i + 1] = blockNext.slice(index + 1);
+ queries[lastIndex] += ` $$\n${block}\n$$ ${queryEnd}`;
+ }
+ } else {
+ queries.push(
+ ...blocks[i].split(';').map(trimQuery).filter(filterQuery(command))
+ );
+ }
+ }
+
+ for (const query of queries) {
+ await db.unsafe(query);
+ }
+};
+
+export const execFileSQL = async (context, path, command) => {
+ await execBatchSQL(context, await readFile(path, 'utf8'), command);
+};
diff --git a/src/runtime/db/migrate/actions/down.js b/src/runtime/db/migrate/actions/down.js
new file mode 100644
index 0000000..9196a65
--- /dev/null
+++ b/src/runtime/db/migrate/actions/down.js
@@ -0,0 +1,26 @@
+import { green } from '#utils/console.js';
+import { runFiles } from '../internals/actions.js';
+
+export const down = async (context, payload) => {
+ const { schemas, params } = payload;
+ const [schemaName, fileName] = params;
+
+ const schema = schemas.find(schema => schema.name === schemaName);
+
+ if (!schema) {
+ throw new Error(`Not found schema "${schemaName}"`);
+ }
+
+ const file = schema.names.get(fileName);
+
+ if (!file) {
+ throw new Error(`Not found file "${fileName}"`);
+ }
+
+ if (file) {
+ payload.files = [file];
+ await context.transaction(runFiles, payload);
+ }
+
+ console.log(green('Migrate: already done'));
+};
diff --git a/src/runtime/db/migrate/actions/help.js b/src/runtime/db/migrate/actions/help.js
new file mode 100644
index 0000000..398eefa
--- /dev/null
+++ b/src/runtime/db/migrate/actions/help.js
@@ -0,0 +1,60 @@
+import { bold } from '#utils/console.js';
+
+import { getSchemas } from '../internals/schemas.js';
+
+const readMeUrl =
+ 'https://github.com/HRForecast/NodeJS-smartAPI/blob/master/docs/MIGRATE.md';
+
+export const help = () => {
+ const schemas = getSchemas();
+
+ console.log(`usage: npm run migrate [command] [schema] [file_name]
+
+ These are common Migrate commands used in various situations:
+
+ ${bold('up')}:
+
+ up Run migrations for current schema
+ up [schema] Run migrations for [schema]
+ up [schema] [file_name] Run migrations for [schema] [file_name]
+
+ ${bold('down')}:
+
+ down [schema] [file_name] Run migrations for [file_name]
+
+ ${bold('status')}:
+
+ status Check status of migrations table
+ status [schema] Check status of migrations table for [schema]
+
+ ${bold('rollback')}:
+
+ rollback [schema] Rollback [schema]
+ rollback all schemas Rollback all schemas that are dependent on current
+
+
+ Schema dependencies: ${schemas.map(({ name }) => name).join(', ')}.
+
+ ENV variables:
+ DB_PORT,
+ DB_HOST,
+ DB_NAME,
+ MASTER_DB_USER,
+ MASTER_DB_PASS,
+ ${schemas
+ .reduce((acc, { name, isCurrent }) => {
+ const prefix = name.toUpperCase();
+ const user = `${prefix}_DB_USER`;
+ const pass = `${prefix}_DB_PASS`;
+
+ [user, pass].map(val => acc.push(isCurrent ? bold(val) : val));
+
+ return acc;
+ }, [])
+ .join(',\n ')}
+
+ More detailed description you can find via this link:
+ ${readMeUrl}
+
+ 'npm run migrate help' list available subcommands and some concept guides.`);
+};
diff --git a/src/runtime/db/migrate/actions/rollback.js b/src/runtime/db/migrate/actions/rollback.js
new file mode 100644
index 0000000..682af44
--- /dev/null
+++ b/src/runtime/db/migrate/actions/rollback.js
@@ -0,0 +1,45 @@
+import { green } from '#utils/console.js';
+import { runFiles } from '../internals/actions.js';
+import { wrongSchema, wrongAllSchemas } from '../internals/errors.js';
+import { STATUS_DONE, STATUS_UPDATED } from '../constants.js';
+
+const filterByStatus = ({ status }) =>
+ status === STATUS_DONE || status === STATUS_UPDATED;
+
+function filterBySchema({ schema }) {
+ return schema === this;
+}
+
+export const rollback = async (context, payload) => {
+ const [schemaName, optionName] = payload.params;
+
+ let files = payload.files.filter(filterByStatus);
+ const schema = payload.schemas.find(schema => schema.name === schemaName);
+ const dropSchemas = [];
+
+ if (schema) {
+ dropSchemas.push(schema);
+ files = files.filter(filterBySchema, schema);
+ } else if (schemaName !== 'all') {
+ wrongSchema(payload.schemas, schemaName);
+ } else if (optionName !== 'schemas') {
+ wrongAllSchemas();
+ } else {
+ dropSchemas.push(...payload.schemas);
+ }
+
+ if (files.length) {
+ payload.command = 'down';
+ payload.files = files.reverse();
+ await context.transaction(runFiles, payload);
+ }
+
+ const { sql } = context;
+ for (const { user, schemaName, tableName } of dropSchemas) {
+ await sql`DROP TABLE ${tableName}`.catch();
+ await sql`DROP SCHEMA ${schemaName}`.catch();
+ await sql`DROP USER ${user}`.catch();
+ }
+
+ console.log(green('Migrate: already done'));
+};
diff --git a/src/runtime/db/migrate/actions/status.js b/src/runtime/db/migrate/actions/status.js
new file mode 100644
index 0000000..053e47f
--- /dev/null
+++ b/src/runtime/db/migrate/actions/status.js
@@ -0,0 +1,11 @@
+import { STATUS_SKIPED } from '../constants.js';
+
+export const status = async (context, { files }) => {
+ files = files.map(({ schema, name, skip, status }) => ({
+ schema: schema.name,
+ name,
+ status: skip ? STATUS_SKIPED : status,
+ }));
+
+ console.table(files);
+};
diff --git a/src/runtime/db/migrate/actions/up.js b/src/runtime/db/migrate/actions/up.js
new file mode 100644
index 0000000..2a8a13f
--- /dev/null
+++ b/src/runtime/db/migrate/actions/up.js
@@ -0,0 +1,16 @@
+import { green } from '#utils/console.js';
+import { runFiles } from '../internals/actions.js';
+import { STATUS_NEW, STATUS_UPDATED } from '../constants.js';
+
+const filterByStatus = ({ status }) =>
+ status === STATUS_NEW || status === STATUS_UPDATED;
+
+export const up = async (context, payload) => {
+ payload.files = payload.files.filter(filterByStatus);
+
+ if (payload.files.length) {
+ await context.transaction(runFiles, payload);
+ }
+
+ console.log(green('Migrate: already done\n'));
+};
diff --git a/src/runtime/db/migrate/constants.js b/src/runtime/db/migrate/constants.js
new file mode 100644
index 0000000..96e2723
--- /dev/null
+++ b/src/runtime/db/migrate/constants.js
@@ -0,0 +1,8 @@
+export const STATUS_NEW = 'new';
+export const STATUS_DONE = 'done';
+export const STATUS_SKIPED = 'skiped';
+export const STATUS_UPDATED = 'updated';
+export const STATUS_DELETED = 'deleted';
+
+export const LOCK_ID = 0;
+export const FOLDER_NAME = 'migrations';
diff --git a/src/runtime/db/migrate/internals/actions.js b/src/runtime/db/migrate/internals/actions.js
new file mode 100644
index 0000000..24c7b14
--- /dev/null
+++ b/src/runtime/db/migrate/internals/actions.js
@@ -0,0 +1,53 @@
+import { green, red, bold } from '#utils/console.js';
+import { getSchemas, setSchemas } from './schemas.js';
+import { saveTable } from './tables.js';
+import { setFiles, files } from './files.js';
+import { dropDatabase } from './database.js';
+import { presets } from '#env';
+
+export const createContext = config => {
+ const context = {
+ config,
+ tasks: new Set(),
+ async resolveTasks(payload) {
+ for (const task of this.tasks) await task(this, payload);
+ this.tasks.clear();
+ },
+ dropDatabase() {
+ return dropDatabase(this);
+ },
+ language: presets.language,
+ languages: presets.languages,
+ defaultLanguage: presets.language,
+ };
+ return context;
+};
+
+export const createPayload = async context => {
+ const payload = {
+ command: context.config.command,
+ params: context.config.options,
+ files,
+ schemas: getSchemas(context),
+ };
+ await setSchemas(context, payload);
+ await setFiles(payload);
+ return payload;
+};
+
+export const runFiles = async (context, payload) => {
+ let i = 0;
+ const { files, command } = payload;
+ const action = command === 'up' ? green(command) : bold(red(command));
+
+ for (const { name, skip, schema, [command]: method } of files)
+ if (skip === false) {
+ console.log(
+ green('Migrate') + `: ${++i} ${action} ${schema.name} ${name}`
+ );
+
+ await method?.(context, schema.payload);
+ }
+
+ await saveTable(context, payload);
+};
diff --git a/src/runtime/db/migrate/internals/database.js b/src/runtime/db/migrate/internals/database.js
new file mode 100644
index 0000000..81b7df4
--- /dev/null
+++ b/src/runtime/db/migrate/internals/database.js
@@ -0,0 +1,79 @@
+import { LOCK_ID } from '../constants.js';
+import { presets } from '#env';
+import { green, yellow, red } from '#utils/console.js';
+import { setDataBaseContext } from '#db/context.js';
+import { getConnectOptions, createClient } from '#db/client.js';
+
+export const defaultOptions = {
+ max: 1,
+ prepare: false,
+ username: 'postgres',
+ password: 'pass',
+};
+
+const createDatabase = async context => {
+ const { connection } = context.config;
+ await context.db.end({ timeout: 0 });
+
+ const client = createClient({ ...connection, database: 'postgres' });
+ await client.unsafe(`CREATE DATABASE "${connection.database}"`);
+ await client.end({ timeout: 0 });
+
+ context.db = createClient(connection);
+};
+
+export const dropDatabase = async context => {
+ const { connection } = context.config;
+ await context.db.end({ timeout: 0 });
+
+ const client = createClient({ ...connection, database: 'postgres' });
+ await client.unsafe(`DROP DATABASE IF EXISTS "${connection.database}"`);
+ await client.end({ timeout: 0 });
+};
+
+export const lockMigrate = async ({ sql }) => {
+ const isLock = await sql`
+ SELECT pg_try_advisory_lock(${LOCK_ID}::bigint) AS "0"
+ `.findOneValue();
+
+ if (isLock === false) {
+ console.log(yellow(`Migrate: `) + red(`waiting release lock ${LOCK_ID}`));
+ await sql`SELECT pg_advisory_lock(${LOCK_ID}::bigint)`;
+ }
+
+ if (!presets.app.isTesting)
+ console.log(green(`Migrate: start lock ${LOCK_ID}`));
+};
+
+export const unlockMigrate = async ({ sql }) =>
+ await sql`SELECT pg_advisory_unlock_all()`;
+
+export const connect = async context => {
+ context.config.connection = getConnectOptions(
+ {
+ ...presets.db,
+ ...defaultOptions,
+ ...context.config.connection,
+ },
+ 'MASTER'
+ );
+
+ setDataBaseContext(context, context.config.connection);
+
+ try {
+ await lockMigrate(context);
+ } catch (error) {
+ if (error?.code === '3D000') {
+ await createDatabase(context);
+ await lockMigrate(context);
+ } else {
+ throw error;
+ }
+ }
+};
+
+export const disconnect = async context => {
+ if (context.db) {
+ await context.db.end({ timeout: 0 });
+ }
+};
diff --git a/src/runtime/db/migrate/internals/errors.js b/src/runtime/db/migrate/internals/errors.js
new file mode 100644
index 0000000..152e3e1
--- /dev/null
+++ b/src/runtime/db/migrate/internals/errors.js
@@ -0,0 +1,15 @@
+import { red, green, inverse } from '#utils/console.js';
+
+export const wrongSchema = (schemas, name) => {
+ const names = schemas.map(({ name }) => green(name)).join('", "');
+
+ throw red(
+ name
+ ? `Not found schema ${inverse(name)}\nPlease use "${names}"`
+ : `Please enter one of "${names}"`
+ );
+};
+
+export const wrongAllSchemas = () => {
+ throw red(`Please enter "${green('schemas')}"`);
+};
diff --git a/src/runtime/db/migrate/internals/files.js b/src/runtime/db/migrate/internals/files.js
new file mode 100644
index 0000000..b1b6560
--- /dev/null
+++ b/src/runtime/db/migrate/internals/files.js
@@ -0,0 +1,137 @@
+import { resolve } from 'path';
+import { createHmac } from 'crypto';
+import { readdir, readFile } from 'fs/promises';
+import { fileURLToPath, pathToFileURL } from 'url';
+import { CWD } from '#utils/location.js';
+import { execBatchSQL } from '#db/helpers.js';
+import {
+ FOLDER_NAME,
+ STATUS_NEW,
+ STATUS_DONE,
+ STATUS_UPDATED,
+} from '../constants.js';
+
+export const files = [];
+export const indexes = new Map();
+export const getHash = data => createHmac('sha256', data).digest('base64url');
+export const setStatusByHash = (file, hash) => {
+ file.status =
+ file.hash == hash
+ ? STATUS_DONE
+ : file.wasDone
+ ? STATUS_UPDATED
+ : STATUS_NEW;
+
+ file.hash = hash;
+};
+
+export const getFolderPath = (name, isApplication) =>
+ isApplication
+ ? CWD + '/src/' + FOLDER_NAME
+ : resolve(
+ fileURLToPath(import.meta.url),
+ '../../../../../src/' + FOLDER_NAME + '/' + name
+ );
+
+const filterImportFiles = file => (file.module ? indexes.has(file.url) : true);
+
+const sortByDependencies = ({ index: a }, { index: b }) =>
+ a > b ? 1 : a < b ? -1 : 0;
+
+const loadFileSQL = async file => {
+ const data = await readFile(fileURLToPath(file.url), 'utf8');
+ setStatusByHash(file, getHash(data));
+
+ file.up = context => execBatchSQL(context, data);
+ file.down = context => execBatchSQL(context, data, 'DROP');
+ file.index = Infinity;
+};
+
+// eslint-disable-next-line node/no-unsupported-features/es-syntax
+const importFile = file => file.module && import(file.url);
+const getBaseName = name => name.slice(0, name.lastIndexOf('.')).toLowerCase();
+
+const loadFolder = async (promises, schema, base) => {
+ const { names, folders } = schema;
+
+ for (const dirent of await readdir(base.path, { withFileTypes: true })) {
+ if (dirent.isDirectory()) {
+ let meta = null;
+ const name = base.name + dirent.name + '/';
+
+ for (let i = 0; i < folders.length; i++)
+ if (name.startsWith(folders[i].prefix)) {
+ meta = folders[i].params;
+ break;
+ }
+
+ promises.push(
+ loadFolder(promises, schema, {
+ meta,
+ name,
+ path: base.path + dirent.name + '/',
+ })
+ );
+ } else {
+ const isFileJS = dirent.name.endsWith('.js');
+ const isFileSQL = dirent.name.endsWith('.sql');
+
+ if (!isFileJS && !isFileSQL) {
+ continue;
+ }
+
+ let file;
+ const name = base.name + getBaseName(dirent.name);
+ const url = pathToFileURL(base.path + dirent.name).href;
+
+ if (names.has(name)) {
+ file = names.get(name);
+
+ if (file.url) {
+ throw new Error(`Duplicate file name ${name}`);
+ }
+
+ file.url = url;
+ file.status = STATUS_DONE;
+ } else {
+ file = {
+ name,
+ url,
+ schema,
+ skip: false,
+ wasDone: false,
+ status: STATUS_NEW,
+ };
+
+ names.set(name, file);
+ }
+
+ if (isFileSQL) {
+ promises.push(loadFileSQL(file));
+ } else {
+ file.module = true;
+ }
+
+ if (base.meta !== null) {
+ Object.assign(file, base.meta);
+ }
+
+ files.push(file);
+ }
+ }
+};
+
+export const setFiles = async payload => {
+ const promises = [];
+
+ for (const schema of payload.schemas)
+ for (const path of schema.paths)
+ promises.push(
+ loadFolder(promises, schema, { meta: null, name: '', path: path + '/' })
+ );
+
+ for (let i = 0; i < promises.length; i++) await promises[i];
+
+ await Promise.all(files.map(importFile));
+ payload.files = files.filter(filterImportFiles).sort(sortByDependencies);
+};
diff --git a/src/runtime/db/migrate/internals/roles.js b/src/runtime/db/migrate/internals/roles.js
new file mode 100644
index 0000000..66d5423
--- /dev/null
+++ b/src/runtime/db/migrate/internals/roles.js
@@ -0,0 +1,29 @@
+import { sql } from '#db/sql/sql.js';
+import { quoteLiteral } from '#db/utils/text.js';
+
+export const setSchemaRole = (context, schema) => {
+ const { env } = process;
+ const { name } = schema;
+ const NAME = name.toUpperCase();
+
+ schema.user = sql(`"${env[`${NAME}_DB_USER`] ?? `api_${name}`}"`);
+ schema.password = env[`${NAME}_DB_PASS`] ?? 'pass';
+ schema.database = sql(`"${context.config.connection.database}"`);
+ schema.schemaName = sql(`${schema.name}`);
+
+ schema.payload = {
+ user: schema.user,
+ database: schema.database,
+ schema: schema.schemaName,
+ wasDone: name => schema.names.get(name)?.wasDone === true,
+ };
+
+ return schema;
+};
+
+export const createRole = async ({ sql }, { user, password }) => {
+ await sql`CREATE ROLE ${user} WITH NOCREATEDB NOCREATEROLE
+ LOGIN PASSWORD ${sql(quoteLiteral(password))}`;
+
+ await sql`GRANT ${user} TO CURRENT_USER`;
+};
diff --git a/src/runtime/db/migrate/internals/schemas.js b/src/runtime/db/migrate/internals/schemas.js
new file mode 100644
index 0000000..631f36d
--- /dev/null
+++ b/src/runtime/db/migrate/internals/schemas.js
@@ -0,0 +1,105 @@
+import { presets } from '#env';
+import { STATUS_DELETED } from '../constants.js';
+import { createRole } from './roles.js';
+import { getFolderPath } from './files.js';
+import { setSchemaRole } from './roles.js';
+import { createTable, setTableName } from './tables.js';
+
+const grantSchemas = ({ sql }, schemas) => {
+ const nameUsers = sql(schemas.map(({ user }) => user).join(', '));
+ const nameSchemas = sql(schemas.map(({ name }) => name).join(', '));
+
+ return sql`GRANT USAGE ON SCHEMA ${nameSchemas} TO ${nameUsers}`;
+};
+
+const createSchema = async (context, schema) => {
+ const { sql } = context;
+ const { user, database, schemaName } = schema;
+
+ try {
+ await sql`CREATE SCHEMA IF NOT EXISTS ${schemaName} AUTHORIZATION ${user}`;
+ await sql`ALTER ROLE ${user} IN DATABASE ${database} SET search_path TO ${schemaName}`;
+ } catch (error) {
+ if (error?.code === '42704') {
+ await createRole(context, schema);
+ await createSchema(context, schema);
+ } else {
+ throw error;
+ }
+ }
+ context.tasks.add(grantSchemas);
+};
+
+const setSchemaTableFiles = async ({ sql, config }, schema) => {
+ const { tableName, names } = schema;
+
+ for (const { name, hash } of await sql`SELECT name, hash FROM ${tableName}`) {
+ names.set(name, {
+ name,
+ hash,
+ schema,
+ skip: false,
+ wasDone: true,
+ status: STATUS_DELETED,
+ });
+ }
+
+ schema.folders = [];
+ schema.isBootStrap = names.size === 0;
+
+ for (const name of Object.keys(config.folders)) {
+ schema.folders.push({
+ prefix: name + '/',
+ params: config.folders[name](schema),
+ });
+ }
+};
+
+const setSchema = async (context, schema) => {
+ setTableName(context, schema);
+
+ try {
+ await setSchemaTableFiles(context, schema);
+ } catch (error) {
+ if (error?.code === '42P01') {
+ await createSchema(context, schema);
+ await createTable(context, schema);
+ await setSchemaTableFiles(context, schema);
+ } else {
+ throw error;
+ }
+ }
+};
+
+const getSchema = (context, name, isCurrent) =>
+ setSchemaRole(context, {
+ name,
+ names: new Map(),
+ paths: [getFolderPath(name, isCurrent)],
+ isCurrent,
+ });
+
+export const getSchemas = context => {
+ const appId = presets.app.id;
+
+ if (!appId) throw new Error('Missing current application id');
+
+ const schemas = context.config.apps.map(name =>
+ getSchema(context, name, false)
+ );
+
+ const schema =
+ schemas.find(({ name }) => name === appId) ??
+ getSchema(context, appId, true);
+ schema.isCurrent = true;
+
+ if (schemas.includes(schema)) schema.paths.push(getFolderPath(appId, true));
+ else schemas.push(schema);
+
+ return schemas;
+};
+
+export const setSchemas = async (context, { schemas }) => {
+ await Promise.all(schemas.map(schema => setSchema(context, schema)));
+ await context.resolveTasks(schemas);
+};
diff --git a/src/runtime/db/migrate/internals/tables.js b/src/runtime/db/migrate/internals/tables.js
new file mode 100644
index 0000000..d47e267
--- /dev/null
+++ b/src/runtime/db/migrate/internals/tables.js
@@ -0,0 +1,54 @@
+export const setTableName = ({ sql }, schema) => {
+ schema.tableName = sql(schema.name + '.migrations');
+};
+
+export const createTable = async ({ sql }, { tableName }) =>
+ await sql`CREATE TABLE IF NOT EXISTS ${tableName} (
+ name text COLLATE "C" PRIMARY KEY,
+ hash text,
+ updated_at timestamptz not null default CURRENT_TIMESTAMP,
+ created_at timestamptz not null default CURRENT_TIMESTAMP
+ )`;
+
+export const saveTable = ({ sql }, { command, schemas, files }) => {
+ const queries = [];
+
+ for (const schema of schemas) {
+ const list = files.filter(file => file.schema === schema);
+
+ if (list.length) {
+ if (command === 'up') {
+ const names = list.map(file => file.name);
+ const hashes = list.map(file => file.hash || null);
+
+ queries.push(sql`
+ INSERT INTO ${schema.tableName}(name, hash)
+ SELECT * FROM unnest(${names}::text[], ${hashes}::text[])
+ ON CONFLICT(name) DO UPDATE SET hash = EXCLUDED.hash, updated_at = DEFAULT`);
+ } else {
+ queries.push(sql`
+ DELETE FROM ${schema.tableName}
+ WHERE name = ANY(${list.map(file => file.name)}::text[])`);
+ }
+ }
+ }
+
+ if (queries.length === 1) {
+ return queries[0];
+ } else {
+ const query = sql`WITH `;
+
+ for (let i = 1; i < queries.length; i++) {
+ const { source, values } = queries[i];
+
+ source[0] = (i === 1 ? `_${i} AS(` : `,_${i} AS(`) + source[0];
+
+ source[source.length - 1] += ')';
+ query.sql(source, ...values);
+ }
+
+ query.sql(queries[0].source, ...queries[0].values);
+
+ return query;
+ }
+};
diff --git a/src/runtime/db/migrate/run.js b/src/runtime/db/migrate/run.js
new file mode 100644
index 0000000..b53070c
--- /dev/null
+++ b/src/runtime/db/migrate/run.js
@@ -0,0 +1,32 @@
+import { presets } from '#env';
+import { up } from './actions/up.js';
+import { down } from './actions/down.js';
+import { help } from './actions/help.js';
+import { status } from './actions/status.js';
+import { rollback } from './actions/rollback.js';
+import { connect, disconnect } from './internals/database.js';
+import { createContext, createPayload } from './internals/actions.js';
+
+const actions = {
+ up,
+ down,
+ help,
+ status,
+ rollback,
+ __proto__: null,
+};
+
+export const migrate = async (config = { ...presets.migrate }) => {
+ const context = (migrate.context = createContext(config));
+ const action = actions[config.command] ?? actions.help;
+
+ if (action === actions.help) action();
+ else
+ try {
+ await connect(context);
+ await action(context, await createPayload(context));
+ await context.resolveTasks();
+ } finally {
+ await disconnect(context).catch(console.error);
+ }
+};
diff --git a/src/runtime/db/migrate/use.js b/src/runtime/db/migrate/use.js
new file mode 100644
index 0000000..fa09419
--- /dev/null
+++ b/src/runtime/db/migrate/use.js
@@ -0,0 +1,59 @@
+import { resolve } from 'path';
+import { fileURLToPath } from 'url';
+import { presets } from '#env';
+import { files, indexes, getHash, setStatusByHash } from './internals/files.js';
+import { copyFromFile, upsertFromFile } from '../utils/csv.js';
+import { sql } from '../sql/sql.js';
+export { sql };
+
+export const use = ({ url }) => {
+ const file = files.find(file => file.url === url);
+
+ if (indexes.has(url)) {
+ throw new Error('Re-call use');
+ }
+
+ if (!file) {
+ throw new Error(`Not found file ${url}`);
+ }
+
+ indexes.set(url, (file.index = indexes.size));
+
+ return {
+ up: up => {
+ file.up = up;
+ },
+ down: down => {
+ file.down = down;
+ },
+ version: data => {
+ setStatusByHash(
+ file,
+ data == null ? null : getHash(JSON.stringify(data))
+ );
+ },
+ csv: {
+ copyFromFile: (context, table, path) =>
+ copyFromFile(
+ context,
+ table,
+ resolve(fileURLToPath(url.slice(0, url.lastIndexOf('/'))), path)
+ ).then(() =>
+ context.tasks.add(() => context.sql`VACUUM ANALYZE ${table}`)
+ ),
+ upsertFromFile: (context, table, path, keys) =>
+ upsertFromFile(
+ context,
+ table,
+ resolve(fileURLToPath(url.slice(0, url.lastIndexOf('/'))), path),
+ keys
+ ).then(() =>
+ context.tasks.add(() => context.sql`VACUUM ANALYZE ${table}`)
+ ),
+ },
+ isTesting: presets.app.isTesting,
+ isProduction: presets.app.isProduction,
+ };
+};
+
+export const schema = ([name]) => sql(`${presets.app.id}.${name}`);
diff --git a/src/runtime/db/model.js b/src/runtime/db/model.js
new file mode 100644
index 0000000..34d5bcb
--- /dev/null
+++ b/src/runtime/db/model.js
@@ -0,0 +1,47 @@
+import { allowPrivate } from '#security/access.js';
+import { Patches } from './patches.js';
+
+export class Model {
+ static parent = null;
+ static validator = null;
+ static relations = null;
+
+ static applyPatches(context, patches, payload) {
+ return context.transaction(
+ (context, payload) =>
+ new Patches(this, patches).execute(context, payload),
+ payload
+ );
+ }
+
+ static createPatchAPI({ params, rewrite, access = allowPrivate } = {}) {
+ const action = rewrite
+ ? (context, { patches, ...payload }) =>
+ this.applyPatches(
+ context,
+ patches.map(({ path, ...patch }) => ({
+ ...patch,
+ path: rewrite(context, path, payload),
+ })),
+ payload
+ )
+ : (context, { patches, ...payload }) =>
+ this.applyPatches(context, patches, payload);
+
+ return {
+ params: {
+ ...params,
+ body: {
+ ...params?.body,
+ patches: { type: 'array', empty: false, items: 'object' },
+ },
+ },
+ access,
+ action,
+ };
+ }
+
+ static getSettingNameRejectDelete() {
+ return `smartapps.${this.tableName}.reject_deleted`;
+ }
+}
diff --git a/src/runtime/db/patch/validate.js b/src/runtime/db/patch/validate.js
new file mode 100644
index 0000000..4984bd3
--- /dev/null
+++ b/src/runtime/db/patch/validate.js
@@ -0,0 +1,22 @@
+import { validate, createValidate } from '#utils/validate.js';
+
+export function makeValidator(rules) {
+ const schema = Object.create(null);
+
+ for (const key of Object.keys(rules))
+ if (rules[key].schema) {
+ schema[key] = rules[key].schema;
+ }
+
+ if (Object.keys(schema).length === 0) {
+ return {};
+ }
+
+ return {
+ result: {
+ schema,
+ validate,
+ isValidPayload: createValidate(schema),
+ },
+ };
+}
diff --git a/src/runtime/db/patches.js b/src/runtime/db/patches.js
new file mode 100644
index 0000000..aed4b9c
--- /dev/null
+++ b/src/runtime/db/patches.js
@@ -0,0 +1,552 @@
+import { Conflict } from '../exceptions/Conflict.js';
+import { Forbidden } from '../exceptions/Forbidden.js';
+import { UnProcessable } from '../exceptions/UnProcessable.js';
+import { allowOwner, allowAdmin } from '../security/access.js';
+import { create, toArray, isObject, hasOwnProperty } from '../utils/native.js';
+import { makeError } from './utils/errors.js';
+import { makeValidator } from './patch/validate.js';
+
+const PARENT = Symbol('Parent');
+
+const METHOD_NAMES = {
+ add: 'create',
+ replace: 'update',
+ remove: 'delete',
+};
+
+function setPayload(key, value, isId = false) {
+ if (value !== undefined) {
+ this.payload[key] = value;
+ }
+
+ if (isId) return;
+
+ const permission = this.model.rules[key]?.access?.[this.method];
+ if (permission) this.permissions.add(permission);
+
+ if (this.model.validator?.result?.schema?.[key]) {
+ this.isValidateResult = true;
+ }
+
+ if (this.methodHooks?.[key]) {
+ const hookList = this.methodHooks[key];
+
+ if (this.hooks) {
+ this.hooks.push(...hookList);
+ } else {
+ this.hooks = [...hookList];
+ }
+ }
+}
+
+export class Patches {
+ relations;
+ values = [];
+ entries = [];
+ queries = [];
+ returning = [];
+ tree = create(null);
+
+ constructor(model, patches) {
+ if (!patches || !patches.length) {
+ throw new UnProcessable('Empty patches array');
+ }
+
+ const keys = toArray(model.idColumn);
+ model.validator ??= makeValidator(model.rules);
+
+ this.keys = keys;
+ this.model = model;
+ this.keysLength = keys.length;
+
+ const { rules } = model;
+ const { length } = patches;
+ const plainLevel = this.keysLength + 1;
+
+ for (let i = 0; i < length; i++) {
+ const patchParams = patches[i];
+ const { op, path, value } = patchParams;
+
+ if (!path) {
+ throw new UnProcessable(`Path missing`);
+ }
+
+ const key = path[this.keysLength];
+
+ if (key === undefined) {
+ this.adds(patchParams);
+ } else if (hasOwnProperty.call(rules, key)) {
+ if (path.length === plainLevel) {
+ const entry = this.get(path) || this.add(patchParams);
+
+ entry.values[key] = value;
+ entry.setPayload(key, value);
+ } else {
+ const entry =
+ (i && this.get(path)) ||
+ this.add({ ...patchParams, op: 'replace' });
+
+ entry.setPayload(key);
+
+ if (!entry.patches) {
+ entry.patches = create(null);
+ entry.patches[key] = [];
+ } else if (!entry.patches[key]) {
+ entry.patches[key] = [];
+ }
+
+ entry.patches[key].push({
+ op,
+ value,
+ path: path.slice(plainLevel),
+ });
+ }
+ } else if (model.relations?.[key]?.model) {
+ this.setRelationEntry(model.relations[key], value, patchParams);
+ } else {
+ //throw new UnProcessable(`Invalid path "${key}"`);
+ }
+ }
+ }
+
+ queryInsert({ resultIndex, payload }) {
+ if (this.model.rules.created_uid) {
+ payload.created_uid = this.context.uid;
+ }
+ if (this.model.rules.updated_uid) {
+ payload.updated_uid = this.context.uid;
+ }
+
+ const names = Object.keys(payload);
+
+ const indexes = [];
+ const { tableName } = this.model;
+
+ for (const name of names) {
+ indexes.push(this.values.push(payload[name]));
+ }
+
+ this.queries.push(
+ `INSERT INTO ${tableName} ("${names.join(
+ '", "'
+ )}") VALUES($${indexes.join(', $')})${resultIndex ? ' RETURNING *' : ''}`
+ );
+ }
+
+ queryUpdate({ id, values, patches, resultIndex }) {
+ const ids = [];
+ const set = ['updated_at=CURRENT_TIMESTAMP'];
+ const { tableName, rules } = this.model;
+
+ if (rules.updated_uid) {
+ values.updated_uid = this.context.uid;
+ }
+
+ for (const name of Object.keys(id)) {
+ const index = this.values.push(id[name]);
+ ids.push(`"${name}" = $${index}`);
+ }
+
+ for (const name of Object.keys(values)) {
+ const value = values[name];
+ const index = this.values.push(value);
+ set.push(`"${name}"=$${index}`);
+ }
+
+ if (patches) {
+ for (const name of Object.keys(patches)) {
+ let target = `"target"."${name}"`;
+ const isArray = rules[name]?.type?.endsWith(']');
+
+ if (isArray) {
+ target = `to_jsonb(${target})`;
+ }
+
+ const removes = [];
+
+ for (const { op, path, value } of patches[name]) {
+ const index = this.values.push(path);
+
+ if (op === 'remove') {
+ removes.push({
+ sql: ` #-$${index}`,
+ index: +path[path.length - 1],
+ });
+ } else {
+ target = `jsonb_set(${target},$${index}::text[],coalesce($${this.values.push(
+ this.context.db.json(value)
+ )}::jsonb, 'null'::jsonb))`;
+ }
+ }
+
+ if (removes.length) {
+ target += removes
+ .sort((a, b) => (a.index > b.index ? -1 : 1))
+ .map(({ sql }) => sql)
+ .join('');
+ }
+
+ if (isArray) {
+ target = `array(SELECT jsonb_array_elements_text(${target}))::${rules[name].type}`;
+ }
+
+ set.push(`"${name}"=${target}`);
+ }
+ }
+
+ if (resultIndex) {
+ const where = Object.keys(id)
+ .map(
+ (name, i) =>
+ `"target".${ids[i]} AND "source".${name} = "target".${name}`
+ )
+ .join(' AND ');
+
+ this.queries.push(
+ `UPDATE ${tableName} AS "target"
+ SET ${set.join(', ')}
+ FROM ${tableName} AS "source"
+ WHERE ${where}
+ RETURNING "target".*, to_json("source".*) AS "__source__"`
+ );
+ } else {
+ this.queries.push(
+ `UPDATE ${tableName} AS "target" SET ${set.join(', ')} WHERE ${ids.join(
+ ' AND '
+ )}`
+ );
+ }
+ }
+
+ queryDelete({ id, resultIndex }) {
+ const ids = [];
+ for (const name of Object.keys(id)) {
+ ids.push(`"${name}" = $${this.values.push(id[name])}`);
+ }
+ this.queries.push(
+ `DELETE FROM ${this.model.tableName} WHERE ${ids.join(' AND ')}${
+ resultIndex ? ' RETURNING *' : ''
+ }`
+ );
+ }
+
+ add({ op, path, [PARENT]: parent }) {
+ const id = { ...parent?.ids };
+ const payload = { ...id };
+ const { model, keys } = this;
+ const method = METHOD_NAMES[op];
+
+ const entry = {
+ id,
+ model,
+ method,
+ payload,
+ values: {},
+ setPayload,
+ resultIndex: 0,
+ isValidateResult: false,
+ parent: parent || this.model.parent,
+ permissions: new Set(),
+ methodHooks: model.hooks?.[method],
+ modelAccess: model.access?.[method],
+ };
+
+ if (!entry.modelAccess) {
+ const parentAccess = parent?.method
+ ? parent?.model?.access[parent?.method]
+ : parent?.model?.access.update;
+
+ if (parentAccess) {
+ entry.modelAccess = parentAccess;
+ } else if (model.rules.uid) {
+ entry.modelAccess = allowOwner;
+ } else {
+ entry.modelAccess = allowAdmin;
+ }
+ }
+
+ for (let i = 0, map = this.tree; ; ) {
+ const key = keys[i];
+ const value = path[i];
+
+ if (value == null) {
+ throw new UnProcessable(`Invalid "${key}" value`);
+ }
+
+ id[key] = value;
+ entry.setPayload(key, value, method === 'update');
+
+ if (++i === this.keysLength) {
+ map[value] = entry;
+ break;
+ } else {
+ map = map[value] ?? (map[value] = create(null));
+ }
+ }
+
+ this.entries.push(entry);
+ return entry;
+ }
+
+ setRelationEntry(relation, value, { op, path }) {
+ const { keys } = this;
+ const relMap = relation.using;
+ const patch = {
+ op,
+ value,
+ path: [],
+ [PARENT]: {
+ ids: {},
+ model: this.model,
+ using: relMap,
+ method: this.get(path)?.method,
+ },
+ };
+
+ const relKeys = toArray(relation.model.idColumn);
+
+ for (const key of Object.keys(relMap)) {
+ const name = relMap[key];
+ const index = relKeys.indexOf(name);
+ const value = path[keys.indexOf(key)];
+
+ if (index === -1) {
+ patch[PARENT].ids[name] = value;
+ } else {
+ patch.path[index] = value;
+ }
+ }
+
+ let n = this.keysLength + 1;
+ for (let i = 0; i < relKeys.length; i++) {
+ if (n < path.length) {
+ patch.path[i] ??= path[n++];
+ }
+ }
+
+ if (n < path.length) {
+ patch.path.push(...path.slice(n));
+ }
+ if (!this.relations) {
+ this.relations = new Map();
+ }
+
+ if (this.relations.has(relation.model)) {
+ this.relations.get(relation.model).push(patch);
+ } else {
+ this.relations.set(relation.model, [patch]);
+ }
+ }
+
+ filterFields(entry, patch) {
+ const { value } = patch;
+ const { rules, relations } = this.model;
+
+ for (const key of Object.keys(value))
+ if (relations?.[key]?.model) {
+ this.setRelationEntry(relations[key], value[key], patch);
+ } else if (hasOwnProperty.call(rules, key)) {
+ if (hasOwnProperty.call(entry.id, key) === false) {
+ entry.setPayload(key, value[key]);
+ }
+ }
+ }
+
+ adds(patch) {
+ if (this.keysLength === patch.path.length) {
+ const entry = this.add(patch);
+
+ if (patch.value) {
+ this.filterFields(entry, patch);
+ }
+ } else if (isObject(patch.value))
+ for (const key of Object.keys(patch.value)) {
+ this.adds({
+ ...patch,
+ path: [...patch.path, key],
+ value: patch.value[key],
+ });
+ }
+ else {
+ throw new UnProcessable(`Values missing from path`);
+ }
+ }
+
+ get(path) {
+ let i = 0;
+ let entry = this.tree;
+
+ do {
+ entry = entry[path[i]];
+ } while (entry && ++i < this.keysLength);
+
+ return entry;
+ }
+
+ setReturning(entry) {
+ if (entry.resultIndex === 0) {
+ const index = this.queries.length;
+ let { parent } = entry;
+
+ if (parent) {
+ let num = 0;
+ let join = '';
+ let select = '';
+ let relation = `"${index}"`;
+
+ do {
+ num++;
+ const { using } = parent;
+ const { tableName } = parent.model;
+
+ const on = Object.keys(using)
+ .map(key => `${relation}."${using[key]}"=_${num}."${key}"`)
+ .join(', ');
+
+ relation = '_' + num;
+ join += ` JOIN ${tableName} _${num} ON ${on}`;
+ select += `to_jsonb(_${num}.*) || `;
+ } while ((parent = parent.model?.parent));
+
+ entry.resultIndex = this.returning.length + 1;
+ this.returning.push(
+ `(SELECT ${select}to_jsonb("${index}".*) AS "${entry.resultIndex}" FROM "${index}"${join})`
+ );
+ } else {
+ entry.resultIndex = this.returning.length + 1;
+ this.returning.push(
+ `(SELECT to_json("${index}".*) AS "${entry.resultIndex}" FROM "${index}")`
+ );
+ }
+ }
+ return entry;
+ }
+
+ async apply(context, payload) {
+ this.context = context;
+ const { entries, queries } = this;
+
+ const query = {
+ text: '',
+ values: this.values,
+ };
+
+ const hookMap = new Map();
+ const checkEntries = [];
+ const validateEntities = [];
+
+ for (const entry of entries) {
+ const { id, hooks, permissions } = entry;
+
+ if (entry.isValidateResult) {
+ validateEntities.push(this.setReturning(entry));
+ }
+
+ if (permissions.size === 0) permissions.add(entry.modelAccess);
+
+ for (const permission of permissions)
+ if ((await permission(context, id, true)) !== true)
+ checkEntries.push(this.setReturning(entry));
+
+ if (hooks) {
+ const { resultIndex } = this.setReturning(entry);
+ for (const hook of hooks) {
+ const indexes = hookMap.get(hook);
+
+ if (!indexes) {
+ hookMap.set(hook, [resultIndex]);
+ } else if (!indexes.includes(resultIndex)) {
+ indexes.push(resultIndex);
+ }
+ }
+ }
+
+ if (entry.method === 'create') {
+ this.queryInsert(entry);
+ } else if (entry.method === 'update') {
+ this.queryUpdate(entry);
+ } else {
+ this.queryDelete(entry);
+ }
+ }
+
+ if (queries.length === 1 && this.returning.length === 0) {
+ query.text = queries[0];
+ } else {
+ query.text += `WITH "0" AS (${queries[0]})`;
+
+ for (let index = 1; index < entries.length; index++) {
+ query.text += `,\n"${index}" AS(${queries[index]})\n`;
+ }
+
+ if (this.returning.length) {
+ query.text += '\nSELECT ' + this.returning.join(',\n');
+ } else {
+ query.text += `SELECT 0`;
+ }
+ }
+
+ // console.log(entries);
+ // console.log('\n', query.text, '\n', query.values);
+
+ let result;
+ try {
+ result = (await context.db.unsafe(query.text, query.values))?.[0];
+ } catch (error) {
+ const type = error?.constraint_name;
+ if (type) {
+ throw Conflict.from(error).putErrors([
+ {
+ type,
+ code: error.code,
+ message: error.detail || error.message,
+ },
+ ]);
+ } else {
+ throw makeError(error);
+ }
+ }
+
+ if (validateEntities.length) {
+ for (const { resultIndex } of validateEntities) {
+ this.model.validator.result.validate(result[resultIndex]);
+ }
+ }
+
+ for (const { resultIndex, permissions } of checkEntries) {
+ const entity = result[resultIndex];
+
+ if (!entity) {
+ throw new Conflict(`Not found entity`);
+ }
+
+ const origin = { ...entity, ...entity.__source__ };
+
+ for (const permission of permissions)
+ if ((await permission(context, origin)) !== true)
+ throw new Forbidden('Access denied');
+ }
+
+ for (const [hook, indexes] of hookMap) {
+ const entities = indexes
+ .map(index => result[index] ?? false)
+ .filter(Boolean);
+
+ if (entities.length) {
+ await hook(context, { entities, payload });
+ }
+ }
+ }
+
+ async execute(context, payload) {
+ if (this.entries.length) {
+ await this.apply(context, payload);
+ }
+
+ if (this.relations) {
+ for (const [model, patches] of this.relations) {
+ await new Patches(model, patches).execute(context, payload);
+ }
+ }
+ }
+}
diff --git a/src/runtime/db/sql/constants.js b/src/runtime/db/sql/constants.js
new file mode 100644
index 0000000..154a5de
--- /dev/null
+++ b/src/runtime/db/sql/constants.js
@@ -0,0 +1,5 @@
+export const //set result
+ RESULT_ALL = Symbol('Result all rows'),
+ RESULT_ONE = Symbol('Result one row'),
+ RESULT_BLOB = Symbol('Result blob'),
+ RESULT_ONE_VALUE = Symbol('Result one value');
diff --git a/src/runtime/db/sql/helpers/insert.js b/src/runtime/db/sql/helpers/insert.js
new file mode 100644
index 0000000..c497975
--- /dev/null
+++ b/src/runtime/db/sql/helpers/insert.js
@@ -0,0 +1,11 @@
+import { getAllKeys } from '../utils/set.js';
+
+export function insert(model, rows, query) {
+ if (!rows?.length) return;
+
+ const sql = this;
+ const columns = sql('"' + getAllKeys(rows).join('", "') + '"');
+
+ return sql`INSERT INTO ${model}(${columns})
+ SELECT ${columns} FROM json_populate_recordset(NULL::${model}, ${rows}::json) ${query}`;
+}
diff --git a/src/runtime/db/sql/helpers/intl.js b/src/runtime/db/sql/helpers/intl.js
new file mode 100644
index 0000000..b5586b6
--- /dev/null
+++ b/src/runtime/db/sql/helpers/intl.js
@@ -0,0 +1,28 @@
+import { SQL } from '../sql.js';
+
+export function intl(
+ name,
+ {
+ json = false,
+ language = this.context.language,
+ languages = this.context.languages,
+ isMultiLang = this.context.isMultiLang,
+ } = {}
+) {
+ const operator = json ? '->' : '->>';
+
+ let sql = name;
+
+ if (!isMultiLang) {
+ sql = `coalesce(${name}${operator}'${language}'`;
+
+ for (let i = 0; i < languages.length; i++)
+ if (languages[i] !== language) {
+ sql += `, ${name}${operator}'${languages[i]}'`;
+ }
+
+ sql += ')';
+ }
+
+ return new SQL([sql]);
+}
diff --git a/src/runtime/db/sql/helpers/records.js b/src/runtime/db/sql/helpers/records.js
new file mode 100644
index 0000000..8f83bd1
--- /dev/null
+++ b/src/runtime/db/sql/helpers/records.js
@@ -0,0 +1,13 @@
+import { SQL } from '../sql.js';
+
+export function records(values, model) {
+ return new SQL(
+ [
+ model
+ ? 'json_populate_recordset(NULL::' + (model.tableName || model) + ', '
+ : 'json_to_recordset(',
+ '::json)',
+ ],
+ [values]
+ );
+}
diff --git a/src/runtime/db/sql/helpers/tsQuery.js b/src/runtime/db/sql/helpers/tsQuery.js
new file mode 100644
index 0000000..0be06ea
--- /dev/null
+++ b/src/runtime/db/sql/helpers/tsQuery.js
@@ -0,0 +1,19 @@
+import { normalizeText } from '#utils/string.js';
+import { nullObject } from '#utils/native.js';
+import { SQL } from '../sql.js';
+
+export function tsQuery(text, { strict = false } = nullObject) {
+ text = text?.trim?.();
+
+ if (text) {
+ const prefix = strict ? '' : ':*';
+
+ text = normalizeText(text)
+ .toLowerCase()
+ .replace(/'/g, `\\'`)
+ .replace(/\\/g, '\\\\')
+ .replace(/\s+/g, `' & '`);
+
+ return new SQL(['', '::tsquery'], [`'` + text + `'` + prefix]);
+ }
+}
diff --git a/src/runtime/db/sql/helpers/update.js b/src/runtime/db/sql/helpers/update.js
new file mode 100644
index 0000000..493ec02
--- /dev/null
+++ b/src/runtime/db/sql/helpers/update.js
@@ -0,0 +1,25 @@
+import { SQL } from '../sql.js';
+import { getAllKeys } from '../utils/set.js';
+
+export function update(model, rows) {
+ if (!rows?.length) return;
+
+ const sql = this;
+ const fields = [];
+ const columns = getAllKeys(rows);
+ const keys = Array.isArray(model.idColumn)
+ ? model.idColumn
+ : [model.idColumn];
+
+ for (let i = 0; i < columns.length; i++)
+ if (keys.includes(columns[i]) === false)
+ fields.push(`"${columns[i]}"=_from."${columns[i]}"`);
+
+ for (let i = 0; i < keys.length; i++)
+ keys[i] = `_from."${keys[i]}"=_to."${keys[i]}"`;
+
+ return sql`UPDATE ${model} AS _to SET
+ ${new SQL(fields.join(',\n'))}
+ FROM json_populate_recordset(NULL::${model}, ${rows}::json) AS _from
+ WHERE ${new SQL(keys.join(' AND '))}`;
+}
diff --git a/src/runtime/db/sql/helpers/upsert.js b/src/runtime/db/sql/helpers/upsert.js
new file mode 100644
index 0000000..54f4d01
--- /dev/null
+++ b/src/runtime/db/sql/helpers/upsert.js
@@ -0,0 +1,5 @@
+import { SQL } from '../sql.js';
+
+export function upsert(model, rows, query = new SQL('ON CONFLICT DO NOTHING')) {
+ return this.insert(model, rows, query);
+}
diff --git a/src/runtime/db/sql/methods/blob.js b/src/runtime/db/sql/methods/blob.js
new file mode 100644
index 0000000..c316f77
--- /dev/null
+++ b/src/runtime/db/sql/methods/blob.js
@@ -0,0 +1,20 @@
+import { RESULT_ALL, RESULT_BLOB } from '../constants.js';
+
+export function blob() {
+ if (this.result === RESULT_ALL) {
+ this.source[0] =
+ `SELECT json_build_object('data', (SELECT json_agg(_.*) FROM (` +
+ this.source[0];
+
+ this.source[this.source.length - 1] += `) _)) AS "0"`;
+ } else {
+ this.source[0] =
+ `SELECT json_build_object('data', (SELECT to_json(_.*) FROM (` +
+ this.source[0];
+
+ this.source[this.source.length - 1] += `) _ LIMIT 1)) AS "0"`;
+ }
+
+ this.result = RESULT_BLOB;
+ return this;
+}
diff --git a/src/runtime/db/sql/methods/build.js b/src/runtime/db/sql/methods/build.js
new file mode 100644
index 0000000..f10f25e
--- /dev/null
+++ b/src/runtime/db/sql/methods/build.js
@@ -0,0 +1,76 @@
+import { isFunction } from '#utils/native.js';
+
+import { SQL } from '../sql.js';
+import { join } from '../utils/concat.js';
+
+const parts = {
+ with: { start: '\nWITH ', separator: ',' },
+ withRecursive: { start: '\nWITH RECURSIVE ', separator: ',' },
+ select: { start: '\nSELECT ', separator: ',' },
+ from: { start: '\n', separator: '\n' },
+ where: { start: '\nWHERE ', separator: ' AND ' },
+ groupBy: { start: '\nGROUP BY ', separator: ',' },
+ having: { start: '\nHAVING ', separator: ' AND ' },
+ orderBy: { start: '\nORDER BY ', separator: ',' },
+};
+
+const commands = {
+ 'WITH RECURSIVE': parts.withRecursive,
+ WITH: parts.with,
+ SELECT: parts.select,
+ FROM: parts.from,
+ JOIN: parts.from,
+ 'LEFT JOIN': parts.from,
+ 'INNER JOIN': parts.from,
+ 'RIGHT JOIN': parts.from,
+ 'CROSS JOIN': parts.from,
+ 'FULL JOIN': parts.from,
+ WHERE: parts.where,
+ 'GROUP BY': parts.groupBy,
+ HAVING: parts.having,
+ 'ORDER BY': parts.orderBy,
+};
+
+const keys = Object.keys(commands);
+const values = Object.values(parts).map((value, index) => {
+ value.index = index;
+ return value;
+});
+
+const makeSQL = (queries, strings, params) => {
+ if (strings.raw) {
+ const string = strings[0].trimStart();
+
+ for (const key of keys)
+ if (string.startsWith(key)) {
+ if (commands[key] !== parts.from) {
+ strings = [...strings];
+ strings[0] = string.slice(key.length).trimStart();
+ }
+
+ const query = new SQL(strings, params);
+ queries[commands[key].index].push(query);
+ return query;
+ }
+ }
+
+ return new SQL(strings, params);
+};
+
+export function build(params, actions) {
+ const queries = [[], [], [], [], [], [], [], []];
+ const sql = (strings, ...params) => makeSQL(queries, strings, params);
+
+ const context = Object.create(this.context);
+ context.sql = Object.assign(sql, this.context.sql);
+
+ for (const action of actions) {
+ if (isFunction(action)) action(context, params);
+ }
+
+ for (let i = 0; i < queries.length; i++)
+ if (queries[i].length)
+ join(this, values[i].start, queries[i], values[i].separator);
+
+ return this;
+}
diff --git a/src/runtime/db/sql/methods/expected.js b/src/runtime/db/sql/methods/expected.js
new file mode 100644
index 0000000..fe4745b
--- /dev/null
+++ b/src/runtime/db/sql/methods/expected.js
@@ -0,0 +1,5 @@
+import { objectContaining } from '#utils/assert.js';
+
+export async function expected(data) {
+ return objectContaining(await this, data);
+}
diff --git a/src/runtime/db/sql/methods/explain.js b/src/runtime/db/sql/methods/explain.js
new file mode 100644
index 0000000..493e3e8
--- /dev/null
+++ b/src/runtime/db/sql/methods/explain.js
@@ -0,0 +1,32 @@
+export async function explain(isAll = false) {
+ const { sql } = this.context;
+
+ const [{ 'QUERY PLAN': plans }] = await sql(
+ [
+ 'EXPLAIN (ANALYZE, BUFFERS, FORMAT JSON) ' + this.source[0],
+ ...this.source.slice(1),
+ ],
+ ...this.values
+ );
+
+ if (isAll) {
+ const { url } = await fetch('https://explain-postgresql.com/explain', {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify({
+ private: true,
+ plan: JSON.stringify(plans[0]),
+ query: this.toString().trim(),
+ }),
+ });
+
+ console.log('Explain: ', url + '#context');
+ } else {
+ console.log('\n');
+ console.log('\tCost:', plans[0].Plan['Total Cost']);
+ console.log('\tTime:', plans[0]['Execution Time'], 'ms');
+ console.log('\n');
+ }
+
+ return await this;
+}
diff --git a/src/runtime/db/sql/methods/groupBy.js b/src/runtime/db/sql/methods/groupBy.js
new file mode 100644
index 0000000..6a34442
--- /dev/null
+++ b/src/runtime/db/sql/methods/groupBy.js
@@ -0,0 +1,9 @@
+export function groupBy(...params) {
+ params = params.filter(Boolean);
+
+ if (params.length) {
+ this.source[this.source.length - 1] += '\nGROUP BY ';
+ return this.sql(params);
+ }
+ return this;
+}
diff --git a/src/runtime/db/sql/methods/having.js b/src/runtime/db/sql/methods/having.js
new file mode 100644
index 0000000..cc85759
--- /dev/null
+++ b/src/runtime/db/sql/methods/having.js
@@ -0,0 +1,5 @@
+import { join } from '../utils/concat.js';
+
+export function having(...params) {
+ return join(this, '\nHAVING ', params.filter(Boolean), ' AND ');
+}
diff --git a/src/runtime/db/sql/methods/log.js b/src/runtime/db/sql/methods/log.js
new file mode 100644
index 0000000..bdecba5
--- /dev/null
+++ b/src/runtime/db/sql/methods/log.js
@@ -0,0 +1,7 @@
+export function log() {
+ console.log('\n');
+ console.log(this.toString().trim());
+ console.log(this.values);
+ console.log('\n');
+ return this;
+}
diff --git a/src/runtime/db/sql/methods/orderBy.js b/src/runtime/db/sql/methods/orderBy.js
new file mode 100644
index 0000000..2aaffc3
--- /dev/null
+++ b/src/runtime/db/sql/methods/orderBy.js
@@ -0,0 +1,9 @@
+export function orderBy(...params) {
+ params = params.filter(Boolean);
+
+ if (params.length) {
+ this.source[this.source.length - 1] += '\nORDER BY ';
+ return this.sql(params);
+ }
+ return this;
+}
diff --git a/src/runtime/db/sql/methods/paginate.js b/src/runtime/db/sql/methods/paginate.js
new file mode 100644
index 0000000..43944b5
--- /dev/null
+++ b/src/runtime/db/sql/methods/paginate.js
@@ -0,0 +1,36 @@
+import { RESULT_ONE_VALUE } from '../constants.js';
+import { sql } from '../sql.js';
+
+const LIMIT = 10;
+
+const addQueries = queries => {
+ const query = sql`,`;
+
+ for (const key of Object.keys(queries))
+ query
+ .sql("'" + key + "',(")
+ .sql(queries[key])
+ .sql(')');
+
+ return query;
+};
+
+export function paginate({ page = 1, limit = LIMIT }, queries) {
+ if (page < 1) page = 1;
+ if (limit < 1) limit = LIMIT;
+
+ const offset = page > 1 ? (page - 1) * limit : 0;
+
+ this.result = RESULT_ONE_VALUE;
+
+ return this.overwrite(
+ sql`WITH "all" AS(${this})
+ SELECT json_build_object(
+ 'page', ${page}::int,
+ 'limit', ${limit}::int,
+ 'count', (SELECT count(*)::int FROM "all"),
+ 'entities', (SELECT json_agg(_.*) FROM (SELECT * FROM "all" LIMIT ${limit} OFFSET ${offset}) _)
+ ${queries && addQueries(queries)}
+ ) AS "0"`
+ );
+}
diff --git a/src/runtime/db/sql/methods/throwIfNotFound.js b/src/runtime/db/sql/methods/throwIfNotFound.js
new file mode 100644
index 0000000..4ff858f
--- /dev/null
+++ b/src/runtime/db/sql/methods/throwIfNotFound.js
@@ -0,0 +1,9 @@
+import { NotFound } from '#exceptions/NotFound.js';
+
+export async function throwIfNotFound(message) {
+ const response = await this;
+
+ if (response == null) throw new NotFound(message);
+
+ return response;
+}
diff --git a/src/runtime/db/sql/methods/where.js b/src/runtime/db/sql/methods/where.js
new file mode 100644
index 0000000..147b8c5
--- /dev/null
+++ b/src/runtime/db/sql/methods/where.js
@@ -0,0 +1,32 @@
+import { SQL } from '../sql.js';
+import { SetSQL } from '../utils/set.js';
+import { isArray, isObject } from '../../../utils/native.js';
+
+const options = {
+ startWith: '\nWHERE ',
+ separator: ' AND ',
+};
+
+export function where(...params) {
+ const sql = new SetSQL(this, options);
+
+ for (const param of params)
+ if (isObject(param))
+ if (param instanceof SQL) sql.set('', param);
+ else
+ for (const key in param) {
+ const value = param[key];
+
+ if (value !== undefined) {
+ if (value === null) {
+ sql.set(key + ' IS NULL');
+ } else if (isArray(value)) {
+ sql.set(key + ' = ANY(', value, ')');
+ } else {
+ sql.set(key + ' = ', value);
+ }
+ }
+ }
+
+ return this;
+}
diff --git a/src/runtime/db/sql/query.js b/src/runtime/db/sql/query.js
new file mode 100644
index 0000000..039254a
--- /dev/null
+++ b/src/runtime/db/sql/query.js
@@ -0,0 +1,109 @@
+import { noop } from '#utils/native.js';
+import { makeError } from '../utils/errors.js';
+import { SQL } from './sql.js';
+import { intl } from './helpers/intl.js';
+import { insert } from './helpers/insert.js';
+import { upsert } from './helpers/upsert.js';
+import { update } from './helpers/update.js';
+import { tsQuery } from './helpers/tsQuery.js';
+import { records } from './helpers/records.js';
+import { blob } from './methods/blob.js';
+import { build } from './methods/build.js';
+import { where } from './methods/where.js';
+import { explain } from './methods/explain.js';
+import { groupBy } from './methods/groupBy.js';
+import { having } from './methods/having.js';
+import { orderBy } from './methods/orderBy.js';
+import { paginate } from './methods/paginate.js';
+import { expected } from './methods/expected.js';
+import { throwIfNotFound } from './methods/throwIfNotFound.js';
+import {
+ RESULT_ALL,
+ RESULT_ONE,
+ RESULT_BLOB,
+ RESULT_ONE_VALUE,
+} from './constants.js';
+
+export class Query extends SQL {
+ result = RESULT_ALL;
+
+ constructor(context, strings, params) {
+ super(strings, params);
+ this.context = context;
+ }
+
+ async send() {
+ try {
+ const query = this.context.db.unsafe(this.toString(), this.values);
+
+ switch (this.result) {
+ case RESULT_ALL:
+ return await query;
+ case RESULT_ONE:
+ return (await query)[0];
+ case RESULT_BLOB:
+ return (await query.raw())[0]?.[0];
+ case RESULT_ONE_VALUE:
+ return (await query)[0]?.['0'];
+ }
+ throw new Error('Wrong setting query result');
+ } catch (error) {
+ throw makeError(error);
+ }
+ }
+
+ get then() {
+ const promise = this.send();
+ return (resolve, reject) => promise.then(resolve, reject);
+ }
+
+ catch(reject = noop) {
+ return this.send().catch(reject);
+ }
+
+ overwrite({ source, values }) {
+ this.source = source;
+ this.values = values;
+ return this;
+ }
+
+ findOne() {
+ this.result = RESULT_ONE;
+ return this;
+ }
+
+ findOneValue() {
+ this.result = RESULT_ONE_VALUE;
+ return this;
+ }
+}
+
+function buildStatic(params, actions) {
+ return new Query(this.context).build(params, actions);
+}
+
+Query.prototype.blob = blob;
+Query.prototype.build = build;
+Query.prototype.where = where;
+Query.prototype.explain = explain;
+Query.prototype.groupBy = groupBy;
+Query.prototype.orderBy = orderBy;
+Query.prototype.having = having;
+Query.prototype.paginate = paginate;
+Query.prototype.expected = expected;
+Query.prototype.throwIfNotFound = throwIfNotFound;
+
+export const factory = context => {
+ const sql = (strings, ...params) => new Query(context, strings, params);
+
+ sql.intl = intl;
+ sql.insert = insert;
+ sql.upsert = upsert;
+ sql.update = update;
+ sql.tsQuery = tsQuery;
+ sql.records = records;
+ sql.build = buildStatic;
+ sql.context = context;
+
+ return sql;
+};
diff --git a/src/runtime/db/sql/sql.js b/src/runtime/db/sql/sql.js
new file mode 100644
index 0000000..8adeb60
--- /dev/null
+++ b/src/runtime/db/sql/sql.js
@@ -0,0 +1,73 @@
+import { isArray, isFunction, nullArray } from '../../utils/native.js';
+import { log } from './methods/log.js';
+import { concat, injection, join } from './utils/concat.js';
+
+export class SQL {
+ source = [];
+ values = [];
+
+ constructor(strings, params = nullArray) {
+ if (isArray(strings)) {
+ this.source.push(strings[0]);
+ for (let i = 0; i < params.length; ) this.set(params[i], strings[++i]);
+ } else {
+ this.source.push(strings === undefined ? '' : strings);
+ }
+ }
+
+ set(param, string) {
+ const { source, values } = this;
+ const index = source.length - 1;
+
+ if (param instanceof SQL) {
+ source[index] += param.source[0];
+
+ if (param.values.length) {
+ source.push(...param.source.slice(1));
+ source[source.length - 1] += string;
+ values.push(...param.values);
+ } else {
+ source[index] += string;
+ }
+ } else if (isFunction(param)) {
+ if (param.tableName) {
+ source[index] += param.tableName + string;
+ } else {
+ throw new Error('Wrong SQL param type function');
+ }
+ } else if (param === undefined) {
+ source[index] += string;
+ } else {
+ values.push(param);
+ source.push(string);
+ }
+ }
+
+ sql(strings, ...params) {
+ if (isArray(strings)) {
+ if (params.length) {
+ concat(this, strings, params);
+ } else {
+ injection(this, strings, ', ');
+ }
+ } else if (strings !== undefined) {
+ this.source[this.source.length - 1] += strings;
+ }
+ return this;
+ }
+
+ join(queries, separator = ',', start = '') {
+ return join(this, start, queries.filter(Boolean), separator);
+ }
+
+ toString() {
+ const { source } = this;
+ let text = source[0];
+ for (let i = 1; i < source.length; i++) text += '$' + i + source[i];
+ return text;
+ }
+}
+
+SQL.prototype.log = log;
+
+export const sql = (strings, ...params) => new SQL(strings, params);
diff --git a/src/runtime/db/sql/utils/concat.js b/src/runtime/db/sql/utils/concat.js
new file mode 100644
index 0000000..7f5a03a
--- /dev/null
+++ b/src/runtime/db/sql/utils/concat.js
@@ -0,0 +1,44 @@
+export const concat = (sql, strings, params) => {
+ let i = sql.source.length - 1;
+
+ sql.source[i] += strings[0];
+ for (i = 0; i < params.length; i) sql.set(params[i], strings[++i]);
+};
+
+export const injection = (sql, strings, separator = ', ') => {
+ let s = '';
+
+ for (let i = 0; strings.length > i; i++) {
+ const string = strings[i];
+
+ if (string === undefined || string === '') continue;
+ let c = sql.source.length - 1;
+
+ if (sql.sql === string?.sql) {
+ sql.values.push(...string.values);
+ sql.source[c] += s + string.source[0];
+ sql.source.push(...string.source.slice(1));
+ } else sql.source[c] += s + string;
+
+ s = separator;
+ }
+};
+
+export const join = (sql, start, queries, separator) => {
+ if (queries.length) {
+ let c = 0;
+ let n = sql.source.length - 1;
+
+ sql.source[n] += start;
+
+ do {
+ const { source, values } = queries[c];
+ sql.source[n] += source[0];
+
+ for (let i = 0; i < values.length; i) sql.set(values[i], source[++i]);
+ n = sql.source.length - 1;
+ } while (++c < queries.length && (sql.source[n] += separator));
+ }
+
+ return sql;
+};
diff --git a/src/runtime/db/sql/utils/set.js b/src/runtime/db/sql/utils/set.js
new file mode 100644
index 0000000..91b1af9
--- /dev/null
+++ b/src/runtime/db/sql/utils/set.js
@@ -0,0 +1,31 @@
+export const getAllKeys = values => [
+ ...new Set(values.map(Object.keys).flat()),
+];
+export class SetSQL {
+ sql = null;
+ startWith = '';
+ separator = '';
+ isStarted = false;
+
+ constructor(sql, { startWith, separator }) {
+ this.sql = sql;
+ this.startWith = startWith;
+ this.separator = separator;
+ }
+
+ set(startString, value, endString = '') {
+ const { source } = this.sql;
+ const lastIndex = source.length - 1;
+
+ if (this.isStarted) {
+ source[lastIndex] += this.separator + startString;
+ } else {
+ this.isStarted = true;
+ source[lastIndex] += this.startWith + startString;
+ }
+
+ if (value !== undefined) {
+ this.sql.set(value, endString);
+ }
+ }
+}
diff --git a/src/runtime/db/sql/utils/string.js b/src/runtime/db/sql/utils/string.js
new file mode 100644
index 0000000..6b2ac18
--- /dev/null
+++ b/src/runtime/db/sql/utils/string.js
@@ -0,0 +1,3 @@
+export const escapeName = string => String(string).replaceAll('"', '""');
+export const escapeString = string =>
+ "'" + string.replaceAll("'", "''").replaceAll('\\', '\\\\') + "'";
diff --git a/src/runtime/db/utils/csv.js b/src/runtime/db/utils/csv.js
new file mode 100644
index 0000000..5d9d0bb
--- /dev/null
+++ b/src/runtime/db/utils/csv.js
@@ -0,0 +1,48 @@
+import { once } from 'events';
+import { randomUUID } from 'crypto';
+import { copyFrom } from '#db/copy.js';
+import { trimQuote } from '#utils/string.js';
+import { getFileStream, getFileLine } from '#utils/file.js';
+
+export const copyFromFile = async (context, table, path) => {
+ const columns = (await getFileLine(path)).split(',').map(trimQuote);
+
+ const stream = await copyFrom(
+ context,
+ `COPY ${table} ("${columns.join('","')}") FROM STDIN WITH(
+ FORMAT csv, HEADER true, DELIMITER ','
+ )`
+ );
+
+ await once(getFileStream(path).pipe(stream), 'finish');
+ return { columns };
+};
+
+export const upsertFromFile = async (context, table, path, keys) => {
+ const { sql } = context;
+ const tempTable = sql('"' + randomUUID() + '"');
+
+ await sql`CREATE TEMP TABLE ${tempTable} (LIKE ${table})`;
+ const { columns } = await copyFromFile(context, tempTable, path);
+
+ const sqlColumns = sql('"' + columns.join('", "') + '"');
+
+ const query = sql`
+ INSERT INTO ${table} (${sqlColumns})
+ SELECT ${sqlColumns} FROM ${tempTable} ON CONFLICT `;
+
+ if (keys?.length) {
+ const setColumns = columns
+ .filter(name => !keys.includes(name))
+ .map(name => `"${name}"=EXCLUDED."${name}"`)
+ .join(', ');
+
+ query.sql`("${sql(keys.join('", "'))}") DO UPDATE SET ${sql(setColumns)}`;
+ } else {
+ query.sql`DO NOTHING`;
+ }
+
+ await query;
+
+ await sql`DROP TABLE ${tempTable}`;
+};
diff --git a/src/runtime/db/utils/errors.js b/src/runtime/db/utils/errors.js
new file mode 100644
index 0000000..f5e1136
--- /dev/null
+++ b/src/runtime/db/utils/errors.js
@@ -0,0 +1,90 @@
+import { ERROR_FORMATTED, ERROR_REPORTED } from '#utils/native.js';
+import { red, blue, bold } from '#utils/console.js';
+import { CODES } from '#db/constants.js';
+
+import { Timeout } from '#exceptions/Timeout.js';
+import { Conflict } from '#exceptions/Conflict.js';
+import { Forbidden } from '#exceptions/Forbidden.js';
+import { Unavailable } from '#exceptions/Unavailable.js';
+import { UnProcessable } from '#exceptions/UnProcessable.js';
+
+const filterSource = line => line.includes('file:///');
+
+const exceptions = {
+ ECONNREFUSED: Unavailable,
+ CONNECTION_CLOSED: Unavailable,
+ CONNECTION_DESTROYED: Timeout,
+ CONNECT_TIMEOUT: Timeout,
+
+ [CODES.INSUFFICIENT_PRIVILEGE]: Forbidden,
+
+ [CODES.UNIQUE_VIOLATION]: Conflict,
+ [CODES.RESTRICT_VIOLATION]: Conflict,
+ [CODES.FOREIGN_KEY_VIOLATION]: Conflict,
+ [CODES.INTEGRITY_CONSTRAINT_VIOLATION]: Conflict,
+
+ [CODES.CHECK_VIOLATION]: UnProcessable,
+ [CODES.NOT_NULL_VIOLATION]: UnProcessable,
+ [CODES.INVALID_DATETIME_FORMAT]: UnProcessable,
+ [CODES.DATETIME_FIELD_OVERFLOW]: UnProcessable,
+ [CODES.NUMERIC_VALUE_OUT_OF_RANGE]: UnProcessable,
+ [CODES.INVALID_TEXT_REPRESENTATION]: UnProcessable,
+ [CODES.INVALID_BINARY_REPRESENTATION]: UnProcessable,
+};
+
+export const makeError = e => {
+ if (e?.code) {
+ e[ERROR_REPORTED] = true;
+
+ let { message } = e;
+ const error = new (exceptions[e.code] ?? Error)(message);
+
+ error[ERROR_FORMATTED] = true;
+
+ message = red(bold('DB Error')) + '\n' + message;
+
+ if (e.detail) message += '\n' + e.detail;
+ if (e.where) message += '\n' + e.where;
+ if (e.hint) message += '\n' + e.hint;
+
+ if (e.position && e.query) {
+ let max = 60;
+
+ let left = e.query
+ .slice(0, e.position - 1)
+ .replace(/\s+/g, ' ')
+ .trimLeft();
+
+ let right = e.query
+ .slice(e.position - 1)
+ .replace(/\s+/g, ' ')
+ .trimRight();
+
+ if (left.length > max) {
+ const chunk = left.slice(0, left.length - max);
+ left = left.slice(chunk.lastIndexOf(' ') + 1);
+ }
+ if (right.length > max) {
+ const chunk = right.slice(max);
+ right = right.slice(0, max + chunk.indexOf(' '));
+ }
+
+ let length = right.indexOf(' ') > 0 ? right.indexOf(' ') : right.length;
+
+ message += '\n' + left + right;
+ message += '\n' + ' '.repeat(left.length) + bold(red('^'.repeat(length)));
+ }
+
+ error.code = e.code;
+ error.stack =
+ message +
+ '\n' +
+ blue(error.stack.split('\n').filter(filterSource).slice(2).join('\n'));
+
+ if (e.constraint_name) error.constraint = e.constraint_name;
+
+ return error;
+ } else {
+ return e;
+ }
+};
diff --git a/src/runtime/db/utils/text.js b/src/runtime/db/utils/text.js
new file mode 100644
index 0000000..bfcfa3e
--- /dev/null
+++ b/src/runtime/db/utils/text.js
@@ -0,0 +1,22 @@
+export const quoteLiteral = value =>
+ value == null ? 'NULL' : "'" + String(value).replaceAll("'", "''") + "'";
+
+export const inlineSQL = async query => {
+ if (!query.values?.length) {
+ return query.toString();
+ }
+
+ const { source, values } = query;
+ const { db } = query.context;
+ const { serializers } = db.options;
+
+ let sql = source[0];
+ const { types } = await db.unsafe(query.toString(), values).describe();
+
+ for (let i = 0; i < types.length; i++) {
+ const serialize = serializers[types[i]] ?? String;
+ sql += quoteLiteral(serialize(values[i])) + source[i + 1];
+ }
+
+ return sql;
+};
diff --git a/src/runtime/db/utils/textSearch.js b/src/runtime/db/utils/textSearch.js
new file mode 100644
index 0000000..bca6539
--- /dev/null
+++ b/src/runtime/db/utils/textSearch.js
@@ -0,0 +1,18 @@
+export const LOCALES = {
+ en: 'english',
+ de: 'german',
+ fr: 'french',
+ it: 'italian',
+ fi: 'finnish',
+ ru: 'russian',
+ __proto__: null,
+};
+
+export const getLocaleName = lang => LOCALES[lang] ?? 'simple';
+
+export const getSearchQuery = (text, param = ':* &') =>
+ text
+ .trim()
+ .replace(/'/g, `''`)
+ .replace(/\\/g, '')
+ .replace(/\s+/g, `'${param} '`);
diff --git a/src/runtime/exceptions/BadRequest.js b/src/runtime/exceptions/BadRequest.js
new file mode 100644
index 0000000..a4e2c72
--- /dev/null
+++ b/src/runtime/exceptions/BadRequest.js
@@ -0,0 +1,6 @@
+import { Exception } from './Exception.js';
+
+export class BadRequest extends Exception {
+ static status = 400;
+ static message = 'Bad Request';
+}
diff --git a/src/runtime/exceptions/Conflict.js b/src/runtime/exceptions/Conflict.js
new file mode 100644
index 0000000..28aac2c
--- /dev/null
+++ b/src/runtime/exceptions/Conflict.js
@@ -0,0 +1,6 @@
+import { Exception } from './Exception.js';
+
+export class Conflict extends Exception {
+ static status = 409;
+ static message = 'Conflict';
+}
diff --git a/src/runtime/exceptions/Exception.js b/src/runtime/exceptions/Exception.js
new file mode 100644
index 0000000..60c2473
--- /dev/null
+++ b/src/runtime/exceptions/Exception.js
@@ -0,0 +1,59 @@
+import { presets } from '#env';
+import { log } from '../utils/process.js';
+
+const CODE = 500;
+
+export class Exception extends Error {
+ static status = CODE;
+ static message = 'Internal Server Error';
+
+ constructor(message = new.target.message) {
+ super(message);
+ this.status = new.target.status;
+ }
+
+ putErrors(errors) {
+ this.errors = errors;
+ return this;
+ }
+
+ static from({ message }) {
+ return new this(message);
+ }
+
+ static of(error) {
+ if (!error) {
+ error = new Error(this.message);
+ }
+
+ const { message = error, errors = [{ message }] } = error;
+ const status = error?.status >= 400 ? error.status : this.status;
+
+ if (status >= CODE && !presets.app.isTesting) {
+ log.error(error);
+ }
+
+ return { status, errors };
+ }
+
+ static respond(context, exception, id) {
+ context.body = null;
+ context.stream = null;
+
+ if (exception == null) {
+ context.status = 204;
+ } else {
+ const error = this.of(exception);
+ if (id) error.id = id;
+
+ context.error = exception;
+ context.status = error.status;
+
+ context.type = 'json';
+ context.body = JSON.stringify(error);
+ context.set('cache-control', 'no-store');
+ }
+
+ return context;
+ }
+}
diff --git a/src/runtime/exceptions/Forbidden.js b/src/runtime/exceptions/Forbidden.js
new file mode 100644
index 0000000..7afb5ce
--- /dev/null
+++ b/src/runtime/exceptions/Forbidden.js
@@ -0,0 +1,6 @@
+import { Exception } from './Exception.js';
+
+export class Forbidden extends Exception {
+ static status = 403;
+ static message = 'Forbidden';
+}
diff --git a/src/runtime/exceptions/NotAllowed.js b/src/runtime/exceptions/NotAllowed.js
new file mode 100644
index 0000000..055bf2d
--- /dev/null
+++ b/src/runtime/exceptions/NotAllowed.js
@@ -0,0 +1,6 @@
+import { Exception } from './Exception.js';
+
+export class NotAllowed extends Exception {
+ static status = 405;
+ static message = 'Not Allowed';
+}
diff --git a/src/runtime/exceptions/NotFound.js b/src/runtime/exceptions/NotFound.js
new file mode 100644
index 0000000..0c02a95
--- /dev/null
+++ b/src/runtime/exceptions/NotFound.js
@@ -0,0 +1,6 @@
+import { Exception } from './Exception.js';
+
+export class NotFound extends Exception {
+ static status = 404;
+ static message = 'Not Found';
+}
diff --git a/src/runtime/exceptions/PayloadLarge.js b/src/runtime/exceptions/PayloadLarge.js
new file mode 100644
index 0000000..020a387
--- /dev/null
+++ b/src/runtime/exceptions/PayloadLarge.js
@@ -0,0 +1,6 @@
+import { Exception } from './Exception.js';
+
+export class PayloadLarge extends Exception {
+ static status = 413;
+ static message = 'Payload Too Large';
+}
diff --git a/src/runtime/exceptions/Timeout.js b/src/runtime/exceptions/Timeout.js
new file mode 100644
index 0000000..c91b58c
--- /dev/null
+++ b/src/runtime/exceptions/Timeout.js
@@ -0,0 +1,6 @@
+import { Exception } from './Exception.js';
+
+export class Timeout extends Exception {
+ static status = 504;
+ static message = 'Timeout';
+}
diff --git a/src/runtime/exceptions/UnProcessable.js b/src/runtime/exceptions/UnProcessable.js
new file mode 100644
index 0000000..e3cbbda
--- /dev/null
+++ b/src/runtime/exceptions/UnProcessable.js
@@ -0,0 +1,6 @@
+import { Exception } from './Exception.js';
+
+export class UnProcessable extends Exception {
+ static status = 422;
+ static message = 'Unprocessable entity';
+}
diff --git a/src/runtime/exceptions/Unauthorized.js b/src/runtime/exceptions/Unauthorized.js
new file mode 100644
index 0000000..7fa5a68
--- /dev/null
+++ b/src/runtime/exceptions/Unauthorized.js
@@ -0,0 +1,6 @@
+import { Exception } from './Exception.js';
+
+export class Unauthorized extends Exception {
+ static status = 401;
+ static message = 'Unauthorized';
+}
diff --git a/src/runtime/exceptions/Unavailable.js b/src/runtime/exceptions/Unavailable.js
new file mode 100644
index 0000000..e331972
--- /dev/null
+++ b/src/runtime/exceptions/Unavailable.js
@@ -0,0 +1,6 @@
+import { Exception } from './Exception.js';
+
+export class Unavailable extends Exception {
+ static status = 503;
+ static message = 'Service Unavailable';
+}
diff --git a/src/runtime/package.json b/src/runtime/package.json
new file mode 100644
index 0000000..abc495b
--- /dev/null
+++ b/src/runtime/package.json
@@ -0,0 +1,45 @@
+{
+ "name": "@uah/server",
+ "author": "UAH",
+ "description": "API",
+ "version": "1.0.0",
+ "license": "ISC",
+ "private": true,
+ "type": "module",
+ "exports": {
+ "./*": "./src/*",
+ "./bin/*": "./bin/*",
+ "./app.js": "./src/app.js",
+ "./env.js": "./src/env.js"
+ },
+ "scripts": {
+ "lint": "eslint ."
+ },
+ "imports": {
+ "#db/*": "./src/db/*",
+ "#env": "./src/env.js",
+ "#test/*": "./src/test/*",
+ "#utils/*": "./src/utils/*",
+ "#mocks/*": "./src/mocks/*",
+ "#email/*": "./src/email/*",
+ "#hooks/*": "./src/hooks/*",
+ "#models/*": "./src/models/*",
+ "#server/*": "./src/server/*",
+ "#presets/*": "./src/presets/*",
+ "#storage/*": "./src/storage/*",
+ "#security/*": "./src/security/*",
+ "#services/*": "./src/services/*",
+ "#exceptions/*": "./src/exceptions/*",
+ "#migrations/*": "./src/migrations/*"
+ },
+ "engines": {
+ "node": ">=20.0.0"
+ },
+ "dependencies": {
+ "kleur": "latest",
+ "uWebSockets.js": "uNetworking/uWebSockets.js#v20.23.0",
+ "postgres": "^3.3.3",
+ "uuid": "latest"
+ },
+ "devDependencies": {}
+}
\ No newline at end of file
diff --git a/src/runtime/security/access.js b/src/runtime/security/access.js
new file mode 100644
index 0000000..1a81188
--- /dev/null
+++ b/src/runtime/security/access.js
@@ -0,0 +1,67 @@
+import { presets } from '../env.js';
+import { isUser, isOwner, isTeamLeader, isUserTeamLeader } from './rules.js';
+import { isPublicProfile } from './auth/user.js';
+import {
+ entries,
+ alwaysTrue,
+ isFunction,
+ isString,
+ alwaysFalse,
+} from '../utils/native.js';
+import {
+ SUPERUSER,
+ SMARTPLAN_ADMIN,
+ LUDICLOUD_ADMIN,
+ SMARTPEOPLE_ADMIN,
+ SMARTLIBRARY_ADMIN,
+ SMARTDATA_ADMIN,
+} from './roles.js';
+import { permissions, factoryResolve, factoryAccess } from './resolve.js';
+
+entries(presets.app.access).forEach(([name, acl]) => {
+ const permission = factoryResolve(acl);
+
+ permission.roles = acl.filter(isString);
+ permission.rules = acl.filter(isFunction);
+
+ permissions[name] = permission;
+});
+
+export { permissions };
+export { byFeatureName, factoryAccess, switchAccess } from './resolve.js';
+
+export const byPrivacyProfile = (
+ publicPermission,
+ privatePermission = permissions.get_user_profile_private
+) => {
+ const checkPublicPermission = factoryAccess(publicPermission);
+ const checkPrivatePermission = factoryAccess(privatePermission);
+
+ return async (context, entity) =>
+ (await isPublicProfile(context, entity))
+ ? await checkPublicPermission(context, entity)
+ : await checkPrivatePermission(context, entity);
+};
+
+export const // preset allows
+ notAllow = factoryResolve([alwaysFalse]),
+ allowOwner = factoryResolve([isOwner]),
+ allowPrivate = factoryResolve([isUser]),
+ allowPublic = factoryResolve([alwaysTrue]),
+ allowSuperUser = factoryResolve([SUPERUSER]),
+ allowAdmin = factoryResolve([presets.app.id + '_admin', SUPERUSER]),
+ allowLudiCloudAdmin = factoryResolve([SUPERUSER, LUDICLOUD_ADMIN]),
+ allowSmartPlanAdmin = factoryResolve([SUPERUSER, SMARTPLAN_ADMIN]),
+ allowSmartPeopleAdmin = factoryResolve([SUPERUSER, SMARTPEOPLE_ADMIN]),
+ allowSmartLibraryAdmin = factoryResolve([SUPERUSER, SMARTLIBRARY_ADMIN]),
+ allowSmartDataAdmin = factoryResolve([SUPERUSER, SMARTDATA_ADMIN]),
+ allowAllAdmin = factoryResolve([
+ SUPERUSER,
+ LUDICLOUD_ADMIN,
+ SMARTPLAN_ADMIN,
+ SMARTPEOPLE_ADMIN,
+ SMARTLIBRARY_ADMIN,
+ SMARTDATA_ADMIN,
+ ]),
+ allowTeamLeader = factoryResolve([isTeamLeader]),
+ allowUserTeamLeader = factoryResolve([isUserTeamLeader]);
diff --git a/src/runtime/security/auth/authorize.js b/src/runtime/security/auth/authorize.js
new file mode 100644
index 0000000..e18a0c0
--- /dev/null
+++ b/src/runtime/security/auth/authorize.js
@@ -0,0 +1,74 @@
+import { presets } from '#env';
+import { Unauthorized } from '#exceptions/Unauthorized.js';
+import { allowPublic } from '#security/access.js';
+import { defaultRoles, disabledRoles } from '../resolve.js';
+import { cookie, deleteAuthCookies } from './cookie.js';
+import { Token, TokenExpired } from './token.js';
+
+export const getUserAccessToken = context =>
+ context.cookies?.get(cookie.token.name);
+
+export const authorize = async (context, route) => {
+ const uid = context.cookies.get(cookie.uid.name);
+ const jwt = context.cookies.get(cookie.token.name);
+
+ if (uid && jwt) {
+ try {
+ const token = new Token(jwt);
+ const user = token.getUser();
+
+ if (presets.app.isProduction) {
+ try {
+ await token.verify();
+ } catch (error) {
+ if (error instanceof TokenExpired) {
+ try {
+ const r = await token.refresh(cookie.refreshToken.get(context));
+
+ cookie.token.set(context, r.access_token);
+ cookie.refreshToken.set(context, r.refresh_token);
+ } catch (error) {
+ throw new Unauthorized();
+ }
+ } else throw error;
+ }
+ }
+
+ if (uid === user.id) {
+ context.uid = uid;
+ context.user = user;
+
+ for (let i = 0; i < defaultRoles.length; i++)
+ if (user.roles.includes(defaultRoles[i]) === false) {
+ user.roles.push(defaultRoles[i]);
+ }
+
+ for (let i = 0; i < disabledRoles.length; i++)
+ if (user.roles.includes(disabledRoles[i])) {
+ user.roles.splice(user.roles.indexOf(disabledRoles[i]), 1);
+ }
+ } else {
+ throw new Unauthorized();
+ }
+ } catch (error) {
+ deleteAuthCookies(context);
+ throw error;
+ }
+ } else {
+ if (uid !== undefined) {
+ cookie.uid.delete(context);
+ }
+
+ if (jwt !== undefined) {
+ cookie.token.delete(context);
+ cookie.refreshToken.delete(context);
+ }
+
+ if (route?.checkAccess !== allowPublic && !route?.public) {
+ throw new Unauthorized();
+ }
+
+ context.set('x-robots-tag', 'none');
+ context.set('cache-control', 'private');
+ }
+};
diff --git a/src/runtime/security/auth/cookie.js b/src/runtime/security/auth/cookie.js
new file mode 100644
index 0000000..4122dfe
--- /dev/null
+++ b/src/runtime/security/auth/cookie.js
@@ -0,0 +1,60 @@
+export function getCookie(context, name = this.name) {
+ return context.cookies.get(name);
+}
+
+export function setCookie(context, value, { name, options } = this) {
+ context.cookies.set(name, value, { ...options });
+}
+
+export function deleteCookie(context, { name, options } = this) {
+ context.cookies.set(name, '', { ...options, maxAge: 0 });
+}
+
+export const cookie = {
+ uid: {
+ name: 'UID',
+ options: {
+ httpOnly: false,
+ sameSite: 'strict',
+ },
+ get: getCookie,
+ set: setCookie,
+ delete: deleteCookie,
+ },
+ token: {
+ name: 'ACCESS_TOKEN',
+ options: {
+ httpOnly: true,
+ sameSite: 'strict',
+ },
+ get: getCookie,
+ set: setCookie,
+ delete: deleteCookie,
+ },
+ refreshToken: {
+ name: 'REFRESH_TOKEN',
+ options: {
+ httpOnly: true,
+ sameSite: 'strict',
+ },
+ get: getCookie,
+ set: setCookie,
+ delete: deleteCookie,
+ },
+ lang: {
+ name: 'LANG',
+ options: {
+ httpOnly: false,
+ maxAge: 31_536_000,
+ },
+ get: getCookie,
+ set: setCookie,
+ delete: deleteCookie,
+ },
+};
+
+export const deleteAuthCookies = context => {
+ deleteCookie(context, cookie.uid);
+ deleteCookie(context, cookie.token);
+ deleteCookie(context, cookie.refreshToken);
+};
diff --git a/src/runtime/security/auth/service.js b/src/runtime/security/auth/service.js
new file mode 100644
index 0000000..c7032db
--- /dev/null
+++ b/src/runtime/security/auth/service.js
@@ -0,0 +1,58 @@
+import { presets } from '#env';
+import { Unauthorized } from '#exceptions/Unauthorized.js';
+
+const { host, applicationId, clientSecret } = presets.auth;
+
+export const validate = async jwt => {
+ if (!host) {
+ throw new Error('Undefined presets.auth.host');
+ }
+
+ const url = `${host}/api/jwt/validate`;
+ const headers = { authorization: 'Bearer ' + jwt };
+
+ if ((await fetch(url, { headers })).ok === false) {
+ throw new Unauthorized('Invalid token');
+ }
+};
+
+export const refresh = async (jwt, refreshToken) => {
+ if (!host) {
+ throw new Error('Undefined presets.auth.host');
+ }
+
+ if (!clientSecret) {
+ throw new Error('Undefined presets.auth.clientSecret');
+ }
+
+ if (!applicationId) {
+ throw new Error('Undefined presets.auth.applicationId');
+ }
+
+ if (!refreshToken) {
+ throw new Error('Undefined refresh_token');
+ }
+
+ const url = `${host}/oauth2/token`;
+ const headers = {
+ 'content-type': 'application/x-www-form-urlencoded',
+ };
+
+ const response = await fetch(url, {
+ method: 'POST',
+ headers,
+ body: new URLSearchParams({
+ grant_type: 'refresh_token',
+ client_id: applicationId,
+ client_secret: clientSecret,
+ access_token: jwt,
+ refresh_token: refreshToken,
+ }),
+ });
+
+ if (response.ok === true) {
+ return await response.json();
+ } else {
+ throw new Unauthorized('Invalid token');
+ }
+};
diff --git a/src/runtime/security/auth/token.js b/src/runtime/security/auth/token.js
new file mode 100644
index 0000000..070912e
--- /dev/null
+++ b/src/runtime/security/auth/token.js
@@ -0,0 +1,138 @@
+import { createHmac } from 'crypto';
+import { presets } from '#env';
+import { Unauthorized } from '#exceptions/Unauthorized.js';
+import { validate, refresh } from './service.js';
+import { uuid } from '../../utils/uuid.js';
+import { log } from '../../utils/process.js';
+
+const { now } = Date;
+const promises = new Map();
+const { applicationId, jwtSecret } = presets.auth;
+
+const algorithms = {
+ HS256: (key, data) =>
+ createHmac('sha256', key).update(data).digest('base64Url'),
+ HS384: (key, data) =>
+ createHmac('sha384', key).update(data).digest('base64Url'),
+ HS512: (key, data) =>
+ createHmac('sha512', key).update(data).digest('base64Url'),
+};
+
+export class TokenInvalid extends Unauthorized {}
+export class TokenExpired extends Unauthorized {}
+
+export class Token {
+ constructor(jwt) {
+ const parts = jwt.split('.', 3);
+
+ if (!parts[0] || !parts[1] || !parts[2]) {
+ throw new TokenInvalid();
+ }
+
+ const { alg } = JSON.parse(Buffer.from(parts[0], 'base64'));
+
+ this.jwt = jwt;
+ this.signature = parts[2];
+ this.algorithm = algorithms[alg];
+ this.data = parts[0] + '.' + parts[1];
+ this.payload = JSON.parse(Buffer.from(parts[1], 'base64'));
+ }
+
+ async verify() {
+ if (this.algorithm === undefined) {
+ await validate(this.jwt);
+ } else if (jwtSecret) {
+ if (this.algorithm(jwtSecret, this.data) !== this.signature) {
+ throw new Unauthorized('Invalid token signature');
+ }
+ } else {
+ log.error('Need define process.env.AUTH_JWT_SECRET_KEY');
+ await validate(this.jwt);
+ }
+ if (this.payload.exp < now() / 1000) {
+ throw new TokenExpired('Expired token');
+ }
+ }
+
+ async refresh(token) {
+ if (promises.has(token)) {
+ return await promises.get(token);
+ }
+
+ const promise = refresh(this.jwt, token);
+ promises.set(token, promise);
+
+ try {
+ return await promise;
+ } finally {
+ setTimeout(() => promises.delete(token), 10_000);
+ }
+ }
+
+ getUser() {
+ return {
+ id: this.payload.sub || '',
+ email: this.payload.email || '',
+ roles: this.payload.roles || [],
+ username: this.payload.preferred_username,
+ };
+ }
+
+ static create({
+ payload = {},
+ secret = jwtSecret,
+ clientId = applicationId,
+ duration = 1 * 60 * 60 * 1000,
+ } = {}) {
+ if (!secret) log.error('Need define process.env.AUTH_JWT_SECRET_KEY');
+
+ const header = {
+ alg: 'HS256',
+ typ: 'JWT',
+ };
+
+ const iat = now();
+ payload = {
+ aud: clientId,
+ exp: iat + duration,
+ iat: iat,
+ iss: presets.app.origin,
+ sub: presets.app.id,
+ jti: uuid.random(),
+ applicationId: clientId,
+ ...payload,
+ };
+ const encodedHeaders = Buffer.from(JSON.stringify(header)).toString(
+ 'base64Url'
+ );
+ const encodedPayload = Buffer.from(JSON.stringify(payload)).toString(
+ 'base64Url'
+ );
+ const encodedSignature = Buffer.from(
+ algorithms.HS256(secret, `${encodedHeaders}.${encodedPayload}`)
+ );
+
+ return `${encodedHeaders}.${encodedPayload}.${encodedSignature}`;
+ }
+
+ static tokens = new Map();
+
+ static getToken(name, params) {
+ const { tokens } = Token;
+ let token = tokens.get(name);
+ if (token) {
+ const payload = JSON.parse(
+ Buffer.from(token.split('.')[1], 'base64Url').toString()
+ );
+ const now = Date.now();
+
+ if (payload.exp > now) {
+ return token;
+ }
+ }
+
+ token = Token.create(params);
+ tokens.set(name, token);
+ return token;
+ }
+}
diff --git a/src/runtime/security/auth/user.js b/src/runtime/security/auth/user.js
new file mode 100644
index 0000000..0e8e71d
--- /dev/null
+++ b/src/runtime/security/auth/user.js
@@ -0,0 +1,11 @@
+import { freeze, nullArray } from '../../utils/native.js';
+
+export const DEFAULT_USER = freeze({
+ id: '',
+ email: '',
+ username: '',
+ roles: nullArray,
+});
+
+export const isPublicProfile = async ({ sql }, { uid }) =>
+ await sql`SELECT is_public AS "0" FROM ludicloud.users WHERE uid = ${uid}`.findOneValue();
diff --git a/src/runtime/security/auth/utils.js b/src/runtime/security/auth/utils.js
new file mode 100644
index 0000000..c2e5aaa
--- /dev/null
+++ b/src/runtime/security/auth/utils.js
@@ -0,0 +1,25 @@
+export const fetchOpenId = async (url, params) => {
+ const response = await fetch(url, {
+ method: 'POST',
+ headers: {
+ 'content-type': 'application/x-www-form-urlencoded',
+ },
+ body: new URLSearchParams(params),
+ });
+
+ if (!response.ok) {
+ const { error, error_description } = await response.json();
+
+ throw {
+ status: response.status,
+ errors: [
+ {
+ type: error,
+ message: error_description,
+ },
+ ],
+ };
+ }
+
+ return await response.json();
+};
diff --git a/src/runtime/security/context.js b/src/runtime/security/context.js
new file mode 100644
index 0000000..e7cfcc3
--- /dev/null
+++ b/src/runtime/security/context.js
@@ -0,0 +1,21 @@
+import { byFeatureName } from './resolve.js';
+import { DEFAULT_USER } from './auth/user.js';
+
+async function isAccess(name, payload) {
+ return await byFeatureName(name)(this, payload, true);
+}
+
+async function getPermissionResult() {
+ for (const check of this.route?.checkAccess.acl)
+ if (await check(this, this.payload, true))
+ return {
+ [check.name]: (await check.getValue?.(this)) || check.value || true,
+ };
+}
+
+export const setSecurityContext = context => {
+ context.uid = null;
+ context.user = DEFAULT_USER;
+ context.isAccess = isAccess;
+ context.getPermissionResult = getPermissionResult;
+};
diff --git a/src/runtime/security/error.js b/src/runtime/security/error.js
new file mode 100644
index 0000000..412578a
--- /dev/null
+++ b/src/runtime/security/error.js
@@ -0,0 +1,26 @@
+import { Forbidden } from '../exceptions/Forbidden.js';
+import { Unauthorized } from '../exceptions/Unauthorized.js';
+
+export const getExpected = acl => {
+ const expected = {};
+ for (const { name, value } of acl) {
+ if (!expected[name]) {
+ expected[name] = [];
+ }
+ if (value) {
+ expected[name].push(value);
+ }
+ }
+ return expected;
+};
+
+export const throwAccessDenied = (context, expected) => {
+ throw context.uid
+ ? new Forbidden().putErrors([
+ {
+ type: 'accessDenied',
+ expected,
+ },
+ ])
+ : new Unauthorized();
+};
diff --git a/src/runtime/security/resolve.js b/src/runtime/security/resolve.js
new file mode 100644
index 0000000..08bf534
--- /dev/null
+++ b/src/runtime/security/resolve.js
@@ -0,0 +1,124 @@
+import {
+ keys,
+ create,
+ isArray,
+ isString,
+ isFunction,
+ throwError,
+ alwaysFalse,
+} from '../utils/native.js';
+import { getExpected, throwAccessDenied } from './error.js';
+
+export const cacheIsRoles = create(null);
+export const permissions = create(null);
+
+export const defaultRoles = [];
+export const disabledRoles = [];
+
+export const getResolveMethod = value => {
+ if (isFunction(value)) {
+ return value;
+ }
+ if (isString(value)) {
+ if (!cacheIsRoles[value]) {
+ const isRole = ({ user }) => user.roles.includes(value);
+ isRole.value = value;
+ cacheIsRoles[value] = isRole;
+ }
+ return cacheIsRoles[value];
+ }
+
+ if (value === false) {
+ return alwaysFalse;
+ }
+
+ throw new Error('ACL role name must type string');
+};
+
+async function getCustomTeamsLeadValue({ sql, uid = null }) {
+ return await sql`
+ SELECT json_agg(DISTINCT id) AS "0"
+ FROM ludicloud.teams_users
+ WHERE lid = ${uid} AND id = ANY(${this.value})
+ `.findOneValue();
+}
+
+async function getCustomTeamsMemberValue({ sql, uid = null }) {
+ return await sql`
+ SELECT json_agg(id) AS "0"
+ FROM ludicloud.teams_users
+ WHERE uid = ${uid} AND id = ANY(${this.value})
+ `.findOneValue();
+}
+
+export const factoryCustomTeamsLead = teams => {
+ const isCustomTeamsLead = async ({ sql, uid = null }) =>
+ await sql`SELECT EXISTS (SELECT 1 FROM ludicloud.teams_users WHERE lid = ${uid} AND id = ANY(${teams}::uuid[])) AS "0"`.findOneValue();
+
+ isCustomTeamsLead.value = teams;
+ isCustomTeamsLead.getValue = getCustomTeamsLeadValue;
+
+ return isCustomTeamsLead;
+};
+
+export const factoryCustomTeamsMember = teams => {
+ const isCustomTeamsMember = async ({ sql, uid = null }) =>
+ await sql`SELECT EXISTS (SELECT 1 FROM ludicloud.teams_users WHERE uid = ${uid} AND id = ANY(${teams}::uuid[])) AS "0"`.findOneValue();
+
+ isCustomTeamsMember.value = teams;
+ isCustomTeamsMember.getValue = getCustomTeamsMemberValue;
+
+ return isCustomTeamsMember;
+};
+
+export const factoryResolve = acl => {
+ if (!isArray(acl)) {
+ throw new Error('ACL must type array');
+ }
+
+ acl = acl.map(getResolveMethod);
+ acl.expected = getExpected(acl);
+
+ const checkAccess = async (context, payload, isNotThrow = false) => {
+ for (let i = 0; i < acl.length; i++) {
+ const check = acl[i];
+ if ((await check(context, payload, isNotThrow)) === true) return true;
+ }
+
+ if (isNotThrow) return false;
+ throwAccessDenied(context, acl.expected);
+ };
+
+ checkAccess.acl = acl;
+ return checkAccess;
+};
+
+export const byFeatureName = name =>
+ permissions[name] ??
+ throwError(new Error(`Invalid feature access name "${name}"`));
+
+export const factoryAccess = (acl = permissions.default) =>
+ isFunction(acl)
+ ? acl
+ : isString(acl)
+ ? byFeatureName(acl)
+ : factoryResolve(acl);
+
+export const switchAccess = (name, cases, caseDefault) => {
+ const acl = [];
+
+ for (const key of keys(cases)) {
+ const resolve = factoryAccess(cases[key]);
+
+ const isCase = (context, payload, isNotThrow) =>
+ String(payload?.[name]) === key && resolve(context, payload, isNotThrow);
+
+ acl.push(isCase);
+ }
+
+ if (caseDefault) {
+ acl.push(factoryAccess(caseDefault));
+ }
+
+ return factoryResolve(acl);
+};
diff --git a/src/runtime/security/roles.js b/src/runtime/security/roles.js
new file mode 100644
index 0000000..f9bdda9
--- /dev/null
+++ b/src/runtime/security/roles.js
@@ -0,0 +1,12 @@
+export const LUDICLOUD_ADMIN = 'ludicloud_admin',
+ SMARTLIBRARY_ADMIN = 'smartlibrary_admin',
+ SMARTPEOPLE_ADMIN = 'smartpeople_admin',
+ SMARTPEOPLE_HR = 'smartpeople_hr',
+ SMARTPEOPLE_MODERATOR = 'smartpeople_moderator',
+ SMARTPEOPLE_USER = 'smartpeople_user',
+ SMARTPLAN_ADMIN = 'smartplan_admin',
+ SMARTPLAN_USER = 'smartplan_user',
+ SUPERUSER = 'ludicloud_superuser',
+ SMARTLIBRARY_USER = 'smartlibrary_user',
+ SMARTDATA_ADMIN = 'smartdata_admin',
+ SMARTDATA_USER = 'smartdata_user';
diff --git a/src/runtime/security/rules.js b/src/runtime/security/rules.js
new file mode 100644
index 0000000..b45d098
--- /dev/null
+++ b/src/runtime/security/rules.js
@@ -0,0 +1,32 @@
+export {
+ isTeamLeader,
+ isTeamMemberDirect,
+ isTeamMemberReader,
+ isTeamLeaderWriter,
+ isUserTeamLeader,
+} from './rules/teams.js';
+
+export {
+ hasJobsByApiKeyAccess,
+ hasSkillsByApiKeyAccess,
+ hasDataUploadByApiKeyAccess,
+} from './rules/apiKey.js';
+
+export { isMyConnection } from './rules/network.js';
+
+export const isUser = context => !!context.uid;
+
+export const isOwner = (context, entity) =>
+ isUser(context) && context.uid === entity?.uid;
+
+export const isWriters = (context, entity) =>
+ isOwner(context) || entity?.granted_permissions?.[context.uid] > 0;
+
+export const isReaders = (context, entity) =>
+ isOwner(context) || entity?.granted_permissions?.[context.uid] !== undefined;
+
+export const isPublicUserProfile = async ({ sql }, entity) =>
+ entity?.is_public === true ||
+ (entity?.is_public !== false &&
+ !!entity?.uid &&
+ (await sql`SELECT EXISTS(SELECT 1 FROM ludicloud.users WHERE uid=${entity.uid} AND is_public) AS "0"`.findOneValue()));
diff --git a/src/runtime/security/rules/apiKey.js b/src/runtime/security/rules/apiKey.js
new file mode 100644
index 0000000..90452c2
--- /dev/null
+++ b/src/runtime/security/rules/apiKey.js
@@ -0,0 +1,42 @@
+const apis = {
+ skill_extraction: 'skill_extraction',
+ data_upload: 'data_upload',
+ job_standardization: 'job_standardization',
+ jobs_library: 'jobs_library',
+ skills_library: 'skills_library',
+ users_details: 'users_details',
+ users_search: 'users_search',
+};
+
+const isValidApiKey = async ({ sql, request }, payload, feature) => {
+ if (!payload?.api_key) return false;
+ return await sql`
+ SELECT EXISTS(
+ SELECT api_key
+ FROM ludicloud.api_keys api
+ WHERE api.api_key = ${payload.api_key}
+ AND ${feature} = any (api.apis)
+ AND (array_length (ip_list, 1) is null OR ${request.ip} = ANY(ip_list))
+ ) AS "0"`.findOneValue();
+};
+
+export const hasDataUploadByApiKeyAccess = async (context, payload) =>
+ await isValidApiKey(context, payload, apis.data_upload);
+
+export const hasSkillsByApiKeyAccess = async (context, payload) =>
+ await isValidApiKey(context, payload, apis.skills_library);
+
+export const hasJobsByApiKeyAccess = async (context, payload) =>
+ await isValidApiKey(context, payload, apis.jobs_library);
+
+export const hasSkillExtractionByApiKeyAccess = async (context, payload) =>
+ await isValidApiKey(context, payload, apis.skill_extraction);
+
+export const hasJobStandardizationByApiKeyAccess = async (context, payload) =>
+ await isValidApiKey(context, payload, apis.job_standardization);
+
+export const hasUsersDetailsByApiKeyAccess = async (context, payload) =>
+ await isValidApiKey(context, payload, apis.users_details);
+
+export const hasUsersSearchByApiKeyAccess = async (context, payload) =>
+ await isValidApiKey(context, payload, apis.users_search);
diff --git a/src/runtime/security/rules/network.js b/src/runtime/security/rules/network.js
new file mode 100644
index 0000000..f0ee990
--- /dev/null
+++ b/src/runtime/security/rules/network.js
@@ -0,0 +1,9 @@
+export const isMyConnection = async ({ sql, uid }, entity) => {
+ if (!entity?.uid) return false;
+
+ return await sql`SELECT EXISTS (
+ SELECT 1
+ FROM ludicloud.users_network
+ WHERE uid = ${uid} AND target = ${entity.uid} AND accepted
+ ) AS "0"`.findOneValue();
+};
diff --git a/src/runtime/security/rules/teams.js b/src/runtime/security/rules/teams.js
new file mode 100644
index 0000000..3c51153
--- /dev/null
+++ b/src/runtime/security/rules/teams.js
@@ -0,0 +1,54 @@
+export const isTeamLeader = async ({ sql, uid: lid }, entity) => {
+ if (!lid || !entity?.uid) return false;
+ const { uid } = entity;
+
+ return await sql`SELECT EXISTS(
+ SELECT 1 FROM ludicloud.teams_users WHERE uid = ${uid} AND lid = ${lid}
+ UNION
+ SELECT 1 FROM (WITH RECURSIVE teams_leads(id, uid) AS (
+ SELECT members.id, members.lid AS uid
+ FROM (SELECT DISTINCT id FROM ludicloud.teams_users JOIN ludicloud.teams USING (id) WHERE teams_users.lid = ${lid} AND teams.options ? 'leader_can_see_all') _
+ JOIN (SELECT id, lid FROM ludicloud.teams_users WHERE uid = ${uid}) AS members USING (id)
+ UNION
+ SELECT leads.id, leads.lid AS uid FROM teams_leads JOIN ludicloud.teams_users AS leads USING(id, uid)
+ )
+ SELECT 1 FROM teams_leads WHERE uid = ${lid} LIMIT 1)_) AS "0"`.findOneValue();
+};
+
+export const isTeamMemberDirect = async ({ sql, uid: lid }, entity) => {
+ if (!lid || !entity?.uid) return false;
+ const { uid } = entity;
+
+ return await sql`SELECT EXISTS(
+ SELECT 1
+ FROM (SELECT id FROM ludicloud.teams_users WHERE uid = ${lid} OR lid = ${lid}) AS own_teams
+ JOIN (SELECT id FROM ludicloud.teams_users WHERE uid = ${uid} OR lid = ${uid}) AS users_teams USING (id)
+ ) AS "0"`.findOneValue();
+};
+
+export const isTeamMemberReader = async ({ sql, uid }, entity) => {
+ if (!uid || !entity?.uid) return false;
+
+ return await sql`SELECT EXISTS(
+ SELECT 1
+ FROM (
+ SELECT id, lid FROM ludicloud.teams_users WHERE uid = ${uid}
+ INTERSECT
+ SELECT id, lid FROM ludicloud.teams_users WHERE uid = ${entity.uid}
+ ) AS users
+ JOIN ludicloud.teams AS teams USING (id)
+ WHERE teams.options->'member_see_each_other' ? 'profile_details') AS "0"`.findOneValue();
+};
+
+export const isTeamLeaderWriter = async ({ sql, uid: lid }, entity) => {
+ if (!lid || !entity?.uid) return false;
+
+ return await sql`SELECT EXISTS (
+ SELECT 1 FROM (SELECT id FROM ludicloud.teams_users WHERE uid = ${entity.uid} AND lid = ${lid}) AS teams_users
+ JOIN ludicloud.teams AS teams ON(teams_users.id = teams.id AND teams.options ? 'leader_can_modify')) AS "0"`.findOneValue();
+};
+
+export const isUserTeamLeader = async ({ sql, uid }) =>
+ await sql`SELECT EXISTS (
+ SELECT 1 FROM ludicloud.teams_users WHERE lid = ${uid}
+ ) AS "0"`.findOneValue();
diff --git a/src/runtime/security/service.js b/src/runtime/security/service.js
new file mode 100644
index 0000000..6b28f4f
--- /dev/null
+++ b/src/runtime/security/service.js
@@ -0,0 +1,78 @@
+import { log } from '../utils/process.js';
+import { getExpected } from './error.js';
+import { permissions } from './access.js';
+import { alwaysFalse } from '../utils/native.js';
+import {
+ defaultRoles,
+ disabledRoles,
+ factoryCustomTeamsLead,
+ factoryCustomTeamsMember,
+ getResolveMethod,
+} from './resolve.js';
+
+const setPermissions = async ({ sql }) => {
+ const rows = await sql`
+ SELECT
+ permission,
+ permissions.enabled,
+ array_agg(DISTINCT roles.role) FILTER(WHERE roles.enabled) AS "enabledRoles",
+ array_agg(DISTINCT teams.team_id) FILTER(WHERE teams.is_team_lead) AS "enabledTeamsLead",
+ array_agg(DISTINCT teams.team_id) FILTER(WHERE teams.is_team_member) AS "enabledTeamsMember"
+ FROM ludicloud.permissions AS permissions
+ LEFT JOIN ludicloud.permissions_roles AS roles USING(permission)
+ LEFT JOIN ludicloud.permissions_teams AS teams USING(permission)
+ WHERE permission = ANY(${Object.keys(permissions)}::text[])
+ GROUP BY permission`;
+
+ for (const row of rows) {
+ let { acl, rules } = permissions[row.permission];
+
+ if (row.enabled) {
+ let roles = row.enabledRoles ?? [];
+
+ if (row.enabledTeamsLead) {
+ rules = [...rules, factoryCustomTeamsLead(row.enabledTeamsLead)];
+ }
+
+ if (row.enabledTeamsMember) {
+ rules = [...rules, factoryCustomTeamsMember(row.enabledTeamsMember)];
+ }
+
+ acl.length = 0;
+ acl.push(...roles.map(getResolveMethod), ...rules);
+ acl.expected = getExpected(acl);
+ } else {
+ acl.length = 0;
+ acl.push(alwaysFalse);
+ acl.expected = { enabled: true };
+ }
+ }
+};
+
+const setRoles = async ({ sql }) => {
+ const roles = await sql`
+ SELECT
+ json_agg(role) FILTER (WHERE enabled IS FALSE) AS disabled,
+ json_agg(role) FILTER (WHERE enabled AND is_default) AS defaults
+ FROM ludicloud.roles`.findOne();
+
+ defaultRoles.length = 0;
+ disabledRoles.length = 0;
+
+ if (roles.defaults?.length)
+ for (const role of roles.defaults) defaultRoles.push(role);
+
+ if (roles.disabled?.length)
+ for (const role of roles.disabled) disabledRoles.push(role);
+};
+
+export const securityService = async context => {
+ const handlerRoles = () => setRoles(context).catch(log.error);
+ const handlerPermissions = () => setPermissions(context).catch(log.error);
+
+ await handlerRoles();
+ await handlerPermissions();
+
+ await context.db.listen('roles', handlerRoles);
+ await context.db.listen('permissions', handlerPermissions);
+};
diff --git a/src/runtime/security/utils.js b/src/runtime/security/utils.js
new file mode 100644
index 0000000..f176c55
--- /dev/null
+++ b/src/runtime/security/utils.js
@@ -0,0 +1,5 @@
+import * as allRoles from './roles.js';
+
+const roles = Object.values(allRoles);
+
+export const isRole = role => roles.includes(role);
diff --git a/src/runtime/server/app.js b/src/runtime/server/app.js
new file mode 100644
index 0000000..3e29f67
--- /dev/null
+++ b/src/runtime/server/app.js
@@ -0,0 +1,40 @@
+import {
+ App,
+ us_listen_socket_close,
+ LIBUS_LISTEN_EXCLUSIVE_PORT,
+} from 'uWebSockets.js';
+
+import { onAborted } from '#utils/process.js';
+import { green, red } from '#utils/console.js';
+
+import { port } from './constants.js';
+import { Context } from './context.js';
+import { bindEndpoints } from './router.js';
+import { loadServices } from './services.js';
+import { noop } from '#utils/native.js';
+
+export const startServer = async ({ services, websocket } = {}) => {
+ const server = App();
+ const context = Context.init({
+ server,
+ onOpenWebsocket: websocket?.onOpen ?? noop,
+ onCloseWebsocket: websocket?.onClose ?? noop,
+ });
+
+ await bindEndpoints(server);
+ await loadServices(context, services);
+
+ await new Promise((resolve, reject) => {
+ server.listen(port, LIBUS_LISTEN_EXCLUSIVE_PORT, token => {
+ if (token) {
+ onAborted(() => {
+ us_listen_socket_close(token);
+ });
+ console.info(green('Server listen on port ') + port);
+ resolve();
+ } else {
+ reject(new Error(red('Server listen on port ' + port)));
+ }
+ });
+ });
+};
diff --git a/src/runtime/server/constants.js b/src/runtime/server/constants.js
new file mode 100644
index 0000000..1263e82
--- /dev/null
+++ b/src/runtime/server/constants.js
@@ -0,0 +1,4 @@
+import { presets } from '../env.js';
+
+export const routes = new Map();
+export const { port = 3000, baseURI = '/api' } = presets.server;
diff --git a/src/runtime/server/context.js b/src/runtime/server/context.js
new file mode 100644
index 0000000..fd5582f
--- /dev/null
+++ b/src/runtime/server/context.js
@@ -0,0 +1,70 @@
+import { presets } from '#env';
+import { Cookies } from './cookies.js';
+import { noop } from '#utils/native.js';
+import { setDataBaseContext } from '#db/context.js';
+import { respond, respondStream } from './response.js';
+import { setSecurityContext } from '#security/context.js';
+import { factory } from '#db/sql/query.js';
+import {
+ sendMessageToChannel,
+ sendMessageToSocket,
+ sendMessageToUser,
+ subscribeToChannel,
+ unsubscribeFromChannel,
+} from './messenger.js';
+
+export class Context {
+ sql = factory(this);
+
+ headers = [];
+ cookies = new Cookies(this);
+
+ set(name, value) {
+ this.headers.push(name.toLowerCase(), value);
+ }
+
+ static init(params) {
+ const context = Object.assign(this.prototype, params);
+
+ context.language = context.lang = presets.language;
+ context.defaultLanguage = presets.language;
+
+ context.languages = presets.languages;
+
+ setDataBaseContext(context);
+ setSecurityContext(context);
+
+ return context;
+ }
+}
+
+Context.prototype.type = '';
+Context.prototype.status = 204;
+Context.prototype.route = null;
+Context.prototype.body = null;
+Context.prototype.error = null;
+Context.prototype.stream = null;
+Context.prototype.payload = null;
+Context.prototype.service = null;
+Context.prototype.request = null;
+Context.prototype.response = null;
+
+Context.prototype.path = '';
+
+Context.prototype.lang = '';
+Context.prototype.language = '';
+Context.prototype.isMultiLang = false;
+
+Context.prototype.isAborted = false;
+Context.prototype.onAborted = noop;
+Context.prototype.respond = respond;
+Context.prototype.respondStream = respondStream;
+
+Context.prototype.websocket = null;
+Context.prototype.onOpenWebsocket = noop;
+Context.prototype.onCloseWebsocket = noop;
+Context.prototype.sendMessageToUser = sendMessageToUser;
+Context.prototype.sendMessageToSocket = sendMessageToSocket;
+Context.prototype.sendMessageToChannel = sendMessageToChannel;
+Context.prototype.subscribeToChannel = subscribeToChannel;
+Context.prototype.unsubscribeFromChannel = unsubscribeFromChannel;
diff --git a/src/runtime/server/cookies.js b/src/runtime/server/cookies.js
new file mode 100644
index 0000000..2e82fc0
--- /dev/null
+++ b/src/runtime/server/cookies.js
@@ -0,0 +1,42 @@
+import { nullObject } from '#utils/native.js';
+
+export class Cookies {
+ constructor(context) {
+ this.context = context;
+ }
+
+ get(name) {
+ return this.data[name];
+ }
+
+ set(name, value, options) {
+ this.data[name] &&= value;
+
+ value = name + '=' + value;
+ value += '; path=' + (options?.path || '/');
+
+ if (options?.maxAge != null) {
+ value += '; max-age=' + options.maxAge;
+ } else if (options?.expires) {
+ value += '; expires=' + options.expires.toGMTString();
+ }
+
+ if (options?.httpOnly) value += '; httponly';
+ if (options?.sameSite) value += '; samesite=' + options.sameSite;
+
+ this.context.set('set-cookie', value);
+ }
+
+ parse(text) {
+ if (text) {
+ this.data = Object.create(null);
+
+ for (let vars = text.split(';'), i = 0; i < vars.length; i++) {
+ const param = vars[i].split('=', 2);
+ this.data[param[0].trim()] = param[1].trim();
+ }
+ }
+ }
+}
+
+Cookies.prototype.data = nullObject;
diff --git a/src/runtime/server/handler.js b/src/runtime/server/handler.js
new file mode 100644
index 0000000..e5b2f60
--- /dev/null
+++ b/src/runtime/server/handler.js
@@ -0,0 +1,81 @@
+import { presets } from '#env';
+import { NotFound } from '../exceptions/NotFound.js';
+import { factoryAccess } from '../security/access.js';
+import { stringify } from '../utils/native.js';
+import { setValidate } from './validate.js';
+import { getMetaRespond } from './meta.js';
+
+const respondDefault = (context, payload, data, id) => ({ id, data });
+const getRespond = ({ meta }) => {
+ return meta ? getMetaRespond(meta) : respondDefault;
+};
+
+export const setContext = context => {
+ const { lang } = context;
+ const isMultiLang = lang[0] === '~';
+ const language = isMultiLang ? lang.slice(1) || presets.language : lang;
+
+ if (!context.languages.includes(language)) {
+ throw new NotFound(`Not found language: ${lang}`);
+ }
+
+ context.lang = language;
+ context.language = language;
+ context.isMultiLang = isMultiLang;
+
+ return context;
+};
+
+export async function method(context, payload, id) {
+ const { action, checkAccess } = this;
+
+ context.route = this;
+ context.payload = payload;
+
+ this.validate(payload);
+
+ const isAccess = await checkAccess(context, payload, this.isSafeMethod);
+ const data = await action(context, payload);
+ if (!isAccess) await checkAccess(context, data);
+
+ if (data != null) {
+ context.status = 200;
+
+ if (data.constructor === Buffer) {
+ context.type ||= 'json';
+ context.body = data;
+ } else {
+ context.type = 'json';
+ context.body = stringify(await this.respond(context, payload, data, id));
+ }
+ } else if (context.body || context.stream) {
+ context.status = 200;
+ }
+
+ return context;
+}
+
+export const makeHandler = (routes, name) => {
+ const route = routes[name];
+
+ if (!route) {
+ throw new Error(`Method "${name}" Not Implemented`);
+ }
+
+ if (typeof route.action !== 'function') {
+ throw new Error(`Endpoint action is not function`);
+ }
+
+ route.method = method;
+ route.methodName = name;
+
+ route.respond = getRespond(route);
+ route.checkAccess = factoryAccess(route.access);
+
+ route.isSafeMethod = name === 'get';
+ route.isBodyParams = !!route.params?.body || !!route.params?.files;
+
+ setValidate(route);
+
+ return route;
+};
diff --git a/src/runtime/server/health.js b/src/runtime/server/health.js
new file mode 100644
index 0000000..2d61f6e
--- /dev/null
+++ b/src/runtime/server/health.js
@@ -0,0 +1,31 @@
+import { Context } from './context.js';
+
+const context = Context.prototype;
+
+export const checkHealth = response => {
+ response.end(process.env.APP_BUILD_VERSION ?? '');
+};
+
+export const checkDatabaseHealth = async response => {
+ let isOpen = true;
+
+ response.onAborted(() => {
+ isOpen = false;
+ });
+
+ try {
+ await context.db.unsafe('SELECT 1');
+ if (isOpen) response.end();
+ } catch (error) {
+ if (isOpen) {
+ const body = JSON.stringify({
+ code: error?.code,
+ message: error?.message,
+ });
+
+ response.writeStatus('500');
+ response.writeHeader('content-type', 'application/json');
+ response.end(body);
+ }
+ }
+};
diff --git a/src/runtime/server/messenger.js b/src/runtime/server/messenger.js
new file mode 100644
index 0000000..ad4afeb
--- /dev/null
+++ b/src/runtime/server/messenger.js
@@ -0,0 +1,98 @@
+import { stringify } from '#utils/native.js';
+import { Exception } from '#exceptions/Exception.js';
+import { routes } from './constants.js';
+
+const clients = new Map();
+
+export const onMessage = async (ws, { id, method, params }) => {
+ const { context } = ws;
+ const route = routes.get(method);
+
+ try {
+ if (route) {
+ await route.method(context, params, id);
+ } else {
+ context.body = stringify({
+ id,
+ status: 404,
+ errors: [{ message: `Not found method ${method}` }],
+ });
+ }
+ } catch (error) {
+ Exception.respond(context, error, id);
+ }
+
+ if (id) {
+ context.sendMessageToSocket(context.body);
+ }
+};
+
+export function sendMessageToSocket(message) {
+ if (this.isAborted === false)
+ this.websocket.cork(() => {
+ this.websocket.send(message);
+ });
+}
+
+export function sendMessageToUser(uid, method, params) {
+ if (clients.has(uid)) {
+ const message = stringify({ method, params });
+
+ for (const ws of clients.get(uid)) {
+ ws.context.sendMessageToSocket(message);
+ }
+ }
+}
+
+export function sendMessageToChannel(name, method, params) {
+ this.server.publish(name, stringify({ method, params }));
+}
+
+export function subscribeToChannel(name) {
+ if (this.isAborted === false) this.websocket.subscribe(name);
+}
+
+export function unsubscribeFromChannel(name) {
+ if (this.isAborted === false) this.websocket.unsubscribe(name);
+}
+
+export const onOpen = async ws => {
+ const { context } = ws;
+ const { uid } = context;
+
+ context.websocket = ws;
+
+ if (clients.has(uid)) clients.get(uid).add(ws);
+ else clients.set(uid, new Set().add(ws));
+
+ try {
+ const response = await context.onOpenWebsocket(context);
+
+ if (response != null) {
+ context.sendMessageToSocket(stringify(response));
+ }
+ } catch (error) {
+ console.error(error);
+ }
+};
+
+export const onClose = async ws => {
+ const { context } = ws;
+ const { uid } = context;
+
+ context.websocket = null;
+ context.isAborted = true;
+
+ if (clients.has(uid)) {
+ const sockets = clients.get(uid);
+
+ if (sockets.size === 1) clients.delete(uid);
+ else if (sockets.size > 1) clients.get(uid).delete(ws);
+
+ try {
+ await context.onCloseWebsocket(context);
+ } catch (error) {
+ console.error(error);
+ }
+ }
+};
diff --git a/src/runtime/server/meta.js b/src/runtime/server/meta.js
new file mode 100644
index 0000000..4036716
--- /dev/null
+++ b/src/runtime/server/meta.js
@@ -0,0 +1,63 @@
+import { isArray } from '#utils/native.js';
+
+const getAccessName = (ns, name) => (ns ? ns + '_' + name : name);
+
+const setMapPermissions = (map, permissions, ns = '') => {
+ if (permissions)
+ for (const name of Object.keys(permissions)) {
+ const permission = permissions[name];
+ const key = getAccessName(ns, name);
+
+ if (map.has(permission)) map.get(permission).push(key);
+ else map.set(permission, [key]);
+ }
+};
+
+const makeMapPermissions = (model, map, ns = '') => {
+ setMapPermissions(map, model.access, ns);
+
+ if (model.rules)
+ for (const name of Object.keys(model.rules))
+ setMapPermissions(map, model.rules[name].access, getAccessName(ns, name));
+
+ if (model.relations)
+ for (const name of Object.keys(model.relations))
+ makeMapPermissions(
+ model.relations[name].model,
+ map,
+ getAccessName(ns, name)
+ );
+
+ return map;
+};
+
+const respondAccess = async (context, entity, map) => {
+ const access = {};
+
+ for (const [permission, keys] of map) {
+ const result = await permission(context, entity, true);
+ for (const key of keys) access[key] = result;
+ }
+
+ return access;
+};
+
+export const getMetaRespond = meta => {
+ if (meta.access) {
+ const map = makeMapPermissions(meta.access, new Map());
+
+ const respond = async (context, payload, data, id) => ({
+ id,
+ data,
+ meta: {
+ access: await respondAccess(
+ context,
+ isArray(data) ? payload : data,
+ map
+ ),
+ },
+ });
+
+ return respond;
+ }
+};
diff --git a/src/runtime/server/openapi.js b/src/runtime/server/openapi.js
new file mode 100644
index 0000000..bc1f4d0
--- /dev/null
+++ b/src/runtime/server/openapi.js
@@ -0,0 +1,236 @@
+import { presets } from '#env';
+import { toCamelCase } from '#utils/string.js';
+import { baseURI, routes } from './constants.js';
+import { getRuleAsObject } from './validate.js';
+
+const mapTypes = {
+ string: 'string',
+ number: 'number',
+ boolean: 'boolean',
+ object: 'object',
+ array: 'array',
+};
+
+const getTypeSchema = ({ type, items }) => ({
+ type: mapTypes[type] ?? 'string',
+ format: mapTypes[type] ? undefined : type,
+ items: items && getTypeSchema({ type: items.type ?? items }),
+});
+
+const toExampleValue = (object, value) => {
+ object[value] = { value, summary: value, type: 'string', format: 'string' };
+ return object;
+};
+
+const getParameter = (rule, key, name) => {
+ switch (rule.type) {
+ case 'object':
+ case 'array':
+ case 'tuple': {
+ let { type, items, props } = rule;
+
+ if (items) {
+ //let examples = items.values?.reduce(toExampleValue, {});
+ items = getTypeSchema({ type: items.type ?? items });
+ }
+
+ return {
+ name,
+ in: key,
+ content: {
+ 'application/json': {
+ schema: { type, items, properties: props },
+ },
+ },
+ required: !rule.optional,
+ };
+ }
+ case 'enum':
+ return {
+ name,
+ in: key,
+ required: !rule.optional,
+ schema: getTypeSchema(rule),
+ examples: rule.values.reduce(toExampleValue, {}),
+ };
+ default:
+ return {
+ name,
+ in: key,
+ required: !rule.optional,
+ schema: getTypeSchema(rule),
+ };
+ }
+};
+
+const getOperationId = (method, dirName, fileName) =>
+ method + toCamelCase(dirName) + (fileName ? toCamelCase(fileName) : '');
+
+const makeOpenAPI = () => {
+ const paths = {};
+ const data = {
+ openapi: '3.0.0',
+ info: {
+ title: presets.app.id,
+ version: process.env.APP_BUILD_VERSION ?? '',
+ },
+ servers: [
+ {
+ url: baseURI + '/{language}',
+ variables: {
+ language: {
+ enum: ['~', ...presets.languages],
+ default: 'en',
+ },
+ },
+ },
+ ],
+ paths,
+ };
+
+ const responses = {
+ 200: {
+ description: 'Successful Response',
+ content: { 'application/json': {} },
+ },
+ };
+
+ for (const route of routes.values()) {
+ let path = '/' + route.basePath;
+ let { description = '' } = route;
+
+ const dirs = route.basePath.split('/');
+ const operationId = getOperationId(route.methodName, dirs[1], dirs[2]);
+
+ if (route.params?.path) {
+ path += '/{' + Object.keys(route.params.path).join('}/{') + '}';
+ }
+
+ // let args = route.params
+ // ? '{ ' +
+ // Object.keys({
+ // ...route.params.path,
+ // ...route.params.query,
+ // ...route.params.body,
+ // ...route.params.files,
+ // }).join(', ') +
+ // ' }'
+ // : '';
+
+ // description +=
+ // '```\n' + dirs[0] + '.' + operationId + '(' + args + ')\n```';
+
+ paths[path] ??= {};
+
+ const api = {
+ tags: [dirs[0]],
+ description,
+ parameters: [],
+ responses: route.response
+ ? {
+ 200: {
+ description: 'Successful Response',
+ content: {
+ [route.response.type]: {
+ schema: {
+ type: 'string',
+ format: 'binary',
+ },
+ },
+ },
+ },
+ }
+ : responses,
+ operationId,
+ };
+
+ paths[path][route.methodName] = api;
+
+ if (route.params) {
+ for (const key of Object.keys(route.params)) {
+ for (const name of Object.keys(route.params[key])) {
+ const rule = getRuleAsObject(route.params[key][name]);
+
+ switch (key) {
+ case 'path':
+ case 'query':
+ api.parameters.push(getParameter(rule, key, name));
+ break;
+
+ case 'body':
+ case 'files':
+ {
+ const contentType = route.params.files
+ ? 'multipart/form-data'
+ : 'application/json';
+
+ api.requestBody ??= {
+ content: {
+ [contentType]: {
+ schema: {
+ type: 'object',
+ required: [],
+ properties: {},
+ },
+ },
+ },
+ };
+
+ const { required, properties } =
+ api.requestBody.content[contentType].schema;
+
+ if (!rule.optional) required.push(name);
+
+ properties[name] =
+ key === 'files'
+ ? {
+ type: 'string',
+ format: 'binary',
+ }
+ : getTypeSchema(rule);
+ }
+ break;
+ }
+ }
+ }
+ }
+ }
+
+ data.paths = Object.fromEntries(
+ Object.entries(paths).sort((a, b) => a[0].localeCompare(b[0]))
+ );
+
+ return data;
+};
+
+export const getOpenapiJSON = response => {
+ response
+ .writeHeader('cache-control', 'no-cache')
+ .writeHeader('content-type', 'application/json')
+ .end(JSON.stringify(makeOpenAPI()));
+};
+
+export const getOpenapiUI = response => {
+ response
+ .writeHeader('cache-control', 'no-cache')
+ .writeHeader('content-type', 'text/html; charset=utf-8')
+ .end(`
+
+
+ Swagger UI - ${presets.app.id}
+
+
+
+
+
+
+
+ `);
+};
diff --git a/src/runtime/server/payload.js b/src/runtime/server/payload.js
new file mode 100644
index 0000000..b6dc1e8
--- /dev/null
+++ b/src/runtime/server/payload.js
@@ -0,0 +1,129 @@
+import { getParts } from 'uWebSockets.js';
+import { makeHandler } from './handler.js';
+import { getMethodCast } from './validate.js';
+import { assign, parse, noop, idx } from '#utils/native.js';
+import { BadRequest } from '#exceptions/BadRequest.js';
+
+export const readBody = context =>
+ new Promise((resolve, reject) => {
+ let offset = 0;
+ const buffer = Buffer.allocUnsafe(context.request.length);
+
+ context.onAborted = reject;
+ context.response.onData((chunk, done) => {
+ buffer.set(Buffer.from(chunk), offset);
+ if (done) {
+ context.onAborted = noop;
+ resolve(buffer);
+ } else {
+ offset += chunk.byteLength;
+ }
+ });
+ });
+
+export const makePayload = (routes, name) => {
+ const route = routes[name];
+ const { params = {} } = route;
+
+ route.parameters = { path: null, query: null, body: null, files: null };
+
+ if (params.path) {
+ route.parameters.path = Object.keys(params.path).map((key, index) => ({
+ key,
+ index: index + 1,
+ cast: getMethodCast(params.path[key]),
+ }));
+ }
+
+ if (params.query) {
+ route.parameters.query = Object.keys(params.query).map(key => ({
+ key,
+ cast: getMethodCast(params.query[key]),
+ }));
+ }
+
+ if (params.body) {
+ route.parameters.body = Object.create(null);
+
+ for (const key of Object.keys(params.body)) {
+ route.parameters.body[key] = {
+ cast: getMethodCast(params.body[key]),
+ };
+ }
+ }
+
+ if (params.files) {
+ route.parameters.body ??= Object.create(null);
+
+ for (const key of Object.keys(params.files)) {
+ route.parameters.body[key] = {
+ cast: idx,
+ };
+ }
+ }
+
+ return makeHandler(routes, name);
+};
+
+export const getPayload = (route, request) => {
+ const payload = Object.create(null);
+ const { path, query } = route.parameters;
+
+ if (path !== null) {
+ for (let i = 0; i < path.length; i++) {
+ const { key, index, cast } = path[i];
+ payload[key] = cast(request.getParameter(index));
+ }
+ }
+
+ if (query !== null) {
+ for (let i = 0; i < query.length; i++) {
+ const { key, cast } = query[i];
+ const value = request.getQuery(key);
+
+ if (value !== '') payload[key] = cast(value);
+ }
+ }
+
+ return payload;
+};
+
+export const setPayloadFromBody = (route, context, payload, buffer) => {
+ const { type } = context.request;
+ const { body } = route.parameters;
+
+ if (type === 'application/json') {
+ try {
+ assign(payload, parse(buffer.toString()));
+ } catch (error) {
+ throw BadRequest.of(error);
+ }
+ } else if (type.startsWith('multipart/form-data')) {
+ const parts = getParts(buffer, type);
+
+ if (!parts) {
+ throw new BadRequest('Invalid multipart body');
+ }
+
+ for (const { name, type, data, filename } of parts)
+ if (body[name]) {
+ if (filename) {
+ const size = data.byteLength;
+ const buffer = Buffer.allocUnsafe(size);
+
+ buffer.set(Buffer.from(data));
+
+ payload[name] = { name: filename, type, size, buffer };
+ } else {
+ payload[name] = body[name].cast(Buffer.from(data).toString());
+ }
+ }
+ } else if (type === 'application/x-www-form-urlencoded') {
+ const entries = new URLSearchParams(buffer.toString()).entries();
+
+ for (const [name, value] of entries)
+ if (value && body[name]) payload[name] = body[name].cast(value);
+ } else {
+ throw new BadRequest('Invalid request content-type');
+ }
+};
diff --git a/src/runtime/server/request.js b/src/runtime/server/request.js
new file mode 100644
index 0000000..0935724
--- /dev/null
+++ b/src/runtime/server/request.js
@@ -0,0 +1,70 @@
+import { Context } from './context.js';
+import { setContext } from './handler.js';
+import { authorize } from '#security/auth/authorize.js';
+import { Exception } from '#exceptions/Exception.js';
+import {
+ makePayload,
+ getPayload,
+ readBody,
+ setPayloadFromBody,
+} from './payload.js';
+
+export const createContext = (request, response) => {
+ const context = new Context();
+
+ response.onAborted(() => {
+ context.isAborted = true;
+ context.response = null;
+ context.onAborted();
+ });
+
+ context.path = request.getUrl();
+ context.lang = request.getParameter(0);
+
+ context.response = response;
+ context.cookies.parse(request.getHeader('cookie'));
+
+ context.request = {
+ type: request.getHeader('content-type'),
+ length: +request.getHeader('content-length') || 0,
+ ip: (
+ request.getHeader('x-forwarded-for') ||
+ Buffer.from(response.getProxiedRemoteAddressAsText()).toString() ||
+ Buffer.from(response.getRemoteAddressAsText()).toString()
+ ).split(',')[0],
+ };
+
+ return context;
+};
+
+const handleRequest = async (route, request, response) => {
+ const context = createContext(request, response);
+
+ try {
+ setContext(context);
+ const payload = getPayload(route, request);
+
+ if (route.isBodyParams) {
+ setPayloadFromBody(route, context, payload, await readBody(context));
+ }
+
+ await authorize(context, route);
+ await route.method(context, payload);
+ } catch (error) {
+ Exception.respond(context, error);
+ }
+
+ if (context.isAborted === false) {
+ response.cork(() => {
+ context.respond();
+ });
+ }
+};
+
+export const factoryHandler = (routes, name) => {
+ const route = makePayload(routes, name);
+
+ return (response, request) => {
+ handleRequest(route, request, response);
+ };
+};
diff --git a/src/runtime/server/response.js b/src/runtime/server/response.js
new file mode 100644
index 0000000..adc5eed
--- /dev/null
+++ b/src/runtime/server/response.js
@@ -0,0 +1,69 @@
+import { createGzip, gzipSync } from 'zlib';
+
+export function respond() {
+ const { response, headers, body } = this;
+
+ if (this.status !== 200) {
+ response.writeStatus(this.status + '');
+ }
+
+ if (this.type) {
+ response.writeHeader(
+ 'content-type',
+ this.type === 'json' ? 'application/json' : this.type
+ );
+ }
+
+ for (let i = 0; i < headers.length; ) {
+ response.writeHeader(headers[i++], headers[i++]);
+ }
+
+ if (this.stream !== null) {
+ this.stream
+ .on('data', chunk => {
+ if (this.isAborted === false) {
+ response.write(chunk);
+ }
+ })
+ .on('error', error => {
+ console.error(error);
+ if (this.isAborted === false) {
+ response.close();
+ }
+ })
+ .on('close', () => {
+ if (this.isAborted === false) {
+ response.end();
+ }
+ });
+ } else if (body == null) {
+ response.end();
+ } else if (body.length > 2000) {
+ response.writeHeader('content-encoding', 'gzip');
+ response.end(gzipSync(body));
+ } else {
+ response.end(body);
+ }
+}
+
+export function respondStream({ stream, type, fileName, compress = false }) {
+ this.status = 200;
+ this.type = type || 'application/octet-stream';
+ this.set('cache-control', 'no-store, no-transform');
+
+ if (fileName) {
+ this.set(
+ 'content-disposition',
+ `attachment; filename="${encodeURIComponent(fileName)}"`
+ );
+ }
+
+ if (compress) {
+ this.set('content-encoding', 'gzip');
+ this.stream = stream.pipe(createGzip());
+ } else {
+ this.stream = stream;
+ }
+
+ return stream;
+}
diff --git a/src/runtime/server/router.js b/src/runtime/server/router.js
new file mode 100644
index 0000000..d8a4e4c
--- /dev/null
+++ b/src/runtime/server/router.js
@@ -0,0 +1,104 @@
+import { readdir } from 'fs/promises';
+import { baseURI, routes } from './constants.js';
+import { factoryHandler } from './request.js';
+import { CWD } from '../utils/location.js';
+import { keys, returnNullArray } from '#utils/native.js';
+import { websocket } from './websocket.js';
+import { getOpenapiUI, getOpenapiJSON } from './openapi.js';
+import { checkHealth, checkDatabaseHealth } from './health.js';
+
+export const root = CWD + '/src/app';
+
+const wft = { withFileTypes: true };
+
+const getBasePath = params => {
+ const path = params.appName + '/' + params.apiName;
+ return params.name === 'index' ? path : path + '/' + params.name;
+};
+
+const bindEndpoint =
+ params =>
+ ({ methods }) => {
+ if (!methods) return;
+
+ const basePath = getBasePath(params);
+ const rootPath = baseURI + '/:lang/' + basePath;
+
+ for (const name of keys(methods)) {
+ const route = methods[name];
+
+ route.basePath = basePath;
+ route.name = name.toUpperCase() + '/' + basePath;
+
+ routes.set(route.name, route);
+
+ const path = route.params?.path
+ ? rootPath + '/:' + keys(route.params.path).join('/:')
+ : rootPath;
+
+ //console.log(`${method}: ${path}`);
+ params.router[name === 'delete' ? 'del' : name](
+ path,
+ factoryHandler(methods, name)
+ );
+ }
+ };
+
+const loadApiFiles = async ({ router, promises }, appName, apiName) => {
+ const path = root + '/' + appName + '/' + apiName + '/api';
+
+ for (const { name } of await readdir(path, wft).catch(returnNullArray)) {
+ if (name.endsWith('.js'))
+ promises.push(
+ // eslint-disable-next-line node/no-unsupported-features/es-syntax
+ import('file://' + path + '/' + name).then(
+ bindEndpoint({ router, appName, apiName, name: name.slice(0, -3) })
+ )
+ );
+ }
+};
+
+const loadAppFolders = async (params, appName) => {
+ const path = root + '/' + appName;
+
+ for (const dirent of await readdir(path, wft))
+ if (dirent.isDirectory()) {
+ params.promises.push(loadApiFiles(params, appName, dirent.name));
+ }
+};
+
+export const bindEndpoints = async router => {
+ const promises = [];
+ const params = { router, promises };
+
+ router
+ .get(baseURI + '/docs', getOpenapiUI)
+ .get(baseURI + '/openapi.json', getOpenapiJSON)
+ .ws(baseURI + '/websocket/:lang', websocket)
+ .get(baseURI + '/health-check', checkHealth)
+ .get(baseURI + '/health-check-db', checkDatabaseHealth)
+ .any('/*', (response, request) => {
+ response
+ .writeStatus('501')
+ .writeHeader('cache-control', 'no-store')
+ .writeHeader('content-type', 'application/json')
+ .end(
+ JSON.stringify({
+ status: 501,
+ errors: [
+ {
+ message: request.getUrl().includes('//')
+ ? 'Double slashes in url'
+ : 'Not Implemented',
+ },
+ ],
+ })
+ );
+ });
+
+ for (const dirent of await readdir(root, wft))
+ if (dirent.isDirectory())
+ promises.push(loadAppFolders(params, dirent.name));
+
+ for (let i = 0; i < promises.length; i++) await promises[i];
+};
diff --git a/src/runtime/server/services.js b/src/runtime/server/services.js
new file mode 100644
index 0000000..baf32de
--- /dev/null
+++ b/src/runtime/server/services.js
@@ -0,0 +1,22 @@
+import { factory } from '#db/sql/query.js';
+import { securityService } from '#security/service.js';
+import { intlService } from '#utils/intl.js';
+
+const factoryContext = (context, service) => {
+ const ctx = Object.create(context);
+
+ ctx.sql = factory(ctx);
+ ctx.service = service;
+ ctx.state = Object.create(null);
+
+ return ctx;
+};
+
+export const loadServices = async (context, services = []) => {
+ await securityService(factoryContext(context), securityService);
+ await intlService(factoryContext(context), intlService);
+
+ for (const service of services) {
+ service(factoryContext(context, service))?.catch(console.error);
+ }
+};
diff --git a/src/runtime/server/validate.js b/src/runtime/server/validate.js
new file mode 100644
index 0000000..6c03953
--- /dev/null
+++ b/src/runtime/server/validate.js
@@ -0,0 +1,56 @@
+import { BadRequest } from '#exceptions/BadRequest.js';
+import { validate, validator } from '#utils/validate.js';
+import { idx, noop, parse, isString } from '../utils/native.js';
+
+export const setValidate = route => {
+ const schema = {
+ ...route.params?.path,
+ ...route.params?.query,
+ ...route.params?.body,
+ ...route.params?.files,
+ };
+
+ if (Object.keys(schema).length) {
+ schema.$$strict = true;
+ route.validate = validate;
+ route.isValidPayload = validator.compile(schema);
+ } else {
+ route.validate = noop;
+ }
+};
+
+const castBoolean = value =>
+ value === 'true' ? true : value === 'false' ? false : value;
+
+const castJSON = value => {
+ try {
+ return parse(value);
+ } catch (error) {
+ throw BadRequest.of(error);
+ }
+};
+
+export const getRuleAsObject = rule =>
+ isString(rule)
+ ? validator.parseShortHand(rule)
+ : rule.$$type
+ ? validator.parseShortHand(rule.$$type)
+ : rule;
+
+export const getMethodCast = rule => {
+ switch (getRuleAsObject(rule).type) {
+ case 'number':
+ return Number;
+
+ case 'boolean':
+ return castBoolean;
+
+ case 'object':
+ case 'array':
+ case 'tuple':
+ return castJSON;
+
+ default:
+ return idx;
+ }
+};
diff --git a/src/runtime/server/websocket.js b/src/runtime/server/websocket.js
new file mode 100644
index 0000000..fa1e582
--- /dev/null
+++ b/src/runtime/server/websocket.js
@@ -0,0 +1,63 @@
+import { SHARED_COMPRESSOR } from 'uWebSockets.js';
+import { authorize } from '#security/auth/authorize.js';
+import { parse } from '#utils/native.js';
+
+import { setContext } from './handler.js';
+import { createContext } from './request.js';
+import { onOpen, onMessage, onClose } from './messenger.js';
+
+export const websocket = {
+ idleTimeout: 960,
+ sendPingsAutomatically: true,
+ compression: SHARED_COMPRESSOR,
+ maxPayloadLength: 16 * 1024 * 1024,
+
+ upgrade: async (res, req, ctx) => {
+ const context = createContext(req, res);
+
+ const secWebSocketKey = req.getHeader('sec-websocket-key');
+ const secWebSocketProtocol = req.getHeader('sec-websocket-protocol');
+ const secWebSocketExtensions = req.getHeader('sec-websocket-extensions');
+
+ try {
+ await authorize(setContext(context));
+ } catch (error) {
+ context.uid = '';
+ context.body = error?.message;
+ context.status = error?.status;
+ }
+
+ if (context.isAborted === false) {
+ context.cookies = null;
+ context.response = null;
+
+ res.upgrade(
+ { context },
+ secWebSocketKey,
+ secWebSocketProtocol,
+ secWebSocketExtensions,
+ ctx
+ );
+ }
+ },
+
+ open: ws => {
+ if (ws.context.uid) {
+ onOpen(ws);
+ } else if (ws.context.body) {
+ ws.end(ws.context.status, ws.context.body);
+ } else {
+ ws.end(401);
+ }
+ },
+
+ message: (ws, message) => {
+ try {
+ onMessage(ws, parse(Buffer.from(message)));
+ } catch (error) {
+ ws.end(400, error?.message);
+ }
+ },
+
+ close: onClose,
+};
diff --git a/src/runtime/test/dev.js b/src/runtime/test/dev.js
new file mode 100644
index 0000000..04571b9
--- /dev/null
+++ b/src/runtime/test/dev.js
@@ -0,0 +1,25 @@
+import { migrate } from '#db/migrate/run.js';
+import { loadServices } from '#server/services.js';
+import { TREE } from './internals/tree.js';
+import { loadTest } from './internals/loader.js';
+import { runTests } from './internals/actions.js';
+import { createContext } from './internals/context.js';
+import { reportErrorEndTests } from './internals/report.js';
+import { confirmCommit } from './internals/process.js';
+
+export const runOneTestFile = async (path, name) => {
+ const context = await createContext({
+ isSafePoint: false,
+ isRunAllTests: false,
+ });
+ // && process.pid - process.ppid === 1
+ await loadTest(path, name);
+ if (context.isMigrate) {
+ await migrate();
+ }
+
+ await loadServices(context);
+ await context
+ .transaction(() => runTests(context, TREE).then(confirmCommit))
+ .catch(reportErrorEndTests);
+};
diff --git a/src/runtime/test/internals/actions.js b/src/runtime/test/internals/actions.js
new file mode 100644
index 0000000..f341f1f
--- /dev/null
+++ b/src/runtime/test/internals/actions.js
@@ -0,0 +1,28 @@
+import { reportPass, reportSkipped, reportFail } from './report.js';
+
+export const runTests = async (context, tree) => {
+ for (const [test, node] of tree) {
+ if (test.skipped) {
+ reportSkipped(test, node);
+ continue;
+ }
+
+ if (context.isSafePoint)
+ await context.db.unsafe(`SAVEPOINT "${node.index}"`);
+
+ try {
+ test.result = await test.run();
+
+ reportPass(test, node);
+
+ if (node.children.size) {
+ await runTests(context, node.children);
+ }
+ } catch (error) {
+ reportFail(test, error);
+ }
+
+ if (context.isSafePoint)
+ await context.db.unsafe(`ROLLBACK TO SAVEPOINT "${node.index}"`);
+ }
+};
diff --git a/src/runtime/test/internals/class.js b/src/runtime/test/internals/class.js
new file mode 100644
index 0000000..ead16aa
--- /dev/null
+++ b/src/runtime/test/internals/class.js
@@ -0,0 +1,26 @@
+import { Context } from './context.js';
+import { LIST, makeParent, getParentResult } from './tree.js';
+
+export class Test {
+ count = 0;
+ stack = 0;
+ result = null;
+ skipped = false;
+ index = LIST.push(this);
+
+ constructor(describe, options, assert) {
+ this.assert = assert;
+ this.describe = describe;
+ this.context = options.context;
+ this.parent = makeParent(options);
+
+ if (options.skipped) this.skipped = true;
+ }
+
+ async run() {
+ return await this.assert(
+ new Context(this.context),
+ getParentResult(this.parent)
+ );
+ }
+}
diff --git a/src/runtime/test/internals/context.js b/src/runtime/test/internals/context.js
new file mode 100644
index 0000000..69565f2
--- /dev/null
+++ b/src/runtime/test/internals/context.js
@@ -0,0 +1,44 @@
+import { presets } from '#env';
+import { randomUUID } from 'crypto';
+import { setContext } from '#server/handler.js';
+import { DEFAULT_USER } from '#security/auth/user.js';
+import { Context as ContextServer } from '#server/context.js';
+import { getBranchName } from './process.js';
+
+export class Context extends ContextServer {
+ constructor({ user = DEFAULT_USER, lang = presets.language, request } = {}) {
+ super();
+
+ this.lang = lang;
+ this.user = user;
+ this.uid = user.uid;
+ this.request = request;
+
+ this.expected = {};
+ this.request = request;
+
+ setContext(this);
+ }
+}
+
+export const context = Context.prototype;
+
+context.isRunAllTests = false;
+
+export const createContext = async options => {
+ presets.app.isTesting = true;
+
+ options.isSafePoint ??= true;
+ options.isMigrate = !process.env.DB_NAME;
+
+ if (options.isMigrate) {
+ if (options.isRunAllTests) {
+ presets.db.database = randomUUID();
+ } else {
+ presets.db.database = await getBranchName();
+ }
+ process.env.DB_NAME = presets.db.database;
+ }
+
+ return Context.init(options);
+};
diff --git a/src/runtime/test/internals/loader.js b/src/runtime/test/internals/loader.js
new file mode 100644
index 0000000..ef5e5c8
--- /dev/null
+++ b/src/runtime/test/internals/loader.js
@@ -0,0 +1,57 @@
+import { readdir } from 'fs/promises';
+import { root } from '#server/router.js';
+import { returnNullArray } from '#utils/native.js';
+import { LIST, makeTree } from './tree.js';
+import { Test } from './class.js';
+
+const wft = { withFileTypes: true };
+
+const loadFiles = async (promises, path) => {
+ for (const dirent of await readdir(path, wft).catch(returnNullArray)) {
+ if (dirent.isDirectory())
+ promises.push(loadFiles(promises, path + '/' + dirent.name));
+ else if (dirent.name.endsWith('.js'))
+ // eslint-disable-next-line node/no-unsupported-features/es-syntax
+ promises.push(import('file://' + path + '/' + dirent.name));
+ }
+};
+
+const loadAppSubfolder = async (promises, path) => {
+ for (const dirent of await readdir(path, wft))
+ if (dirent.isDirectory()) {
+ promises.push(
+ loadFiles(promises, path + '/' + dirent.name + '/api'),
+ loadFiles(promises, path + '/' + dirent.name + '/tests')
+ );
+ }
+};
+
+export const loadTests = async () => {
+ const promises = [];
+
+ for (const dirent of await readdir(root, wft))
+ if (dirent.isDirectory())
+ promises.push(loadAppSubfolder(promises, root + '/' + dirent.name));
+
+ for (let i = 0; i < promises.length; i++) await promises[i];
+
+ makeTree();
+};
+
+export const loadTest = async (path, name) => {
+ // eslint-disable-next-line node/no-unsupported-features/es-syntax
+ const module = await import('file://' + root + '/' + path + '.js');
+
+ const tests = new Set(name ? [module[name]] : Object.values(module));
+
+ for (const test of tests)
+ if (test instanceof Test) {
+ test.skipped = false;
+ for (const parent of test.parent.tests) tests.add(parent);
+ }
+
+ for (let i = LIST.length; i--; )
+ if (tests.has(LIST[i]) === false) LIST.splice(i, 1);
+
+ makeTree();
+};
diff --git a/src/runtime/test/internals/payload.js b/src/runtime/test/internals/payload.js
new file mode 100644
index 0000000..b9f4bc1
--- /dev/null
+++ b/src/runtime/test/internals/payload.js
@@ -0,0 +1,20 @@
+import { resolve } from 'path';
+import { fileURLToPath } from 'url';
+import { readFileSync } from 'fs';
+
+const types = {
+ csv: 'text/csv',
+ tsv: 'text/tsv',
+ xlsx: 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet',
+};
+
+export const file = ({ url }, path, options) => {
+ path = resolve(fileURLToPath(url.slice(0, url.lastIndexOf('/'))), path);
+
+ const buffer = readFileSync(path, options);
+ const name = path.slice(path.lastIndexOf('/') + 1);
+ const size = buffer.byteLength ?? buffer.length;
+ const type = types[path.slice(path.lastIndexOf('.') + 1)] ?? '';
+
+ return { buffer, size, type, name, path };
+};
diff --git a/src/runtime/test/internals/process.js b/src/runtime/test/internals/process.js
new file mode 100644
index 0000000..ee28031
--- /dev/null
+++ b/src/runtime/test/internals/process.js
@@ -0,0 +1,28 @@
+import { baseURI } from '#utils/location.js';
+import { exec } from 'child_process';
+
+export const getBranchName = () => ({
+ then(resolve, reject) {
+ exec('git branch --show-current', (error, stdout, stderr) => {
+ if (error) reject(stderr.trim());
+ else resolve(stdout.trim());
+ });
+ },
+});
+
+export const confirmCommit = () => ({
+ then(resolve, reject) {
+ console.log('\nCommit and start server? (Yes/No)');
+
+ process.stdin.once('data', data => {
+ if (data.toString().trim().toLowerCase()[0] === 'y') {
+ // eslint-disable-next-line node/no-unsupported-features/es-syntax
+ import(baseURI + 'bin/app.js');
+ resolve();
+ } else {
+ console.log('Rollback');
+ reject(null);
+ }
+ });
+ },
+});
diff --git a/src/runtime/test/internals/report.js b/src/runtime/test/internals/report.js
new file mode 100644
index 0000000..6afe512
--- /dev/null
+++ b/src/runtime/test/internals/report.js
@@ -0,0 +1,99 @@
+import { ERROR_FORMATTED, ERROR_REPORTED } from '#utils/native.js';
+import { bold, red, green, blue, grey } from '#utils/console.js';
+import { baseURI } from '#utils/location.js';
+import { LIST, INDEX } from './tree.js';
+
+const filterStack = line => line.includes(baseURI);
+const mapStack = line => line.slice(line.indexOf('file://'), -1);
+
+export const reportStartTests = () => {
+ console.log(green(bold('\nTest start \n')));
+};
+
+export const reportEndTests = () => {
+ console.log(green(bold('\nTest end')));
+};
+
+export const reportFail = (test, error) => {
+ if (!error || error[ERROR_REPORTED]) {
+ return;
+ }
+
+ console.error(red(bold('✕ FAIL: ')) + test.describe);
+ console.log();
+ console.group();
+
+ if (error[ERROR_FORMATTED]) {
+ delete error[ERROR_FORMATTED];
+ console.error(error);
+ } else {
+ const message = error?.message || String(error);
+ const stack = error?.stack || '';
+
+ if (error?.status) {
+ console.error(red('Status ' + error.status + ' ' + message));
+ } else {
+ console.error(message);
+ }
+
+ if (error?.errors) {
+ if (Array.isArray(error.errors))
+ error.errors.forEach(error => console.error(error));
+ else console.error(error.errors);
+ }
+
+ error[ERROR_FORMATTED] = true;
+
+ console.error(
+ blue(stack.split('\n').filter(filterStack).map(mapStack).join('\n'))
+ );
+ }
+
+ console.groupEnd();
+ console.log();
+ process.exitCode = 1;
+ error[ERROR_REPORTED] = true;
+};
+
+export const reportAborted = ({ message }) => {
+ console.log(red(bold(message)));
+ process.exitCode = 1;
+};
+
+export const reportPass = (test, node) => {
+ if (node.target === test) {
+ console.log(green(bold('✓ PASS: ')) + test.describe);
+ }
+};
+
+export const reportSkipped = (test, node) => {
+ if (node.target === test) {
+ console.log(grey(bold('- SKIP: ')) + test.describe);
+ }
+};
+
+export const reportErrorEndTests = error => {
+ if (error && !error[ERROR_REPORTED]) console.error(error);
+};
+
+export const printTestTree = (tree, indents = '') => {
+ let index = 0;
+ let size = tree.size;
+
+ if (indents === '') {
+ console.log('Total tests ' + bold(LIST.length));
+ }
+
+ for (const [test, node] of tree) {
+ let isLast = ++index === size;
+ let num = node.index.toString().padStart(INDEX.toString().length, ' ');
+
+ console.log(
+ (test.stack > 1 ? red(num) : num) +
+ ' ' +
+ grey(indents + (isLast ? '└──' : '├──')) +
+ test.describe
+ );
+ printTestTree(node.children, indents + (isLast ? ' ' : '│ '));
+ }
+};
diff --git a/src/runtime/test/internals/request.js b/src/runtime/test/internals/request.js
new file mode 100644
index 0000000..6c32781
--- /dev/null
+++ b/src/runtime/test/internals/request.js
@@ -0,0 +1,28 @@
+import { factoryResponse } from './response.js';
+import { makeHandler } from '../../server/handler.js';
+import { Exception } from '#exceptions/Exception.js';
+
+const factoryError = context => error =>
+ factoryResponse(Exception.respond(context, error));
+
+function expected(data) {
+ if (data) this.context.expected = data;
+ return this;
+}
+
+export const factoryRequest = (methods, name) => {
+ const route = makeHandler(methods, name);
+
+ return (context, payload = {}) => {
+ context = Object.create(context);
+
+ const promise = route
+ .method(context, Object.assign(Object.create(null), payload))
+ .then(factoryResponse, factoryError(context));
+
+ promise.context = context;
+ promise.expected = expected;
+
+ return promise;
+ };
+};
diff --git a/src/runtime/test/internals/response.js b/src/runtime/test/internals/response.js
new file mode 100644
index 0000000..3f30e65
--- /dev/null
+++ b/src/runtime/test/internals/response.js
@@ -0,0 +1,30 @@
+import { objectContaining } from '#utils/assert.js';
+
+export const factoryResponse = context => {
+ const { status, type, expected } = context;
+
+ const isJson = type === 'json';
+ const body = isJson ? JSON.parse(context.body) : context.body;
+
+ if (!expected.status) {
+ expected.status = status < 300 ? status : 200;
+ }
+
+ if (status === expected.status) {
+ const response = {
+ status,
+ type,
+ body,
+ headers: context.headers,
+ data: isJson ? body.data : null,
+ meta: isJson ? body.meta : null,
+ };
+ objectContaining(response, expected);
+ } else if (context.error) {
+ throw context.error;
+ } else {
+ objectContaining({ status }, { status: expected.status });
+ }
+
+ return isJson ? body.data : body;
+};
diff --git a/src/runtime/test/internals/tree.js b/src/runtime/test/internals/tree.js
new file mode 100644
index 0000000..06ca37f
--- /dev/null
+++ b/src/runtime/test/internals/tree.js
@@ -0,0 +1,85 @@
+export let INDEX = 0;
+
+export const LIST = [];
+export const TREE = new Map();
+
+const createNode = (target = null) => ({
+ target,
+ index: ++INDEX,
+ children: new Map(),
+});
+
+export const makeParent = ({ parents = {} }) => {
+ const path = [];
+ const names = Object.keys(parents);
+ const tests = Object.values(parents);
+
+ for (let i = 0; i < tests.length; i++) {
+ const test = tests[i];
+ const parentPath = test.parent.path;
+
+ test.count++;
+
+ for (let p = 0; p < parentPath.length; p++)
+ if (path.includes(parentPath[p]) === false) path.push(parentPath[p]);
+
+ if (path.includes(test) === false) path.push(test);
+ }
+
+ return { deep: path.length, path, names, tests };
+};
+
+const sortList = (a, b) =>
+ a.parent.deep === b.parent.deep
+ ? a.count === b.count
+ ? a.index - b.index
+ : b.count - a.count
+ : b.parent.deep - a.parent.deep;
+
+const sortPath = (a, b) =>
+ a.parent.path.includes(b)
+ ? 1
+ : a.count === b.count
+ ? a.index - b.index
+ : b.count - a.count;
+
+const appendNode = test => {
+ if (test.stack) return;
+
+ let tree = TREE;
+ const { path } = test.parent;
+
+ if (test.parent.tests.length) {
+ path.sort(sortPath);
+
+ for (let node, i = 0; i < path.length; i++) {
+ const key = path[i];
+
+ if (tree.has(key)) {
+ node = tree.get(key);
+ } else {
+ node = createNode(++key.stack === 1 ? key : null);
+ tree.set(key, node);
+ }
+
+ tree = node.children;
+ }
+ }
+
+ test.stack++;
+ tree.set(test, createNode(test));
+
+ //console.log([...path, test].map(test => test.index).join(' -> '));
+};
+
+export const makeTree = () => {
+ LIST.sort(sortList);
+
+ for (let i = 0; i < LIST.length; i++) appendNode(LIST[i]);
+};
+
+export const getParentResult = ({ tests, names }) => {
+ const result = {};
+ for (let i = 0; i < tests.length; i++) result[names[i]] = tests[i].result;
+ return result;
+};
diff --git a/src/runtime/test/run.js b/src/runtime/test/run.js
new file mode 100644
index 0000000..2356b22
--- /dev/null
+++ b/src/runtime/test/run.js
@@ -0,0 +1,42 @@
+import { migrate } from '#db/migrate/run.js';
+import { loadServices } from '#server/services.js';
+import { TREE } from './internals/tree.js';
+import { loadTests } from './internals/loader.js';
+import { runTests } from './internals/actions.js';
+import { createContext } from './internals/context.js';
+import {
+ printTestTree,
+ reportEndTests,
+ reportStartTests,
+ reportErrorEndTests,
+} from './internals/report.js';
+
+export const printAllTestTree = async () => {
+ await loadTests();
+ printTestTree(TREE);
+ console.log('');
+};
+
+export const runAllTestFiles = async () => {
+ const context = await createContext({
+ isRunAllTests: true,
+ });
+
+ try {
+ await loadTests();
+
+ if (context.isMigrate) {
+ await migrate();
+ }
+
+ reportStartTests();
+
+ await loadServices(context);
+ await context.transaction(runTests, TREE).catch(reportErrorEndTests);
+ } finally {
+ await context.db.end();
+ await migrate.context.dropDatabase();
+ }
+
+ reportEndTests();
+};
diff --git a/src/runtime/test/use.js b/src/runtime/test/use.js
new file mode 100644
index 0000000..863c787
--- /dev/null
+++ b/src/runtime/test/use.js
@@ -0,0 +1,37 @@
+import { presets } from '#env';
+import { Test } from './internals/class.js';
+import { factoryRequest } from './internals/request.js';
+
+export { objectContaining as expected } from '#utils/assert.js';
+
+export { file } from './internals/payload.js';
+
+export const test = (describe, options, assert) =>
+ new Test(describe, options, assert);
+
+export const use = methods => ({
+ get get() {
+ return (methods.get.request ??= factoryRequest(methods, 'get'));
+ },
+ get post() {
+ return (methods.post.request ??= factoryRequest(methods, 'post'));
+ },
+ get put() {
+ return (methods.put.request ??= factoryRequest(methods, 'put'));
+ },
+ get patch() {
+ return (methods.patch.request ??= factoryRequest(methods, 'patch'));
+ },
+ get delete() {
+ return (methods.delete.request ??= factoryRequest(methods, 'delete'));
+ },
+});
+
+export const log = value => {
+ if (presets.app.isDevelopment)
+ console.dir(value, {
+ showHidden: false,
+ depth: null,
+ colors: true,
+ });
+};
diff --git a/tsconfig.base.json b/tsconfig.base.json
new file mode 100644
index 0000000..74db431
--- /dev/null
+++ b/tsconfig.base.json
@@ -0,0 +1,51 @@
+{
+ "display": "UAH",
+ "compilerOptions": {
+ "lib": ["ESNext", "WebWorker"],
+ "types": ["@uah/server"],
+ "strict": true,
+ "noEmit": true,
+ "allowJs": false,
+ "composite": false,
+ "incremental": true,
+ "isolatedModules": false,
+ "skipLibCheck": true,
+ "noEmitHelpers": true,
+ "noEmitOnError": false,
+ "removeComments": true,
+ "preserveSymlinks": true,
+ "strictNullChecks": true,
+ "resolveJsonModule": true,
+ "noResolve": false,
+ "allowUmdGlobalAccess": false,
+ "importHelpers": false,
+ "downlevelIteration": false,
+ "experimentalDecorators": false,
+ "allowUnusedLabels": false,
+ "allowUnreachableCode": false,
+ "noFallthroughCasesInSwitch": false,
+ "declaration": false,
+ "newLine": "lf",
+ "noUnusedLocals": true,
+ "alwaysStrict": false,
+ "esModuleInterop": true,
+ "noImplicitThis": true,
+ "noImplicitAny": true,
+ "noImplicitReturns": false,
+ "emitDeclarationOnly": false,
+ "jsx": "preserve",
+ "module": "ESNext",
+ "target": "ESNext",
+ "moduleDetection": "force",
+ "useDefineForClassFields": true,
+ "exactOptionalPropertyTypes": true,
+ "forceConsistentCasingInFileNames": true,
+ "assumeChangesOnlyAffectDirectDependencies": false,
+ "moduleResolution": "bundler",
+ "verbatimModuleSyntax": false,
+ "allowImportingTsExtensions": true,
+ "resolvePackageJsonExports": true,
+ "resolvePackageJsonImports": true,
+ "allowArbitraryExtensions": false
+ }
+}