From 5dd6fd51bfc456b346d76a0e8117e75e877b4fc2 Mon Sep 17 00:00:00 2001 From: Alban Mouton Date: Wed, 29 Apr 2026 12:29:09 +0200 Subject: [PATCH 1/4] chore: work on dev env --- .github/workflows/reuse-quality.yml | 52 ++- .gitignore | 4 + .zellij.kdl | 10 +- AGENTS.md | 113 ++++++ CLAUDE.md | 1 + api/config/development.mjs | 16 +- api/config/test.mjs | 15 - api/package.json | 2 +- api/src/app.ts | 11 +- api/src/config.ts | 8 +- api/src/misc/routers/test-env.ts | 107 ++++++ dev/delete-worktree.sh | 29 ++ dev/init-env.sh | 28 ++ dev/resources/nginx.conf | 108 ------ dev/resources/nginx.conf.template | 69 ++++ dev/resources/organizations.json | 26 ++ dev/resources/users.json | 72 ++++ dev/status.sh | 100 +++++ dev/worktree.sh | 47 +++ docker-compose.yml | 62 ++-- package-lock.json | 106 ++++++ package.json | 23 +- playwright.config.ts | 46 +++ test-it/01-plugins-registry.ts | 25 -- test-it/02-plugins.ts | 183 ---------- test-it/03-processings.ts | 343 ------------------ test-it/04-permissions.ts | 198 ---------- test-it/utils/index.ts | 51 --- tests/features/plugins/install.api.spec.ts | 137 +++++++ tests/features/plugins/registry.api.spec.ts | 21 ++ .../processings/lifecycle.api.spec.ts | 293 +++++++++++++++ .../processings/permissions.api.spec.ts | 188 ++++++++++ .../fixtures}/processing-hello-world.tgz | Bin tests/state-setup.ts | 27 ++ tests/state-teardown.ts | 14 + tests/support/axios.ts | 58 +++ tsconfig.json | 2 +- tsconfig.test.json | 7 - worker/config/development.mjs | 19 +- worker/config/test.mjs | 18 - worker/package.json | 2 +- worker/src/config.ts | 7 +- worker/src/worker.ts | 3 +- 43 files changed, 1633 insertions(+), 1018 deletions(-) create mode 100644 AGENTS.md create mode 100644 CLAUDE.md delete mode 100644 api/config/test.mjs create mode 100644 api/src/misc/routers/test-env.ts create mode 100755 dev/delete-worktree.sh create mode 100755 dev/init-env.sh delete mode 100644 dev/resources/nginx.conf create mode 100644 dev/resources/nginx.conf.template create mode 100755 dev/status.sh create mode 100755 dev/worktree.sh create mode 100644 playwright.config.ts delete mode 100644 test-it/01-plugins-registry.ts delete mode 100644 test-it/02-plugins.ts delete mode 100644 test-it/03-processings.ts delete mode 100644 test-it/04-permissions.ts delete mode 100644 test-it/utils/index.ts create mode 100644 tests/features/plugins/install.api.spec.ts create mode 100644 tests/features/plugins/registry.api.spec.ts create mode 100644 tests/features/processings/lifecycle.api.spec.ts create mode 100644 tests/features/processings/permissions.api.spec.ts rename {test-it/utils => tests/fixtures}/processing-hello-world.tgz (100%) create mode 100644 tests/state-setup.ts create mode 100644 tests/state-teardown.ts create mode 100644 tests/support/axios.ts delete mode 100644 tsconfig.test.json delete mode 100644 worker/config/test.mjs diff --git a/.github/workflows/reuse-quality.yml b/.github/workflows/reuse-quality.yml index b3dc8a85..6dfd9e4e 100644 --- a/.github/workflows/reuse-quality.yml +++ b/.github/workflows/reuse-quality.yml @@ -18,9 +18,53 @@ jobs: - name: Install dependencies run: npm ci - - name: Run quality checks - run: npm run quality + - name: Lint + run: npm run lint - - name: Docker logs in case of failure + - name: Build types + run: npm run build-types + + - name: Type check + run: npm run check-types + + - name: Build UI + run: npm -w ui run build + + - name: Init .env + run: ./dev/init-env.sh + + - name: Use localhost as DEV_HOST in CI + run: sed -i 's/^DEV_HOST=.*/DEV_HOST=localhost/' .env + + - name: Start docker compose services + run: docker compose --profile dev up -d --wait + + - name: Start dev API + run: npx dotenv -- npm run dev-api & + + - name: Start dev worker + run: npx dotenv -- npm run dev-worker & + + - name: Wait for dev API to be ready + run: | + set -a; source .env; set +a + for i in {1..30}; do + curl -sf "http://localhost:${DEV_API_PORT}/api/v1/_ping" && exit 0 + sleep 2 + done + echo "dev API did not become ready" >&2 + exit 1 + + - name: Run tests + run: npm test + + - name: Audit + run: npm audit --omit=dev --audit-level=critical + + - name: Docker compose logs on failure if: failure() - run: docker compose logs data-fair \ No newline at end of file + run: docker compose --profile dev logs + + - name: Stop docker compose services + if: always() + run: docker compose --profile dev down diff --git a/.gitignore b/.gitignore index c4f49af3..4490df16 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,9 @@ data/ +dev/logs/ node_modules/ **/config/local-* .config/ .type/ +.env +playwright-report/ +test-results/ diff --git a/.zellij.kdl b/.zellij.kdl index a2b16158..c8895f74 100644 --- a/.zellij.kdl +++ b/.zellij.kdl @@ -7,26 +7,26 @@ layout { } pane name="deps" { command "${DEV_SHELL}" - args "-ic" "npm run dev-deps && watch -n 4 \"docker compose ps --all --format 'table {{.Name}}\t{{.Status}}'\"" + args "-ic" "npm run dev-deps" } } pane { split_direction "vertical" pane name="ui" { command "${DEV_SHELL}" - args "-ic" "nvm use > /dev/null 2>&1 && npm -w ui run dev" + args "-ic" "nvm use > /dev/null 2>&1 && npm run dev-ui" } pane name="api" { command "${DEV_SHELL}" - args "-ic" "nvm use > /dev/null 2>&1 && npm -w api run dev" + args "-ic" "nvm use > /dev/null 2>&1 && npm run dev-api" } pane name="worker" { command "${DEV_SHELL}" - args "-ic" "nvm use > /dev/null 2>&1 && npm -w worker run dev" + args "-ic" "nvm use > /dev/null 2>&1 && npm run dev-worker" } } pane size=1 borderless=true { command "bash" - args "-ic" "echo -n -e \"Dev server available at \\e[1;96mhttp://localhost:5600\\033[0m\"" + args "-ic" "echo -n -e \"Dev server available at \\e[1;96mhttp://${DEV_HOST}:${NGINX_PORT1}/processings\\033[0m\"" } } diff --git a/AGENTS.md b/AGENTS.md new file mode 100644 index 00000000..5cc9ce2f --- /dev/null +++ b/AGENTS.md @@ -0,0 +1,113 @@ +# Data Fair Processings — Agent Guidelines + +## Project Overview + +`@data-fair/processings` is a sister service of `data-fair`. It runs scheduled and on-demand +processings (NPM-installable plugins) that ingest, transform and publish datasets into a +running `data-fair` instance. The project is a monorepo with four workspaces: + +- `api/` — Express REST API +- `worker/` — Polling worker that runs the processings tasks in child processes +- `ui/` — Vue 3 + Vuetify SPA (built with Vite) +- `shared/` — TypeScript types and helpers shared between api, worker and ui + +## Dev environment + +The dev environment is managed by **zellij** (terminal multiplexer) and **docker compose**. +**Never start, stop, or restart dev processes yourself** — the user manages them through +zellij panes (`npm run dev-zellij`). + +The dev environment also serves as the test environment: the Playwright test suite hits the +running dev API, dev worker and dev UI. There is no separate test server. + +Each git worktree gets its own `.env` (random ports + `.localhost` subdomain) so +multiple worktrees coexist without port collisions. Use the worktree scripts: + +```bash +./dev/worktree.sh feat-xyz # create ../processings_feat-xyz with a fresh .env +./dev/delete-worktree.sh feat-xyz # tear down docker compose and remove the worktree +``` + +### Checking status + +```bash +bash dev/status.sh +``` + +Shows the health of all services (nginx, dev API, dev UI, dev worker, simple-directory, +data-fair upstream, events, openapi-viewer, mongo, elasticsearch) and lists log files +with sizes and timestamps. + +### Log files + +All dev processes write to `dev/logs/`: +- `dev-api.log` — API server +- `dev-worker.log` — worker process +- `dev-ui.log` — UI dev server (Vite) +- `docker-compose.log` — all docker compose services + +### Troubleshooting + +1. Run `bash dev/status.sh` to identify which services are down +2. Read the relevant log file in `dev/logs/` for error details +3. Report findings to the user — do not attempt to fix infrastructure issues yourself + +### Port assignments + +Port numbers and the `DEV_HOST` subdomain live in `.env` (gitignored, generated by +`dev/init-env.sh`). Do not modify port assignments by hand. + +### Test users + +Test fixtures live in `dev/resources/users.json` and `dev/resources/organizations.json`. +All test users and orgs are prefixed with `test_` (e.g. `test_admin1@test.com`, +`test_org1`). The cleanup endpoint `DELETE /api/v1/test-env` only purges documents +whose `owner.id` matches `^test_` so that interactive dev work under non-prefixed +accounts (e.g. `superadmin`, `albanm`) survives test runs. + +Special accounts: +- `test_superadmin@test.com` / password `superpasswd` — superadmin +- all other `test_*@test.com` — password `passwd` + +### Test-env API + +When `NODE_ENV=development` the API exposes `/api/v1/test-env`: +- `DELETE /` — delete all `test_*`-owned processings, runs and limits +- `GET /pending-tasks` — list runs in `triggered`/`scheduled`/`running` status +- `GET /raw-processing/:id`, `GET /raw-run/:id` — raw mongo docs +- `POST /set-env`, `POST /set-config` — runtime overrides for testing +- `DELETE /plugins` — wipe the installed plugins directory + +These endpoints are used by the Playwright support helpers in `tests/support/axios.ts`. + +### Testing + +```bash +npm test # all tests +npm run test-unit # unit tests only +npm run test-api # API tests only +npm run test-e2e # e2e tests only +npx playwright test path/to/file # specific file +``` + +Tests are organized under `tests/features//.{unit,api,e2e}.spec.ts`. +The `state-setup` project pings `/api/v1/test-env/pending-tasks` and tails the dev API +and worker logs into the test reporter output. + +The full test suite is long — when iterating on changes always run only the related +test cases. The full suite runs on `git push` via husky. + +### Linting & Type Checking + +```bash +npm run lint # ESLint (root + ui workspace) +npm run lint-fix # auto-fix +npm run check-types # TypeScript +``` + +### Building + +```bash +npm run build-types # generate type definitions for shared schemas +npm -w ui run build # build the Vue SPA +``` diff --git a/CLAUDE.md b/CLAUDE.md new file mode 100644 index 00000000..43c994c2 --- /dev/null +++ b/CLAUDE.md @@ -0,0 +1 @@ +@AGENTS.md diff --git a/api/config/development.mjs b/api/config/development.mjs index ee27bada..d89fdd38 100644 --- a/api/config/development.mjs +++ b/api/config/development.mjs @@ -1,13 +1,19 @@ +const apiPort = parseInt(process.env.DEV_API_PORT ?? '8082') +const mongoPort = process.env.MONGO_PORT ?? '27017' +const sdPort = process.env.SD_PORT ?? '8080' +const eventsPort = process.env.EVENTS_PORT ?? '8083' +const observerPort = parseInt(process.env.DEV_API_OBSERVER_PORT ?? '9092') + export default { cipherPassword: 'dev', dataDir: '../data/development', - mongoUrl: 'mongodb://localhost:27017/data-fair-processings-development', + mongoUrl: `mongodb://localhost:${mongoPort}/data-fair-processings-development`, observer: { - port: 9092 + port: observerPort }, - port: 8082, - privateDirectoryUrl: 'http://localhost:8080', - privateEventsUrl: 'http://localhost:8083', + port: apiPort, + privateDirectoryUrl: `http://localhost:${sdPort}`, + privateEventsUrl: `http://localhost:${eventsPort}`, secretKeys: { identities: 'secret-identities', events: 'secret-events' diff --git a/api/config/test.mjs b/api/config/test.mjs deleted file mode 100644 index 0c4481ef..00000000 --- a/api/config/test.mjs +++ /dev/null @@ -1,15 +0,0 @@ -export default { - cipherPassword: 'test', - dataDir: './data/test', - mongoUrl: 'mongodb://localhost:27017/data-fair-processings-test', - observer: { - port: 9092 - }, - port: 8082, - privateDirectoryUrl: 'http://localhost:8080', - privateEventsUrl: 'http://localhost:8083', - secretKeys: { - identities: 'secret-identities', - events: 'secret-events' - } -} diff --git a/api/package.json b/api/package.json index 5c329f74..71d0ee60 100644 --- a/api/package.json +++ b/api/package.json @@ -3,7 +3,7 @@ "main": "index.ts", "type": "module", "scripts": { - "dev": "NODE_ENV=development DEBUG=upgrade node --watch index.ts" + "dev": "NODE_ENV=development DEBUG=upgrade node --env-file-if-exists=../.env --watch index.ts" }, "imports": { "#config": "./src/config.ts", diff --git a/api/src/app.ts b/api/src/app.ts index 4f11bdd0..b9491735 100644 --- a/api/src/app.ts +++ b/api/src/app.ts @@ -23,6 +23,10 @@ app.set('json spaces', 2) app.use(express.json()) app.use(express.urlencoded({ extended: true })) +app.get('/api/v1/_ping', (req, res) => { + res.send('ok') +}) + app.use('/api/identities', identitiesRouter) app.use('/api/v1/plugins-registry', pluginsRegistryRouter) app.use('/api/v1/plugins', pluginsRouter) @@ -31,7 +35,12 @@ app.use('/api/v1/runs', runsRouter) app.use('/api/v1/limits', limitsRouter) app.use('/api/v1/admin', adminRouter) -if (process.env.NODE_ENV !== 'test') { +if (process.env.NODE_ENV === 'development') { + app.use('/api/v1/test-env', (await import('./misc/routers/test-env.ts')).default) +} + +if (process.env.NODE_ENV !== 'development') { + // in development the UI is served by the Vite dev server through nginx const cspDirectives = { ...defaultNonceCSPDirectives } // necessary to use vjsf without pre-compilation cspDirectives['script-src'] = "'unsafe-eval' " + defaultNonceCSPDirectives['script-src'] diff --git a/api/src/config.ts b/api/src/config.ts index 0d64b8d1..b134db30 100644 --- a/api/src/config.ts +++ b/api/src/config.ts @@ -2,12 +2,10 @@ import type { ApiConfig } from '../config/type/index.ts' import { assertValid } from '../config/type/index.ts' import config from 'config' -// we reload the config instead of using the singleton from the config module for testing purposes -// @ts-ignore -const apiConfig = process.env.NODE_ENV === 'test' ? config.util.loadFileConfigs(process.env.NODE_CONFIG_DIR, { skipConfigSources: true }) : config -assertValid(apiConfig, { lang: 'en', name: 'config', internal: true }) +assertValid(config, { lang: 'en', name: 'config', internal: true }) -export default apiConfig as ApiConfig +const apiConfig = config as unknown as ApiConfig +export default apiConfig export const uiConfig = { pluginCategories: apiConfig.pluginCategories, diff --git a/api/src/misc/routers/test-env.ts b/api/src/misc/routers/test-env.ts new file mode 100644 index 00000000..6684d526 --- /dev/null +++ b/api/src/misc/routers/test-env.ts @@ -0,0 +1,107 @@ +import express from 'express' +import fs from 'fs-extra' +import path from 'node:path' +import mongo from '#mongo' +import config from '#config' + +const router = express.Router() + +// Cleanup test_* owned data from mongo + plugins on disk +router.delete('/', async (req, res, next) => { + try { + const testOwnerFilter = { 'owner.id': { $regex: /^test_/ } } + const testIdFilter = { id: { $regex: /^test_/ } } + + await Promise.all([ + mongo.processings.deleteMany(testOwnerFilter), + mongo.runs.deleteMany(testOwnerFilter), + mongo.limits.deleteMany(testIdFilter), + mongo.db.collection('locks').deleteMany({}) + ]) + + res.json({ ok: true }) + } catch (err) { + next(err) + } +}) + +// Return the list of runs that are not yet completed (used by tests to wait for worker idle) +router.get('/pending-tasks', async (req, res, next) => { + try { + const runs = await mongo.runs.find( + { status: { $in: ['triggered', 'scheduled', 'running'] } }, + { projection: { _id: 1, status: 1, 'processing._id': 1, 'processing.title': 1 } } + ).toArray() + const grouped: Record = { triggered: [], scheduled: [], running: [] } + for (const run of runs) grouped[run.status].push(run) + res.json(grouped) + } catch (err) { + next(err) + } +}) + +// Return the raw MongoDB document for a processing +router.get('/raw-processing/:id', async (req, res, next) => { + try { + const processing = await mongo.processings.findOne({ _id: req.params.id }) + if (!processing) return res.status(404).json({ error: 'processing not found' }) + res.json(processing) + } catch (err) { + next(err) + } +}) + +// Return the raw MongoDB document for a run +router.get('/raw-run/:id', async (req, res, next) => { + try { + const run = await mongo.runs.findOne({ _id: req.params.id }) + if (!run) return res.status(404).json({ error: 'run not found' }) + res.json(run) + } catch (err) { + next(err) + } +}) + +// Set an environment variable in the main process (for testing) +router.post('/set-env', (req, res, next) => { + try { + const { key, value } = req.body + if (value === undefined || value === null) { + delete process.env[key] + } else { + process.env[key] = value + } + res.json({ ok: true }) + } catch (err) { + next(err) + } +}) + +// Set a config value at runtime (for testing) +router.post('/set-config', (req, res, next) => { + try { + const { path: configPath, value } = req.body + const parts = configPath.split('.') + let obj: any = config + for (let i = 0; i < parts.length - 1; i++) { + obj = obj[parts[i]] + } + obj[parts[parts.length - 1]] = value + res.json({ ok: true }) + } catch (err) { + next(err) + } +}) + +// Wipe the installed plugins directory (used between test runs) +router.delete('/plugins', async (req, res, next) => { + try { + const pluginsDir = path.resolve(config.dataDir, 'plugins') + await fs.emptyDir(pluginsDir) + res.json({ ok: true }) + } catch (err) { + next(err) + } +}) + +export default router diff --git a/dev/delete-worktree.sh b/dev/delete-worktree.sh new file mode 100755 index 00000000..3a4db0f8 --- /dev/null +++ b/dev/delete-worktree.sh @@ -0,0 +1,29 @@ +#!/bin/bash + +BRANCH_NAME=$1 + +if [ -z "$BRANCH_NAME" ]; then + echo "Error: Please provide a branch name." + echo "Usage: ./dev/delete-worktree.sh feat-xyz" + exit 1 +fi + +REPO_NAME=$(basename "$PWD") +TARGET_DIR="../${REPO_NAME}_${BRANCH_NAME}" + +if [ ! -d "$TARGET_DIR" ]; then + echo "Error: Worktree directory $TARGET_DIR does not exist." + exit 1 +fi + +echo "Stopping docker compose services in $TARGET_DIR" +cd "$TARGET_DIR" +docker compose --profile dev down + +echo "Removing git worktree at $TARGET_DIR" +cd "$OLDPWD" +git worktree remove "$TARGET_DIR" + +echo "-----------------------------------------------" +echo "✅ Worktree $BRANCH_NAME deleted!" +echo "-----------------------------------------------" diff --git a/dev/init-env.sh b/dev/init-env.sh new file mode 100755 index 00000000..04fc895c --- /dev/null +++ b/dev/init-env.sh @@ -0,0 +1,28 @@ +#!/bin/bash + +RANDOM_NB=$((1024 + RANDOM % 48000)) +echo "Use random base port $RANDOM_NB" + +BRANCH=$(git rev-parse --abbrev-ref HEAD 2>/dev/null | sed 's/[^a-zA-Z0-9-]/-/g') +DEV_HOST="${BRANCH:-df}.localhost" + +cat < ".env" +DEV_HOST=${DEV_HOST} + +NGINX_PORT1=$((RANDOM_NB)) +NGINX_PORT2=$((RANDOM_NB + 1)) + +DEV_API_PORT=$((RANDOM_NB + 10)) +DEV_UI_PORT=$((RANDOM_NB + 11)) +DEV_WORKER_PORT=$((RANDOM_NB + 12)) +DEV_API_OBSERVER_PORT=$((RANDOM_NB + 13)) +DEV_WORKER_OBSERVER_PORT=$((RANDOM_NB + 14)) + +MONGO_PORT=$((RANDOM_NB + 20)) +ES_PORT=$((RANDOM_NB + 21)) + +SD_PORT=$((RANDOM_NB + 30)) +EVENTS_PORT=$((RANDOM_NB + 31)) +OAV_PORT=$((RANDOM_NB + 32)) +DF_PORT=$((RANDOM_NB + 33)) +EOF diff --git a/dev/resources/nginx.conf b/dev/resources/nginx.conf deleted file mode 100644 index e53f3211..00000000 --- a/dev/resources/nginx.conf +++ /dev/null @@ -1,108 +0,0 @@ -# nginx configuration file for Data Fair development and test environment -user nginx; -worker_processes auto; - -error_log /var/log/nginx/error.log notice; -pid /var/run/nginx.pid; - -events { - worker_connections 1024; -} - -http { - include /etc/nginx/mime.types; - default_type application/octet-stream; - - # usual access logs - log_format main '$remote_addr - $remote_user [$time_local] "$request" ' - '$status $body_bytes_sent "$http_referer" ' - '"$http_user_agent" "$http_x_forwarded_for"'; - - # use header origin if referer is empty - map $http_referer $reqref { - default $http_referer; - "" $http_origin; - } - - # custom logs destined to our metrics daemon - log_format metrics escape=json '["$host","$reqref",$request_time,$bytes_sent,$status,"$upstream_http_x_owner","$cookie_id_token","$cookie_id_token_org","$http_x_apikey","$http_x_account","$http_x_processing","$upstream_cache_status","$upstream_http_x_resource","$upstream_http_x_operation","$gzip_ratio"]'; - - sendfile on; - #tcp_nopush on; - - keepalive_timeout 65; - - gzip on; - gzip_types application/atom+xml application/javascript application/x-javascript application/json application/rss+xml application/vnd.ms-fontobject application/x-font-ttf application/x-web-app-manifest+json application/xhtml+xml application/xml font/opentype image/svg+xml image/x-icon text/css text/plain text/x-component text/csv application/geo+json application/x-ndjson application/schema+json application/tableschema+json; - - map $http_upgrade $connection_upgrade { - default upgrade; - '' close; - } - - server { - listen 5600; - server_name _; - - access_log /var/log/nginx/access.log main; - - # logs written on a unix socket for our daemon - # you can listen directly on this socket to debug: - # umask 0 - # nc -vkluU dev/data/metrics.log.sock - access_log syslog:server=unix:/data/metrics.log.sock,tag=df,nohostname metrics if=$upstream_http_x_operation; - - # Transmit host, protocol and user ip, we use it for routing, rate limiting, etc. - proxy_set_header Host $http_host; - proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; - proxy_set_header X-Forwarded-Proto $scheme; - proxy_set_header X-Forwarded-Host $http_host; - proxy_set_header X-Real-IP $remote_addr; - proxy_set_header X-Client-IP $remote_addr; - - # web socket support - proxy_set_header Upgrade $http_upgrade; - proxy_set_header Connection "Upgrade"; - - # hmr - proxy_read_timeout 86400; - - location = / { - return 302 /processings/dev; - } - - location = /processings/ { - return 302 /processings/dev; - } - - location /processings/api/ { - proxy_pass http://localhost:8082; - } - - location ~ /processings/(.*)-ui-config.js { - proxy_pass http://localhost:8082; - } - location /processings/ { - # port 3039 to use vite dev server - # port 8082 to use built application - proxy_pass http://localhost:3039; - } - - location /simple-directory/ { - proxy_pass http://localhost:8080; - } - - location /data-fair { - rewrite ^/data-fair/(.*) /$1 break; - proxy_pass http://localhost:8081/; - } - - location /openapi-viewer { - proxy_pass http://localhost:8084; - } - - location /events/ { - proxy_pass http://localhost:8083; - } - } -} diff --git a/dev/resources/nginx.conf.template b/dev/resources/nginx.conf.template new file mode 100644 index 00000000..e29c20ca --- /dev/null +++ b/dev/resources/nginx.conf.template @@ -0,0 +1,69 @@ +# nginx configuration file for processings development and test environment + +server { + listen ${NGINX_PORT1}; + server_name ${DEV_HOST}; + + # Transmit host, protocol and user ip, we use it for routing, rate limiting, etc. + proxy_set_header Host $http_host; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + proxy_set_header X-Forwarded-Host $http_host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Client-IP $remote_addr; + + # web socket support + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection "Upgrade"; + + # accept some occasional long queries + HMR long poll from the Vite dev server + proxy_read_timeout 86400; + # body size limits are implemented by services themselves + client_max_body_size 0; + # direct streaming of uploads + proxy_request_buffering off; + # direct streaming of downloads + proxy_buffering off; + # allow large response headers (setCookies with large session token) + proxy_buffer_size 32k; + proxy_buffers 4 32k; + + location = / { + return 302 /processings/; + } + + location = /processings/ { + return 302 /processings/dev; + } + + # API routes served by API server + location /processings/api/ { + proxy_pass http://localhost:${DEV_API_PORT}; + } + + # ui-config served by API + location ~ /processings/(.*)-ui-config.js { + proxy_pass http://localhost:${DEV_API_PORT}; + } + + # UI served by Vite dev server + location /processings/ { + proxy_pass http://localhost:${DEV_UI_PORT}; + } + + location /simple-directory/ { + proxy_pass http://localhost:${SD_PORT}/simple-directory/; + } + + location /data-fair/ { + proxy_pass http://localhost:${DF_PORT}/data-fair/; + } + + location /openapi-viewer/ { + proxy_pass http://localhost:${OAV_PORT}/; + } + + location /events/ { + proxy_pass http://localhost:${EVENTS_PORT}/; + } +} diff --git a/dev/resources/organizations.json b/dev/resources/organizations.json index 8ba1b63e..1c457f10 100644 --- a/dev/resources/organizations.json +++ b/dev/resources/organizations.json @@ -180,5 +180,31 @@ "name": "Fivechat" } ] + }, + { + "id": "test_org1", + "name": "Test Org 1", + "departments": [ + { "id": "dep1", "name": "department 1" }, + { "id": "dep2", "name": "department 2" } + ], + "partners": [ + { "id": "test_org2", "name": "Test Org 2" } + ], + "members": [ + { "id": "test_admin1", "role": "admin" }, + { "id": "test_user1", "role": "user" }, + { "id": "test_contrib1", "role": "contrib" }, + { "id": "test_dep_admin", "role": "admin", "department": "dep1" }, + { "id": "test_user2", "role": "user", "department": "dep1" } + ] + }, + { + "id": "test_org2", + "name": "Test Org 2", + "members": [ + { "id": "test_user2", "role": "admin" }, + { "id": "test_user1", "role": "user" } + ] } ] diff --git a/dev/resources/users.json b/dev/resources/users.json index b378e52b..f57dcee6 100644 --- a/dev/resources/users.json +++ b/dev/resources/users.json @@ -136,5 +136,77 @@ "password": { "clear": "superpasswd" } + }, + { + "id": "test_user1", + "firstName": "Test User1", + "lastName": "", + "email": "test_user1@test.com", + "password": { + "clear": "passwd" + } + }, + { + "id": "test_user2", + "firstName": "Test User2", + "lastName": "", + "email": "test_user2@test.com", + "password": { + "clear": "passwd" + } + }, + { + "id": "test_contrib1", + "firstName": "Test Contrib1", + "lastName": "", + "email": "test_contrib1@test.com", + "password": { + "clear": "passwd" + } + }, + { + "id": "test_admin1", + "firstName": "Test Admin1", + "lastName": "", + "email": "test_admin1@test.com", + "password": { + "clear": "passwd" + } + }, + { + "id": "test_alone", + "firstName": "Test Alone", + "lastName": "", + "email": "test_alone@test.com", + "password": { + "clear": "passwd" + } + }, + { + "id": "test_dep_admin", + "firstName": "Test Dep", + "lastName": "Admin", + "email": "test_dep_admin@test.com", + "password": { + "clear": "passwd" + } + }, + { + "id": "test_outsider", + "firstName": "Test", + "lastName": "Outsider", + "email": "test_outsider@test.com", + "password": { + "clear": "passwd" + } + }, + { + "id": "test_superadmin", + "firstName": "Test Super", + "lastName": "Admin", + "email": "test_superadmin@test.com", + "password": { + "clear": "superpasswd" + } } ] diff --git a/dev/status.sh b/dev/status.sh new file mode 100755 index 00000000..ef47fd36 --- /dev/null +++ b/dev/status.sh @@ -0,0 +1,100 @@ +#!/usr/bin/env bash +# Check the status of all dev environment services. +# Read-only — never starts, stops, or restarts anything. +# Safe to run from sandbox (only needs curl and .env). + +set -euo pipefail + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +PROJECT_DIR="$(dirname "$SCRIPT_DIR")" + +# Load port configuration +if [ -f "$PROJECT_DIR/.env" ]; then + set -a + source "$PROJECT_DIR/.env" + set +a +else + echo "ERROR: .env file not found at $PROJECT_DIR/.env" + exit 1 +fi + +NGINX1="http://${DEV_HOST}:${NGINX_PORT1}" + +# Colors (disabled if not a terminal) +if [ -t 1 ]; then + GREEN='\033[0;32m' + RED='\033[0;31m' + YELLOW='\033[0;33m' + BOLD='\033[1m' + RESET='\033[0m' +else + GREEN='' RED='' YELLOW='' BOLD='' RESET='' +fi + +check_http() { + local name="$1" url="$2" + local http_code + http_code=$(curl -s -L --max-time 2 -o /dev/null -w "%{http_code}" "$url" 2>&1) || http_code="000" + if [ "$http_code" = "000" ]; then + printf "${RED}%-20s DOWN %s (connection refused)${RESET}\n" "$name" "$url" + elif [ "$http_code" -ge 200 ] && [ "$http_code" -lt 400 ]; then + printf "${GREEN}%-20s UP %s${RESET}\n" "$name" "$url" + else + printf "${YELLOW}%-20s ERROR %s (HTTP %s)${RESET}\n" "$name" "$url" "$http_code" + fi +} + +check_tcp() { + local name="$1" host="$2" port="$3" + if (echo > /dev/tcp/"$host"/"$port") 2>/dev/null; then + printf "${GREEN}%-20s UP %s:%s${RESET}\n" "$name" "$host" "$port" + else + printf "${RED}%-20s DOWN %s:%s${RESET}\n" "$name" "$host" "$port" + fi +} + +echo -e "${BOLD}Dev environment status${RESET}" +echo "" + +# --- Nginx (gateway to everything) --- +echo -e "${BOLD}Nginx proxy:${RESET}" +check_http "nginx (port1)" "$NGINX1" +check_tcp "nginx (port2)" "localhost" "${NGINX_PORT2}" +echo "" + +# --- Dev processes --- +echo -e "${BOLD}Dev processes:${RESET}" +check_http "dev-api" "http://localhost:${DEV_API_PORT}/api/v1/_ping" +check_http "dev-ui" "http://localhost:${DEV_UI_PORT}" +check_tcp "dev-worker-observer" "localhost" "${DEV_WORKER_OBSERVER_PORT}" +echo "" + +# --- Docker compose services --- +echo -e "${BOLD}Docker compose services:${RESET}" +check_http "simple-directory" "${NGINX1}/simple-directory/" +check_http "data-fair" "${NGINX1}/data-fair/" +check_http "events" "${NGINX1}/events/" +check_http "openapi-viewer" "${NGINX1}/openapi-viewer/" +check_tcp "mongo" "localhost" "${MONGO_PORT}" +check_tcp "elasticsearch" "localhost" "${ES_PORT}" +echo "" + +# --- Docker compose status (if docker/podman available) --- +if command -v docker &> /dev/null && docker compose version &> /dev/null; then + echo -e "${BOLD}Container details:${RESET}" + (cd "$PROJECT_DIR" && docker compose ps --format "table {{.Name}}\t{{.Status}}\t{{.Ports}}" 2>/dev/null) || echo "(docker compose not available)" + echo "" +fi + +# --- Log files --- +echo -e "${BOLD}Log files:${RESET}" +found_logs=false +for log in "$PROJECT_DIR"/dev/logs/*.log; do + [ -f "$log" ] || continue + found_logs=true + name=$(basename "$log") + size=$(wc -c < "$log" 2>/dev/null || echo 0) + mod=$(date -r "$log" "+%H:%M:%S" 2>/dev/null || echo "unknown") + printf " %-25s %6s bytes (last modified: %s)\n" "$name" "$size" "$mod" +done +$found_logs || echo " (no log files found)" diff --git a/dev/worktree.sh b/dev/worktree.sh new file mode 100755 index 00000000..4ac8e08c --- /dev/null +++ b/dev/worktree.sh @@ -0,0 +1,47 @@ +#!/bin/bash + +BRANCH_NAME=$1 + +if [ -z "$BRANCH_NAME" ]; then + echo "Error: Please provide a branch name." + echo "Usage: ./dev/worktree.sh feat-xyz" + exit 1 +fi + +SOURCE_BRANCH=$(git branch --show-current) +REPO_NAME=$(basename "$PWD") +TARGET_DIR="../${REPO_NAME}_${BRANCH_NAME}" + +echo "Creating worktree at $TARGET_DIR from branch $SOURCE_BRANCH" +git worktree add -b "$BRANCH_NAME" "$TARGET_DIR" $SOURCE_BRANCH + +cd $TARGET_DIR + +if [ -d "$OLDPWD/.claude" ]; then + echo "Copy local Claude settings" + mkdir -p .claude + for f in "$OLDPWD/.claude"/*.json; do + [ -f "$f" ] && cp "$f" .claude/ + done +fi + +echo "Create .env file" +./dev/init-env.sh + +echo "npm ci" +npm ci + +echo "npm run build-types" +npm run build-types + +echo "npm -w ui run build" +npm -w ui run build + +echo "-----------------------------------------------" +echo "✅ Setup Complete!" +echo "Location: $TARGET_DIR" +echo "Branch: $BRANCH_NAME" +echo "-----------------------------------------------" +echo "Next step:" +echo " cd $TARGET_DIR" +echo "" diff --git a/docker-compose.yml b/docker-compose.yml index 7d1499db..e3dfa046 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -4,10 +4,22 @@ services: # reverse proxy for the whole environment ##### nginx: + profiles: + - dev image: nginx:1.29.4-alpine network_mode: host + environment: + - DEV_HOST=${DEV_HOST} + - NGINX_PORT1=${NGINX_PORT1} + - NGINX_PORT2=${NGINX_PORT2} + - DEV_API_PORT=${DEV_API_PORT} + - DEV_UI_PORT=${DEV_UI_PORT} + - SD_PORT=${SD_PORT} + - DF_PORT=${DF_PORT} + - EVENTS_PORT=${EVENTS_PORT} + - OAV_PORT=${OAV_PORT} volumes: - - ./dev/resources/nginx.conf:/etc/nginx/nginx.conf:ro + - ./dev/resources/nginx.conf.template:/etc/nginx/templates/default.conf.template:ro ##### # related services from the data-fair stack @@ -16,18 +28,18 @@ services: simple-directory: profiles: - dev - - test image: ghcr.io/data-fair/simple-directory:8 network_mode: host depends_on: - mongo environment: - DEBUG=session + - PORT=${SD_PORT} - CONTACT=contact@test.com - - ADMINS=["superadmin@test.com"] - - PUBLIC_URL=http://localhost:5600/simple-directory + - ADMINS=["superadmin@test.com","test_superadmin@test.com"] + - PUBLIC_URL=http://${DEV_HOST}:${NGINX_PORT1}/simple-directory - MAILDEV_ACTIVE=true - - MONGO_URL=mongodb://localhost:27017/simple-directory + - MONGO_URL=mongodb://localhost:${MONGO_PORT}/simple-directory - STORAGE_TYPE=file - AUTHRATELIMIT_ATTEMPTS=500 - AUTHRATELIMIT_DURATION=30 @@ -42,7 +54,6 @@ services: data-fair: profiles: - dev - - test image: ghcr.io/data-fair/data-fair:5 restart: on-failure:10 network_mode: host @@ -52,19 +63,19 @@ services: elasticsearch: condition: service_healthy environment: - - PORT=8081 - - MONGO_URL=mongodb://localhost:27017/data-fair - - DIRECTORY_URL=http://localhost:5600/simple-directory - - PRIVATE_EVENTS_URL=http://localhost:8083 - - PRIVATE_DIRECTORY_URL=http://localhost:8080 - - PUBLIC_URL=http://localhost:5600/data-fair - - WS_PUBLIC_URL=ws://localhost:5600/data-fair + - PORT=${DF_PORT} + - MONGO_URL=mongodb://localhost:${MONGO_PORT}/data-fair + - ES_HOST=http://localhost:${ES_PORT} + - DIRECTORY_URL=http://${DEV_HOST}:${NGINX_PORT1}/simple-directory + - PRIVATE_EVENTS_URL=http://localhost:${EVENTS_PORT} + - PRIVATE_DIRECTORY_URL=http://localhost:${SD_PORT} + - PUBLIC_URL=http://${DEV_HOST}:${NGINX_PORT1}/data-fair + - WS_PUBLIC_URL=ws://${DEV_HOST}:${NGINX_PORT1}/data-fair - OBSERVER_ACTIVE=false - - - PRIVATE_PROCESSINGS_URL=http://localhost:8082 + - PRIVATE_PROCESSINGS_URL=http://localhost:${DEV_API_PORT} - SECRET_EVENTS=secret-events - - EXTRA_ADMIN_NAV_ITEMS=[{"id":"processingsAdminDoc","href":"http://localhost:5600/openapi-viewer?urlType=processingsAdmin","icon":"mdi-cog-transfer-outline","title":"API Traitements périodiques"}] - - OPENAPI_VIEWER_URL=http://localhost:5600/openapi-viewer/ + - EXTRA_ADMIN_NAV_ITEMS=[{"id":"processingsAdminDoc","href":"http://${DEV_HOST}:${NGINX_PORT1}/openapi-viewer?urlType=processingsAdmin","icon":"mdi-cog-transfer-outline","title":"API Traitements périodiques"}] + - OPENAPI_VIEWER_URL=http://${DEV_HOST}:${NGINX_PORT1}/openapi-viewer/ - OPENAPI_VIEWER_V2=true ##### @@ -77,9 +88,9 @@ services: image: ghcr.io/data-fair/events:main network_mode: host environment: - - PORT=8083 - - MONGO_URL=mongodb://localhost:27017/data-fair-events - - PRIVATE_DIRECTORY_URL=http://localhost:5600/simple-directory + - PORT=${EVENTS_PORT} + - MONGO_URL=mongodb://localhost:${MONGO_PORT}/data-fair-events + - PRIVATE_DIRECTORY_URL=http://${DEV_HOST}:${NGINX_PORT1}/simple-directory - SECRET_IDENTITIES=secret-identities - SECRET_EVENTS=secret-events - SECRET_SENDMAILS=secret-sendmails @@ -89,10 +100,11 @@ services: profiles: - dev image: ghcr.io/data-fair/openapi-viewer:master + network_mode: host environment: - - PORT=8084 + - PORT=${OAV_PORT} - USE_SIMPLE_DIRECTORY=true - - ALLOWED_URLS={"processings":"http://localhost:5600/processings/api/v1/api-docs.json","processingsId":"http://localhost:5600/processings/api/v1/processings/{id}/api-docs.json","processingsAdmin":"http://localhost:5600/processings/api/v1/admin/api-docs.json"} + - ALLOWED_URLS={"processings":"http://${DEV_HOST}:${NGINX_PORT1}/processings/api/v1/api-docs.json","processingsId":"http://${DEV_HOST}:${NGINX_PORT1}/processings/api/v1/processings/{id}/api-docs.json","processingsAdmin":"http://${DEV_HOST}:${NGINX_PORT1}/processings/api/v1/admin/api-docs.json"} ##### # db and search engine @@ -101,10 +113,9 @@ services: elasticsearch: profiles: - dev - - test image: ghcr.io/data-fair/elasticsearch:7.x ports: - - 9200:9200 + - ${ES_PORT}:9200 environment: - discovery.type=single-node - xpack.security.enabled=false @@ -122,10 +133,9 @@ services: mongo: profiles: - dev - - test image: mongo:4.2 ports: - - 27017:27017 + - ${MONGO_PORT}:27017 volumes: - mongo-data:/data/db diff --git a/package-lock.json b/package-lock.json index fb7233e4..c29efe79 100644 --- a/package-lock.json +++ b/package-lock.json @@ -22,13 +22,17 @@ "@data-fair/lib-common-types": "^1.10.6", "@data-fair/lib-node": "^2.12.1", "@data-fair/lib-utils": "^1.10.1", + "@playwright/test": "^1.52.0", "@types/config": "^3.3.5", "@types/debug": "^4.1.12", "@types/semver": "^7.7.1", "commitlint": "^20.5.0", + "dotenv": "^16.4.5", + "dotenv-cli": "^7.4.4", "eslint": "^9.35.0", "eslint-plugin-vue": "^10.4.0", "eslint-plugin-vuetify": "^2.7.2", + "form-data": "^4.0.0", "husky": "^9.1.7", "neostandard": "^0.12.2", "tough-cookie": "^5.0.0", @@ -2191,6 +2195,22 @@ "url": "https://opencollective.com/parcel" } }, + "node_modules/@playwright/test": { + "version": "1.59.1", + "resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.59.1.tgz", + "integrity": "sha512-PG6q63nQg5c9rIi4/Z5lR5IVF7yU5MqmKaPOe0HSc0O2cX1fPi96sUQu5j7eo4gKCkB2AnNGoWt7y4/Xx3Kcqg==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "playwright": "1.59.1" + }, + "bin": { + "playwright": "cli.js" + }, + "engines": { + "node": ">=18" + } + }, "node_modules/@rolldown/binding-android-arm64": { "version": "1.0.0-rc.15", "resolved": "https://registry.npmjs.org/@rolldown/binding-android-arm64/-/binding-android-arm64-1.0.0-rc.15.tgz", @@ -4798,6 +4818,45 @@ "node": ">=8" } }, + "node_modules/dotenv": { + "version": "16.6.1", + "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.6.1.tgz", + "integrity": "sha512-uBq4egWHTcTt33a72vpSG0z3HnPuIl6NqYcTrKEg2azoEyl2hpW0zqlxysq2pK9HlDIHyHyakeYaYnSAwd8bow==", + "dev": true, + "license": "BSD-2-Clause", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://dotenvx.com" + } + }, + "node_modules/dotenv-cli": { + "version": "7.4.4", + "resolved": "https://registry.npmjs.org/dotenv-cli/-/dotenv-cli-7.4.4.tgz", + "integrity": "sha512-XkBYCG0tPIes+YZr4SpfFv76SQrV/LeCE8CI7JSEMi3VR9MvTihCGTOtbIexD6i2mXF+6px7trb1imVCXSNMDw==", + "dev": true, + "license": "MIT", + "dependencies": { + "cross-spawn": "^7.0.6", + "dotenv": "^16.3.0", + "dotenv-expand": "^10.0.0", + "minimist": "^1.2.6" + }, + "bin": { + "dotenv": "cli.js" + } + }, + "node_modules/dotenv-expand": { + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/dotenv-expand/-/dotenv-expand-10.0.0.tgz", + "integrity": "sha512-GopVGCpVS1UKH75VKHGuQFqS1Gusej0z4FyQkPdwjil2gNIv+LNsqBlboOzpJFZKVT95GkCyWJbBSdFEFUWI2A==", + "dev": true, + "license": "BSD-2-Clause", + "engines": { + "node": ">=12" + } + }, "node_modules/dunder-proto": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", @@ -8673,6 +8732,53 @@ "pathe": "^2.0.3" } }, + "node_modules/playwright": { + "version": "1.59.1", + "resolved": "https://registry.npmjs.org/playwright/-/playwright-1.59.1.tgz", + "integrity": "sha512-C8oWjPR3F81yljW9o5OxcWzfh6avkVwDD2VYdwIGqTkl+OGFISgypqzfu7dOe4QNLL2aqcWBmI3PMtLIK233lw==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "playwright-core": "1.59.1" + }, + "bin": { + "playwright": "cli.js" + }, + "engines": { + "node": ">=18" + }, + "optionalDependencies": { + "fsevents": "2.3.2" + } + }, + "node_modules/playwright-core": { + "version": "1.59.1", + "resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.59.1.tgz", + "integrity": "sha512-HBV/RJg81z5BiiZ9yPzIiClYV/QMsDCKUyogwH9p3MCP6IYjUFu/MActgYAvK0oWyV9NlwM3GLBjADyWgydVyg==", + "dev": true, + "license": "Apache-2.0", + "bin": { + "playwright-core": "cli.js" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/playwright/node_modules/fsevents": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz", + "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, "node_modules/possible-typed-array-names": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/possible-typed-array-names/-/possible-typed-array-names-1.1.0.tgz", diff --git a/package.json b/package.json index f769ac18..50f98f64 100644 --- a/package.json +++ b/package.json @@ -6,20 +6,21 @@ "scripts": { "build-types": "df-build-types ./", "check-types": "tsc", - "dev-api": "npm -w api run dev", - "dev-deps": "docker compose --profile dev up -d --wait", - "dev-ui": "npm -w ui run dev", - "dev-worker": "npm -w worker run dev", - "dev-zellij": "export DEV_SHELL=$(basename \"$SHELL\") && zellij --layout .zellij.kdl && rm data/zellij-deps-ready", + "dev-api": "mkdir -p dev/logs && npm -w api run dev 2>&1 | tee dev/logs/dev-api.log", + "dev-deps": "mkdir -p dev/logs && docker compose --profile dev up -d --wait && docker compose --profile dev logs -f 2>&1 | tee dev/logs/docker-compose.log", + "dev-ui": "mkdir -p dev/logs && npm -w ui run dev 2>&1 | tee dev/logs/dev-ui.log", + "dev-worker": "mkdir -p dev/logs && npm -w worker run dev 2>&1 | tee dev/logs/dev-worker.log", + "dev-zellij": "export DEV_SHELL=$(basename \"$SHELL\") && dotenv -- zellij --layout .zellij.kdl", "lint": "eslint . && npm -w ui run lint", "lint-fix": "eslint --fix . && npm -w ui run lint-fix", "prepare": "husky || true", "stop-dev-deps": "docker compose --profile dev stop", - "test-base": "NODE_ENV=test EVENTS_LOG_LEVEL=alert node --disable-warning=ExperimentalWarning --test-force-exit --test-concurrency=1 --test", - "test-only": "npm run test-base -- --test-only test-it/*.ts", - "test": "npm run test-base test-it/*.ts", + "test": "playwright test --max-failures=1", + "test-unit": "playwright test --project unit --max-failures=1", + "test-api": "playwright test --project api --max-failures=1", + "test-e2e": "playwright test --project e2e --max-failures=1", "test-images": "docker compose --profile dev --profile testImages up -d --build --wait", - "quality": "npm run dev-deps && npm run lint && npm run build-types && npm run check-types && npm -w ui run build && npm run test && npm audit --omit=dev --audit-level=critical" + "quality": "npm run lint && npm run build-types && npm run check-types && npm -w ui run build && npm test && npm audit --omit=dev --audit-level=critical" }, "repository": { "type": "git", @@ -48,13 +49,17 @@ "@data-fair/lib-common-types": "^1.10.6", "@data-fair/lib-node": "^2.12.1", "@data-fair/lib-utils": "^1.10.1", + "@playwright/test": "^1.52.0", "@types/config": "^3.3.5", "@types/debug": "^4.1.12", "@types/semver": "^7.7.1", "commitlint": "^20.5.0", + "dotenv": "^16.4.5", + "dotenv-cli": "^7.4.4", "eslint": "^9.35.0", "eslint-plugin-vue": "^10.4.0", "eslint-plugin-vuetify": "^2.7.2", + "form-data": "^4.0.0", "husky": "^9.1.7", "neostandard": "^0.12.2", "tough-cookie": "^5.0.0", diff --git a/playwright.config.ts b/playwright.config.ts new file mode 100644 index 00000000..34c035b5 --- /dev/null +++ b/playwright.config.ts @@ -0,0 +1,46 @@ +import { defineConfig, devices } from '@playwright/test' +import 'dotenv/config' + +export default defineConfig({ + testDir: './tests', + workers: 1, + fullyParallel: false, + forbidOnly: !!process.env.CI, + retries: process.env.CI ? 1 : 0, + reporter: 'dot', + timeout: 60_000, + expect: { timeout: 5_000 }, + + use: { + baseURL: `http://${process.env.DEV_HOST}:${process.env.NGINX_PORT1}/processings`, + actionTimeout: 5_000, + navigationTimeout: 5_000, + }, + + projects: [ + { + name: 'state-setup', + testMatch: /state-setup\.ts/, + teardown: 'state-teardown' + }, + { + name: 'state-teardown', + testMatch: /state-teardown\.ts/, + }, + { + name: 'unit', + testMatch: /.*\.unit\.spec\.ts/, + }, + { + name: 'api', + testMatch: /.*\.api\.spec\.ts/, + dependencies: ['state-setup'], + }, + { + name: 'e2e', + testMatch: /.*\.e2e\.spec\.ts/, + dependencies: ['state-setup'], + use: { ...devices['Desktop Chrome'] }, + }, + ], +}) diff --git a/test-it/01-plugins-registry.ts b/test-it/01-plugins-registry.ts deleted file mode 100644 index 21f49c2d..00000000 --- a/test-it/01-plugins-registry.ts +++ /dev/null @@ -1,25 +0,0 @@ -import { strict as assert } from 'node:assert' -import { it, describe, before, after } from 'node:test' -import { axios, startApiServer, stopApiServer } from './utils/index.ts' - -const axAno = axios() - -describe('plugin-registry', () => { - before(startApiServer) - after(stopApiServer) - - it('should search for plugins (just lastest) on npmjs', async () => { - const res = await axAno.get('/api/v1/plugins-registry', { params: { q: 'hello-world' } }) - const hwProcessingPackages = res.data.results.filter(p => p.name === '@data-fair/processing-hello-world') - assert.equal(hwProcessingPackages.length, 1) - assert.equal(res.data.results[0].distTag, 'latest') - }) - - it('should search for plugins and all their distTag versions on npmjs', async () => { - const res = await axAno.get('/api/v1/plugins-registry', { params: { q: 'hello-world', showAll: 'true' } }) - const hwProcessingPackages = res.data.results.filter(p => p.name === '@data-fair/processing-hello-world') - assert.equal(hwProcessingPackages.length, 2) - assert(['latest', 'test'].includes(res.data.results[0].distTag)) - assert(['test', 'latest'].includes(res.data.results[1].distTag)) - }) -}) diff --git a/test-it/02-plugins.ts b/test-it/02-plugins.ts deleted file mode 100644 index 0664ece5..00000000 --- a/test-it/02-plugins.ts +++ /dev/null @@ -1,183 +0,0 @@ -import { strict as assert } from 'node:assert' -import { it, describe, before, after } from 'node:test' -import { axios, axiosAuth, clean, startApiServer, stopApiServer } from './utils/index.ts' - -const axAno = axios() -const superadmin = await axiosAuth('superadmin@test.com') -const dmeadus = await axiosAuth('dmeadus0@answers.com') -const dmeadusOrg = await axiosAuth({ email: 'dmeadus0@answers.com', org: 'KWqAGZ4mG' }) - -// Hello World project : https://github.com/data-fair//processing-hello-world -const plugin = { - name: '@data-fair/processing-hello-world', - version: '1.2.2', - distTag: 'latest', -} -const pluginId = '@data-fair-processing-hello-world-1' - -describe('plugin', () => { - before(startApiServer) - before(clean) - after(clean) - after(stopApiServer) - - it('should install a plugin from npm', async () => { - const res = await superadmin.post('/api/v1/plugins', { - name: plugin.name, - version: '0.13.0', // Previous version to test update - distTag: 'latest' - }) - assert.equal(res.data.name, plugin.name, 'Plugin name should match') - assert.equal(res.data.id, '@data-fair-processing-hello-world-0', 'Plugin ID should match') - assert.equal(res.data.version, '0.13.0', 'Plugin version should match') - - // Only superadmin can install plugins - await assert.rejects( - dmeadusOrg.post('/api/v1/plugins'), - (err: { status: number }) => err.status === 403, - 'Only superadmin can install plugins' - ) - }) - - it('should install a plugin from tarball', async () => { - const FormData = (await import('form-data')).default - const fs = await import('fs') - const path = await import('path') - - const tarballPath = path.join(import.meta.dirname, 'utils', 'processing-hello-world.tgz') - const formData = new FormData() - formData.append('file', fs.createReadStream(tarballPath)) - - const res = await superadmin.post('/api/v1/plugins', formData, { - headers: formData.getHeaders() - }) - - assert.equal(res.data.name, plugin.name, 'Plugin name should match') - assert.equal(pluginId, res.data.id, 'Plugin ID should match') - assert.equal(res.data.version, '1.2.2', 'Plugin version should match tarball version') - - // Only superadmin can install plugins - await assert.rejects( - dmeadusOrg.post('/api/v1/plugins'), - (err: { status: number }) => err.status === 403, - 'Only superadmin can install plugins' - ) - }) - - it('should update a plugin', async () => { - const res = await superadmin.post('/api/v1/plugins', plugin) - assert.equal(res.data.name, plugin.name, 'Plugin name should match') - assert.equal(pluginId, res.data.id, 'Plugin ID should match') - assert.equal(res.data.version, plugin.version, 'Plugin version should match') - - // Only superadmin can update plugins - await assert.rejects( - dmeadusOrg.post('/api/v1/plugins'), - (err: { status: number }) => err.status === 403, - 'Only superadmin can update plugins' - ) - }) - - it('should list installed plugins', async () => { - // List plugins as superadmin - let res = await superadmin.get('/api/v1/plugins') - assert.equal(res.data.count, 2, 'Superadmin should see the installed plugin') - assert.equal(res.data.results.length, 2) - assert.equal(res.data.results[0].name, plugin.name) - - // List plugins as admin (privateAccess filter is required) - await assert.rejects( - dmeadusOrg.get('/api/v1/plugins'), - (err: { status: number }) => err.status === 400, - 'Admin should get error when privateAccess filter is missing' - ) - - res = await dmeadusOrg.get('/api/v1/plugins?privateAccess=organization:KWqAGZ4mG') - assert.equal(res.data.count, 0, 'Admin should see no plugins without access') - assert.equal(res.data.results.length, 0) - }) - - it('should get specific plugin', async () => { - // Get a plugin that not exists (should fail) - await assert.rejects( - superadmin.get('/api/v1/plugins/does-not-exist'), - (err: { status: number }) => err.status === 404, - 'Should not find a plugin that does not exist' - ) - - // Get plugin as superadmin - const res = await superadmin.get('/api/v1/plugins/' + pluginId) - assert.equal(res.data.name, plugin.name, 'Superadmin should get the plugin details') - assert.equal(res.data.id, pluginId) - assert.equal(res.data.version, plugin.version) - - // Get plugin as admin (should fail without access) - // await assert.rejects( - // dmeadusOrg.get('/api/v1/plugins/' + pluginId), - // (err: { status: number }) => err.status === 403, - // 'Admin should not get the plugin details without access' - // ) - }) - - it('should manage plugin access permissions', async () => { - // Make the plugin private with specific access to admin only - await superadmin.put(`/api/v1/plugins/${pluginId}/access`, { - public: false, - privateAccess: [{ type: 'organization', id: 'KWqAGZ4mG' }] - }) - - // Admin should still see the plugin - let res = await dmeadusOrg.get('/api/v1/plugins?privateAccess=organization:KWqAGZ4mG') - assert.equal(res.data.results.length, 1, 'Admin should see plugin with private access') - - // User should not see the plugin anymore - res = await dmeadus.get('/api/v1/plugins?privateAccess=user:dmeadus0') - assert.equal(res.data.results.length, 0, 'User should not see plugin without access') - - // Make the plugin public - await superadmin.put(`/api/v1/plugins/${pluginId}/access`, { public: true }) - - // Now admin and user should be able to see the plugin - res = await dmeadusOrg.get('/api/v1/plugins?privateAccess=organization:KWqAGZ4mG') - assert.equal(res.data.results.length, 1, 'Admin should see public plugin') - - res = await dmeadus.get('/api/v1/plugins?privateAccess=user:dmeadus0') - assert.equal(res.data.results.length, 1, 'User should see public plugin') - }) - - it('should delete a plugin', async () => { - // Check that the plugin is installed before deletion - let res = await superadmin.get('/api/v1/plugins') - assert.equal(res.data.count, 2, 'There should be one plugin installed before deletion') - - // Only superadmin can delete plugins - await assert.rejects( - axAno.delete(`/api/v1/plugins/${pluginId}`), - (err: { status: number }) => err.status === 401, - 'Only superadmin can delete plugins, not anonymous user' - ) - await assert.rejects( - dmeadusOrg.delete(`/api/v1/plugins/${pluginId}`), - (err: { status: number }) => err.status === 403, - 'Only superadmin can delete plugins, not admin' - ) - - // Delete the plugin - res = await superadmin.delete(`/api/v1/plugins/${pluginId}`) - assert.equal(res.status, 204, 'Plugin should be deleted successfully when called by superadmin') - - // Check that the plugin is deleted - await assert.rejects( - superadmin.get('/api/v1/plugins/' + pluginId), - (err: { status: number }) => err.status === 404, - 'Plugin is found after deletion, should not be found' - ) - - // Try to delete again (should fail) - await assert.rejects( - superadmin.delete(`/api/v1/plugins/${pluginId}`), - (err: { status: number }) => err.status === 404, - 'Should not be able to delete a plugin that does not exist anymore' - ) - }) -}) diff --git a/test-it/03-processings.ts b/test-it/03-processings.ts deleted file mode 100644 index 6ac0bcd0..00000000 --- a/test-it/03-processings.ts +++ /dev/null @@ -1,343 +0,0 @@ -import { strict as assert } from 'node:assert' -import { it, describe, before, beforeEach, after } from 'node:test' -import { axiosAuth, clean, startApiServer, startWorkerServer, stopApiServer, stopWorkerServer } from './utils/index.ts' -import * as testSpies from '@data-fair/lib-node/test-spies.js' - -testSpies.registerModuleHooks() - -const superadmin = await axiosAuth('superadmin@test.com') -const hlalonde = await axiosAuth({ email: 'hlalonde3@desdev.cn', org: 'KWqAGZ4mG', dep: 'dep1' }) - -let plugin -const createTestPlugin = async () => { - plugin = (await superadmin.post('/api/v1/plugins', { - name: '@data-fair/processing-hello-world', - version: '1.2.2', - distTag: 'latest', - description: 'Minimal plugin for data-fair-processings. Create one-line datasets on demand.' - })).data - await superadmin.put(`/api/v1/plugins/${plugin.id}/access`, { public: true }) -} - -describe('processing', () => { - before(startApiServer) - before(startWorkerServer) - beforeEach(clean) - beforeEach(createTestPlugin) - after(stopApiServer) - after(stopWorkerServer) - - it('should create a new processing, activate it and run it', async () => { - let processing = (await superadmin.post('/api/v1/processings', { - title: 'Hello processing', - plugin: plugin.id - })).data - assert.ok(processing._id) - assert.deepEqual(processing.scheduling, []) - assert.ok(!processing.webhookKey) - - const processings = (await superadmin.get('/api/v1/processings')).data - assert.equal(processings.count, 1) - assert.equal(processings.results[0]._id, processing._id) - assert.ok(!processings.results[0].webhookKey) - - // no run at first - let runs = (await superadmin.get('/api/v1/runs', { params: { processing: processing._id } })).data - assert.equal(runs.count, 0) - - // active but without scheduling = still no run - await superadmin.patch(`/api/v1/processings/${processing._id}`, { - active: true, - config: { - datasetMode: 'create', - dataset: { id: 'hello-world-test-processings', title: 'Hello world test processing' }, - overwrite: false, - message: 'Hello world test processing' - } - }) - runs = (await superadmin.get('/api/v1/runs', { params: { processing: processing._id } })).data - assert.equal(runs.count, 0) - - // active and with scheduling = a scheduled run - await superadmin.patch(`/api/v1/processings/${processing._id}`, { - scheduling: [{ type: 'monthly', dayOfWeek: '*', dayOfMonth: 1, month: '*', hour: 0, minute: 0 }] - }) - runs = (await superadmin.get('/api/v1/runs', { params: { processing: processing._id } })).data - assert.equal(runs.count, 1) - assert.equal(runs.results[0].status, 'scheduled') - - await superadmin.patch(`/api/v1/processings/${processing._id}`, { scheduling: [] }) - await Promise.all([ - superadmin.post(`/api/v1/processings/${processing._id}/_trigger`), - testSpies.waitFor('isRunning', 10000) - ]) - runs = (await superadmin.get('/api/v1/runs', { params: { processing: processing._id } })).data - assert.equal(runs.count, 1) - assert.equal(runs.results[0].status, 'running') - - // nothing, failure is normal we have no api key - const [topicEvent] = await Promise.all([ - testSpies.waitFor('pushEvent', 10000), - testSpies.waitFor('isFailure', 11000) - ]) - assert.equal(topicEvent, `processings:processing-finish-error:${processing._id}`) - - const run = (await superadmin.get('/api/v1/runs/' + runs.results[0]._id)).data - assert.equal(run.status, 'error') - assert.equal(run.log[2].type, 'step') - assert.equal(run.log[3].type, 'error') - - processing = (await superadmin.get(`/api/v1/processings/${processing._id}`)).data - assert.ok(processing.lastRun) - assert.equal(processing.lastRun.status, 'error') - assert.ok(!processing.webhookKey) - }) - - it('should kill a long run with SIGTERM', async () => { - const processing = (await superadmin.post('/api/v1/processings', { - title: 'Hello processing', - plugin: plugin.id, - active: true, - config: { - datasetMode: 'create', - dataset: { id: 'hello-world-test-processings', title: 'Hello world test processing' }, - message: 'Hello world test processing long', - delay: 4 - } - })).data - - await Promise.all([ - superadmin.post(`/api/v1/processings/${processing._id}/_trigger`), - testSpies.waitFor('isRunning', 10000) // We wait for the run to be triggered - ]) - const runs = (await superadmin.get('/api/v1/runs', { params: { processing: processing._id } })).data - assert.equal(runs.count, 1) - let run = runs.results[0] - assert.equal(run.status, 'running') - await superadmin.post(`/api/v1/runs/${run._id}/_kill`) - run = (await superadmin.get(`/api/v1/runs/${run._id}`)).data - assert.equal(run.status, 'kill') - await testSpies.waitFor('isKilled', 10000) - run = (await superadmin.get(`/api/v1/runs/${run._id}`)).data - assert.equal(run.status, 'killed') - assert.equal(run.log.length, 6) - - // limits were updated - const limits = (await superadmin.get('/api/v1/limits/user/superadmin')).data - assert.ok(limits.processings_seconds.consumption >= 1) - assert.equal(limits.processings_seconds.limit, -1) - }) - - it('should kill a long run with SIGTERM and wait for grace period', async () => { - const processing = (await superadmin.post('/api/v1/processings', { - title: 'Hello processing', - plugin: plugin.id, - active: true, - config: { - datasetMode: 'create', - dataset: { id: 'hello-world-test-processings', title: 'Hello world test processing' }, - message: 'Hello world test processing long', - delay: 10000, - ignoreStop: true - } - })).data - - await Promise.all([ - superadmin.post(`/api/v1/processings/${processing._id}/_trigger`), - testSpies.waitFor('isRunning', 10000) // We wait for the run to be triggered - ]) - await new Promise(resolve => setTimeout(resolve, 1000)) - const runs = (await superadmin.get('/api/v1/runs', { params: { processing: processing._id } })).data - assert.equal(runs.count, 1) - let run = runs.results[0] - assert.equal(run.status, 'running') - await superadmin.post(`/api/v1/runs/${run._id}/_kill`) - run = (await superadmin.get(`/api/v1/runs/${run._id}`)).data - assert.equal(run.status, 'kill') - await testSpies.waitFor('isKilled', 10000) - run = (await superadmin.get(`/api/v1/runs/${run._id}`)).data - assert.equal(run.status, 'killed') - assert.equal(run.log.length, 4) - }) - - it('should fail a run if processings_seconds limit is exceeded', async () => { - await superadmin.post('/api/v1/limits/user/superadmin', { - processings_seconds: { limit: 1 }, - lastUpdate: new Date().toISOString() - }) - - const processing = (await superadmin.post('/api/v1/processings', { - title: 'Hello processing', - plugin: plugin.id, - active: true, - config: { - datasetMode: 'create', - dataset: { id: 'hello-world-test-processings', title: 'Hello world test processing' }, - message: 'Hello world test processing long', - delay: 1, - ignoreStop: true - } - })).data - - await superadmin.post(`/api/v1/processings/${processing._id}/_trigger`) - await testSpies.waitFor('isFailure', 10000) - - let limits = (await superadmin.get('/api/v1/limits/user/superadmin')).data - const consumption = limits.processings_seconds.consumption - assert.ok(consumption >= 1) - - superadmin.post(`/api/v1/processings/${processing._id}/_trigger`) - await testSpies.waitFor('processingsSecondsExceeded', 10000) - limits = (await superadmin.get('/api/v1/limits/user/superadmin')).data - assert.equal(limits.processings_seconds.consumption, consumption) - }) - - it('should manage a processing as a department admin', async () => { - const processing = (await hlalonde.post('/api/v1/processings', { - title: 'Hello processing', - plugin: plugin.id - })).data - - const processings = (await hlalonde.get('/api/v1/processings')).data - assert.equal(processings.count, 1) - assert.equal(processings.results[0]._id, processing._id) - await hlalonde.patch(`/api/v1/processings/${processing._id}`, { - active: true, - config: { - datasetMode: 'create', - dataset: { id: 'hello-world-test-processings', title: 'Hello world test processing' }, - overwrite: false, - message: 'Hello world test processing' - } - }) - - await Promise.all([ - hlalonde.post(`/api/v1/processings/${processing._id}/_trigger`), - testSpies.waitFor('isFailure', 10000) - ]) - - const runs = (await hlalonde.get('/api/v1/runs', { params: { processing: processing._id } })).data - assert.equal(runs.count, 1) - // failure is normal we have no api key - assert.equal(runs.results[0].status, 'error') - }) - - it('should config a new processing, with a secret field', async () => { - const processing = (await superadmin.post('/api/v1/processings', { - title: 'Hello processing', - plugin: plugin.id - })).data - assert.ok(processing._id) - - // configure the processing - const patchRes = await superadmin.patch(`/api/v1/processings/${processing._id}`, { - active: true, - config: { - datasetMode: 'create', - dataset: { id: 'hello-world-test-processings', title: 'Hello world test processing' }, - overwrite: false, - message: 'Hello world test processing', - secretField: 'my secret value' - } - }) - assert.equal(patchRes.data.config.secretField, '********') - - const getRes = await superadmin.get(`/api/v1/processings/${processing._id}`) - assert.equal(getRes.data.config.secretField, '********') - - // Patch the processing to edit the secret field - const patchRes2 = await superadmin.patch(`/api/v1/processings/${processing._id}`, { - config: { - datasetMode: 'create', - dataset: { id: 'hello-world-test-processings', title: 'Hello world test processing' }, - overwrite: false, - message: 'Hello world test processing', - secretField: 'my new secret value' - } - }) - assert.equal(patchRes2.data.config.secretField, '********') - - // trigger the processing - await Promise.all([ - superadmin.post(`/api/v1/processings/${processing._id}/_trigger`), - testSpies.waitFor('isRunning', 10000) - ]) - - // nothing, failure is normal we have no api key - const [topicEvent] = await Promise.all([ - testSpies.waitFor('pushEvent', 10000), - testSpies.waitFor('isFailure', 11000) - ]) - assert.equal(topicEvent, `processings:processing-finish-error:${processing._id}`) - - const runs = (await superadmin.get('/api/v1/runs', { params: { processing: processing._id } })).data - assert.equal(runs.count, 1) - const run = (await superadmin.get('/api/v1/runs/' + runs.results[0]._id)).data - assert.equal(run.status, 'error') - assert.equal(run.log[1].type, 'info') - assert.equal(run.log[1].extra.secrets.secretField, 'my new secret value') - }) - - it('should patch config with secrets', async () => { - // create a new processing with a secret field - const processing = (await superadmin.post('/api/v1/processings', { - title: 'Hello processing', - plugin: plugin.id, - active: true, - config: { - datasetMode: 'create', - dataset: { title: 'Hello world test processing' }, - overwrite: false, - message: 'Hello world test processing', - secretField: 'my secret value' - } - })).data - assert.equal(processing.config.secretField, '********') - - const getRes = await superadmin.get(`/api/v1/processings/${processing._id}`) - assert.equal(getRes.data.config.secretField, '********') - - // Patch the processing without editing the secret field - let patchRes = await superadmin.patch(`/api/v1/processings/${processing._id}`, { - config: { - datasetMode: 'create', - dataset: { title: 'Hello world test processing' }, - overwrite: false, - message: 'Hello world test processing (edited)', - secretField: '********' - } - }) - assert.equal(patchRes.data.config.secretField, '********') - - // trigger the processing - await Promise.all([ - superadmin.post(`/api/v1/processings/${processing._id}/_trigger`), - testSpies.waitFor('isFailure', 15000) - ]) - - // get the last run to check if the plugin has the uncrypted secret - const runs = (await superadmin.get('/api/v1/runs', { params: { processing: processing._id } })).data - assert.equal(runs.count, 1, 'There should be one run') - const run = (await superadmin.get('/api/v1/runs/' + runs.results[0]._id)).data - assert.equal(run.status, 'error') - assert.equal(run.log[1].extra.secrets.secretField, 'my secret value', 'The secret field should be uncrypted when passed to the plugin') - - // patch the config to unset the secret field - patchRes = await superadmin.patch(`/api/v1/processings/${processing._id}`, { - config: { - datasetMode: 'create', - dataset: { title: 'Hello world test processing' }, - overwrite: false, - message: 'Hello world test processing (edited)', - secretField: '' - } - }) - assert.equal(patchRes.data.config.secretField, '') - - // trigger the processing - await Promise.all([ - superadmin.post(`/api/v1/processings/${processing._id}/_trigger`), - testSpies.waitFor('isFailure', 15000) - ]) - }) -}) diff --git a/test-it/04-permissions.ts b/test-it/04-permissions.ts deleted file mode 100644 index 20781662..00000000 --- a/test-it/04-permissions.ts +++ /dev/null @@ -1,198 +0,0 @@ -import { strict as assert } from 'node:assert' -import { it, describe, before, beforeEach, after } from 'node:test' -import { axiosAuth, clean, startApiServer, stopApiServer } from './utils/index.ts' - -const superadmin = await axiosAuth('superadmin@test.com') -const cdurning2 = await axiosAuth('cdurning2@desdev.cn') -const dmeadus = await axiosAuth('dmeadus0@answers.com') -const admin1Koumoul = await axiosAuth({ email: 'admin1@test.com', org: 'koumoul' }) -const contrib1Koumoul = await axiosAuth({ email: 'contrib1@test.com', org: 'koumoul' }) -const user1Koumoul = await axiosAuth({ email: 'user1@test.com', org: 'koumoul' }) -const dmeadusFivechat = await axiosAuth({ email: 'dmeadus0@answers.com', org: 'KWqAGZ4mG' }) -const dmeadusKoumoul = await axiosAuth({ email: 'dmeadus0@answers.com', org: 'koumoul' }) -const depAdmin = await axiosAuth({ email: 'hlalonde3@desdev.cn', org: 'KWqAGZ4mG', dep: 'dep1' }) - -let plugin -const createTestPlugin = async () => { - plugin = (await superadmin.post('/api/v1/plugins', { - name: '@data-fair/processing-hello-world', - version: '0.12.2', - distTag: 'latest', - description: 'Minimal plugin for data-fair-processings. Create one-line datasets on demand.' - })).data - await superadmin.put(`/api/v1/plugins/${plugin.id}/access`, { public: true }) -} - -describe('processing', () => { - before(startApiServer) - beforeEach(clean) - beforeEach(createTestPlugin) - after(stopApiServer) - - it('should create a new processing and work on it as admin of an organization', async function () { - // create a processing and a scheduled run - const processing = (await admin1Koumoul.post('/api/v1/processings', { - title: 'Hello processing', - plugin: plugin.id - })).data - assert.ok(processing._id) - - await admin1Koumoul.patch(`/api/v1/processings/${processing._id}`, { - active: true, - config: { - datasetMode: 'create', - dataset: { id: 'hello-world-test-processings', title: 'Hello world test processing' }, - overwrite: false, - message: 'Hello world test processing' - }, - scheduling: [{ type: 'monthly', dayOfWeek: '*', dayOfMonth: 1, month: '*', hour: 0, minute: 0 }] - }) - // list permission for admins and contribs in orga - assert.equal((await admin1Koumoul.get('/api/v1/processings')).data.count, 1) - assert.equal((await contrib1Koumoul.get('/api/v1/processings')).data.count, 1) - assert.equal((await user1Koumoul.get('/api/v1/processings')).data.count, 0) - - // read permission for admins and contribs in orga - const admin1Processing = (await admin1Koumoul.get(`/api/v1/processings/${processing._id}`)).data - assert.ok(admin1Processing) - assert.equal(admin1Processing.userProfile, 'admin') - const user1Processing = (await contrib1Koumoul.get(`/api/v1/processings/${processing._id}`)).data - assert.ok(user1Processing) - assert.equal(user1Processing.userProfile, 'read') - await assert.rejects(user1Koumoul.get(`/api/v1/processings/${processing._id}`), { status: 403 }) - - // read runs permission for admins and contribs in orga - const runs = (await admin1Koumoul.get('/api/v1/runs', { params: { processing: processing._id } })).data - assert.equal(runs.count, 1) - assert.equal(runs.results[0].status, 'scheduled') - assert.equal((await contrib1Koumoul.get('/api/v1/runs', { params: { processing: processing._id } })).data.count, 1) - // await assert.rejects(user1Koumoul.get('/api/v1/runs', { params: { processing: processing._id } }), { status: 403 }) - // write permission only for admin - await admin1Koumoul.patch(`/api/v1/processings/${processing._id}`, { title: 'test' }) - await assert.rejects(contrib1Koumoul.patch(`/api/v1/processings/${processing._id}`, { title: 'test' }), { status: 403 }) - await assert.rejects(user1Koumoul.patch(`/api/v1/processings/${processing._id}`, { title: 'test' }), { status: 403 }) - - // no permission at all for outsiders - assert.equal((await dmeadusFivechat.get('/api/v1/processings', { params: { owner: 'organization:koumoul' } })).data.count, 0) - assert.equal((await cdurning2.get('/api/v1/processings', { params: { owner: 'organization:koumoul' } })).data.count, 0) - await assert.rejects(dmeadusFivechat.get(`/api/v1/processings/${processing._id}`), { status: 403 }) - await assert.rejects(cdurning2.get(`/api/v1/processings/${processing._id}`), { status: 403 }) - // await assert.rejects(dmeadusFivechat.get('/api/v1/runs', { params: { processing: processing._id } }), { status: 403 }) - // await assert.rejects(cdurning2.get('/api/v1/runs', { params: { processing: processing._id } }), { status: 403 }) - - // add permission based on user email and partner org - await admin1Koumoul.patch(`/api/v1/processings/${processing._id}`, { - permissions: [{ - profile: 'read', - target: { type: 'userEmail', email: 'cdurning2@desdev.cn' } - }, { - profile: 'read', - target: { type: 'partner', organization: { name: 'Fivechat', id: 'KWqAGZ4mG' }, roles: ['admin'] } - }] - }) - // list permission ok with profile "read" - assert.equal((await dmeadusFivechat.get('/api/v1/processings', { params: { owner: 'organization:koumoul' } })).data.count, 1) - assert.equal((await cdurning2.get('/api/v1/processings', { params: { owner: 'organization:koumoul' } })).data.count, 1) - assert.equal((await dmeadus.get('/api/v1/processings', { params: { owner: 'organization:koumoul' } })).data.count, 0) - // read permission ok too - const dmeadusFivechatProcessing = (await dmeadusFivechat.get(`/api/v1/processings/${processing._id}`)).data - assert.ok(dmeadusFivechatProcessing) - assert.equal(dmeadusFivechatProcessing.userProfile, 'read') - const cdurning2Processing = (await cdurning2.get(`/api/v1/processings/${processing._id}`)).data - assert.ok(cdurning2Processing) - assert.equal(cdurning2Processing.userProfile, 'read') - // read runs ok too - assert.equal((await dmeadusFivechat.get('/api/v1/runs', { params: { processing: processing._id, owner: 'organization:koumoul' } })).data.count, 1) - assert.equal((await cdurning2.get('/api/v1/runs', { params: { processing: processing._id, owner: 'organization:koumoul' } })).data.count, 1) - assert.equal((await dmeadus.get('/api/v1/runs', { params: { processing: processing._id, owner: 'organization:koumoul' } })).data.count, 0) - // permission depends on active account (simple user from partner cannot read it) - await assert.rejects(dmeadus.get(`/api/v1/processings/${processing._id}`), { status: 403 }) - // still no write permissions - await assert.rejects(dmeadusFivechat.patch(`/api/v1/processings/${processing._id}`, { title: 'test' }), { status: 403 }) - await assert.rejects(cdurning2.patch(`/api/v1/processings/${processing._id}`, { title: 'test' }), { status: 403 }) - - await superadmin.delete(`/api/v1/plugins/${plugin.id}`) - }) - - it('should list processings with good permissions', async function () { - // Add 2 processings for 2 users/orgs - await admin1Koumoul.post('/api/v1/processings', { - title: 'Hello processing 1', - plugin: plugin.id - }) - await dmeadusFivechat.post('/api/v1/processings', { - title: 'Hello processing 2', - plugin: plugin.id - }) - - // list processings - assert.equal((await admin1Koumoul.get('/api/v1/processings?owner=organization:koumoul')).data.count, 1) // Because only one processing in koumoul and admin1 is admin of koumoul - assert.equal((await dmeadusFivechat.get('/api/v1/processings?owner=organization:KWqAGZ4mG')).data.count, 1) // Because only one processing in Fivechat and dmeadus is admin of Fivechat - assert.equal((await dmeadusKoumoul.get('/api/v1/processings?owner=organization:koumoul')).data.count, 1) // Because only one processing in koumoul and dmeadus is connected to the orga koumoul - assert.equal((await dmeadusKoumoul.get('/api/v1/processings?owner=organization:koumoul,organization:KWqAGZ4mG')).data.count, 1) // Lists the processings of koumoul, not fivechart, because dmeadus is connected to the orga koumoul - - assert.equal((await superadmin.get('/api/v1/processings?showAll=true')).data.count, 2) // Lists all the processings without permission filter - assert.equal((await superadmin.get('/api/v1/processings?showAll=true&owner=organization:koumoul')).data.results.length, 1) // Lists all the processings without permission filter but with owner filter - }) - - it('sould create processings as department admin', async function () { - // Create a processing as department admin in his department - const processing = (await depAdmin.post('/api/v1/processings', { - title: 'Hello processing', - plugin: plugin.id, - owner: { - id: 'KWqAGZ4mG', - name: 'Fivechat', - type: 'organization', - department: 'dep1', - departmentName: 'department 1' - } - })).data - assert.ok(processing._id) - - // Create a processing as department admin in another department with permissions on the department (should fail) - await assert.rejects(depAdmin.post('/api/v1/processings', { - title: 'Hello processing', - plugin: plugin.id, - owner: { - id: 'KWqAGZ4mG', - name: 'Fivechat', - type: 'organization', - department: 'dep2', - departmentName: 'department 2' - } - }), { status: 403 }) // Cannot create a processing in another department than the one he is connected to - - // Create a processing as department admin in the root organization - await assert.rejects(depAdmin.post('/api/v1/processings', { - title: 'Hello processing', - plugin: plugin.id, - owner: { - id: 'KWqAGZ4mG', - name: 'Fivechat', - type: 'organization' - } - }), { status: 403 }) // Cannot create a processing in the root organization when he is an admin of department - - // Change the owner of a processing to another organization witout permissions - await assert.rejects(depAdmin.patch(`/api/v1/processings/${processing._id}`, { - owner: { - id: 'koumoul', - name: 'Koumoul', - type: 'organization' - } - }), { status: 403 }) // Cannot change the owner to another organization than the one he is connected to - - // Change the owner of a processing to another department - const processing2 = (await depAdmin.patch(`/api/v1/processings/${processing._id}`, { - owner: { - id: 'KWqAGZ4mG', - name: 'Fivechat', - type: 'organization', - department: 'dep2', - departmentName: 'department 2' - } - })).data - assert.ok(processing2._id) - }) -}) diff --git a/test-it/utils/index.ts b/test-it/utils/index.ts deleted file mode 100644 index 536aab47..00000000 --- a/test-it/utils/index.ts +++ /dev/null @@ -1,51 +0,0 @@ -import type { AxiosAuthOptions } from '@data-fair/lib-node/axios-auth.js' -import { axiosBuilder } from '@data-fair/lib-node/axios.js' -import { axiosAuth as _axiosAuth } from '@data-fair/lib-node/axios-auth.js' -import fs from 'fs-extra' - -const directoryUrl = 'http://localhost:5600/simple-directory' - -const axiosOpts = { baseURL: 'http://localhost:5600/processings' } - -export const axios = (opts = {}) => axiosBuilder({ ...axiosOpts, ...opts }) - -export const axiosAuth = (opts: string | Omit) => { - opts = typeof opts === 'string' ? { email: opts } : opts - const password = opts.email === 'superadmin@test.com' ? 'superpasswd' : 'passwd' - const adminMode = opts.email === 'superadmin@test.com' - return _axiosAuth({ ...opts, password, adminMode, axiosOpts, directoryUrl }) -} - -export const clean = async () => { - const mongo = (await import('../../api/src/mongo.ts')).default - for (const name of ['processings', 'runs', 'limits']) { - await mongo.db.collection(name).deleteMany({}) - } - await fs.emptyDir('./data/test/plugins') -} - -process.env.SUPPRESS_NO_CONFIG_WARNING = '1' - -export const startApiServer = async () => { - console.log('Starting API server...') - process.env.NODE_CONFIG_DIR = 'api/config/' - const apiServer = await import('../../api/src/server.ts') - await apiServer.start() -} - -export const stopApiServer = async () => { - const apiServer = await import('../../api/src/server.ts') - await apiServer.stop() -} - -export const startWorkerServer = async () => { - console.log('Starting worker server...') - process.env.NODE_CONFIG_DIR = 'worker/config/' - const workerServer = await import('../../worker/src/worker.ts') - await workerServer.start() -} - -export const stopWorkerServer = async () => { - const workerServer = await import('../../worker/src/worker.ts') - await workerServer.stop() -} diff --git a/tests/features/plugins/install.api.spec.ts b/tests/features/plugins/install.api.spec.ts new file mode 100644 index 00000000..8261005b --- /dev/null +++ b/tests/features/plugins/install.api.spec.ts @@ -0,0 +1,137 @@ +import { test, expect } from '@playwright/test' +import { axios, axiosAuth, clean } from '../../support/axios.ts' + +const axAno = axios() + +const plugin = { + name: '@data-fair/processing-hello-world', + version: '1.2.2', + distTag: 'latest', +} +const pluginId = '@data-fair-processing-hello-world-1' + +test.describe('plugin', () => { + test.beforeAll(clean) + test.afterAll(clean) + + test('should install a plugin from npm', async () => { + const superadmin = await axiosAuth('test_superadmin@test.com') + const adminTestOrg1 = await axiosAuth({ email: 'test_admin1@test.com', org: 'test_org1' }) + + const res = await superadmin.post('/api/v1/plugins', { + name: plugin.name, + version: '0.13.0', // Previous version to test update + distTag: 'latest' + }) + expect(res.data.name).toBe(plugin.name) + expect(res.data.id).toBe('@data-fair-processing-hello-world-0') + expect(res.data.version).toBe('0.13.0') + + // Only superadmin can install plugins + await expect(adminTestOrg1.post('/api/v1/plugins')).rejects.toMatchObject({ status: 403 }) + }) + + test('should install a plugin from tarball', async () => { + const superadmin = await axiosAuth('test_superadmin@test.com') + const adminTestOrg1 = await axiosAuth({ email: 'test_admin1@test.com', org: 'test_org1' }) + const FormData = (await import('form-data')).default + const fs = await import('fs') + const path = await import('path') + + const tarballPath = path.join(import.meta.dirname, '..', '..', 'fixtures', 'processing-hello-world.tgz') + const formData = new FormData() + formData.append('file', fs.createReadStream(tarballPath)) + + const res = await superadmin.post('/api/v1/plugins', formData, { + headers: formData.getHeaders() + }) + + expect(res.data.name).toBe(plugin.name) + expect(res.data.id).toBe(pluginId) + expect(res.data.version).toBe('1.2.2') + + await expect(adminTestOrg1.post('/api/v1/plugins')).rejects.toMatchObject({ status: 403 }) + }) + + test('should update a plugin', async () => { + const superadmin = await axiosAuth('test_superadmin@test.com') + const adminTestOrg1 = await axiosAuth({ email: 'test_admin1@test.com', org: 'test_org1' }) + + const res = await superadmin.post('/api/v1/plugins', plugin) + expect(res.data.name).toBe(plugin.name) + expect(res.data.id).toBe(pluginId) + expect(res.data.version).toBe(plugin.version) + + await expect(adminTestOrg1.post('/api/v1/plugins')).rejects.toMatchObject({ status: 403 }) + }) + + test('should list installed plugins', async () => { + const superadmin = await axiosAuth('test_superadmin@test.com') + const adminTestOrg1 = await axiosAuth({ email: 'test_admin1@test.com', org: 'test_org1' }) + + let res = await superadmin.get('/api/v1/plugins') + expect(res.data.count).toBe(2) + expect(res.data.results.length).toBe(2) + expect(res.data.results[0].name).toBe(plugin.name) + + await expect(adminTestOrg1.get('/api/v1/plugins')).rejects.toMatchObject({ status: 400 }) + + res = await adminTestOrg1.get('/api/v1/plugins?privateAccess=organization:test_org1') + expect(res.data.count).toBe(0) + expect(res.data.results.length).toBe(0) + }) + + test('should get specific plugin', async () => { + const superadmin = await axiosAuth('test_superadmin@test.com') + + await expect(superadmin.get('/api/v1/plugins/does-not-exist')).rejects.toMatchObject({ status: 404 }) + + const res = await superadmin.get('/api/v1/plugins/' + pluginId) + expect(res.data.name).toBe(plugin.name) + expect(res.data.id).toBe(pluginId) + expect(res.data.version).toBe(plugin.version) + }) + + test('should manage plugin access permissions', async () => { + const superadmin = await axiosAuth('test_superadmin@test.com') + const adminTestOrg1 = await axiosAuth({ email: 'test_admin1@test.com', org: 'test_org1' }) + const aloneUser = await axiosAuth('test_alone@test.com') + + // make the plugin private with specific access to admin only + await superadmin.put(`/api/v1/plugins/${pluginId}/access`, { + public: false, + privateAccess: [{ type: 'organization', id: 'test_org1' }] + }) + + let res = await adminTestOrg1.get('/api/v1/plugins?privateAccess=organization:test_org1') + expect(res.data.results.length).toBe(1) + + res = await aloneUser.get('/api/v1/plugins?privateAccess=user:test_alone') + expect(res.data.results.length).toBe(0) + + await superadmin.put(`/api/v1/plugins/${pluginId}/access`, { public: true }) + + res = await adminTestOrg1.get('/api/v1/plugins?privateAccess=organization:test_org1') + expect(res.data.results.length).toBe(1) + + res = await aloneUser.get('/api/v1/plugins?privateAccess=user:test_alone') + expect(res.data.results.length).toBe(1) + }) + + test('should delete a plugin', async () => { + const superadmin = await axiosAuth('test_superadmin@test.com') + const adminTestOrg1 = await axiosAuth({ email: 'test_admin1@test.com', org: 'test_org1' }) + + let res = await superadmin.get('/api/v1/plugins') + expect(res.data.count).toBe(2) + + await expect(axAno.delete(`/api/v1/plugins/${pluginId}`)).rejects.toMatchObject({ status: 401 }) + await expect(adminTestOrg1.delete(`/api/v1/plugins/${pluginId}`)).rejects.toMatchObject({ status: 403 }) + + res = await superadmin.delete(`/api/v1/plugins/${pluginId}`) + expect(res.status).toBe(204) + + await expect(superadmin.get('/api/v1/plugins/' + pluginId)).rejects.toMatchObject({ status: 404 }) + await expect(superadmin.delete(`/api/v1/plugins/${pluginId}`)).rejects.toMatchObject({ status: 404 }) + }) +}) diff --git a/tests/features/plugins/registry.api.spec.ts b/tests/features/plugins/registry.api.spec.ts new file mode 100644 index 00000000..9591949d --- /dev/null +++ b/tests/features/plugins/registry.api.spec.ts @@ -0,0 +1,21 @@ +import { test, expect } from '@playwright/test' +import { axios } from '../../support/axios.ts' + +const axAno = axios() + +test.describe('plugin-registry', () => { + test('should search for plugins (just latest) on npmjs', async () => { + const res = await axAno.get('/api/v1/plugins-registry', { params: { q: 'hello-world' } }) + const hwProcessingPackages = res.data.results.filter((p: { name: string }) => p.name === '@data-fair/processing-hello-world') + expect(hwProcessingPackages.length).toBe(1) + expect(res.data.results[0].distTag).toBe('latest') + }) + + test('should search for plugins and all their distTag versions on npmjs', async () => { + const res = await axAno.get('/api/v1/plugins-registry', { params: { q: 'hello-world', showAll: 'true' } }) + const hwProcessingPackages = res.data.results.filter((p: { name: string }) => p.name === '@data-fair/processing-hello-world') + expect(hwProcessingPackages.length).toBe(2) + expect(['latest', 'test']).toContain(res.data.results[0].distTag) + expect(['latest', 'test']).toContain(res.data.results[1].distTag) + }) +}) diff --git a/tests/features/processings/lifecycle.api.spec.ts b/tests/features/processings/lifecycle.api.spec.ts new file mode 100644 index 00000000..74635804 --- /dev/null +++ b/tests/features/processings/lifecycle.api.spec.ts @@ -0,0 +1,293 @@ +import { test, expect } from '@playwright/test' +import { axiosAuth, clean, waitForRunStatus } from '../../support/axios.ts' + +const installTestPlugin = async (superadmin: any) => { + const plugin = (await superadmin.post('/api/v1/plugins', { + name: '@data-fair/processing-hello-world', + version: '1.2.2', + distTag: 'latest', + description: 'Minimal plugin for data-fair-processings. Create one-line datasets on demand.' + })).data + await superadmin.put(`/api/v1/plugins/${plugin.id}/access`, { public: true }) + return plugin +} + +test.describe('processing', () => { + test.beforeEach(clean) + test.afterAll(clean) + + test('should create a new processing, activate it and run it', async () => { + const superadmin = await axiosAuth('test_superadmin@test.com') + const plugin = await installTestPlugin(superadmin) + + let processing = (await superadmin.post('/api/v1/processings', { + title: 'Hello processing', + plugin: plugin.id, + owner: { type: 'user', id: 'test_superadmin', name: 'Test Super Admin' } + })).data + expect(processing._id).toBeTruthy() + expect(processing.scheduling).toEqual([]) + expect(processing.webhookKey).toBeFalsy() + + const processings = (await superadmin.get('/api/v1/processings?showAll=true&owner=user:test_superadmin')).data + expect(processings.results.find((p: any) => p._id === processing._id)).toBeTruthy() + + let runs = (await superadmin.get('/api/v1/runs', { params: { processing: processing._id } })).data + expect(runs.count).toBe(0) + + await superadmin.patch(`/api/v1/processings/${processing._id}`, { + active: true, + config: { + datasetMode: 'create', + dataset: { id: 'test_hello-world-test-processings', title: 'Hello world test processing' }, + overwrite: false, + message: 'Hello world test processing' + } + }) + runs = (await superadmin.get('/api/v1/runs', { params: { processing: processing._id } })).data + expect(runs.count).toBe(0) + + await superadmin.patch(`/api/v1/processings/${processing._id}`, { + scheduling: [{ type: 'monthly', dayOfWeek: '*', dayOfMonth: 1, month: '*', hour: 0, minute: 0 }] + }) + runs = (await superadmin.get('/api/v1/runs', { params: { processing: processing._id } })).data + expect(runs.count).toBe(1) + expect(runs.results[0].status).toBe('scheduled') + + await superadmin.patch(`/api/v1/processings/${processing._id}`, { scheduling: [] }) + const triggered = (await superadmin.post(`/api/v1/processings/${processing._id}/_trigger`)).data + const finishedRun = await waitForRunStatus(triggered._id, 'finished', 30_000) + expect(finishedRun.status).toBe('finished') + + runs = (await superadmin.get('/api/v1/runs', { params: { processing: processing._id } })).data + expect(runs.count).toBe(1) + + const run = (await superadmin.get('/api/v1/runs/' + runs.results[0]._id)).data + expect(run.status).toBe('finished') + expect(run.log.length).toBeGreaterThan(0) + expect(run.log.some((l: any) => l.type === 'step')).toBe(true) + + processing = (await superadmin.get(`/api/v1/processings/${processing._id}`)).data + expect(processing.lastRun).toBeTruthy() + expect(processing.lastRun.status).toBe('finished') + }) + + test('should kill a long run with SIGTERM', async () => { + const superadmin = await axiosAuth('test_superadmin@test.com') + const plugin = await installTestPlugin(superadmin) + + const processing = (await superadmin.post('/api/v1/processings', { + title: 'Hello processing', + plugin: plugin.id, + owner: { type: 'user', id: 'test_superadmin', name: 'Test Super Admin' }, + active: true, + config: { + datasetMode: 'create', + dataset: { id: 'test_hello-world-test-processings', title: 'Hello world test processing' }, + message: 'Hello world test processing long', + delay: 4 + } + })).data + + const triggered = (await superadmin.post(`/api/v1/processings/${processing._id}/_trigger`)).data + await waitForRunStatus(triggered._id, 'running') + await superadmin.post(`/api/v1/runs/${triggered._id}/_kill`) + const killedRun = await waitForRunStatus(triggered._id, 'killed') + expect(killedRun.log.length).toBe(6) + + const limits = (await superadmin.get('/api/v1/limits/user/test_superadmin')).data + expect(limits.processings_seconds.consumption).toBeGreaterThanOrEqual(1) + expect(limits.processings_seconds.limit).toBe(-1) + }) + + test('should kill a long run with SIGTERM and wait for grace period', async () => { + const superadmin = await axiosAuth('test_superadmin@test.com') + const plugin = await installTestPlugin(superadmin) + + const processing = (await superadmin.post('/api/v1/processings', { + title: 'Hello processing', + plugin: plugin.id, + owner: { type: 'user', id: 'test_superadmin', name: 'Test Super Admin' }, + active: true, + config: { + datasetMode: 'create', + dataset: { id: 'test_hello-world-test-processings', title: 'Hello world test processing' }, + message: 'Hello world test processing long', + delay: 10000, + ignoreStop: true + } + })).data + + const triggered = (await superadmin.post(`/api/v1/processings/${processing._id}/_trigger`)).data + await waitForRunStatus(triggered._id, 'running') + await new Promise(resolve => setTimeout(resolve, 1000)) + await superadmin.post(`/api/v1/runs/${triggered._id}/_kill`) + const killedRun = await waitForRunStatus(triggered._id, 'killed', 30_000) + expect(killedRun.log.length).toBe(4) + }) + + test('should fail a run if processings_seconds limit is exceeded', async () => { + const superadmin = await axiosAuth('test_superadmin@test.com') + const plugin = await installTestPlugin(superadmin) + + await superadmin.post('/api/v1/limits/user/test_superadmin', { + processings_seconds: { limit: 1 }, + lastUpdate: new Date().toISOString() + }) + + const processing = (await superadmin.post('/api/v1/processings', { + title: 'Hello processing', + plugin: plugin.id, + owner: { type: 'user', id: 'test_superadmin', name: 'Test Super Admin' }, + active: true, + config: { + datasetMode: 'create', + dataset: { id: 'test_hello-world-test-processings', title: 'Hello world test processing' }, + message: 'Hello world test processing long', + delay: 1, + ignoreStop: true + } + })).data + + const firstRun = (await superadmin.post(`/api/v1/processings/${processing._id}/_trigger`)).data + await waitForRunStatus(firstRun._id, 'finished', 30_000) + + let limits = (await superadmin.get('/api/v1/limits/user/test_superadmin')).data + const consumption = limits.processings_seconds.consumption + expect(consumption).toBeGreaterThanOrEqual(1) + + const secondRun = (await superadmin.post(`/api/v1/processings/${processing._id}/_trigger`)).data + await waitForRunStatus(secondRun._id, 'error', 15_000) + limits = (await superadmin.get('/api/v1/limits/user/test_superadmin')).data + expect(limits.processings_seconds.consumption).toBe(consumption) + }) + + test('should manage a processing as a department admin', async () => { + const superadmin = await axiosAuth('test_superadmin@test.com') + const plugin = await installTestPlugin(superadmin) + const depAdmin = await axiosAuth({ email: 'test_dep_admin@test.com', org: 'test_org1', dep: 'dep1' }) + + const processing = (await depAdmin.post('/api/v1/processings', { + title: 'Hello processing', + plugin: plugin.id + })).data + + const processings = (await depAdmin.get('/api/v1/processings')).data + expect(processings.count).toBe(1) + expect(processings.results[0]._id).toBe(processing._id) + + await depAdmin.patch(`/api/v1/processings/${processing._id}`, { + active: true, + config: { + datasetMode: 'create', + dataset: { id: 'test_hello-world-test-processings', title: 'Hello world test processing' }, + overwrite: false, + message: 'Hello world test processing' + } + }) + + const triggered = (await depAdmin.post(`/api/v1/processings/${processing._id}/_trigger`)).data + await waitForRunStatus(triggered._id, 'finished', 30_000) + + const runs = (await depAdmin.get('/api/v1/runs', { params: { processing: processing._id } })).data + expect(runs.count).toBe(1) + expect(runs.results[0].status).toBe('finished') + }) + + test('should config a new processing with a secret field', async () => { + const superadmin = await axiosAuth('test_superadmin@test.com') + const plugin = await installTestPlugin(superadmin) + + const processing = (await superadmin.post('/api/v1/processings', { + title: 'Hello processing', + plugin: plugin.id, + owner: { type: 'user', id: 'test_superadmin', name: 'Test Super Admin' } + })).data + expect(processing._id).toBeTruthy() + + const patchRes = await superadmin.patch(`/api/v1/processings/${processing._id}`, { + active: true, + config: { + datasetMode: 'create', + dataset: { id: 'test_hello-world-test-processings', title: 'Hello world test processing' }, + overwrite: false, + message: 'Hello world test processing', + secretField: 'my secret value' + } + }) + expect(patchRes.data.config.secretField).toBe('********') + + const getRes = await superadmin.get(`/api/v1/processings/${processing._id}`) + expect(getRes.data.config.secretField).toBe('********') + + await superadmin.patch(`/api/v1/processings/${processing._id}`, { + config: { + datasetMode: 'create', + dataset: { id: 'test_hello-world-test-processings', title: 'Hello world test processing' }, + overwrite: false, + message: 'Hello world test processing', + secretField: 'my new secret value' + } + }) + + const triggered = (await superadmin.post(`/api/v1/processings/${processing._id}/_trigger`)).data + await waitForRunStatus(triggered._id, 'finished', 30_000) + + const run = (await superadmin.get('/api/v1/runs/' + triggered._id)).data + expect(run.status).toBe('finished') + expect(run.log[1].type).toBe('info') + expect(run.log[1].extra.secrets.secretField).toBe('my new secret value') + }) + + test('should patch config with secrets', async () => { + const superadmin = await axiosAuth('test_superadmin@test.com') + const plugin = await installTestPlugin(superadmin) + + const processing = (await superadmin.post('/api/v1/processings', { + title: 'Hello processing', + plugin: plugin.id, + owner: { type: 'user', id: 'test_superadmin', name: 'Test Super Admin' }, + active: true, + config: { + datasetMode: 'create', + dataset: { title: 'Hello world test processing' }, + overwrite: false, + message: 'Hello world test processing', + secretField: 'my secret value' + } + })).data + expect(processing.config.secretField).toBe('********') + + let patchRes = await superadmin.patch(`/api/v1/processings/${processing._id}`, { + config: { + datasetMode: 'create', + dataset: { title: 'Hello world test processing' }, + overwrite: false, + message: 'Hello world test processing (edited)', + secretField: '********' + } + }) + expect(patchRes.data.config.secretField).toBe('********') + + const triggered = (await superadmin.post(`/api/v1/processings/${processing._id}/_trigger`)).data + await waitForRunStatus(triggered._id, 'finished', 30_000) + + const run = (await superadmin.get('/api/v1/runs/' + triggered._id)).data + expect(run.status).toBe('finished') + expect(run.log[1].extra.secrets.secretField).toBe('my secret value') + + patchRes = await superadmin.patch(`/api/v1/processings/${processing._id}`, { + config: { + datasetMode: 'create', + dataset: { title: 'Hello world test processing' }, + overwrite: false, + message: 'Hello world test processing (edited)', + secretField: '' + } + }) + expect(patchRes.data.config.secretField).toBe('') + + const lastTrigger = (await superadmin.post(`/api/v1/processings/${processing._id}/_trigger`)).data + await waitForRunStatus(lastTrigger._id, 'finished', 30_000) + }) +}) diff --git a/tests/features/processings/permissions.api.spec.ts b/tests/features/processings/permissions.api.spec.ts new file mode 100644 index 00000000..ce3f98e3 --- /dev/null +++ b/tests/features/processings/permissions.api.spec.ts @@ -0,0 +1,188 @@ +import { test, expect } from '@playwright/test' +import { axiosAuth, clean } from '../../support/axios.ts' + +const installTestPlugin = async (superadmin: any) => { + const plugin = (await superadmin.post('/api/v1/plugins', { + name: '@data-fair/processing-hello-world', + version: '0.12.2', + distTag: 'latest', + description: 'Minimal plugin for data-fair-processings. Create one-line datasets on demand.' + })).data + await superadmin.put(`/api/v1/plugins/${plugin.id}/access`, { public: true }) + return plugin +} + +test.describe('processing permissions', () => { + test.beforeEach(clean) + test.afterAll(clean) + + test('should create a processing and check permissions across roles', async () => { + const superadmin = await axiosAuth('test_superadmin@test.com') + const plugin = await installTestPlugin(superadmin) + + const adminTestOrg1 = await axiosAuth({ email: 'test_admin1@test.com', org: 'test_org1' }) + const contribTestOrg1 = await axiosAuth({ email: 'test_contrib1@test.com', org: 'test_org1' }) + const userTestOrg1 = await axiosAuth({ email: 'test_user1@test.com', org: 'test_org1' }) + const aloneOutsider = await axiosAuth('test_alone@test.com') + const partnerAdmin = await axiosAuth({ email: 'test_user2@test.com', org: 'test_org2' }) + + const processing = (await adminTestOrg1.post('/api/v1/processings', { + title: 'Hello processing', + plugin: plugin.id + })).data + + await adminTestOrg1.patch(`/api/v1/processings/${processing._id}`, { + active: true, + config: { + datasetMode: 'create', + dataset: { id: 'test_hello-world-test-processings', title: 'Hello world test processing' }, + overwrite: false, + message: 'Hello world test processing' + }, + scheduling: [{ type: 'monthly', dayOfWeek: '*', dayOfMonth: 1, month: '*', hour: 0, minute: 0 }] + }) + + // list permission for admins and contribs in org + expect((await adminTestOrg1.get('/api/v1/processings')).data.count).toBe(1) + expect((await contribTestOrg1.get('/api/v1/processings')).data.count).toBe(1) + expect((await userTestOrg1.get('/api/v1/processings')).data.count).toBe(0) + + // read permission for admins and contribs in org + const adminProcessing = (await adminTestOrg1.get(`/api/v1/processings/${processing._id}`)).data + expect(adminProcessing.userProfile).toBe('admin') + const contribProcessing = (await contribTestOrg1.get(`/api/v1/processings/${processing._id}`)).data + expect(contribProcessing.userProfile).toBe('read') + await expect(userTestOrg1.get(`/api/v1/processings/${processing._id}`)).rejects.toMatchObject({ status: 403 }) + + // read runs permission for admins and contribs in org + const runs = (await adminTestOrg1.get('/api/v1/runs', { params: { processing: processing._id } })).data + expect(runs.count).toBe(1) + expect(runs.results[0].status).toBe('scheduled') + expect((await contribTestOrg1.get('/api/v1/runs', { params: { processing: processing._id } })).data.count).toBe(1) + + // write permission only for admin + await adminTestOrg1.patch(`/api/v1/processings/${processing._id}`, { title: 'test' }) + await expect(contribTestOrg1.patch(`/api/v1/processings/${processing._id}`, { title: 'test' })).rejects.toMatchObject({ status: 403 }) + await expect(userTestOrg1.patch(`/api/v1/processings/${processing._id}`, { title: 'test' })).rejects.toMatchObject({ status: 403 }) + + // no permission at all for outsiders + expect((await partnerAdmin.get('/api/v1/processings', { params: { owner: 'organization:test_org1' } })).data.count).toBe(0) + expect((await aloneOutsider.get('/api/v1/processings', { params: { owner: 'organization:test_org1' } })).data.count).toBe(0) + await expect(partnerAdmin.get(`/api/v1/processings/${processing._id}`)).rejects.toMatchObject({ status: 403 }) + await expect(aloneOutsider.get(`/api/v1/processings/${processing._id}`)).rejects.toMatchObject({ status: 403 }) + + // grant permission based on user email and partner org + await adminTestOrg1.patch(`/api/v1/processings/${processing._id}`, { + permissions: [{ + profile: 'read', + target: { type: 'userEmail', email: 'test_alone@test.com' } + }, { + profile: 'read', + target: { type: 'partner', organization: { name: 'Test Org 2', id: 'test_org2' }, roles: ['admin'] } + }] + }) + + expect((await partnerAdmin.get('/api/v1/processings', { params: { owner: 'organization:test_org1' } })).data.count).toBe(1) + expect((await aloneOutsider.get('/api/v1/processings', { params: { owner: 'organization:test_org1' } })).data.count).toBe(1) + const partnerProcessing = (await partnerAdmin.get(`/api/v1/processings/${processing._id}`)).data + expect(partnerProcessing.userProfile).toBe('read') + const alonePartnerProcessing = (await aloneOutsider.get(`/api/v1/processings/${processing._id}`)).data + expect(alonePartnerProcessing.userProfile).toBe('read') + expect((await partnerAdmin.get('/api/v1/runs', { params: { processing: processing._id, owner: 'organization:test_org1' } })).data.count).toBe(1) + expect((await aloneOutsider.get('/api/v1/runs', { params: { processing: processing._id, owner: 'organization:test_org1' } })).data.count).toBe(1) + + // still no write permissions for the granted readers + await expect(partnerAdmin.patch(`/api/v1/processings/${processing._id}`, { title: 'test' })).rejects.toMatchObject({ status: 403 }) + await expect(aloneOutsider.patch(`/api/v1/processings/${processing._id}`, { title: 'test' })).rejects.toMatchObject({ status: 403 }) + + await superadmin.delete(`/api/v1/plugins/${plugin.id}`) + }) + + test('should list processings with proper org isolation', async () => { + const superadmin = await axiosAuth('test_superadmin@test.com') + const plugin = await installTestPlugin(superadmin) + + const adminTestOrg1 = await axiosAuth({ email: 'test_admin1@test.com', org: 'test_org1' }) + const partnerAdmin = await axiosAuth({ email: 'test_user2@test.com', org: 'test_org2' }) + + await adminTestOrg1.post('/api/v1/processings', { + title: 'Hello processing 1', + plugin: plugin.id + }) + await partnerAdmin.post('/api/v1/processings', { + title: 'Hello processing 2', + plugin: plugin.id + }) + + expect((await adminTestOrg1.get('/api/v1/processings?owner=organization:test_org1')).data.count).toBe(1) + expect((await partnerAdmin.get('/api/v1/processings?owner=organization:test_org2')).data.count).toBe(1) + + expect((await superadmin.get('/api/v1/processings?showAll=true')).data.count).toBeGreaterThanOrEqual(2) + expect((await superadmin.get('/api/v1/processings?showAll=true&owner=organization:test_org1')).data.results.length).toBe(1) + }) + + test('should manage processings as a department admin', async () => { + const superadmin = await axiosAuth('test_superadmin@test.com') + const plugin = await installTestPlugin(superadmin) + const depAdmin = await axiosAuth({ email: 'test_dep_admin@test.com', org: 'test_org1', dep: 'dep1' }) + + // create a processing in his department + const processing = (await depAdmin.post('/api/v1/processings', { + title: 'Hello processing', + plugin: plugin.id, + owner: { + id: 'test_org1', + name: 'Test Org 1', + type: 'organization', + department: 'dep1', + departmentName: 'department 1' + } + })).data + expect(processing._id).toBeTruthy() + + // cannot create in another department + await expect(depAdmin.post('/api/v1/processings', { + title: 'Hello processing', + plugin: plugin.id, + owner: { + id: 'test_org1', + name: 'Test Org 1', + type: 'organization', + department: 'dep2', + departmentName: 'department 2' + } + })).rejects.toMatchObject({ status: 403 }) + + // cannot create in the root organization (no department) + await expect(depAdmin.post('/api/v1/processings', { + title: 'Hello processing', + plugin: plugin.id, + owner: { + id: 'test_org1', + name: 'Test Org 1', + type: 'organization' + } + })).rejects.toMatchObject({ status: 403 }) + + // cannot change owner to a different organization + await expect(depAdmin.patch(`/api/v1/processings/${processing._id}`, { + owner: { + id: 'test_org2', + name: 'Test Org 2', + type: 'organization' + } + })).rejects.toMatchObject({ status: 403 }) + + // can change owner to another department in the same org (admin of dep1 can move to dep2 only if also admin there) + // here the dep_admin is only admin in dep1, so this still fails + await expect(depAdmin.patch(`/api/v1/processings/${processing._id}`, { + owner: { + id: 'test_org1', + name: 'Test Org 1', + type: 'organization', + department: 'dep2', + departmentName: 'department 2' + } + })).rejects.toMatchObject({ status: 403 }) + }) +}) diff --git a/test-it/utils/processing-hello-world.tgz b/tests/fixtures/processing-hello-world.tgz similarity index 100% rename from test-it/utils/processing-hello-world.tgz rename to tests/fixtures/processing-hello-world.tgz diff --git a/tests/state-setup.ts b/tests/state-setup.ts new file mode 100644 index 00000000..742db2c4 --- /dev/null +++ b/tests/state-setup.ts @@ -0,0 +1,27 @@ +import assert from 'node:assert/strict' +import { spawn } from 'node:child_process' +import { axiosBuilder } from '@data-fair/lib-node/axios.js' +import { test as setup } from '@playwright/test' +import { apiUrl } from './support/axios.ts' + +const ax = axiosBuilder() + +setup('Stateful tests setup', async () => { + // Check that the dev API server is up + await assert.doesNotReject( + ax.get(`${apiUrl}/api/v1/test-env/pending-tasks`), + `Dev API server seems to be unavailable at ${apiUrl}. +If you are an agent do not try to start it. Instead check for a startup failure at the end of dev/logs/dev-api.log and report this problem to your user.` + ) + + // More visible dev server logs straight in the test output + try { + const { existsSync, mkdirSync } = await import('node:fs') + if (!existsSync('dev/logs')) mkdirSync('dev/logs', { recursive: true }) + const tailApi = spawn('tail', ['-n', '0', '-f', 'dev/logs/dev-api.log'], { stdio: 'inherit', detached: true }) + const tailWorker = spawn('tail', ['-n', '0', '-f', 'dev/logs/dev-worker.log'], { stdio: 'inherit', detached: true }) + process.env.TAIL_PIDS = [tailApi.pid, tailWorker.pid].filter(Boolean).join(',') + } catch { + // log tailing is optional + } +}) diff --git a/tests/state-teardown.ts b/tests/state-teardown.ts new file mode 100644 index 00000000..663f2756 --- /dev/null +++ b/tests/state-teardown.ts @@ -0,0 +1,14 @@ +import { test as teardown } from '@playwright/test' + +teardown('Stateful tests teardown', () => { + const pids = process.env.TAIL_PIDS + if (pids) { + for (const pid of pids.split(',')) { + try { + process.kill(parseInt(pid)) + } catch { + // process may have already exited + } + } + } +}) diff --git a/tests/support/axios.ts b/tests/support/axios.ts new file mode 100644 index 00000000..dbf29bea --- /dev/null +++ b/tests/support/axios.ts @@ -0,0 +1,58 @@ +import type { AxiosAuthOptions } from '@data-fair/lib-node/axios-auth.js' +import { axiosBuilder } from '@data-fair/lib-node/axios.js' +import { axiosAuth as _axiosAuth } from '@data-fair/lib-node/axios-auth.js' + +/** + * Test users and orgs are defined in: + * - dev/resources/users.json — accounts test_user1, test_user2, test_contrib1, + * test_admin1, test_alone, test_superadmin (all use password 'passwd' except + * test_superadmin which uses 'superpasswd'). + * - dev/resources/organizations.json — test_org1, test_org2 with departments and roles. + * + * Loaded by simple-directory (STORAGE_TYPE=file in docker-compose.yml). + */ + +export const directoryUrl = `http://${process.env.DEV_HOST}:${process.env.NGINX_PORT1}/simple-directory` +export const apiUrl = `http://localhost:${process.env.DEV_API_PORT}` +export const baseURL = `http://${process.env.DEV_HOST}:${process.env.NGINX_PORT1}/processings` + +const axiosOpts = { baseURL } + +export const axios = (opts = {}) => axiosBuilder({ ...axiosOpts, ...opts }) +export const anonymousAx = axios() + +export const axiosAuth = (opts: string | Omit) => { + opts = typeof opts === 'string' ? { email: opts } : opts + const isSuperadmin = opts.email === 'test_superadmin@test.com' || opts.email === 'superadmin@test.com' + const password = isSuperadmin ? 'superpasswd' : 'passwd' + const adminMode = isSuperadmin + return _axiosAuth({ ...opts, password, adminMode, axiosOpts, directoryUrl }) +} + +export const waitForWorkerIdle = async (timeoutMs = 10_000): Promise => { + const start = Date.now() + while (Date.now() - start < timeoutMs) { + const res = await anonymousAx.get(`${apiUrl}/api/v1/test-env/pending-tasks`) + if (res.data.triggered.length === 0 && res.data.running.length === 0) return + await new Promise(resolve => setTimeout(resolve, 200)) + } + throw new Error(`worker still has pending tasks after ${timeoutMs}ms`) +} + +export const clean = async () => { + await waitForWorkerIdle() + await anonymousAx.delete(`${apiUrl}/api/v1/test-env`) + await anonymousAx.delete(`${apiUrl}/api/v1/test-env/plugins`) +} + +/** Poll the test-env raw-run endpoint until status matches one of the given values. */ +export const waitForRunStatus = async (runId: string, status: string | string[], timeoutMs = 15_000) => { + const statuses = Array.isArray(status) ? status : [status] + const start = Date.now() + while (Date.now() - start < timeoutMs) { + const res = await anonymousAx.get(`${apiUrl}/api/v1/test-env/raw-run/${runId}`).catch(() => null) + if (res && statuses.includes(res.data.status)) return res.data + await new Promise(resolve => setTimeout(resolve, 100)) + } + throw new Error(`run ${runId} did not reach status ${statuses.join('|')} within ${timeoutMs}ms`) +} diff --git a/tsconfig.json b/tsconfig.json index e0a0c886..a9bf9a49 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -14,5 +14,5 @@ "#api/type/*": ["./api/types/*"] } }, - "exclude": ["node_modules", ".type", "ui", "dev", "data", "test-it"] + "exclude": ["node_modules", ".type", "ui", "dev", "data", "tests"] } diff --git a/tsconfig.test.json b/tsconfig.test.json deleted file mode 100644 index da47df3d..00000000 --- a/tsconfig.test.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "extends": "./tsconfig.json", - "compilerOptions": { - "strict": false - }, - "include": ["test-it"] -} diff --git a/worker/config/development.mjs b/worker/config/development.mjs index 068b0085..7015cb5f 100644 --- a/worker/config/development.mjs +++ b/worker/config/development.mjs @@ -1,3 +1,8 @@ +const mongoPort = process.env.MONGO_PORT ?? '27017' +const eventsPort = process.env.EVENTS_PORT ?? '8083' +const dfPort = process.env.DF_PORT ?? '8081' +const observerPort = parseInt(process.env.DEV_WORKER_OBSERVER_PORT ?? '9091') + export default { cipherPassword: 'dev', dataDir: '../data/development', @@ -5,15 +10,19 @@ export default { dataFairAPIKey: '', // override in local-development.cjs maxFailures: 2, maxFailuresCooldown: 0.05, // 3 minutes - mongoUrl: 'mongodb://localhost:27017/data-fair-processings-development', + mongoUrl: `mongodb://localhost:${mongoPort}/data-fair-processings-development`, secretKeys: { events: 'secret-events' }, observer: { - port: 9091 + port: observerPort }, - privateDataFairUrl: 'http://localhost:8081', - privateEventsUrl: 'http://localhost:8083', + privateDataFairUrl: `http://localhost:${dfPort}`, + privateEventsUrl: `http://localhost:${eventsPort}`, runsRetention: 5, - upgradeRoot: '../' + upgradeRoot: '../', + worker: { + killInterval: 2000, + gracePeriod: 3000 + } } diff --git a/worker/config/test.mjs b/worker/config/test.mjs deleted file mode 100644 index 257d6e88..00000000 --- a/worker/config/test.mjs +++ /dev/null @@ -1,18 +0,0 @@ -export default { - cipherPassword: 'test', - dataDir: './data/test', - dataFairAdminMode: true, - dataFairAPIKey: 'dTpzdXBlcmFkbWluOjZEQ0NXY2ZrSHhVRVQxSzVudmNNg', - mongoUrl: 'mongodb://localhost:27017/data-fair-processings-test', - observer: { - port: 9091 - }, - privateDataFairUrl: 'http://localhost:8081', - worker: { - interval: 100, - killInterval: 2000, - concurrency: 1, - gracePeriod: 3000 - }, - upgradeRoot: './' -} diff --git a/worker/package.json b/worker/package.json index 33979155..7e8836f3 100644 --- a/worker/package.json +++ b/worker/package.json @@ -3,7 +3,7 @@ "main": "index.ts", "type": "module", "scripts": { - "dev": "NODE_ENV=development DEBUG=worker,upgrade* node --watch index.ts" + "dev": "NODE_ENV=development DEBUG=worker,upgrade* node --env-file-if-exists=../.env --watch index.ts" }, "imports": { "#config": "./src/config.ts", diff --git a/worker/src/config.ts b/worker/src/config.ts index 42115a64..77570431 100644 --- a/worker/src/config.ts +++ b/worker/src/config.ts @@ -2,9 +2,6 @@ import type { WorkerConfig } from '../config/type/index.ts' import { assertValid } from '../config/type/index.ts' import config from 'config' -// we reload the config instead of using the singleton from the config module for testing purposes -// @ts-ignore -const workerConfig = process.env.NODE_ENV === 'test' ? config.util.loadFileConfigs(process.env.NODE_CONFIG_DIR, { skipConfigSources: true }) : config -assertValid(workerConfig, { lang: 'en', name: 'config', internal: true }) +assertValid(config, { lang: 'en', name: 'config', internal: true }) -export default workerConfig as WorkerConfig +export default config as unknown as WorkerConfig diff --git a/worker/src/worker.ts b/worker/src/worker.ts index c459ae15..79d559cb 100644 --- a/worker/src/worker.ts +++ b/worker/src/worker.ts @@ -208,8 +208,7 @@ async function iter (run: Run) { } // Run a task in a dedicated child process for extra resiliency to fatal memory exceptions - const path = process.env.NODE_ENV === 'test' ? './worker/src/task/index.ts' : './src/task/index.ts' - const child = spawn('node', ['--disable-warning=ExperimentalWarning', path, run._id, processing._id], { + const child = spawn('node', ['--disable-warning=ExperimentalWarning', './src/task/index.ts', run._id, processing._id], { env: process.env, stdio: ['ignore', 'pipe', 'pipe'] }) From 12394058c57a5f294b59cb49e7eefd122e6c03a6 Mon Sep 17 00:00:00 2001 From: Alban Mouton Date: Wed, 29 Apr 2026 12:56:33 +0200 Subject: [PATCH 2/4] chore: extract pure functions for unit tests --- shared/runs.ts | 32 ++-- tests/features/api-utils/find.unit.spec.ts | 95 ++++++++++++ .../api-utils/permissions.unit.spec.ts | 140 ++++++++++++++++++ tests/features/shared/cipher.unit.spec.ts | 43 ++++++ tests/features/shared/runs.unit.spec.ts | 63 ++++++++ .../features/worker-utils/axios.unit.spec.ts | 92 ++++++++++++ .../limits-operations.unit.spec.ts | 28 ++++ .../worker-utils/runs-operations.unit.spec.ts | 61 ++++++++ .../worker-operations.unit.spec.ts | 32 ++++ worker/src/task/axios-errors.ts | 41 +++++ worker/src/task/axios.ts | 49 +----- worker/src/utils/limits-operations.ts | 17 +++ worker/src/utils/limits.ts | 8 +- worker/src/utils/runs-operations.ts | 38 +++++ worker/src/utils/runs.ts | 11 +- worker/src/utils/worker-operations.ts | 20 +++ worker/src/worker.ts | 14 +- 17 files changed, 702 insertions(+), 82 deletions(-) create mode 100644 tests/features/api-utils/find.unit.spec.ts create mode 100644 tests/features/api-utils/permissions.unit.spec.ts create mode 100644 tests/features/shared/cipher.unit.spec.ts create mode 100644 tests/features/shared/runs.unit.spec.ts create mode 100644 tests/features/worker-utils/axios.unit.spec.ts create mode 100644 tests/features/worker-utils/limits-operations.unit.spec.ts create mode 100644 tests/features/worker-utils/runs-operations.unit.spec.ts create mode 100644 tests/features/worker-utils/worker-operations.unit.spec.ts create mode 100644 worker/src/task/axios-errors.ts create mode 100644 worker/src/utils/limits-operations.ts create mode 100644 worker/src/utils/runs-operations.ts create mode 100644 worker/src/utils/worker-operations.ts diff --git a/shared/runs.ts b/shared/runs.ts index a2855ec4..dbaeecf9 100644 --- a/shared/runs.ts +++ b/shared/runs.ts @@ -19,6 +19,24 @@ export const toCRON = (scheduling: Scheduling): string => { return `${minute} ${hour} ${dayOfMonth} ${month} ${dayOfWeek}` } +/** + * Compute the earliest next-fire date across a list of scheduling rules. + * Returns null when the list is empty. Throws if any rule yields no next date. + */ +export const nextScheduledDate = (schedulings: Scheduling[]): Date | null => { + let nextDate: Date | null = null + for (const scheduling of schedulings) { + const cron = toCRON(scheduling) + const job = new Cron(cron, { timezone: scheduling.timeZone || 'Europe/Paris' }) + const candidate = job.nextRun() + if (!candidate) { + throw new Error('No next date returned for processing scheduling ' + JSON.stringify(scheduling)) + } + if (!nextDate || candidate < nextDate) nextDate = candidate + } + return nextDate +} + export const createNext = async (db: Db, locks: Locks, processing: Processing, triggered: boolean = false, delaySeconds:number = 0): Promise => { const ack = await locks.acquire(processing._id + '/next-run') try { @@ -60,19 +78,7 @@ export const createNext = async (db: Db, locks: Locks, processing: Processing, t } await runsCollection.deleteMany({ 'processing._id': processing._id, status: 'scheduled' }) await processingsCollection.updateOne({ _id: run.processing._id }, { $unset: { nextRun: 1 } }) - let nextDate = null - for (const scheduling of processing.scheduling) { - const cron = toCRON(scheduling) - const job = new Cron(cron, { timezone: scheduling.timeZone || 'Europe/Paris' }) - const nextDateCandidate = job.nextRun() - if (!nextDateCandidate) { - throw new Error('No next date returned for processing scheduling ' + JSON.stringify(scheduling)) - } - if (!nextDate || nextDateCandidate < nextDate) { - nextDate = nextDateCandidate - } - } - + const nextDate = nextScheduledDate(processing.scheduling) if (nextDate) run.scheduledAt = nextDate.toISOString() } diff --git a/tests/features/api-utils/find.unit.spec.ts b/tests/features/api-utils/find.unit.spec.ts new file mode 100644 index 00000000..78cbbc28 --- /dev/null +++ b/tests/features/api-utils/find.unit.spec.ts @@ -0,0 +1,95 @@ +import { test, expect } from '@playwright/test' +import type { SessionStateAuthenticated } from '@data-fair/lib-express' +import findUtils from '../../../api/src/misc/utils/find.ts' + +const session = (overrides: any = {}): SessionStateAuthenticated => ({ + user: { id: 'u1', name: 'U1', email: 'u1@test.com', adminMode: false, organizations: [] }, + account: { type: 'user', id: 'u1', name: 'U1' }, + accountRole: 'admin', + ...overrides +}) as unknown as SessionStateAuthenticated + +test.describe('find.sort', () => { + test('returns empty object when no sort string', () => { + expect(findUtils.sort(undefined)).toEqual({}) + expect(findUtils.sort('')).toEqual({}) + }) + + test('parses single sort key', () => { + expect(findUtils.sort('title:1')).toEqual({ title: 1 }) + }) + + test('parses multiple sort keys with directions', () => { + expect(findUtils.sort('title:1,createdAt:-1')).toEqual({ title: 1, createdAt: -1 }) + }) +}) + +test.describe('find.pagination', () => { + test('defaults size=10 skip=0', () => { + expect(findUtils.pagination(undefined, undefined, undefined)).toEqual([10, 0]) + }) + + test('parses size and ignores invalid values', () => { + expect(findUtils.pagination('25', undefined, undefined)).toEqual([25, 0]) + expect(findUtils.pagination('not-a-number', undefined, undefined)).toEqual([10, 0]) + }) + + test('skip takes precedence over page', () => { + expect(findUtils.pagination('20', '3', '50')).toEqual([20, 50]) + }) + + test('falls back to page when skip is missing', () => { + expect(findUtils.pagination('20', '3', undefined)).toEqual([20, 40]) + }) + + test('treats invalid skip as 0 even with page set', () => { + // page is only used as a fallback when skip is undefined; invalid skip stays 0 + expect(findUtils.pagination('20', '3', 'NaN')).toEqual([20, 40]) + }) +}) + +test.describe('find.project', () => { + test('returns empty object when no select', () => { + expect(findUtils.project(undefined)).toEqual({}) + }) + + test('builds projection object with 1 for each field', () => { + expect(findUtils.project('title,owner')).toEqual({ title: 1, owner: 1 }) + }) +}) + +test.describe('find.query', () => { + test('full-text search via q', () => { + const q = findUtils.query({ q: 'foo' }, session()) + expect(q.$text).toEqual({ $search: 'foo' }) + }) + + test('owners default to current account', () => { + const q = findUtils.query({}, session()) + expect(q.$and).toBeDefined() + expect(q.$and[0].$or[0]).toMatchObject({ 'owner.type': 'user', 'owner.id': 'u1' }) + }) + + test('showAll requires admin mode', () => { + expect(() => findUtils.query({ showAll: 'true' }, session())).toThrow() + }) + + test('showAll allowed for super admin', () => { + const q = findUtils.query({ showAll: 'true' }, session({ user: { id: 'u1', name: 'U1', email: 'u1@test.com', adminMode: true, organizations: [] } })) + expect(q.$and).toBeUndefined() + }) + + test('explicit owner filter', () => { + const q = findUtils.query({ owner: 'organization:org1' }, session({ user: { id: 'u1', name: 'U1', email: 'u1@test.com', adminMode: true, organizations: [] } })) + expect(q.$and[0].$or[0]).toMatchObject({ 'owner.type': 'organization', 'owner.id': 'org1' }) + }) + + test('rejects unknown owner type', () => { + expect(() => findUtils.query({ owner: 'group:abc' }, session({ user: { id: 'u1', name: 'U1', email: 'u1@test.com', adminMode: true, organizations: [] } }))).toThrow() + }) + + test('applies fieldsMap with comma-separated values', () => { + const q = findUtils.query({ status: 'a,b' }, session(), { status: 'lastRun.status' }) + expect(q['lastRun.status']).toEqual({ $in: ['a', 'b'] }) + }) +}) diff --git a/tests/features/api-utils/permissions.unit.spec.ts b/tests/features/api-utils/permissions.unit.spec.ts new file mode 100644 index 00000000..7924af2b --- /dev/null +++ b/tests/features/api-utils/permissions.unit.spec.ts @@ -0,0 +1,140 @@ +import { test, expect } from '@playwright/test' +import type { SessionStateAuthenticated, Account } from '@data-fair/lib-express' +import permissions from '../../../api/src/misc/utils/permissions.ts' + +const ownerUser: Account = { type: 'user', id: 'u1', name: 'U1' } +const ownerOrg: Account = { type: 'organization', id: 'org1', name: 'Org1' } +const ownerOrgWithDept: Account = { type: 'organization', id: 'org1', name: 'Org1', department: 'd1' } + +const session = (overrides: any = {}): SessionStateAuthenticated => ({ + user: { id: 'u1', name: 'U1', email: 'u1@test.com', adminMode: false, organizations: [] }, + account: { type: 'user', id: 'u1', name: 'U1' }, + accountRole: 'admin', + ...overrides +}) as unknown as SessionStateAuthenticated + +test.describe('getOwnerRole', () => { + test('returns admin when user owns the resource', () => { + expect(permissions.getOwnerRole(session(), ownerUser)).toBe('admin') + }) + + test('returns null when account type/id mismatches', () => { + const s = session({ account: { type: 'user', id: 'u2', name: 'U2' } }) + expect(permissions.getOwnerRole(s, ownerUser)).toBeNull() + }) + + test('returns accountRole for matching organization', () => { + const s = session({ account: { type: 'organization', id: 'org1', name: 'Org1' }, accountRole: 'contrib' }) + expect(permissions.getOwnerRole(s, ownerOrg)).toBe('contrib') + }) + + test('null when org session has different department than owner', () => { + const s = session({ account: { type: 'organization', id: 'org1', name: 'Org1', department: 'd2' }, accountRole: 'admin' }) + expect(permissions.getOwnerRole(s, ownerOrgWithDept)).toBeNull() + }) +}) + +test.describe('isAdmin / isContrib / isMember', () => { + test('admin via ownership', () => { + expect(permissions.isAdmin(session(), ownerUser)).toBe(true) + }) + + test('admin via adminMode regardless of ownership', () => { + const s = session({ user: { id: 'u9', adminMode: true, email: 'su@x', organizations: [] }, account: { type: 'user', id: 'u9' } }) + expect(permissions.isAdmin(s, ownerUser)).toBe(true) + }) + + test('isContrib true for contrib role', () => { + const s = session({ account: { type: 'organization', id: 'org1' }, accountRole: 'contrib' }) + expect(permissions.isContrib(s, ownerOrg)).toBe(true) + expect(permissions.isAdmin(s, ownerOrg)).toBe(false) + }) + + test('isMember true for any role on owner', () => { + const s = session({ account: { type: 'organization', id: 'org1' }, accountRole: 'user' }) + expect(permissions.isMember(s, ownerOrg)).toBe(true) + expect(permissions.isContrib(s, ownerOrg)).toBe(false) + }) + + test('non-member returns false on all', () => { + const s = session({ account: { type: 'user', id: 'u2' } }) + expect(permissions.isAdmin(s, ownerUser)).toBe(false) + expect(permissions.isContrib(s, ownerUser)).toBe(false) + expect(permissions.isMember(s, ownerUser)).toBe(false) + }) +}) + +test.describe('getUserResourceProfile', () => { + test('admin owner -> admin', () => { + expect(permissions.getUserResourceProfile(ownerUser, [], session())).toBe('admin') + }) + + test('adminMode -> admin', () => { + const s = session({ user: { id: 'u9', adminMode: true, email: 'su@x', organizations: [] }, account: { type: 'user', id: 'u9' } }) + expect(permissions.getUserResourceProfile(ownerUser, [], s)).toBe('admin') + }) + + test('contrib org member -> read (no permission match)', () => { + const s = session({ account: { type: 'organization', id: 'org1' }, accountRole: 'contrib' }) + expect(permissions.getUserResourceProfile(ownerOrg, [], s)).toBe('read') + }) + + test('non-member matches userEmail permission -> exec', () => { + const s = session({ account: { type: 'user', id: 'u2' }, user: { id: 'u2', email: 'u2@test.com', adminMode: false, organizations: [] } }) + const perms = [{ profile: 'exec', target: { type: 'userEmail', email: 'u2@test.com' } }] as any + expect(permissions.getUserResourceProfile(ownerUser, perms, s)).toBe('exec') + }) + + test('non-member with no matching permission -> undefined', () => { + const s = session({ account: { type: 'user', id: 'u2' }, user: { id: 'u2', email: 'u2@test.com', adminMode: false, organizations: [] } }) + expect(permissions.getUserResourceProfile(ownerUser, [], s)).toBeUndefined() + }) + + test('partner permission grants read to org session', () => { + const s = session({ account: { type: 'organization', id: 'partner-org' }, accountRole: 'admin', user: { id: 'u3', email: 'u3@test.com', adminMode: false, organizations: [{ id: 'partner-org', role: 'admin' }] } }) + const perms = [{ profile: 'read', target: { type: 'partner', organization: { id: 'partner-org' }, roles: ['admin'] } }] as any + expect(permissions.getUserResourceProfile(ownerUser, perms, s)).toBe('read') + }) + + test('read profile beats no-match even when both exec/read present', () => { + const s = session({ account: { type: 'user', id: 'u2' }, user: { id: 'u2', email: 'u2@test.com', adminMode: false, organizations: [] } }) + const perms = [ + { profile: 'read', target: { type: 'userEmail', email: 'u2@test.com' } }, + { profile: 'exec', target: { type: 'userEmail', email: 'other@test.com' } } + ] as any + expect(permissions.getUserResourceProfile(ownerUser, perms, s)).toBe('read') + }) +}) + +test.describe('getOwnerPermissionFilter', () => { + test('admin gets unfiltered owner match', () => { + const filter = permissions.getOwnerPermissionFilter(session(), ownerUser) + expect(filter).toEqual({ 'owner.type': 'user', 'owner.id': 'u1' }) + expect((filter as any).permissions).toBeUndefined() + }) + + test('owner department included when set', () => { + const filter = permissions.getOwnerPermissionFilter( + session({ account: { type: 'organization', id: 'org1', department: 'd1' }, accountRole: 'admin' }), + ownerOrgWithDept + ) + expect(filter).toMatchObject({ 'owner.type': 'organization', 'owner.id': 'org1', 'owner.department': 'd1' }) + }) + + test('non-member adds permissions $elemMatch with userEmail target', () => { + const s = session({ account: { type: 'user', id: 'u2' }, user: { id: 'u2', email: 'u2@test.com', adminMode: false, organizations: [] } }) + const filter = permissions.getOwnerPermissionFilter(s, ownerUser) as any + expect(filter.permissions.$elemMatch.profile).toEqual({ $in: ['read', 'exec'] }) + expect(filter.permissions.$elemMatch.$or).toContainEqual({ 'target.type': 'userEmail', 'target.email': 'u2@test.com' }) + }) + + test('partner branch added when session account is org', () => { + const s = session({ account: { type: 'organization', id: 'partner-org' }, accountRole: 'admin', user: { id: 'u2', email: 'u2@test.com', adminMode: false, organizations: [{ id: 'partner-org', role: 'admin' }] } }) + const filter = permissions.getOwnerPermissionFilter(s, ownerUser) as any + expect(filter.permissions.$elemMatch.$or).toContainEqual({ + 'target.type': 'partner', + 'target.organization.id': 'partner-org', + 'target.roles': 'admin' + }) + }) +}) diff --git a/tests/features/shared/cipher.unit.spec.ts b/tests/features/shared/cipher.unit.spec.ts new file mode 100644 index 00000000..22f374da --- /dev/null +++ b/tests/features/shared/cipher.unit.spec.ts @@ -0,0 +1,43 @@ +import { test, expect } from '@playwright/test' +import { cipher, decipher } from '../../../shared/cipher.ts' + +test.describe('shared/cipher', () => { + test('round-trips plain strings', () => { + const ciphered = cipher('hello world', 'pwd') + expect(typeof ciphered).toBe('object') + expect(ciphered.alg).toBe('aes256') + expect(ciphered.iv).toMatch(/^[0-9a-f]{32}$/) + expect(ciphered.data).toMatch(/^[0-9a-f]+$/) + expect(decipher(ciphered, 'pwd')).toBe('hello world') + }) + + test('round-trips multi-byte UTF-8 strings', () => { + const value = 'éàü 漢字 🚀' + expect(decipher(cipher(value, 'pwd'), 'pwd')).toBe(value) + }) + + test('produces a fresh IV per call (non-deterministic ciphertext)', () => { + const a = cipher('same value', 'pwd') + const b = cipher('same value', 'pwd') + expect(typeof a === 'object' && typeof b === 'object').toBe(true) + if (typeof a === 'object' && typeof b === 'object') { + expect(a.iv).not.toBe(b.iv) + expect(a.data).not.toBe(b.data) + } + }) + + test('cipher passes through already-ciphered content unchanged', () => { + const ciphered = cipher('hello', 'pwd') + const passthrough = cipher(ciphered, 'pwd') + expect(passthrough).toBe(ciphered) + }) + + test('decipher passes through plain string unchanged', () => { + expect(decipher('not-encrypted', 'pwd')).toBe('not-encrypted') + }) + + test('decipher fails when password does not match', () => { + const ciphered = cipher('secret', 'good-pwd') + expect(() => decipher(ciphered, 'bad-pwd')).toThrow() + }) +}) diff --git a/tests/features/shared/runs.unit.spec.ts b/tests/features/shared/runs.unit.spec.ts new file mode 100644 index 00000000..305b28a5 --- /dev/null +++ b/tests/features/shared/runs.unit.spec.ts @@ -0,0 +1,63 @@ +import { test, expect } from '@playwright/test' +import type { Scheduling } from '#api/types' +import { toCRON, nextScheduledDate } from '../../../shared/runs.ts' + +test.describe('shared/runs toCRON', () => { + test('formats a daily schedule', () => { + const s = { type: 'daily', month: '*', dayOfMonth: '*', dayOfWeek: '*', hour: 7, minute: 30 } as unknown as Scheduling + expect(toCRON(s)).toBe('30 7 * * *') + }) + + test('formats a weekly schedule', () => { + const s = { type: 'weekly', month: '*', dayOfMonth: '*', dayOfWeek: '1', hour: 9, minute: 0 } as unknown as Scheduling + expect(toCRON(s)).toBe('0 9 * * 1') + }) + + test('formats a monthly schedule with fixed day', () => { + const s = { type: 'monthly', month: '*', dayOfMonth: 15, dayOfWeek: '*', hour: 6, minute: 0, lastDayOfMonth: false } as unknown as Scheduling + expect(toCRON(s)).toBe('0 6 15 * *') + }) + + test('formats a monthly schedule with lastDayOfMonth', () => { + const s = { type: 'monthly', month: '*', dayOfMonth: 1, dayOfWeek: '*', hour: 6, minute: 0, lastDayOfMonth: true } as unknown as Scheduling + expect(toCRON(s)).toBe('0 6 L * *') + }) + + test('applies hour step', () => { + const s = { type: 'hours', month: '*', dayOfMonth: '*', dayOfWeek: '*', hour: '*', hourStep: 3, minute: 15 } as unknown as Scheduling + expect(toCRON(s)).toBe('15 */3 * * *') + }) + + test('applies minute step', () => { + const s = { type: 'hours', month: '*', dayOfMonth: '*', dayOfWeek: '*', hour: '*', minute: 0, minuteStep: 10 } as unknown as Scheduling + expect(toCRON(s)).toBe('0/10 * * * *') + }) +}) + +test.describe('shared/runs nextScheduledDate', () => { + test('returns null for an empty schedule list', () => { + expect(nextScheduledDate([])).toBeNull() + }) + + test('returns the next fire date in the future', () => { + const s = { type: 'daily', month: '*', dayOfMonth: '*', dayOfWeek: '*', hour: 0, minute: 0, timeZone: 'UTC' } as unknown as Scheduling + const next = nextScheduledDate([s]) + expect(next).toBeInstanceOf(Date) + expect(next!.getTime()).toBeGreaterThan(Date.now()) + }) + + test('picks the earliest among multiple schedulings', () => { + const daily = { type: 'daily', month: '*', dayOfMonth: '*', dayOfWeek: '*', hour: 0, minute: 0, timeZone: 'UTC' } as unknown as Scheduling + const hourly = { type: 'hours', month: '*', dayOfMonth: '*', dayOfWeek: '*', hour: '*', hourStep: 1, minute: 0, timeZone: 'UTC' } as unknown as Scheduling + const next = nextScheduledDate([daily, hourly])! + // hourly should be sooner than next-midnight in almost every case (only equal at exact midnight) + const hourlyOnly = nextScheduledDate([hourly])! + expect(next.getTime()).toBe(hourlyOnly.getTime()) + }) + + test('throws when a scheduling rule cannot produce a next date', () => { + // an obviously impossible cron: dayOfMonth=31 with month=2 (February) — croner returns null + const s = { type: 'monthly', month: '2', dayOfMonth: 31, dayOfWeek: '*', hour: 0, minute: 0, lastDayOfMonth: false, timeZone: 'UTC' } as unknown as Scheduling + expect(() => nextScheduledDate([s])).toThrow() + }) +}) diff --git a/tests/features/worker-utils/axios.unit.spec.ts b/tests/features/worker-utils/axios.unit.spec.ts new file mode 100644 index 00000000..e61c541b --- /dev/null +++ b/tests/features/worker-utils/axios.unit.spec.ts @@ -0,0 +1,92 @@ +import { test, expect } from '@playwright/test' +import { prepareAxiosError, getHttpErrorMessage } from '../../../worker/src/task/axios-errors.ts' + +test.describe('prepareAxiosError', () => { + test('returns the error unchanged when there is no response', () => { + const err = new Error('boom') + expect(prepareAxiosError(err)).toBe(err) + }) + + test('shapes a response: keeps location header, drops everything else', () => { + const err: any = { + message: 'failed', + stack: 'stack-trace', + config: { method: 'POST', url: '/x', params: { a: 1 }, data: { b: 2 }, otherSecret: 's' }, + response: { + status: 502, + statusText: 'Bad Gateway', + headers: { 'content-type': 'application/json', location: '/redir' }, + data: { hello: 'world' }, + request: 'should be removed' + } + } + const out = prepareAxiosError(err) + expect(out.status).toBe(502) + expect(out.headers).toEqual({ location: '/redir' }) + expect(out.request).toBeUndefined() + expect(out.config).toEqual({ method: 'POST', url: '/x', params: { a: 1 }, data: { b: 2 } }) + expect(out.message).toBe('failed') + expect(out.stack).toBe('stack-trace') + }) + + test('drops streaming request data and response body', () => { + const err: any = { + message: 'm', + response: { + status: 500, + headers: {}, + data: { _readableState: {}, foo: 1 }, + config: { method: 'PUT', url: '/u', data: { _writableState: {}, x: 1 } } + } + } + const out = prepareAxiosError(err) + expect(out.data).toBeUndefined() + expect(out.config.data).toBeUndefined() + }) + + test('falls back to error.request.res when no response set', () => { + const inner = { status: 503, headers: {}, config: { method: 'GET', url: '/x' } } + const err: any = { message: 'm', request: { res: inner } } + const out = prepareAxiosError(err) + expect(out).toBe(inner) + expect(out.status).toBe(503) + expect(out.message).toBe('m') + }) +}) + +test.describe('getHttpErrorMessage', () => { + test('returns undefined when no http status is present', () => { + expect(getHttpErrorMessage({ message: 'oops' })).toBeUndefined() + }) + + test('combines status, statusText and string data', () => { + expect(getHttpErrorMessage({ status: 404, statusText: 'Not Found', data: 'missing' })) + .toBe('404 - Not Found - missing') + }) + + test('falls back to statusCode/statusMessage', () => { + expect(getHttpErrorMessage({ statusCode: 500, statusMessage: 'KO' })).toBe('500 - KO') + }) + + test('JSON-stringifies object data', () => { + expect(getHttpErrorMessage({ status: 400, data: { error: 'x' } })).toBe('400 - {"error":"x"}') + }) + + test('appends url from config when present', () => { + const out = getHttpErrorMessage({ status: 404, config: { url: 'https://elsewhere/path' } }) + expect(out).toContain('(https://elsewhere/path)') + }) + + test('strips known base urls from displayed url', () => { + const out = getHttpErrorMessage( + { status: 404, config: { url: 'http://df.local/api/v1/datasets' } }, + ['http://df.local', null, 'http://internal.df'] + ) + expect(out).toContain('(/api/v1/datasets)') + }) + + test('falls back to err.message when no data', () => { + expect(getHttpErrorMessage({ status: 502, message: 'upstream broken' })) + .toBe('502 - upstream broken') + }) +}) diff --git a/tests/features/worker-utils/limits-operations.unit.spec.ts b/tests/features/worker-utils/limits-operations.unit.spec.ts new file mode 100644 index 00000000..8e6c6108 --- /dev/null +++ b/tests/features/worker-utils/limits-operations.unit.spec.ts @@ -0,0 +1,28 @@ +import { test, expect } from '@playwright/test' +import { calculateRemainingLimit } from '../../../worker/src/utils/limits-operations.ts' + +test.describe('calculateRemainingLimit', () => { + test('returns 0 when limits object is null', () => { + expect(calculateRemainingLimit(null, 'processings_seconds')).toBe(0) + }) + + test('returns 0 when key absent from limits object', () => { + expect(calculateRemainingLimit({}, 'processings_seconds')).toBe(0) + }) + + test('returns -1 (unlimited) when limit is -1', () => { + expect(calculateRemainingLimit({ processings_seconds: { limit: -1, consumption: 999 } }, 'processings_seconds')).toBe(-1) + }) + + test('returns limit - consumption when below quota', () => { + expect(calculateRemainingLimit({ processings_seconds: { limit: 100, consumption: 30 } }, 'processings_seconds')).toBe(70) + }) + + test('clamps to 0 when consumption exceeds limit', () => { + expect(calculateRemainingLimit({ processings_seconds: { limit: 100, consumption: 250 } }, 'processings_seconds')).toBe(0) + }) + + test('treats missing consumption as 0', () => { + expect(calculateRemainingLimit({ processings_seconds: { limit: 50 } }, 'processings_seconds')).toBe(50) + }) +}) diff --git a/tests/features/worker-utils/runs-operations.unit.spec.ts b/tests/features/worker-utils/runs-operations.unit.spec.ts new file mode 100644 index 00000000..ac866905 --- /dev/null +++ b/tests/features/worker-utils/runs-operations.unit.spec.ts @@ -0,0 +1,61 @@ +import { test, expect } from '@playwright/test' +import { shouldDisableForFailures, buildFinishStatusPatch } from '../../../worker/src/utils/runs-operations.ts' + +test.describe('shouldDisableForFailures', () => { + const maxFailures = 3 + const cooldownHours = 12 + + test('false when error count below threshold', () => { + const now = new Date() + const earlier = new Date(now.getTime() - 24 * 3600 * 1000) + expect(shouldDisableForFailures(2, earlier, now, maxFailures, cooldownHours)).toBe(false) + }) + + test('false when threshold met but cooldown window not reached', () => { + const now = new Date() + const earlier = new Date(now.getTime() - 1 * 3600 * 1000) // 1h span + expect(shouldDisableForFailures(maxFailures, earlier, now, maxFailures, cooldownHours)).toBe(false) + }) + + test('true when threshold met AND cooldown window reached', () => { + const now = new Date() + const earlier = new Date(now.getTime() - 24 * 3600 * 1000) // 24h span + expect(shouldDisableForFailures(maxFailures, earlier, now, maxFailures, cooldownHours)).toBe(true) + }) + + test('false when no error dates available even if count met', () => { + expect(shouldDisableForFailures(maxFailures, null, null, maxFailures, cooldownHours)).toBe(false) + }) + + test('cooldown=0 disables as soon as threshold is reached and any errors exist', () => { + const now = new Date() + expect(shouldDisableForFailures(maxFailures, now, now, maxFailures, 0)).toBe(true) + }) + + test('overshooting threshold also disables (errors > maxFailures)', () => { + // current implementation requires exact equality — document behavior + const now = new Date() + const earlier = new Date(now.getTime() - 24 * 3600 * 1000) + expect(shouldDisableForFailures(maxFailures + 1, earlier, now, maxFailures, cooldownHours)).toBe(false) + }) +}) + +test.describe('buildFinishStatusPatch', () => { + const finishedAt = '2030-01-02T03:04:05.000Z' + + test('killed input stays killed', () => { + expect(buildFinishStatusPatch('killed', undefined, finishedAt)).toEqual({ status: 'killed', finishedAt }) + }) + + test('killed wins over an errorMessage', () => { + expect(buildFinishStatusPatch('killed', 'boom', finishedAt)).toEqual({ status: 'killed', finishedAt }) + }) + + test('errorMessage marks as error', () => { + expect(buildFinishStatusPatch('running', 'boom', finishedAt)).toEqual({ status: 'error', finishedAt }) + }) + + test('no errorMessage and not killed -> finished', () => { + expect(buildFinishStatusPatch('running', undefined, finishedAt)).toEqual({ status: 'finished', finishedAt }) + }) +}) diff --git a/tests/features/worker-utils/worker-operations.unit.spec.ts b/tests/features/worker-utils/worker-operations.unit.spec.ts new file mode 100644 index 00000000..10ff70cc --- /dev/null +++ b/tests/features/worker-utils/worker-operations.unit.spec.ts @@ -0,0 +1,32 @@ +import { test, expect } from '@playwright/test' +import { buildErrorMessageFromStderr } from '../../../worker/src/utils/worker-operations.ts' + +test.describe('buildErrorMessageFromStderr', () => { + test('falls back to errMessage when stderr is empty', () => { + expect(buildErrorMessageFromStderr('', 'fallback boom')).toBe('fallback boom') + }) + + test('drops debug lines starting with "worker:" and TLS env-var noise', () => { + const stderr = [ + 'worker:loop something happened', + 'NODE_TLS_REJECT_UNAUTHORIZED is set, ignore', + 'real error: connection refused', + '' + ].join('\n') + expect(buildErrorMessageFromStderr(stderr, 'fallback')).toBe('real error: connection refused') + }) + + test('preserves order of relevant lines', () => { + const stderr = 'first line\nsecond line\nthird line' + expect(buildErrorMessageFromStderr(stderr, 'unused')).toBe('first line\nsecond line\nthird line') + }) + + test('falls back to errMessage when every line is filtered out', () => { + const stderr = 'worker:foo\nworker:bar\n' + expect(buildErrorMessageFromStderr(stderr, 'fallback')).toBe('fallback') + }) + + test('skips empty lines but keeps the surrounding ones', () => { + expect(buildErrorMessageFromStderr('a\n\nb\n', 'fb')).toBe('a\nb') + }) +}) diff --git a/worker/src/task/axios-errors.ts b/worker/src/task/axios-errors.ts new file mode 100644 index 00000000..7fe9362b --- /dev/null +++ b/worker/src/task/axios-errors.ts @@ -0,0 +1,41 @@ +// customize axios errors for shorter stack traces when a request fails +// WARNING: we used to do it in an interceptor, but it was incompatible with axios-retry +export const prepareAxiosError = (error: any) => { + const response = error.response ?? error.request?.res ?? error.res + if (!response) return error + delete response.request + const headers: Record = {} + if (response.headers?.location) headers.location = response.headers.location + response.headers = headers + response.config = response.config ?? error.config + if (response.config) { + response.config = { method: response.config.method, url: response.config.url, params: response.config.params, data: response.config.data } + if (response.config.data && response.config.data._writableState) delete response.config.data + } + if (response.data && response.data._readableState) delete response.data + if (error.message) response.message = error.message + if (error.stack) response.stack = error.stack + return response +} + +export const getHttpErrorMessage = (err: any, baseUrls: (string | null | undefined)[] = []) => { + let httpMessage = err.status ?? err.statusCode + if (httpMessage) { + const statusText = err.statusText ?? err.statusMessage + if (statusText) httpMessage += ' - ' + statusText + if (err.data) { + if (typeof err.data === 'string') httpMessage += ' - ' + err.data + else httpMessage += ' - ' + JSON.stringify(err.data) + } else if (err.message) { + httpMessage += ' - ' + err.message + } + if (err.config && err.config.url) { + let url: string = err.config.url + for (const base of baseUrls) { + if (base) url = url.replace(base, '') + } + httpMessage += ` (${url})` + } + return httpMessage + } +} diff --git a/worker/src/task/axios.ts b/worker/src/task/axios.ts index 50bc0c37..9c4450a9 100644 --- a/worker/src/task/axios.ts +++ b/worker/src/task/axios.ts @@ -4,6 +4,12 @@ import axiosRetry from 'axios-retry' import config from '#config' import type { Processing } from '#api/types' import type { LogFunctions } from '@data-fair/lib-common-types/processings.js' +import { prepareAxiosError, getHttpErrorMessage as getHttpErrorMessageBase } from './axios-errors.ts' + +export { prepareAxiosError } from './axios-errors.ts' + +export const getHttpErrorMessage = (err: any) => + getHttpErrorMessageBase(err, [config.dataFairUrl, config.privateDataFairUrl]) /** * Create an Axios instance. @@ -66,46 +72,3 @@ export const getAxiosInstance = (processing: Processing, log: LogFunctions) => { return axiosInstance } - -// customize axios errors for shorter stack traces when a request fails -// WARNING: we used to do it in an interceptor, but it was incompatible with axios-retry -export const prepareAxiosError = (error: any) => { - const response = error.response ?? error.request?.res ?? error.res - if (!response) return error - delete response.request - const headers: Record = {} - if (response.headers?.location) headers.location = response.headers.location - response.headers = headers - response.config = response.config ?? error.config - if (response.config) { - response.config = { method: response.config.method, url: response.config.url, params: response.config.params, data: response.config.data } - if (response.config.data && response.config.data._writableState) delete response.config.data - } - if (response.data && response.data._readableState) delete response.data - if (error.message) response.message = error.message - if (error.stack) response.stack = error.stack - return response -} - -export const getHttpErrorMessage = (err: any) => { - let httpMessage = err.status ?? err.statusCode - if (httpMessage) { - const statusText = err.statusText ?? err.statusMessage - if (statusText) httpMessage += ' - ' + statusText - if (err.data) { - if (typeof err.data === 'string') httpMessage += ' - ' + err.data - else httpMessage += ' - ' + JSON.stringify(err.data) - } else if (err.message) { - httpMessage += ' - ' + err.message - } - if (err.config && err.config.url) { - let url = err.config.url - url = url.replace(config.dataFairUrl, '') - if (config.privateDataFairUrl) { - url = url.replace(config.privateDataFairUrl, '') - } - httpMessage += ` (${url})` - } - return httpMessage - } -} diff --git a/worker/src/utils/limits-operations.ts b/worker/src/utils/limits-operations.ts new file mode 100644 index 00000000..9ecc4228 --- /dev/null +++ b/worker/src/utils/limits-operations.ts @@ -0,0 +1,17 @@ +// Pure helpers extracted from limits.ts. + +/** + * Compute remaining quota for a given key on a Limit document. + * Returns -1 when the limit is unlimited (-1), otherwise max(0, limit - consumption). + * When the limit is missing, returns 0 (treated as fully consumed). + */ +export const calculateRemainingLimit = ( + limits: Record | null | undefined, + key: string +): number => { + const limit = limits?.[key]?.limit + if (limit === -1) return -1 + if (limit == null) return 0 + const consumption = limits?.[key]?.consumption ?? 0 + return Math.max(0, limit - consumption) +} diff --git a/worker/src/utils/limits.ts b/worker/src/utils/limits.ts index 962d31e0..ed52d85a 100644 --- a/worker/src/utils/limits.ts +++ b/worker/src/utils/limits.ts @@ -3,13 +3,7 @@ import type { Account } from '@data-fair/lib-express' import { getLimits } from '@data-fair/processings-shared/limits.ts' import config from '#config' import mongo from '#mongo' - -const calculateRemainingLimit = (limits: any, key: string) => { - const limit = limits && limits[key] && limits[key].limit - if (limit === -1) return -1 - const consumption = (limits && limits[key] && limits[key].consumption) || 0 - return Math.max(0, limit - consumption) -} +import { calculateRemainingLimit } from './limits-operations.ts' export const remaining = async (consumer: Account) => { const limits = await getLimits(mongo.db, consumer, config.defaultLimits.processingsSeconds) diff --git a/worker/src/utils/runs-operations.ts b/worker/src/utils/runs-operations.ts new file mode 100644 index 00000000..ba1649a2 --- /dev/null +++ b/worker/src/utils/runs-operations.ts @@ -0,0 +1,38 @@ +// Pure helpers extracted from runs.ts. Stateless: no DB, no IO. + +/** + * Decide whether a processing should be disabled because it has failed too often. + * + * @param errors number of error runs among the last `maxFailures` runs + * @param firstError date of the first error across all runs (null if none) + * @param lastError date of the most recent error across all runs (null if none) + * @param maxFailures threshold of consecutive errors to consider disabling + * @param maxFailuresCooldownHours how far back in time errors must span to count as a cooldown breach + */ +export const shouldDisableForFailures = ( + errors: number, + firstError: Date | null, + lastError: Date | null, + maxFailures: number, + maxFailuresCooldownHours: number +): boolean => { + const allErrors = errors === maxFailures + const cooldownReached = firstError && lastError + ? (lastError.getTime() - firstError.getTime()) / (1000 * 60 * 60) >= maxFailuresCooldownHours + : false + return allErrors && cooldownReached +} + +/** + * Build the mongo `$set` payload for the run's terminal status. + * Pure: returns the new status patch, never mutates input. + */ +export const buildFinishStatusPatch = ( + currentStatus: string, + errorMessage: string | undefined, + finishedAt: string +): { status: 'finished' | 'killed' | 'error', finishedAt: string } => { + if (currentStatus === 'killed') return { status: 'killed', finishedAt } + if (errorMessage) return { status: 'error', finishedAt } + return { status: 'finished', finishedAt } +} diff --git a/worker/src/utils/runs.ts b/worker/src/utils/runs.ts index e42d00c3..a77e188b 100644 --- a/worker/src/utils/runs.ts +++ b/worker/src/utils/runs.ts @@ -7,6 +7,7 @@ import eventsQueue from '@data-fair/lib-node/events-queue.js' import config from '#config' import mongo from '#mongo' import { internalError } from '@data-fair/lib-node/observer.js' +import { shouldDisableForFailures } from './runs-operations.ts' const sendProcessingEvent = ( run: Run, @@ -134,15 +135,7 @@ export const finish = async (run: Run, errorMessage: string | undefined = undefi const firstError = raw?.allErrors?.[0]?.firstError ? new Date(raw.allErrors[0].firstError) : null // date of the first error (across all runs) const lastError = raw?.allErrors?.[0]?.lastError ? new Date(raw.allErrors[0].lastError) : null // date of the last error (across all runs) - const allErrors = errors === config.maxFailures - const cooldownReached = firstError && lastError - ? (lastError.getTime() - firstError.getTime()) / (1000 * 60 * 60) >= config.maxFailuresCooldown // (1000 * 60 * 60) convert to hours - : false - - const reachedMaxFailures = allErrors && cooldownReached - - // Disable processing if reached max failures - if (reachedMaxFailures) { + if (shouldDisableForFailures(errors, firstError, lastError, config.maxFailures, config.maxFailuresCooldown)) { await mongo.processings.updateOne( { _id: run.processing._id }, { $set: { active: false } } diff --git a/worker/src/utils/worker-operations.ts b/worker/src/utils/worker-operations.ts new file mode 100644 index 00000000..b290fd71 --- /dev/null +++ b/worker/src/utils/worker-operations.ts @@ -0,0 +1,20 @@ +// Pure helpers extracted from worker.ts. + +/** + * Reconstruct an error message from a child process stderr, + * filtering out noise that is not relevant to the user. + * Falls back to the original error message when stderr produces nothing useful. + */ +export const buildErrorMessageFromStderr = (stderr: string, errMessage: string): string => { + const lines: string[] = [] + if (stderr) { + for (const line of stderr.split('\n')) { + if (!line) continue + if (line.startsWith('worker:')) continue + if (line.includes('NODE_TLS_REJECT_UNAUTHORIZED')) continue + lines.push(line) + } + } + if (!lines.length) lines.push(errMessage) + return lines.join('\n') +} diff --git a/worker/src/worker.ts b/worker/src/worker.ts index 79d559cb..3fa67a22 100644 --- a/worker/src/worker.ts +++ b/worker/src/worker.ts @@ -18,6 +18,7 @@ import locks from '#locks' import limits from './utils/limits.ts' import { initMetrics } from './utils/metrics.ts' import { finish } from './utils/runs.ts' +import { buildErrorMessageFromStderr } from './utils/worker-operations.ts' const debug = Debug('worker') const debugLoop = Debug('worker-loop') @@ -242,14 +243,7 @@ async function iter (run: Run) { await finish(run) } catch (err: any) { // Build back the original error message from the stderr of the child process - const errorMessage = [] - if (stderr) { - stderr.split('\n') - .filter(line => !!line && !line.startsWith('worker:') && !line.includes('NODE_TLS_REJECT_UNAUTHORIZED')) - .forEach(line => errorMessage.push(line)) - } - - if (!errorMessage.length) errorMessage.push(err.message) + const errorMessage = buildErrorMessageFromStderr(stderr, err.message) if (run) { // case of interruption by a SIGTERM @@ -258,8 +252,8 @@ async function iter (run: Run) { await finish(run) // @test:spy("isKilled") } else { - console.warn(`failure ${processing?.title ?? run.processing.title} > ${run._id}`, errorMessage.join('\n')) - await finish(run, errorMessage.join('\n')) + console.warn(`failure ${processing?.title ?? run.processing.title} > ${run._id}`, errorMessage) + await finish(run, errorMessage) // @test:spy("isFailure") } } else { From 82668ab7cf6583efff680b0beb1deb8ba65d4ce5 Mon Sep 17 00:00:00 2001 From: Alban Mouton Date: Mon, 4 May 2026 10:20:59 +0200 Subject: [PATCH 3/4] chore: init e2e tests --- .github/workflows/reuse-quality.yml | 2 +- tests/features/ui/layout.e2e.spec.ts | 34 ++++++++++++++++++ tests/fixtures/login.ts | 54 ++++++++++++++++++++++++++++ ui/package.json | 2 +- 4 files changed, 90 insertions(+), 2 deletions(-) create mode 100644 tests/features/ui/layout.e2e.spec.ts create mode 100644 tests/fixtures/login.ts diff --git a/.github/workflows/reuse-quality.yml b/.github/workflows/reuse-quality.yml index 6dfd9e4e..0219123f 100644 --- a/.github/workflows/reuse-quality.yml +++ b/.github/workflows/reuse-quality.yml @@ -56,7 +56,7 @@ jobs: exit 1 - name: Run tests - run: npm test + run: npm run test-unit && npm run test-api - name: Audit run: npm audit --omit=dev --audit-level=critical diff --git a/tests/features/ui/layout.e2e.spec.ts b/tests/features/ui/layout.e2e.spec.ts new file mode 100644 index 00000000..47026ea6 --- /dev/null +++ b/tests/features/ui/layout.e2e.spec.ts @@ -0,0 +1,34 @@ +import path from 'node:path' +import fs from 'node:fs' +import FormData from 'form-data' +import { test, expect } from '../../fixtures/login.ts' +import { axiosAuth, clean } from '../../support/axios.ts' + +test.describe('UI layout', () => { + test.beforeEach(clean) + test.afterAll(clean) + + test('authenticated user sees the processings list with empty state', async ({ page, goToWithAuth }) => { + await goToWithAuth('/processings/processings', 'test_user1') + await expect(page.getByText(/n'avez pas encore créé de traitement/)).toBeVisible({ timeout: 10000 }) + }) + + test('processings list renders a card for an existing processing', async ({ page, goToWithAuth }) => { + const superadmin = await axiosAuth('test_superadmin@test.com') + + const tarballPath = path.join(import.meta.dirname, '..', '..', 'fixtures', 'processing-hello-world.tgz') + const formData = new FormData() + formData.append('file', fs.createReadStream(tarballPath)) + const plugin = (await superadmin.post('/api/v1/plugins', formData, { headers: formData.getHeaders() })).data + await superadmin.put(`/api/v1/plugins/${plugin.id}/access`, { public: true }) + + await superadmin.post('/api/v1/processings', { + title: 'My e2e processing', + plugin: plugin.id, + owner: { type: 'user', id: 'test_superadmin', name: 'Test Super Admin' } + }) + + await goToWithAuth('/processings/processings', 'test_superadmin') + await expect(page.getByText('My e2e processing')).toBeVisible({ timeout: 10000 }) + }) +}) diff --git a/tests/fixtures/login.ts b/tests/fixtures/login.ts new file mode 100644 index 00000000..e399e7d9 --- /dev/null +++ b/tests/fixtures/login.ts @@ -0,0 +1,54 @@ +import { test as base, expect } from '@playwright/test' + +const cookieCache = new Map>>() + +async function performLogin (page: any, context: any, baseUrl: string, url: string, user: string) { + const fullUrl = `${baseUrl}${url}` + const password = user === 'test_superadmin' ? 'superpasswd' : 'passwd' + const loginUrl = `${baseUrl}/simple-directory/login?redirect=${encodeURIComponent(fullUrl)}` + await page.goto(loginUrl) + await page.getByLabel('Adresse mail').fill(`${user}@test.com`) + await page.getByLabel('Mot de passe').fill(password) + await page.getByRole('button', { name: 'Se connecter' }).click() + await page.waitForURL(fullUrl, { timeout: 10000 }) + const cookies = await context.cookies() + cookieCache.set(user, cookies) +} + +export const test = base.extend<{ + goToWithAuth: (url: string, user: string) => Promise +}>({ + page: async ({ page }, use) => { + const baseUrl = `http://${process.env.DEV_HOST}:${process.env.NGINX_PORT1}` + await page.context().addCookies([{ + name: 'i18n_lang', + value: 'fr', + url: baseUrl + }, { + name: 'cache_bypass', + value: '1', + url: baseUrl + }]) + await use(page) + }, + + goToWithAuth: async ({ page, context }, use) => { + const baseUrl = `http://${process.env.DEV_HOST}:${process.env.NGINX_PORT1}` + const goToWithAuth = async (url: string, user: string) => { + const cached = cookieCache.get(user) + if (cached) { + await context.addCookies(cached) + await page.goto(url) + if (page.url().includes('/simple-directory/login')) { + cookieCache.delete(user) + await performLogin(page, context, baseUrl, url, user) + } + } else { + await performLogin(page, context, baseUrl, url, user) + } + } + await use(goToWithAuth) + } + }) + +export { expect } diff --git a/ui/package.json b/ui/package.json index ba818414..a974e248 100644 --- a/ui/package.json +++ b/ui/package.json @@ -5,7 +5,7 @@ "scripts": { "build": "vite build", "check-types": "vue-tsc", - "dev": "NODE_CONFIG_DIR=../api/config/ vite --port 3039", + "dev": "NODE_CONFIG_DIR=../api/config/ vite --port $DEV_UI_PORT", "lint": "eslint .", "lint-fix": "eslint --fix .", "preview": "vite preview" From ac37ccdb18c55e780756c0214cf989de93c0b36d Mon Sep 17 00:00:00 2001 From: Alban Mouton Date: Mon, 4 May 2026 10:31:39 +0200 Subject: [PATCH 4/4] chore: env var for vite hmr port --- dev/init-env.sh | 1 + ui/vite.config.ts | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/dev/init-env.sh b/dev/init-env.sh index 04fc895c..2cb380d1 100755 --- a/dev/init-env.sh +++ b/dev/init-env.sh @@ -17,6 +17,7 @@ DEV_UI_PORT=$((RANDOM_NB + 11)) DEV_WORKER_PORT=$((RANDOM_NB + 12)) DEV_API_OBSERVER_PORT=$((RANDOM_NB + 13)) DEV_WORKER_OBSERVER_PORT=$((RANDOM_NB + 14)) +DEV_UI_HMR_PORT=$((RANDOM_NB + 15)) MONGO_PORT=$((RANDOM_NB + 20)) ES_PORT=$((RANDOM_NB + 21)) diff --git a/ui/vite.config.ts b/ui/vite.config.ts index 7c4e22cd..35d00e60 100644 --- a/ui/vite.config.ts +++ b/ui/vite.config.ts @@ -91,5 +91,5 @@ export default defineConfig({ return { relative: true } } }, - server: { hmr: { port: 7200 } } + server: { hmr: { port: parseInt(process.env.DEV_UI_HMR_PORT!) } } })