From 6b73e6eb2ed5dc0d48d66e65594a2a38f26f5090 Mon Sep 17 00:00:00 2001 From: rostalan Date: Thu, 9 Apr 2026 14:39:15 +0200 Subject: [PATCH 1/8] Replace Docker-based smoke tests with native Node.js harness Boot a minimal Backstage backend directly on the runner using createBackend() + dynamicPluginsFeatureLoader, probe /api/ routes, and report results as structured JSON. Includes core bundled plugins (catalog, auth, permission, scaffolder, events, search, proxy) so dynamic plugins resolve their dependencies correctly. Made-with: Cursor --- .../workflows/run-workspace-smoke-tests.yaml | 193 +++----- .github/workflows/workspace-tests.yaml | 42 +- .gitignore | 5 + smoke-tests/app-config.yaml | 7 +- smoke-tests/package.json | 48 ++ smoke-tests/smoke-test.mjs | 418 ++++++++++++++++++ 6 files changed, 563 insertions(+), 150 deletions(-) create mode 100644 smoke-tests/package.json create mode 100644 smoke-tests/smoke-test.mjs diff --git a/.github/workflows/run-workspace-smoke-tests.yaml b/.github/workflows/run-workspace-smoke-tests.yaml index e314ec1be..97d5e009b 100644 --- a/.github/workflows/run-workspace-smoke-tests.yaml +++ b/.github/workflows/run-workspace-smoke-tests.yaml @@ -15,13 +15,13 @@ on: description: Newline-separated list of plugins that failed to load value: ${{ jobs.run.outputs.failed-plugins }} error-logs: - description: Extracted error messages from container logs + description: Extracted error messages from smoke test output value: ${{ jobs.run.outputs.error-logs }} jobs: run: runs-on: ubuntu-latest - timeout-minutes: 25 + timeout-minutes: 15 permissions: contents: read packages: read @@ -36,139 +36,81 @@ jobs: name: smoke-test-artifacts path: ./artifacts - - name: Log in to GitHub Container Registry - uses: docker/login-action@4907a6ddec9925e35a0a9e82d7399ccc52663121 # v4.1.0 + - name: Setup Node.js + uses: actions/setup-node@v4 with: - registry: ghcr.io - username: ${{ github.actor }} - password: ${{ secrets.GITHUB_TOKEN }} + node-version: "20" - - name: Start RHDH with test plugins config + - name: Install skopeo run: | - set -euo pipefail - ls -la ./artifacts/ || true - + if ! command -v skopeo &>/dev/null; then + sudo apt-get update -qq + sudo apt-get install -y -qq skopeo + fi + skopeo --version + + - name: Authenticate to GHCR + run: echo "${{ secrets.GITHUB_TOKEN }}" | skopeo login ghcr.io -u "${{ github.actor }}" --password-stdin + + - name: Verify artifacts + run: | + ls -la ./artifacts/ + ls -la ./artifacts/harness/ || true if [ ! -f "./artifacts/dynamic-plugins.test.yaml" ]; then - echo "Error: dynamic-plugins.test.yaml not found in artifacts" + echo "Error: dynamic-plugins.test.yaml not found" exit 1 fi + echo "=== dynamic-plugins.test.yaml (first 40 lines) ===" + head -40 ./artifacts/dynamic-plugins.test.yaml || true - echo "dynamic-plugins.test.yaml contents:" - sed -n '1,40p' ./artifacts/dynamic-plugins.test.yaml || true - - # Build Docker run command with conditional volume mounts - ENV_ARGS=$( [ -f ./artifacts/test.env ] && echo "--env-file ./artifacts/test.env" || echo "" ) - DOCKER_CMD="docker run -d --name rhdh -p 7007:7007 $ENV_ARGS" - if [ -f "./artifacts/app-config.yaml" ]; then - DOCKER_CMD="$DOCKER_CMD -v "$(pwd)"/artifacts/app-config.yaml:/opt/app-root/src/app-config.yaml" - fi - if [ -f "./artifacts/app-config.test.yaml" ]; then - DOCKER_CMD="$DOCKER_CMD -v "$(pwd)"/artifacts/app-config.test.yaml:/opt/app-root/src/app-config.test.yaml" - fi - DOCKER_CMD="$DOCKER_CMD -v "$(pwd)"/artifacts/dynamic-plugins.test.yaml:/opt/app-root/src/dynamic-plugins.yaml" - - # Add Docker config and environment variables - echo "Using docker auth file: $HOME/.docker/config.json" - DOCKER_CMD="$DOCKER_CMD -v $HOME/.docker/config.json:/root/.docker/config.json:ro" - DOCKER_CMD="$DOCKER_CMD -e REGISTRY_AUTH_FILE=/root/.docker/config.json" - - # Derive image tag from target branch - TARGET_BRANCH="${{ inputs.target-branch }}" - if [[ "$TARGET_BRANCH" =~ ^release-([0-9]+\.[0-9]+)$ ]]; then - IMAGE_TAG="next-${BASH_REMATCH[1]}" - else - IMAGE_TAG="next" - fi - echo "Using RHDH image tag: $IMAGE_TAG (target branch: $TARGET_BRANCH)" - - # Add image and command - DOCKER_CMD="$DOCKER_CMD --entrypoint /bin/bash quay.io/rhdh-community/rhdh:${IMAGE_TAG} -c '" - DOCKER_CMD="$DOCKER_CMD set -ex; " - DOCKER_CMD="$DOCKER_CMD PLUGINS_ROOT=/opt/app-root/src/dynamic-plugins-root; " - DOCKER_CMD="$DOCKER_CMD GENERATED_CONFIG=\$PLUGINS_ROOT/app-config.dynamic-plugins.yaml; " - DOCKER_CMD="$DOCKER_CMD INSTALL_SCRIPT=/opt/app-root/src/install-dynamic-plugins.sh; " - DOCKER_CMD="$DOCKER_CMD mkdir -p \$PLUGINS_ROOT; " - DOCKER_CMD="$DOCKER_CMD \$INSTALL_SCRIPT \$PLUGINS_ROOT; " - DOCKER_CMD="$DOCKER_CMD exec node packages/backend" - - # Add config files to command (optional) - [ -f "./artifacts/app-config.yaml" ] && DOCKER_CMD="$DOCKER_CMD --config /opt/app-root/src/app-config.yaml" - [ -f "./artifacts/app-config.test.yaml" ] && DOCKER_CMD="$DOCKER_CMD --config /opt/app-root/src/app-config.test.yaml" - DOCKER_CMD="$DOCKER_CMD --config /opt/app-root/src/dynamic-plugins.yaml" - DOCKER_CMD="$DOCKER_CMD --config \$GENERATED_CONFIG'" - - echo "Running: $DOCKER_CMD" - eval "$DOCKER_CMD" - - - name: Wait for RHDH to be ready + - name: Install smoke test dependencies + working-directory: ./artifacts/harness + run: npm install --ignore-scripts 2>&1 | tail -5 + + - name: Run smoke test + id: smoke-test + working-directory: ./artifacts/harness run: | - set -e - for i in $(seq 1 10); do - if curl -fsS http://localhost:7007/health >/dev/null; then - echo "RHDH is ready"; exit 0; fi - echo "Waiting for RHDH... (Attempt ${i}/10)" - # Check if container is still running - if ! docker ps | grep -q rhdh; then - echo "Container stopped unexpectedly." - exit 1 - fi - sleep 10 - done - echo "RHDH did not become ready in time." - exit 1 + set -o pipefail - - name: List installed plugins - run: docker exec rhdh ls -l /opt/app-root/src/dynamic-plugins-root + CONFIG_ARGS="--config app-config.yaml" + [ -f ../app-config.yaml ] && CONFIG_ARGS="$CONFIG_ARGS --config ../app-config.yaml" + [ -f ../app-config.test.yaml ] && CONFIG_ARGS="$CONFIG_ARGS --config ../app-config.test.yaml" - - name: Print generated dynamic plugins config - run: docker exec rhdh cat /opt/app-root/src/dynamic-plugins-root/app-config.dynamic-plugins.yaml + ENV_ARGS="" + [ -f ../test.env ] && ENV_ARGS="--env-file ../test.env" - - name: Verify plugin loading + echo "Running: node smoke-test.mjs --plugins-yaml ../dynamic-plugins.test.yaml $CONFIG_ARGS $ENV_ARGS" + + node smoke-test.mjs \ + --plugins-yaml ../dynamic-plugins.test.yaml \ + $CONFIG_ARGS \ + $ENV_ARGS \ + 2>&1 | tee ../smoke-test.log + + - name: Collect results id: collect-results + if: always() run: | - set -e - PLUGINS=$(grep -Eo '!([^[:space:]]+)' ./artifacts/dynamic-plugins.test.yaml | sed -e 's/^!//' -e 's/"$//' | sort -u) - if [ -z "$PLUGINS" ]; then - echo "No plugins found in dynamic-plugins.test.yaml" - echo "success=false" >> "$GITHUB_OUTPUT" - echo "failed-plugins<> "$GITHUB_OUTPUT" - echo "(no-plugins)" >> "$GITHUB_OUTPUT" - echo "EOF" >> "$GITHUB_OUTPUT" - exit 0 - fi - LOGS=$(docker logs rhdh || true) - failures=() - echo "===== Checking logs for plugin loaded messages" - for plugin in $PLUGINS; do - echo "Asserting plugin loaded: $plugin" - # Match either the unpack path or the canonical "loaded dynamic ... plugin '' from" message - if echo "$LOGS" | grep -qiE "loaded dynamic .* plugin .*${plugin}(-dynamic)?'" ; then - echo "Plugin loaded: $plugin" - else - echo "Plugin NOT loaded: $plugin" - failures+=("$plugin") - fi - done - if echo "$LOGS" | grep -E "(InstallException|Error while adding OCI plugin|Failed to load dynamic plugin|dynamic plugin.*error)" >/dev/null; then - echo "Detected dynamic plugin loading errors in logs" - failures+=("(log-errors)") - fi - if [ ${#failures[@]} -eq 0 ]; then - echo "success=true" >> "$GITHUB_OUTPUT" + RESULTS_FILE="./artifacts/harness/results.json" + if [ -f "$RESULTS_FILE" ]; then + SUCCESS=$(jq -r '.success' "$RESULTS_FILE") + FAILED=$(jq -r '.failedPlugins | join("\n")' "$RESULTS_FILE") + echo "success=$SUCCESS" >> "$GITHUB_OUTPUT" echo "failed-plugins<> "$GITHUB_OUTPUT" - echo "" >> "$GITHUB_OUTPUT" + echo "$FAILED" >> "$GITHUB_OUTPUT" echo "EOF" >> "$GITHUB_OUTPUT" else echo "success=false" >> "$GITHUB_OUTPUT" echo "failed-plugins<> "$GITHUB_OUTPUT" - printf "%s\n" "${failures[@]}" >> "$GITHUB_OUTPUT" + echo "(results-file-missing)" >> "$GITHUB_OUTPUT" echo "EOF" >> "$GITHUB_OUTPUT" fi - - name: Fail if any plugin failed to load + - name: Fail if smoke test failed if: ${{ steps.collect-results.outputs.success != 'true' }} run: | - echo "The following plugins failed to load to RHDH:" + echo "Smoke test failed. Failed plugins:" echo "${{ steps.collect-results.outputs.failed-plugins }}" exit 1 @@ -177,30 +119,21 @@ jobs: id: capture-errors continue-on-error: true run: | - if ! docker ps -a | grep -q rhdh; then - echo "error-logs<> "$GITHUB_OUTPUT" - echo "'rhdh' container was not available. Check earlier steps for startup errors." >> "$GITHUB_OUTPUT" - echo "EOF" >> "$GITHUB_OUTPUT" - exit 0 - fi - - LOGS=$(docker logs rhdh 2>&1 || echo "Failed to retrieve logs") - - # Extract errors from logs with short context - ERROR_LINES=$(echo "$LOGS" | grep -iE -B 2 -A 2 "(error|exception|failed|failure|installException)" | grep -vE "(no errors|successfully|resolved|fixed)" | grep -v "^--$" | tail -n 100 || echo "") - + LOG_FILE="./artifacts/smoke-test.log" echo "error-logs<> "$GITHUB_OUTPUT" - if [ -n "$ERROR_LINES" ] && [ "$ERROR_LINES" != "" ]; then - echo "$ERROR_LINES" >> "$GITHUB_OUTPUT" + if [ -f "$LOG_FILE" ]; then + grep -iE -B 1 -A 1 "(error|exception|failed|failure)" "$LOG_FILE" \ + | grep -vE "(no errors|successfully|resolved)" \ + | tail -100 || echo "No error patterns found in logs." else - echo "No specific error patterns found in container logs. Check full workflow logs for details." >> "$GITHUB_OUTPUT" + echo "No smoke test log available." fi echo "EOF" >> "$GITHUB_OUTPUT" - - name: Print container logs + - name: Print full smoke test log if: always() - run: docker logs rhdh || true + run: cat ./artifacts/smoke-test.log 2>/dev/null || echo "No log file" - name: Cleanup if: always() - run: docker rm -f rhdh || true + run: rm -rf ./artifacts/harness/dynamic-plugins-root ./artifacts/harness/.tmp-oci || true diff --git a/.github/workflows/workspace-tests.yaml b/.github/workflows/workspace-tests.yaml index 13bf63c0c..3c10cbc5e 100644 --- a/.github/workflows/workspace-tests.yaml +++ b/.github/workflows/workspace-tests.yaml @@ -88,7 +88,7 @@ jobs: console.log('Missing PR or commit; skipping pending status'); return; } - + await github.rest.repos.createCommitStatus({ owner: context.repo.owner, repo: context.repo.repo, @@ -174,7 +174,7 @@ jobs: # Always include the root-level default config ROOT_CONFIG="$GITHUB_WORKSPACE/smoke-tests/app-config.yaml" [ -f "$ROOT_CONFIG" ] && cp "$ROOT_CONFIG" "$OUT_DIR/app-config.yaml" - + # Read workspace-wide test.env if it exists WORKSPACE_ENV_FILE="$WORKSPACE_PATH/smoke-tests/test.env" WORKSPACE_ENV_CONTENT="" @@ -278,6 +278,16 @@ jobs: echo "plugins-metadata-complete=$PLUGINS_METADATA_COMPLETE" >> "$GITHUB_OUTPUT" echo "skip-tests-missing-env=$SKIP_TESTS_MISSING_ENV" >> "$GITHUB_OUTPUT" + - name: Stage smoke test harness + env: + WORKSPACE_PATH: ${{ needs.resolve.outputs.workspace }} + run: | + HARNESS_DIR="$WORKSPACE_PATH/smoke-tests/harness" + mkdir -p "$HARNESS_DIR" + cp smoke-tests/package.json "$HARNESS_DIR/" + cp smoke-tests/smoke-test.mjs "$HARNESS_DIR/" + cp smoke-tests/app-config.yaml "$HARNESS_DIR/" + - name: Upload smoke test artifacts uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1 with: @@ -321,11 +331,11 @@ jobs: const workspace = core.getInput('workspace'); const pluginsMetadataComplete = core.getInput('plugins_metadata_complete') === 'true'; const skipTestsMissingEnv = core.getInput('skip_tests_missing_env') === 'true'; - + let statusDescription = 'Skipped'; let commentDetail = ' skipped for an unknown reason. Check workflow run for details.\n'; let summaryDetail = 'Unknown reason. Check workflow run for details.'; - + if (!workspace || workspace === '') { statusDescription = 'Skipped: PR doesn\'t touch one workspace'; commentDetail = ' skipped: PR doesn\'t touch exactly one workspace.\n'; @@ -339,7 +349,7 @@ jobs: commentDetail = ' skipped: missing plugin metadata files (`/metadata/*.yaml`).\n'; summaryDetail = 'Missing plugin metadata files (`/metadata/*.yaml`).'; } - + if (overlayCommit) { await github.rest.repos.createCommitStatus({ owner: context.repo.owner, @@ -352,7 +362,7 @@ jobs: }); console.log(`Set success status on ${overlayCommit} (skipped for valid reason)`); } - + if (pr) { await github.rest.issues.createComment({ owner: context.repo.owner, @@ -361,7 +371,7 @@ jobs: body: `:warning: \n[Smoke test workflow](${runUrl})${commentDetail}`, }); } - + await core.summary .addRaw('\n### Tests Skipped\n\n' + summaryDetail) .write(); @@ -400,10 +410,10 @@ jobs: const errorLogs = (process.env.ERROR_LOGS || '').trim(); const pr = Number(process.env.PR_NUMBER); const overlayCommit = process.env.OVERLAY_COMMIT; - + const success = smokeTestsResult === 'success' && successOutput === 'true'; let failureReason = ''; - + if (!success) { switch (smokeTestsResult) { case 'failure': @@ -419,7 +429,7 @@ jobs: failureReason = `Smoke tests ended in an unexpected state: ${smokeTestsResult}. Check the workflow logs for details.`; } } - + // Write step summary (always, even if PR is unavailable) let summary; if (success) { @@ -434,23 +444,23 @@ jobs: } } await core.summary.addRaw(summary).write(); - + if (!pr) { console.log('No PR associated; skipping status and comment'); return; } - + // Get current PR head SHA const { data: prData } = await github.rest.pulls.get({ owner: context.repo.owner, repo: context.repo.repo, pull_number: pr, }); - + // Use PR head if different from overlayCommit (/smoketest command), else use overlayCommit (immediate publish) const sha = prData.head.sha !== overlayCommit ? prData.head.sha : overlayCommit; console.log(`Status SHA: ${sha} (PR head: ${prData.head.sha}, overlay: ${overlayCommit})`); - + await github.rest.repos.createCommitStatus({ owner: context.repo.owner, repo: context.repo.repo, @@ -460,7 +470,7 @@ jobs: target_url: runUrl, context: 'smoketest', }); - + let body; if (success) { body = `:white_check_mark: [Smoke tests workflow](${runUrl}) passed. All plugins loaded successfully.\n`; @@ -473,7 +483,7 @@ jobs: body += `\n\n
Error logs from container\n\n\`\`\`\n${errorLogs}\n\`\`\`\n\n
`; } } - + await github.rest.issues.createComment({ issue_number: pr, owner: context.repo.owner, diff --git a/.gitignore b/.gitignore index d4222298f..65972e55c 100644 --- a/.gitignore +++ b/.gitignore @@ -59,6 +59,11 @@ Thumbs.db build/ *.tsbuildinfo +# Smoke test artifacts +smoke-tests/dynamic-plugins-root/ +smoke-tests/.tmp-oci/ +smoke-tests/results.json + # Coverage coverage/ .nyc_output/ \ No newline at end of file diff --git a/smoke-tests/app-config.yaml b/smoke-tests/app-config.yaml index 57711a7b4..c7014c278 100644 --- a/smoke-tests/app-config.yaml +++ b/smoke-tests/app-config.yaml @@ -8,9 +8,9 @@ backend: port: 7007 database: client: better-sqlite3 - connection: ':memory:' + connection: ":memory:" csp: - connect-src: ["'self'", 'http:', 'https:'] + connect-src: ["'self'", "http:", "https:"] cors: origin: http://localhost:7007 methods: [GET, POST, PUT, DELETE] @@ -30,7 +30,7 @@ catalog: # This is required by the frontend app plugin for schema validation. dynamicPlugins: # This directory is used by the backend to unpack OCI plugins. - rootDirectory: /opt/app-root/src/dynamic-plugins-root + rootDirectory: ./dynamic-plugins-root # Disable the default directory scanning for local plugins sources: local: {} @@ -43,4 +43,3 @@ analytics: {} # Disable the permission plugin for simplified testing. permission: enabled: false - diff --git a/smoke-tests/package.json b/smoke-tests/package.json new file mode 100644 index 000000000..c2828fd4b --- /dev/null +++ b/smoke-tests/package.json @@ -0,0 +1,48 @@ +{ + "name": "rhdh-smoke-test", + "version": "0.1.0", + "private": true, + "type": "module", + "description": "Lightweight smoke test harness for RHDH dynamic plugins — no Docker required", + "scripts": { + "test": "node smoke-test.mjs", + "prettier:check": "prettier --check .", + "prettier:fix": "prettier --write ." + }, + "dependencies": { + "@backstage/backend-app-api": "1.5.0", + "@backstage/backend-defaults": "0.15.2", + "@backstage/backend-dynamic-feature-service": "0.7.9", + "@backstage/backend-plugin-api": "1.7.0", + "@backstage/catalog-client": "1.13.0", + "@backstage/catalog-model": "1.7.6", + "@backstage/cli-node": "0.2.18", + "@backstage/config": "1.3.6", + "@backstage/config-loader": "1.10.8", + "@backstage/errors": "1.2.7", + "@backstage/plugin-auth-backend": "0.27.0", + "@backstage/plugin-auth-backend-module-guest-provider": "0.2.16", + "@backstage/plugin-auth-node": "0.6.13", + "@backstage/plugin-catalog-backend": "3.4.0", + "@backstage/plugin-catalog-backend-module-logs": "0.1.19", + "@backstage/plugin-catalog-backend-module-scaffolder-entity-model": "0.2.17", + "@backstage/plugin-catalog-node": "2.0.0", + "@backstage/plugin-events-backend": "0.5.11", + "@backstage/plugin-events-node": "0.4.19", + "@backstage/plugin-permission-backend": "0.7.9", + "@backstage/plugin-permission-backend-module-allow-all-policy": "0.2.17", + "@backstage/plugin-permission-common": "0.9.6", + "@backstage/plugin-permission-node": "0.7.9", + "@backstage/plugin-proxy-backend": "0.6.10", + "@backstage/plugin-scaffolder-backend": "3.1.3", + "@backstage/plugin-scaffolder-node": "0.6.3", + "@backstage/plugin-search-backend": "2.0.12", + "@backstage/plugin-search-backend-module-catalog": "0.3.12", + "@backstage/types": "1.2.2", + "better-sqlite3": "12.8.0", + "js-yaml": "4.1.0" + }, + "devDependencies": { + "prettier": "3.8.1" + } +} diff --git a/smoke-tests/smoke-test.mjs b/smoke-tests/smoke-test.mjs new file mode 100644 index 000000000..3d3c0077d --- /dev/null +++ b/smoke-tests/smoke-test.mjs @@ -0,0 +1,418 @@ +#!/usr/bin/env node + +import { spawnSync, execSync } from "node:child_process"; +import { + readFileSync, + writeFileSync, + mkdirSync, + existsSync, + rmSync, +} from "node:fs"; +import { join, resolve, dirname } from "node:path"; +import { fileURLToPath } from "node:url"; +import { createHash } from "node:crypto"; +import yaml from "js-yaml"; + +const __dirname = dirname(fileURLToPath(import.meta.url)); + +// --------------------------------------------------------------------------- +// CLI +// --------------------------------------------------------------------------- + +function parseArgs(argv) { + const args = { + pluginsYaml: null, + configs: [], + envFile: null, + port: 7007, + timeout: 120, + pluginsRoot: resolve(__dirname, "dynamic-plugins-root"), + resultsFile: resolve(__dirname, "results.json"), + }; + for (let i = 0; i < argv.length; i++) { + switch (argv[i]) { + case "--plugins-yaml": + args.pluginsYaml = resolve(argv[++i]); + break; + case "--config": + args.configs.push(resolve(argv[++i])); + break; + case "--env-file": + args.envFile = resolve(argv[++i]); + break; + case "--port": + args.port = Number.parseInt(argv[++i], 10); + break; + case "--timeout": + args.timeout = Number.parseInt(argv[++i], 10); + break; + } + } + if (!args.pluginsYaml) { + console.error( + "Usage: node smoke-test.mjs --plugins-yaml [--config ...] [--env-file ]", + ); + process.exit(1); + } + return args; +} + +function loadEnvFile(filePath) { + if (!filePath || !existsSync(filePath)) return; + for (const line of readFileSync(filePath, "utf8").split("\n")) { + const t = line.trim(); + if (!t || t.startsWith("#")) continue; + const eq = t.indexOf("="); + if (eq === -1) continue; + process.env[t.slice(0, eq).trim()] = t.slice(eq + 1).trim(); + } +} + +// --------------------------------------------------------------------------- +// Plugins YAML +// --------------------------------------------------------------------------- + +function parsePluginsYaml(filePath) { + const doc = yaml.load(readFileSync(filePath, "utf8")); + return (doc?.plugins ?? []).filter((p) => !p.disabled); +} + +function parseOciRef(packageStr) { + const cleaned = packageStr.replace(/^"/, "").replace(/"$/, ""); + const withoutOci = cleaned.replace(/^oci:\/\//, ""); + const bangIdx = withoutOci.indexOf("!"); + if (bangIdx === -1) return { imageRef: withoutOci, pluginPath: null }; + return { + imageRef: withoutOci.slice(0, bangIdx), + pluginPath: withoutOci.slice(bangIdx + 1), + }; +} + +// --------------------------------------------------------------------------- +// OCI Download (mirrors install-dynamic-plugins.py §663-715) +// --------------------------------------------------------------------------- + +function pullOciImage(imageRef, tmpDir) { + const hash = createHash("sha256").update(imageRef).digest("hex").slice(0, 16); + const localDir = join(tmpDir, hash); + mkdirSync(localDir, { recursive: true }); + + const result = spawnSync( + "skopeo", + [ + "copy", + "--override-os=linux", + "--override-arch=amd64", + `docker://${imageRef}`, + `dir:${localDir}`, + ], + { stdio: "inherit" }, + ); + if (result.status !== 0) + throw new Error(`skopeo copy failed for ${imageRef}`); + + const manifest = JSON.parse( + readFileSync(join(localDir, "manifest.json"), "utf8"), + ); + const [, filename] = manifest.layers[0].digest.split(":"); + return join(localDir, filename); +} + +function extractPlugin(tarFile, pluginPath, dest) { + mkdirSync(join(dest, pluginPath), { recursive: true }); + execSync(`tar xf "${tarFile}" -C "${dest}" "${pluginPath}/"`, { + stdio: "pipe", + }); +} + +async function downloadPlugins(plugins, dest) { + mkdirSync(dest, { recursive: true }); + const tmpDir = resolve(__dirname, ".tmp-oci"); + mkdirSync(tmpDir, { recursive: true }); + + const imageCache = new Map(); + for (const plugin of plugins) { + const { imageRef, pluginPath } = parseOciRef(plugin.package); + if (!imageRef || !pluginPath) { + console.warn(` Skip (invalid ref): ${plugin.package}`); + continue; + } + console.log(` ${pluginPath}`); + let tarFile = imageCache.get(imageRef); + if (!tarFile) { + tarFile = pullOciImage(imageRef, tmpDir); + imageCache.set(imageRef, tarFile); + } + extractPlugin(tarFile, pluginPath, dest); + } + rmSync(tmpDir, { recursive: true, force: true }); +} + +// --------------------------------------------------------------------------- +// Config generation +// --------------------------------------------------------------------------- + +function deepMerge(src, dst) { + for (const [k, v] of Object.entries(src)) { + if (v && typeof v === "object" && !Array.isArray(v)) { + dst[k] = dst[k] ?? {}; + deepMerge(v, dst[k]); + } else { + dst[k] = v; + } + } + return dst; +} + +function generateMergedConfig(plugins, pluginsRoot) { + const cfg = { dynamicPlugins: { rootDirectory: pluginsRoot } }; + for (const p of plugins) if (p.pluginConfig) deepMerge(p.pluginConfig, cfg); + const outPath = join(pluginsRoot, "app-config.dynamic-plugins.yaml"); + writeFileSync(outPath, yaml.dump(cfg)); + return outPath; +} + +// --------------------------------------------------------------------------- +// Backend boot +// --------------------------------------------------------------------------- + +async function bootBackend(configPaths) { + process.argv = ["node", "smoke-test.mjs"]; + for (const p of configPaths) { + if (existsSync(p)) process.argv.push("--config", p); + } + + const { createBackend } = await import("@backstage/backend-defaults"); + const { + dynamicPluginsFeatureLoader, + CommonJSModuleLoader, + dynamicPluginsFrontendServiceRef, + } = await import("@backstage/backend-dynamic-feature-service"); + const { PackageRoles } = await import("@backstage/cli-node"); + const { createServiceFactory } = + await import("@backstage/backend-plugin-api"); + const path = await import("node:path"); + + const backend = createBackend(); + + backend.add( + dynamicPluginsFeatureLoader({ + schemaLocator(pluginPackage) { + const platform = PackageRoles.getRoleInfo( + pluginPackage.manifest.backstage.role, + ).platform; + return path.join( + platform === "node" ? "dist" : "dist-scalprum", + "configSchema.json", + ); + }, + moduleLoader: (logger) => new CommonJSModuleLoader({ logger }), + }), + ); + + backend.add( + createServiceFactory({ + service: dynamicPluginsFrontendServiceRef, + deps: {}, + factory: () => ({ setResolverProvider() {} }), + }), + ); + + backend.add(import("@backstage/plugin-catalog-backend")); + backend.add( + import("@backstage/plugin-catalog-backend-module-scaffolder-entity-model"), + ); + backend.add(import("@backstage/plugin-catalog-backend-module-logs")); + backend.add(import("@backstage/plugin-auth-backend")); + backend.add(import("@backstage/plugin-auth-backend-module-guest-provider")); + backend.add(import("@backstage/plugin-permission-backend")); + backend.add( + import("@backstage/plugin-permission-backend-module-allow-all-policy"), + ); + backend.add(import("@backstage/plugin-scaffolder-backend")); + backend.add(import("@backstage/plugin-events-backend")); + backend.add(import("@backstage/plugin-search-backend")); + backend.add(import("@backstage/plugin-search-backend-module-catalog")); + backend.add(import("@backstage/plugin-proxy-backend")); + + await backend.start(); + return backend; +} + +// --------------------------------------------------------------------------- +// Health & route probing +// --------------------------------------------------------------------------- + +async function waitForReady(port, timeoutSec) { + const url = `http://localhost:${port}/.backstage/health/v1/readiness`; + const deadline = Date.now() + timeoutSec * 1000; + while (Date.now() < deadline) { + try { + const r = await fetch(url); + if (r.ok) return; + } catch { + /* not ready */ + } + await new Promise((r) => setTimeout(r, 2000)); + } + throw new Error(`Backend not ready within ${timeoutSec}s`); +} + +function readPluginMeta(pluginsRoot, pluginPath) { + try { + const pkg = JSON.parse( + readFileSync(join(pluginsRoot, pluginPath, "package.json"), "utf8"), + ); + return { + pkgName: pkg.name ?? pluginPath, + role: pkg.backstage?.role ?? "unknown", + pluginId: pkg.backstage?.pluginId ?? null, + }; + } catch { + return { pkgName: pluginPath, role: "unknown", pluginId: null }; + } +} + +async function probePluginRoutes(plugins, port, pluginsRoot) { + const results = []; + for (const plugin of plugins) { + const { pluginPath } = parseOciRef(plugin.package); + if (!pluginPath) continue; + + const { pkgName, role, pluginId } = readPluginMeta(pluginsRoot, pluginPath); + + if (role !== "backend-plugin") { + results.push({ pkgName, role, pluginPath, status: "skip" }); + continue; + } + + if (!pluginId) { + results.push({ + pkgName, + role, + pluginPath, + status: "warn", + http: 0, + pluginId: "(unknown)", + }); + continue; + } + + try { + const res = await fetch(`http://localhost:${port}/api/${pluginId}`); + results.push({ + pkgName, + role, + pluginPath, + pluginId, + status: res.status === 404 ? "warn" : "pass", + http: res.status, + }); + } catch (err) { + results.push({ + pkgName, + role, + pluginPath, + pluginId, + status: "fail", + error: err.message, + }); + } + } + return results; +} + +// --------------------------------------------------------------------------- +// Reporting +// --------------------------------------------------------------------------- + +function reportAndWrite(results, resultsFile) { + console.log("\n========== Smoke Test Results ==========\n"); + const failedPlugins = []; + + for (const r of results) { + switch (r.status) { + case "skip": + console.log(` SKIP ${r.pkgName} (${r.role})`); + break; + case "pass": + console.log(` PASS ${r.pkgName} → /api/${r.pluginId} (${r.http})`); + break; + case "warn": + console.log( + ` WARN ${r.pkgName} → /api/${r.pluginId} (404 — pluginId guess may be wrong)`, + ); + break; + default: + console.log(` FAIL ${r.pkgName} ${r.error}`); + failedPlugins.push(r.pkgName); + } + } + + const counts = { pass: 0, warn: 0, skip: 0, fail: 0 }; + for (const r of results) counts[r.status]++; + console.log( + `\n Total: ${results.length} Pass: ${counts.pass} Warn: ${counts.warn} Skip: ${counts.skip} Fail: ${counts.fail}\n`, + ); + + const success = counts.fail === 0; + writeFileSync( + resultsFile, + JSON.stringify({ success, failedPlugins, results }, null, 2), + ); + return success; +} + +// --------------------------------------------------------------------------- +// Main +// --------------------------------------------------------------------------- + +async function main() { + const args = parseArgs(process.argv.slice(2)); + + console.log("\n=== RHDH Smoke Test (Docker-free) ===\n"); + + console.log("1. Loading plugin configuration"); + const plugins = parsePluginsYaml(args.pluginsYaml); + if (!plugins.length) { + console.log(" No enabled plugins found."); + process.exit(0); + } + console.log(` ${plugins.length} plugin(s) enabled`); + + loadEnvFile(args.envFile); + + console.log("\n2. Downloading OCI plugin images"); + await downloadPlugins(plugins, args.pluginsRoot); + + console.log("\n3. Generating merged plugin config"); + const generatedCfg = generateMergedConfig(plugins, args.pluginsRoot); + + console.log("\n4. Booting Backstage backend"); + const allConfigs = [...args.configs, generatedCfg]; + const backend = await bootBackend(allConfigs); + + let success = false; + try { + console.log("\n5. Waiting for readiness"); + await waitForReady(args.port, args.timeout); + + console.log("\n6. Probing plugin routes"); + const results = await probePluginRoutes( + plugins, + args.port, + args.pluginsRoot, + ); + success = reportAndWrite(results, args.resultsFile); + } finally { + console.log("Shutting down backend..."); + await backend.stop(); + } + + process.exit(success ? 0 : 1); +} + +await main().catch((err) => { + console.error("\nSmoke test failed:", err.message || err); + process.exit(1); +}); From ee107521f0eec6025b96b87dd7d8b016205c8e1d Mon Sep 17 00:00:00 2001 From: rostalan Date: Thu, 9 Apr 2026 16:21:20 +0200 Subject: [PATCH 2/8] feat(smoke-tests): validate frontend plugins via bundle checks and loaded-plugin probe Add two-layer frontend plugin validation to the smoke test harness: - Layer 1: static validation of dist-scalprum/ after OCI download - Layer 2: inline backend probe plugin querying dynamicPluginsServiceRef Made-with: Cursor --- smoke-tests/smoke-test.mjs | 281 +++++++++++++++++++++++++++++++++++-- 1 file changed, 272 insertions(+), 9 deletions(-) diff --git a/smoke-tests/smoke-test.mjs b/smoke-tests/smoke-test.mjs index 3d3c0077d..4e167b32f 100644 --- a/smoke-tests/smoke-test.mjs +++ b/smoke-tests/smoke-test.mjs @@ -7,6 +7,7 @@ import { mkdirSync, existsSync, rmSync, + readdirSync, } from "node:fs"; import { join, resolve, dirname } from "node:path"; import { fileURLToPath } from "node:url"; @@ -88,6 +89,12 @@ function parseOciRef(packageStr) { }; } +const FRONTEND_ROLES = new Set(["frontend-plugin", "frontend-plugin-module"]); + +function isFrontendRole(role) { + return FRONTEND_ROLES.has(role); +} + // --------------------------------------------------------------------------- // OCI Download (mirrors install-dynamic-plugins.py §663-715) // --------------------------------------------------------------------------- @@ -148,6 +155,61 @@ async function downloadPlugins(plugins, dest) { rmSync(tmpDir, { recursive: true, force: true }); } +// --------------------------------------------------------------------------- +// Frontend bundle validation (Layer 1) +// --------------------------------------------------------------------------- + +function findJsFiles(dir) { + const entries = readdirSync(dir, { withFileTypes: true }); + for (const entry of entries) { + const full = join(dir, entry.name); + if (entry.isDirectory()) { + if (findJsFiles(full)) return true; + } else if (/\.(js|mjs|cjs)$/.test(entry.name)) { + return true; + } + } + return false; +} + +function validateFrontendBundles(plugins, pluginsRoot) { + const results = []; + for (const plugin of plugins) { + const { pluginPath } = parseOciRef(plugin.package); + if (!pluginPath) continue; + + const { pkgName, role } = readPluginMeta(pluginsRoot, pluginPath); + if (!isFrontendRole(role)) continue; + + const scalprumDir = join(pluginsRoot, pluginPath, "dist-scalprum"); + + if (!existsSync(scalprumDir)) { + results.push({ + pkgName, + role, + pluginPath, + status: "fail-bundle", + detail: "dist-scalprum/ directory missing", + }); + continue; + } + + if (!findJsFiles(scalprumDir)) { + results.push({ + pkgName, + role, + pluginPath, + status: "fail-bundle", + detail: "dist-scalprum/ contains no .js/.mjs/.cjs files", + }); + continue; + } + + results.push({ pkgName, role, pluginPath, status: "pass" }); + } + return results; +} + // --------------------------------------------------------------------------- // Config generation // --------------------------------------------------------------------------- @@ -187,9 +249,10 @@ async function bootBackend(configPaths) { dynamicPluginsFeatureLoader, CommonJSModuleLoader, dynamicPluginsFrontendServiceRef, + dynamicPluginsServiceRef, } = await import("@backstage/backend-dynamic-feature-service"); const { PackageRoles } = await import("@backstage/cli-node"); - const { createServiceFactory } = + const { createServiceFactory, createBackendPlugin, coreServices } = await import("@backstage/backend-plugin-api"); const path = await import("node:path"); @@ -218,6 +281,36 @@ async function bootBackend(configPaths) { }), ); + backend.add( + createBackendPlugin({ + pluginId: "smoke-test-probe", + register(env) { + env.registerInit({ + deps: { + http: coreServices.httpRouter, + dynamicPlugins: dynamicPluginsServiceRef, + }, + async init({ http, dynamicPlugins }) { + const { Router } = await import("express"); + const router = Router(); + router.get("/loaded-plugins", (_, res) => { + res.json(dynamicPlugins.plugins({ includeFailed: true })); + }); + http.use(router); + try { + http.addAuthPolicy({ + path: "/loaded-plugins", + allow: "unauthenticated", + }); + } catch { + /* API may not exist on this version */ + } + }, + }); + }, + }), + ); + backend.add(import("@backstage/plugin-catalog-backend")); backend.add( import("@backstage/plugin-catalog-backend-module-scaffolder-entity-model"), @@ -281,6 +374,8 @@ async function probePluginRoutes(plugins, port, pluginsRoot) { const { pkgName, role, pluginId } = readPluginMeta(pluginsRoot, pluginPath); + if (isFrontendRole(role)) continue; + if (role !== "backend-plugin") { results.push({ pkgName, role, pluginPath, status: "skip" }); continue; @@ -322,6 +417,98 @@ async function probePluginRoutes(plugins, port, pluginsRoot) { return results; } +// --------------------------------------------------------------------------- +// Frontend plugin probing (Layer 2) +// --------------------------------------------------------------------------- + +async function probeFrontendPlugins(plugins, port, pluginsRoot) { + const frontendPlugins = []; + for (const plugin of plugins) { + const { pluginPath } = parseOciRef(plugin.package); + if (!pluginPath) continue; + const meta = readPluginMeta(pluginsRoot, pluginPath); + if (isFrontendRole(meta.role)) { + frontendPlugins.push({ ...meta, pluginPath }); + } + } + + if (frontendPlugins.length === 0) return []; + + const failAll = (detail) => + frontendPlugins.map((fp) => ({ + pkgName: fp.pkgName, + role: fp.role, + pluginPath: fp.pluginPath, + status: "fail-load", + detail, + })); + + let res; + try { + res = await fetch( + `http://localhost:${port}/api/smoke-test-probe/loaded-plugins`, + ); + } catch (err) { + return failAll(`probe endpoint unreachable: ${err.message}`); + } + + if (!res.ok) { + return failAll(`probe returned HTTP ${res.status}`); + } + + let body; + try { + body = await res.json(); + } catch { + return failAll("invalid probe response"); + } + + if (!Array.isArray(body)) { + return failAll("invalid probe response"); + } + + const results = []; + for (const fp of frontendPlugins) { + const loaded = body.find( + (lp) => lp && typeof lp === "object" && lp.name === fp.pkgName, + ); + + if (!loaded) { + results.push({ + pkgName: fp.pkgName, + role: fp.role, + pluginPath: fp.pluginPath, + status: "fail-load", + detail: "not found in loaded plugins list", + }); + } else if (loaded.platform !== "web") { + results.push({ + pkgName: fp.pkgName, + role: fp.role, + pluginPath: fp.pluginPath, + status: "fail-load", + detail: `unexpected platform: ${loaded.platform}`, + }); + } else if (loaded.failure) { + results.push({ + pkgName: fp.pkgName, + role: fp.role, + pluginPath: fp.pluginPath, + status: "fail-load", + detail: `plugin loaded with failure: ${loaded.failure}`, + }); + } else { + results.push({ + pkgName: fp.pkgName, + role: fp.role, + pluginPath: fp.pluginPath, + status: "pass", + }); + } + } + return results; +} + // --------------------------------------------------------------------------- // Reporting // --------------------------------------------------------------------------- @@ -336,26 +523,53 @@ function reportAndWrite(results, resultsFile) { console.log(` SKIP ${r.pkgName} (${r.role})`); break; case "pass": - console.log(` PASS ${r.pkgName} → /api/${r.pluginId} (${r.http})`); + if (r.pluginId) { + console.log( + ` PASS ${r.pkgName} → /api/${r.pluginId} (${r.http})`, + ); + } else if (isFrontendRole(r.role)) { + console.log(` PASS ${r.pkgName} (${r.role})`); + } break; case "warn": console.log( ` WARN ${r.pkgName} → /api/${r.pluginId} (404 — pluginId guess may be wrong)`, ); break; + case "fail-bundle": + console.log(` FAIL ${r.pkgName} [bundle] ${r.detail}`); + failedPlugins.push(r.pkgName); + break; + case "fail-load": + console.log(` FAIL ${r.pkgName} [load] ${r.detail}`); + failedPlugins.push(r.pkgName); + break; default: console.log(` FAIL ${r.pkgName} ${r.error}`); failedPlugins.push(r.pkgName); } } - const counts = { pass: 0, warn: 0, skip: 0, fail: 0 }; - for (const r of results) counts[r.status]++; + const be = { pass: 0, warn: 0, skip: 0, fail: 0 }; + const fe = { pass: 0, fail: 0 }; + for (const r of results) { + const isFe = isFrontendRole(r.role); + if (r.status === "fail-bundle" || r.status === "fail-load") { + if (isFe) fe.fail++; + else be.fail++; + } else if (isFe) { + fe[r.status] = (fe[r.status] ?? 0) + 1; + } else { + be[r.status] = (be[r.status] ?? 0) + 1; + } + } + const total = results.length; + const totalFail = be.fail + fe.fail; console.log( - `\n Total: ${results.length} Pass: ${counts.pass} Warn: ${counts.warn} Skip: ${counts.skip} Fail: ${counts.fail}\n`, + `\n Total: ${total} Backend: ${be.pass} pass / ${be.warn} warn / ${be.fail} fail / ${be.skip} skip Frontend: ${fe.pass} pass / ${fe.fail} fail\n`, ); - const success = counts.fail === 0; + const success = totalFail === 0; writeFileSync( resultsFile, JSON.stringify({ success, failedPlugins, results }, null, 2), @@ -363,6 +577,32 @@ function reportAndWrite(results, resultsFile) { return success; } +// --------------------------------------------------------------------------- +// Result merging +// --------------------------------------------------------------------------- + +function mergeFrontendResults(bundleResults, loadResults) { + const loadMap = new Map(); + for (const r of loadResults) { + if (!loadMap.has(r.pkgName)) loadMap.set(r.pkgName, r); + } + + return bundleResults.map((br) => { + if (br.status === "fail-bundle") return br; + + const lr = loadMap.get(br.pkgName); + if (!lr) { + return { + ...br, + status: "fail-load", + detail: "missing load probe result", + }; + } + if (lr.status === "fail-load") return lr; + return { ...br, status: "pass" }; + }); +} + // --------------------------------------------------------------------------- // Main // --------------------------------------------------------------------------- @@ -385,6 +625,15 @@ async function main() { console.log("\n2. Downloading OCI plugin images"); await downloadPlugins(plugins, args.pluginsRoot); + console.log("\n2b. Validating frontend bundles"); + const bundleResults = validateFrontendBundles(plugins, args.pluginsRoot); + const bundleFailCount = bundleResults.filter( + (r) => r.status === "fail-bundle", + ).length; + console.log( + ` ${bundleResults.length} frontend plugin(s) checked, ${bundleFailCount} failed`, + ); + console.log("\n3. Generating merged plugin config"); const generatedCfg = generateMergedConfig(plugins, args.pluginsRoot); @@ -397,13 +646,27 @@ async function main() { console.log("\n5. Waiting for readiness"); await waitForReady(args.port, args.timeout); - console.log("\n6. Probing plugin routes"); - const results = await probePluginRoutes( + console.log("\n6a. Probing backend plugin routes"); + const backendResults = await probePluginRoutes( + plugins, + args.port, + args.pluginsRoot, + ); + + console.log("\n6b. Probing frontend loaded plugins"); + const frontendLoadResults = await probeFrontendPlugins( plugins, args.port, args.pluginsRoot, ); - success = reportAndWrite(results, args.resultsFile); + + const frontendResults = mergeFrontendResults( + bundleResults, + frontendLoadResults, + ); + + const allResults = [...backendResults, ...frontendResults]; + success = reportAndWrite(allResults, args.resultsFile); } finally { console.log("Shutting down backend..."); await backend.stop(); From 86c082455b6003749f9c334d262b39164b4414fa Mon Sep 17 00:00:00 2001 From: rostalan Date: Mon, 13 Apr 2026 17:23:25 +0200 Subject: [PATCH 3/8] feat(smoke-tests): switch to startTestBackend, delegate OCI download to install-dynamic-plugins.py Replace createBackend() with startTestBackend() for a minimal, focused smoke test that only validates dynamic plugin loading. Remove 23 static plugin dependencies and the in-process OCI download logic in favor of the upstream install-dynamic-plugins.py script fetched at CI time. Made-with: Cursor Signed-of-by: rlan@redhat.com --- .../workflows/run-workspace-smoke-tests.yaml | 26 +- .gitignore | 2 +- smoke-tests/app-config.yaml | 40 --- smoke-tests/package.json | 24 +- smoke-tests/smoke-test.mjs | 324 +++++++----------- 5 files changed, 144 insertions(+), 272 deletions(-) diff --git a/.github/workflows/run-workspace-smoke-tests.yaml b/.github/workflows/run-workspace-smoke-tests.yaml index 97d5e009b..2d64983f2 100644 --- a/.github/workflows/run-workspace-smoke-tests.yaml +++ b/.github/workflows/run-workspace-smoke-tests.yaml @@ -49,6 +49,9 @@ jobs: fi skopeo --version + - name: Install Python dependencies + run: pip install pyyaml + - name: Authenticate to GHCR run: echo "${{ secrets.GITHUB_TOKEN }}" | skopeo login ghcr.io -u "${{ github.actor }}" --password-stdin @@ -63,6 +66,21 @@ jobs: echo "=== dynamic-plugins.test.yaml (first 40 lines) ===" head -40 ./artifacts/dynamic-plugins.test.yaml || true + # TODO: Replace with native TypeScript implementation once available upstream. + - name: Download install-dynamic-plugins.py + run: | + curl -fsSL \ + "https://raw.githubusercontent.com/redhat-developer/rhdh/3efb9cc140ff/scripts/install-dynamic-plugins/install-dynamic-plugins.py" \ + -o ./artifacts/harness/install-dynamic-plugins.py + + - name: Download and extract plugins + working-directory: ./artifacts/harness + run: | + cp ../dynamic-plugins.test.yaml dynamic-plugins.yaml + python3 install-dynamic-plugins.py ./dynamic-plugins-root + env: + SKIP_INTEGRITY_CHECK: "true" + - name: Install smoke test dependencies working-directory: ./artifacts/harness run: npm install --ignore-scripts 2>&1 | tail -5 @@ -77,13 +95,17 @@ jobs: [ -f ../app-config.yaml ] && CONFIG_ARGS="$CONFIG_ARGS --config ../app-config.yaml" [ -f ../app-config.test.yaml ] && CONFIG_ARGS="$CONFIG_ARGS --config ../app-config.test.yaml" + GENERATED_CFG="./dynamic-plugins-root/app-config.dynamic-plugins.yaml" + [ -f "$GENERATED_CFG" ] && CONFIG_ARGS="$CONFIG_ARGS --config $GENERATED_CFG" + ENV_ARGS="" [ -f ../test.env ] && ENV_ARGS="--env-file ../test.env" - echo "Running: node smoke-test.mjs --plugins-yaml ../dynamic-plugins.test.yaml $CONFIG_ARGS $ENV_ARGS" + echo "Running: node smoke-test.mjs --plugins-yaml ../dynamic-plugins.test.yaml --skip-download $CONFIG_ARGS $ENV_ARGS" node smoke-test.mjs \ --plugins-yaml ../dynamic-plugins.test.yaml \ + --skip-download \ $CONFIG_ARGS \ $ENV_ARGS \ 2>&1 | tee ../smoke-test.log @@ -136,4 +158,4 @@ jobs: - name: Cleanup if: always() - run: rm -rf ./artifacts/harness/dynamic-plugins-root ./artifacts/harness/.tmp-oci || true + run: rm -rf ./artifacts/harness/dynamic-plugins-root || true diff --git a/.gitignore b/.gitignore index 65972e55c..dff9f5ab2 100644 --- a/.gitignore +++ b/.gitignore @@ -66,4 +66,4 @@ smoke-tests/results.json # Coverage coverage/ -.nyc_output/ \ No newline at end of file +.nyc_output/ diff --git a/smoke-tests/app-config.yaml b/smoke-tests/app-config.yaml index c7014c278..ffe09ec52 100644 --- a/smoke-tests/app-config.yaml +++ b/smoke-tests/app-config.yaml @@ -1,45 +1,5 @@ -app: - title: Backstage Test App - baseUrl: http://localhost:7007 - -backend: - baseUrl: http://localhost:7007 - listen: - port: 7007 - database: - client: better-sqlite3 - connection: ":memory:" - csp: - connect-src: ["'self'", "http:", "https:"] - cors: - origin: http://localhost:7007 - methods: [GET, POST, PUT, DELETE] - credentials: true - -auth: - environment: development - providers: - guest: {} - -# Minimal catalog configuration. -catalog: - rules: - - allow: [Component, API, Group, User, Template, Location] - locations: [] - -# This is required by the frontend app plugin for schema validation. dynamicPlugins: - # This directory is used by the backend to unpack OCI plugins. rootDirectory: ./dynamic-plugins-root - # Disable the default directory scanning for local plugins sources: local: {} - # A dummy frontend config is sufficient for our backend-only test. frontend: {} - -# Add minimal analytics key to satisfy app bundle schema validation. -analytics: {} - -# Disable the permission plugin for simplified testing. -permission: - enabled: false diff --git a/smoke-tests/package.json b/smoke-tests/package.json index c2828fd4b..17db381a5 100644 --- a/smoke-tests/package.json +++ b/smoke-tests/package.json @@ -10,36 +10,14 @@ "prettier:fix": "prettier --write ." }, "dependencies": { - "@backstage/backend-app-api": "1.5.0", - "@backstage/backend-defaults": "0.15.2", "@backstage/backend-dynamic-feature-service": "0.7.9", "@backstage/backend-plugin-api": "1.7.0", - "@backstage/catalog-client": "1.13.0", - "@backstage/catalog-model": "1.7.6", + "@backstage/backend-test-utils": "1.4.0", "@backstage/cli-node": "0.2.18", "@backstage/config": "1.3.6", "@backstage/config-loader": "1.10.8", "@backstage/errors": "1.2.7", - "@backstage/plugin-auth-backend": "0.27.0", - "@backstage/plugin-auth-backend-module-guest-provider": "0.2.16", - "@backstage/plugin-auth-node": "0.6.13", - "@backstage/plugin-catalog-backend": "3.4.0", - "@backstage/plugin-catalog-backend-module-logs": "0.1.19", - "@backstage/plugin-catalog-backend-module-scaffolder-entity-model": "0.2.17", - "@backstage/plugin-catalog-node": "2.0.0", - "@backstage/plugin-events-backend": "0.5.11", - "@backstage/plugin-events-node": "0.4.19", - "@backstage/plugin-permission-backend": "0.7.9", - "@backstage/plugin-permission-backend-module-allow-all-policy": "0.2.17", - "@backstage/plugin-permission-common": "0.9.6", - "@backstage/plugin-permission-node": "0.7.9", - "@backstage/plugin-proxy-backend": "0.6.10", - "@backstage/plugin-scaffolder-backend": "3.1.3", - "@backstage/plugin-scaffolder-node": "0.6.3", - "@backstage/plugin-search-backend": "2.0.12", - "@backstage/plugin-search-backend-module-catalog": "0.3.12", "@backstage/types": "1.2.2", - "better-sqlite3": "12.8.0", "js-yaml": "4.1.0" }, "devDependencies": { diff --git a/smoke-tests/smoke-test.mjs b/smoke-tests/smoke-test.mjs index 4e167b32f..79640d2dc 100644 --- a/smoke-tests/smoke-test.mjs +++ b/smoke-tests/smoke-test.mjs @@ -1,17 +1,8 @@ #!/usr/bin/env node -import { spawnSync, execSync } from "node:child_process"; -import { - readFileSync, - writeFileSync, - mkdirSync, - existsSync, - rmSync, - readdirSync, -} from "node:fs"; +import { readFileSync, writeFileSync, existsSync, readdirSync } from "node:fs"; import { join, resolve, dirname } from "node:path"; import { fileURLToPath } from "node:url"; -import { createHash } from "node:crypto"; import yaml from "js-yaml"; const __dirname = dirname(fileURLToPath(import.meta.url)); @@ -25,10 +16,9 @@ function parseArgs(argv) { pluginsYaml: null, configs: [], envFile: null, - port: 7007, - timeout: 120, pluginsRoot: resolve(__dirname, "dynamic-plugins-root"), resultsFile: resolve(__dirname, "results.json"), + skipDownload: false, }; for (let i = 0; i < argv.length; i++) { switch (argv[i]) { @@ -41,17 +31,14 @@ function parseArgs(argv) { case "--env-file": args.envFile = resolve(argv[++i]); break; - case "--port": - args.port = Number.parseInt(argv[++i], 10); - break; - case "--timeout": - args.timeout = Number.parseInt(argv[++i], 10); + case "--skip-download": + args.skipDownload = true; break; } } if (!args.pluginsYaml) { console.error( - "Usage: node smoke-test.mjs --plugins-yaml [--config ...] [--env-file ]", + "Usage: node smoke-test.mjs --plugins-yaml [--config ...] [--env-file ] [--skip-download]", ); process.exit(1); } @@ -96,63 +83,29 @@ function isFrontendRole(role) { } // --------------------------------------------------------------------------- -// OCI Download (mirrors install-dynamic-plugins.py §663-715) +// Config helpers // --------------------------------------------------------------------------- -function pullOciImage(imageRef, tmpDir) { - const hash = createHash("sha256").update(imageRef).digest("hex").slice(0, 16); - const localDir = join(tmpDir, hash); - mkdirSync(localDir, { recursive: true }); - - const result = spawnSync( - "skopeo", - [ - "copy", - "--override-os=linux", - "--override-arch=amd64", - `docker://${imageRef}`, - `dir:${localDir}`, - ], - { stdio: "inherit" }, - ); - if (result.status !== 0) - throw new Error(`skopeo copy failed for ${imageRef}`); - - const manifest = JSON.parse( - readFileSync(join(localDir, "manifest.json"), "utf8"), - ); - const [, filename] = manifest.layers[0].digest.split(":"); - return join(localDir, filename); -} - -function extractPlugin(tarFile, pluginPath, dest) { - mkdirSync(join(dest, pluginPath), { recursive: true }); - execSync(`tar xf "${tarFile}" -C "${dest}" "${pluginPath}/"`, { - stdio: "pipe", - }); +function deepMerge(src, dst) { + for (const [k, v] of Object.entries(src)) { + if (v && typeof v === "object" && !Array.isArray(v)) { + dst[k] = dst[k] ?? {}; + deepMerge(v, dst[k]); + } else { + dst[k] = v; + } + } + return dst; } -async function downloadPlugins(plugins, dest) { - mkdirSync(dest, { recursive: true }); - const tmpDir = resolve(__dirname, ".tmp-oci"); - mkdirSync(tmpDir, { recursive: true }); - - const imageCache = new Map(); - for (const plugin of plugins) { - const { imageRef, pluginPath } = parseOciRef(plugin.package); - if (!imageRef || !pluginPath) { - console.warn(` Skip (invalid ref): ${plugin.package}`); - continue; - } - console.log(` ${pluginPath}`); - let tarFile = imageCache.get(imageRef); - if (!tarFile) { - tarFile = pullOciImage(imageRef, tmpDir); - imageCache.set(imageRef, tarFile); - } - extractPlugin(tarFile, pluginPath, dest); +function loadConfigs(configPaths) { + const merged = {}; + for (const p of configPaths) { + if (!existsSync(p)) continue; + const doc = yaml.load(readFileSync(p, "utf8")); + if (doc && typeof doc === "object") deepMerge(doc, merged); } - rmSync(tmpDir, { recursive: true, force: true }); + return merged; } // --------------------------------------------------------------------------- @@ -211,40 +164,12 @@ function validateFrontendBundles(plugins, pluginsRoot) { } // --------------------------------------------------------------------------- -// Config generation -// --------------------------------------------------------------------------- - -function deepMerge(src, dst) { - for (const [k, v] of Object.entries(src)) { - if (v && typeof v === "object" && !Array.isArray(v)) { - dst[k] = dst[k] ?? {}; - deepMerge(v, dst[k]); - } else { - dst[k] = v; - } - } - return dst; -} - -function generateMergedConfig(plugins, pluginsRoot) { - const cfg = { dynamicPlugins: { rootDirectory: pluginsRoot } }; - for (const p of plugins) if (p.pluginConfig) deepMerge(p.pluginConfig, cfg); - const outPath = join(pluginsRoot, "app-config.dynamic-plugins.yaml"); - writeFileSync(outPath, yaml.dump(cfg)); - return outPath; -} - -// --------------------------------------------------------------------------- -// Backend boot +// Backend boot (startTestBackend + probe plugin) // --------------------------------------------------------------------------- -async function bootBackend(configPaths) { - process.argv = ["node", "smoke-test.mjs"]; - for (const p of configPaths) { - if (existsSync(p)) process.argv.push("--config", p); - } - - const { createBackend } = await import("@backstage/backend-defaults"); +async function bootBackend(configData) { + const { startTestBackend, mockServices } = + await import("@backstage/backend-test-utils"); const { dynamicPluginsFeatureLoader, CommonJSModuleLoader, @@ -256,101 +181,67 @@ async function bootBackend(configPaths) { await import("@backstage/backend-plugin-api"); const path = await import("node:path"); - const backend = createBackend(); - - backend.add( - dynamicPluginsFeatureLoader({ - schemaLocator(pluginPackage) { - const platform = PackageRoles.getRoleInfo( - pluginPackage.manifest.backstage.role, - ).platform; - return path.join( - platform === "node" ? "dist" : "dist-scalprum", - "configSchema.json", - ); - }, - moduleLoader: (logger) => new CommonJSModuleLoader({ logger }), - }), - ); - - backend.add( - createServiceFactory({ - service: dynamicPluginsFrontendServiceRef, - deps: {}, - factory: () => ({ setResolverProvider() {} }), - }), - ); - - backend.add( - createBackendPlugin({ - pluginId: "smoke-test-probe", - register(env) { - env.registerInit({ - deps: { - http: coreServices.httpRouter, - dynamicPlugins: dynamicPluginsServiceRef, - }, - async init({ http, dynamicPlugins }) { - const { Router } = await import("express"); - const router = Router(); - router.get("/loaded-plugins", (_, res) => { - res.json(dynamicPlugins.plugins({ includeFailed: true })); + const smokeTestProbePlugin = createBackendPlugin({ + pluginId: "smoke-test-probe", + register(env) { + env.registerInit({ + deps: { + http: coreServices.httpRouter, + dynamicPlugins: dynamicPluginsServiceRef, + }, + async init({ http, dynamicPlugins }) { + const { Router } = await import("express"); + const router = Router(); + router.get("/loaded-plugins", (_, res) => { + res.json(dynamicPlugins.plugins({ includeFailed: true })); + }); + http.use(router); + try { + http.addAuthPolicy({ + path: "/loaded-plugins", + allow: "unauthenticated", }); - http.use(router); - try { - http.addAuthPolicy({ - path: "/loaded-plugins", - allow: "unauthenticated", - }); - } catch { - /* API may not exist on this version */ - } - }, - }); - }, - }), - ); + } catch { + /* API may not exist on this version */ + } + }, + }); + }, + }); - backend.add(import("@backstage/plugin-catalog-backend")); - backend.add( - import("@backstage/plugin-catalog-backend-module-scaffolder-entity-model"), - ); - backend.add(import("@backstage/plugin-catalog-backend-module-logs")); - backend.add(import("@backstage/plugin-auth-backend")); - backend.add(import("@backstage/plugin-auth-backend-module-guest-provider")); - backend.add(import("@backstage/plugin-permission-backend")); - backend.add( - import("@backstage/plugin-permission-backend-module-allow-all-policy"), - ); - backend.add(import("@backstage/plugin-scaffolder-backend")); - backend.add(import("@backstage/plugin-events-backend")); - backend.add(import("@backstage/plugin-search-backend")); - backend.add(import("@backstage/plugin-search-backend-module-catalog")); - backend.add(import("@backstage/plugin-proxy-backend")); - - await backend.start(); - return backend; + const { server } = await startTestBackend({ + features: [ + mockServices.rootConfig.factory({ data: configData }), + dynamicPluginsFeatureLoader({ + schemaLocator(pluginPackage) { + const platform = PackageRoles.getRoleInfo( + pluginPackage.manifest.backstage.role, + ).platform; + return path.join( + platform === "node" ? "dist" : "dist-scalprum", + "configSchema.json", + ); + }, + moduleLoader: (logger) => new CommonJSModuleLoader({ logger }), + }), + createServiceFactory({ + service: dynamicPluginsFrontendServiceRef, + deps: {}, + factory: () => ({ setResolverProvider() {} }), + }), + smokeTestProbePlugin, + ], + }); + + const addr = server.address(); + const port = typeof addr === "object" ? addr.port : 7007; + return { server, port }; } // --------------------------------------------------------------------------- -// Health & route probing +// Plugin metadata & route probing // --------------------------------------------------------------------------- -async function waitForReady(port, timeoutSec) { - const url = `http://localhost:${port}/.backstage/health/v1/readiness`; - const deadline = Date.now() + timeoutSec * 1000; - while (Date.now() < deadline) { - try { - const r = await fetch(url); - if (r.ok) return; - } catch { - /* not ready */ - } - await new Promise((r) => setTimeout(r, 2000)); - } - throw new Error(`Backend not ready within ${timeoutSec}s`); -} - function readPluginMeta(pluginsRoot, pluginPath) { try { const pkg = JSON.parse( @@ -622,8 +513,33 @@ async function main() { loadEnvFile(args.envFile); - console.log("\n2. Downloading OCI plugin images"); - await downloadPlugins(plugins, args.pluginsRoot); + if (!args.skipDownload) { + console.log( + "\n2. Plugin download expected via install-dynamic-plugins.py pre-step", + ); + } else { + console.log("\n2. Skipping download (--skip-download)"); + } + + if (!existsSync(args.pluginsRoot)) { + console.error( + ` ERROR: plugins root directory not found: ${args.pluginsRoot}`, + ); + console.error( + " Run install-dynamic-plugins.py first, or use --skip-download with a pre-populated directory.", + ); + process.exit(1); + } + + const generatedCfg = join( + args.pluginsRoot, + "app-config.dynamic-plugins.yaml", + ); + if (!existsSync(generatedCfg)) { + console.warn( + ` WARN: ${generatedCfg} not found — install-dynamic-plugins.py may not have been run`, + ); + } console.log("\n2b. Validating frontend bundles"); const bundleResults = validateFrontendBundles(plugins, args.pluginsRoot); @@ -634,29 +550,25 @@ async function main() { ` ${bundleResults.length} frontend plugin(s) checked, ${bundleFailCount} failed`, ); - console.log("\n3. Generating merged plugin config"); - const generatedCfg = generateMergedConfig(plugins, args.pluginsRoot); - - console.log("\n4. Booting Backstage backend"); - const allConfigs = [...args.configs, generatedCfg]; - const backend = await bootBackend(allConfigs); + console.log("\n3. Booting Backstage backend (startTestBackend)"); + const allConfigPaths = [...args.configs]; + if (existsSync(generatedCfg)) allConfigPaths.push(generatedCfg); + const configData = loadConfigs(allConfigPaths); + const { server, port } = await bootBackend(configData); let success = false; try { - console.log("\n5. Waiting for readiness"); - await waitForReady(args.port, args.timeout); - - console.log("\n6a. Probing backend plugin routes"); + console.log("\n4a. Probing backend plugin routes"); const backendResults = await probePluginRoutes( plugins, - args.port, + port, args.pluginsRoot, ); - console.log("\n6b. Probing frontend loaded plugins"); + console.log("\n4b. Probing frontend loaded plugins"); const frontendLoadResults = await probeFrontendPlugins( plugins, - args.port, + port, args.pluginsRoot, ); @@ -669,7 +581,7 @@ async function main() { success = reportAndWrite(allResults, args.resultsFile); } finally { console.log("Shutting down backend..."); - await backend.stop(); + server.close(); } process.exit(success ? 0 : 1); From 8357cf46b06c3cf313dc1b70230559c328fc9c55 Mon Sep 17 00:00:00 2001 From: rostalan Date: Wed, 15 Apr 2026 09:57:49 +0200 Subject: [PATCH 4/8] address sonarqube vulnerability --- .github/workflows/run-workspace-smoke-tests.yaml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/run-workspace-smoke-tests.yaml b/.github/workflows/run-workspace-smoke-tests.yaml index 2d64983f2..e3e26d56e 100644 --- a/.github/workflows/run-workspace-smoke-tests.yaml +++ b/.github/workflows/run-workspace-smoke-tests.yaml @@ -53,7 +53,9 @@ jobs: run: pip install pyyaml - name: Authenticate to GHCR - run: echo "${{ secrets.GITHUB_TOKEN }}" | skopeo login ghcr.io -u "${{ github.actor }}" --password-stdin + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: echo $GITHUB_TOKEN | skopeo login ghcr.io -u "${{ github.actor }}" --password-stdin - name: Verify artifacts run: | From c0424d9fd7bf1f688cab3111f4f8f432c73e5f88 Mon Sep 17 00:00:00 2001 From: rostalan Date: Wed, 15 Apr 2026 10:14:05 +0200 Subject: [PATCH 5/8] address sonarqube analysis --- smoke-tests/smoke-test.mjs | 46 +++++++++++++++++++++----------------- 1 file changed, 25 insertions(+), 21 deletions(-) diff --git a/smoke-tests/smoke-test.mjs b/smoke-tests/smoke-test.mjs index 79640d2dc..b8ad935a5 100644 --- a/smoke-tests/smoke-test.mjs +++ b/smoke-tests/smoke-test.mjs @@ -358,44 +358,48 @@ async function probeFrontendPlugins(plugins, port, pluginsRoot) { return failAll("invalid probe response"); } - const results = []; - for (const fp of frontendPlugins) { - const loaded = body.find( - (lp) => lp && typeof lp === "object" && lp.name === fp.pkgName, - ); - + const toFrontendProbeResult = (fp, loaded) => { if (!loaded) { - results.push({ + return { pkgName: fp.pkgName, role: fp.role, pluginPath: fp.pluginPath, status: "fail-load", detail: "not found in loaded plugins list", - }); - } else if (loaded.platform !== "web") { - results.push({ + }; + } + if (loaded.platform !== "web") { + return { pkgName: fp.pkgName, role: fp.role, pluginPath: fp.pluginPath, status: "fail-load", detail: `unexpected platform: ${loaded.platform}`, - }); - } else if (loaded.failure) { - results.push({ + }; + } + if (loaded.failure) { + return { pkgName: fp.pkgName, role: fp.role, pluginPath: fp.pluginPath, status: "fail-load", detail: `plugin loaded with failure: ${loaded.failure}`, - }); - } else { - results.push({ - pkgName: fp.pkgName, - role: fp.role, - pluginPath: fp.pluginPath, - status: "pass", - }); + }; } + return { + pkgName: fp.pkgName, + role: fp.role, + pluginPath: fp.pluginPath, + status: "pass", + }; + }; + + const results = []; + for (const fp of frontendPlugins) { + const loaded = body.find( + (lp) => lp && typeof lp === "object" && lp.name === fp.pkgName, + ); + results.push(toFrontendProbeResult(fp, loaded)); } return results; } From 3b83f1d4034bd03a5b7edcea49b5b664896bc931 Mon Sep 17 00:00:00 2001 From: rostalan Date: Wed, 15 Apr 2026 14:13:12 +0200 Subject: [PATCH 6/8] refactor complex reportAndWrite --- smoke-tests/smoke-test.mjs | 97 +++++++++++++++++++++++--------------- 1 file changed, 58 insertions(+), 39 deletions(-) diff --git a/smoke-tests/smoke-test.mjs b/smoke-tests/smoke-test.mjs index b8ad935a5..e16de3153 100644 --- a/smoke-tests/smoke-test.mjs +++ b/smoke-tests/smoke-test.mjs @@ -408,55 +408,74 @@ async function probeFrontendPlugins(plugins, port, pluginsRoot) { // Reporting // --------------------------------------------------------------------------- +function logPassResult(result) { + if (result.pluginId) { + return ` PASS ${result.pkgName} → /api/${result.pluginId} (${result.http})`; + } + if (isFrontendRole(result.role)) { + return ` PASS ${result.pkgName} (${result.role})`; + } + return null; +} + +function logResultAndCollectFailures(result, failedPlugins) { + const statusHandlers = { + skip: () => ({ line: ` SKIP ${result.pkgName} (${result.role})` }), + pass: () => ({ line: logPassResult(result) }), + warn: () => ({ + line: ` WARN ${result.pkgName} → /api/${result.pluginId} (404 — pluginId guess may be wrong)`, + }), + "fail-bundle": () => ({ + line: ` FAIL ${result.pkgName} [bundle] ${result.detail}`, + failed: true, + }), + "fail-load": () => ({ + line: ` FAIL ${result.pkgName} [load] ${result.detail}`, + failed: true, + }), + }; + + const handled = statusHandlers[result.status]?.() ?? { + line: ` FAIL ${result.pkgName} ${result.error}`, + failed: true, + }; + + if (handled.line) { + console.log(handled.line); + } + if (handled.failed) { + failedPlugins.push(result.pkgName); + } +} + +function updateResultCounts(result, backendCounts, frontendCounts) { + const isFrontend = isFrontendRole(result.role); + if (result.status === "fail-bundle" || result.status === "fail-load") { + if (isFrontend) frontendCounts.fail++; + else backendCounts.fail++; + return; + } + + if (isFrontend) { + frontendCounts[result.status] = (frontendCounts[result.status] ?? 0) + 1; + return; + } + + backendCounts[result.status] = (backendCounts[result.status] ?? 0) + 1; +} + function reportAndWrite(results, resultsFile) { console.log("\n========== Smoke Test Results ==========\n"); const failedPlugins = []; for (const r of results) { - switch (r.status) { - case "skip": - console.log(` SKIP ${r.pkgName} (${r.role})`); - break; - case "pass": - if (r.pluginId) { - console.log( - ` PASS ${r.pkgName} → /api/${r.pluginId} (${r.http})`, - ); - } else if (isFrontendRole(r.role)) { - console.log(` PASS ${r.pkgName} (${r.role})`); - } - break; - case "warn": - console.log( - ` WARN ${r.pkgName} → /api/${r.pluginId} (404 — pluginId guess may be wrong)`, - ); - break; - case "fail-bundle": - console.log(` FAIL ${r.pkgName} [bundle] ${r.detail}`); - failedPlugins.push(r.pkgName); - break; - case "fail-load": - console.log(` FAIL ${r.pkgName} [load] ${r.detail}`); - failedPlugins.push(r.pkgName); - break; - default: - console.log(` FAIL ${r.pkgName} ${r.error}`); - failedPlugins.push(r.pkgName); - } + logResultAndCollectFailures(r, failedPlugins); } const be = { pass: 0, warn: 0, skip: 0, fail: 0 }; const fe = { pass: 0, fail: 0 }; for (const r of results) { - const isFe = isFrontendRole(r.role); - if (r.status === "fail-bundle" || r.status === "fail-load") { - if (isFe) fe.fail++; - else be.fail++; - } else if (isFe) { - fe[r.status] = (fe[r.status] ?? 0) + 1; - } else { - be[r.status] = (be[r.status] ?? 0) + 1; - } + updateResultCounts(r, be, fe); } const total = results.length; const totalFail = be.fail + fe.fail; From b085b244789d55230c63e48bb58916159e927f23 Mon Sep 17 00:00:00 2001 From: rostalan Date: Thu, 16 Apr 2026 10:13:59 +0200 Subject: [PATCH 7/8] bumped node, switched to npm test, pinned action to commit, refactored smoke-test file --- .../workflows/run-workspace-smoke-tests.yaml | 8 +- smoke-tests/smoke-test.mjs | 93 ++++++++----------- 2 files changed, 43 insertions(+), 58 deletions(-) diff --git a/.github/workflows/run-workspace-smoke-tests.yaml b/.github/workflows/run-workspace-smoke-tests.yaml index e3e26d56e..6a99aa28c 100644 --- a/.github/workflows/run-workspace-smoke-tests.yaml +++ b/.github/workflows/run-workspace-smoke-tests.yaml @@ -37,9 +37,9 @@ jobs: path: ./artifacts - name: Setup Node.js - uses: actions/setup-node@v4 + uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6.3.0 with: - node-version: "20" + node-version: "24" - name: Install skopeo run: | @@ -103,9 +103,9 @@ jobs: ENV_ARGS="" [ -f ../test.env ] && ENV_ARGS="--env-file ../test.env" - echo "Running: node smoke-test.mjs --plugins-yaml ../dynamic-plugins.test.yaml --skip-download $CONFIG_ARGS $ENV_ARGS" + echo "Running: npm test -- --plugins-yaml ../dynamic-plugins.test.yaml --skip-download $CONFIG_ARGS $ENV_ARGS" - node smoke-test.mjs \ + npm test -- \ --plugins-yaml ../dynamic-plugins.test.yaml \ --skip-download \ $CONFIG_ARGS \ diff --git a/smoke-tests/smoke-test.mjs b/smoke-tests/smoke-test.mjs index e16de3153..be1e7f4a5 100644 --- a/smoke-tests/smoke-test.mjs +++ b/smoke-tests/smoke-test.mjs @@ -82,32 +82,6 @@ function isFrontendRole(role) { return FRONTEND_ROLES.has(role); } -// --------------------------------------------------------------------------- -// Config helpers -// --------------------------------------------------------------------------- - -function deepMerge(src, dst) { - for (const [k, v] of Object.entries(src)) { - if (v && typeof v === "object" && !Array.isArray(v)) { - dst[k] = dst[k] ?? {}; - deepMerge(v, dst[k]); - } else { - dst[k] = v; - } - } - return dst; -} - -function loadConfigs(configPaths) { - const merged = {}; - for (const p of configPaths) { - if (!existsSync(p)) continue; - const doc = yaml.load(readFileSync(p, "utf8")); - if (doc && typeof doc === "object") deepMerge(doc, merged); - } - return merged; -} - // --------------------------------------------------------------------------- // Frontend bundle validation (Layer 1) // --------------------------------------------------------------------------- @@ -167,9 +141,8 @@ function validateFrontendBundles(plugins, pluginsRoot) { // Backend boot (startTestBackend + probe plugin) // --------------------------------------------------------------------------- -async function bootBackend(configData) { - const { startTestBackend, mockServices } = - await import("@backstage/backend-test-utils"); +async function bootBackend(configPaths) { + const { startTestBackend } = await import("@backstage/backend-test-utils"); const { dynamicPluginsFeatureLoader, CommonJSModuleLoader, @@ -209,29 +182,37 @@ async function bootBackend(configData) { }, }); - const { server } = await startTestBackend({ - features: [ - mockServices.rootConfig.factory({ data: configData }), - dynamicPluginsFeatureLoader({ - schemaLocator(pluginPackage) { - const platform = PackageRoles.getRoleInfo( - pluginPackage.manifest.backstage.role, - ).platform; - return path.join( - platform === "node" ? "dist" : "dist-scalprum", - "configSchema.json", - ); - }, - moduleLoader: (logger) => new CommonJSModuleLoader({ logger }), - }), - createServiceFactory({ - service: dynamicPluginsFrontendServiceRef, - deps: {}, - factory: () => ({ setResolverProvider() {} }), - }), - smokeTestProbePlugin, - ], - }); + const baseArgv = [...process.argv]; + const configArgv = configPaths.flatMap((configPath) => ["--config", configPath]); + process.argv = [...baseArgv, ...configArgv]; + + let server; + try { + ({ server } = await startTestBackend({ + features: [ + dynamicPluginsFeatureLoader({ + schemaLocator(pluginPackage) { + const platform = PackageRoles.getRoleInfo( + pluginPackage.manifest.backstage.role, + ).platform; + return path.join( + platform === "node" ? "dist" : "dist-scalprum", + "configSchema.json", + ); + }, + moduleLoader: (logger) => new CommonJSModuleLoader({ logger }), + }), + createServiceFactory({ + service: dynamicPluginsFrontendServiceRef, + deps: {}, + factory: () => ({ setResolverProvider() {} }), + }), + smokeTestProbePlugin, + ], + })); + } finally { + process.argv = baseArgv; + } const addr = server.address(); const port = typeof addr === "object" ? addr.port : 7007; @@ -576,8 +557,12 @@ async function main() { console.log("\n3. Booting Backstage backend (startTestBackend)"); const allConfigPaths = [...args.configs]; if (existsSync(generatedCfg)) allConfigPaths.push(generatedCfg); - const configData = loadConfigs(allConfigPaths); - const { server, port } = await bootBackend(configData); + const configPaths = allConfigPaths.filter((configPath) => { + if (existsSync(configPath)) return true; + console.warn(` WARN: config file not found, skipping: ${configPath}`); + return false; + }); + const { server, port } = await bootBackend(configPaths); let success = false; try { From c5034460a0c622f8d8641da390c8897cc6ef68fc Mon Sep 17 00:00:00 2001 From: rostalan Date: Thu, 16 Apr 2026 10:31:34 +0200 Subject: [PATCH 8/8] changed test file to .ts, addressed errors, fixed linter errors --- smoke-tests/global.d.ts | 4 + smoke-tests/package.json | 2 +- smoke-tests/{smoke-test.mjs => smoke-test.ts} | 221 +++++++++++++----- 3 files changed, 162 insertions(+), 65 deletions(-) create mode 100644 smoke-tests/global.d.ts rename smoke-tests/{smoke-test.mjs => smoke-test.ts} (76%) diff --git a/smoke-tests/global.d.ts b/smoke-tests/global.d.ts new file mode 100644 index 000000000..2244ba98a --- /dev/null +++ b/smoke-tests/global.d.ts @@ -0,0 +1,4 @@ +declare module "@backstage/backend-test-utils"; +declare module "@backstage/backend-dynamic-feature-service"; +declare module "@backstage/cli-node"; +declare module "@backstage/backend-plugin-api"; diff --git a/smoke-tests/package.json b/smoke-tests/package.json index 17db381a5..9bb53ac44 100644 --- a/smoke-tests/package.json +++ b/smoke-tests/package.json @@ -5,7 +5,7 @@ "type": "module", "description": "Lightweight smoke test harness for RHDH dynamic plugins — no Docker required", "scripts": { - "test": "node smoke-test.mjs", + "test": "node smoke-test.ts", "prettier:check": "prettier --check .", "prettier:fix": "prettier --write ." }, diff --git a/smoke-tests/smoke-test.mjs b/smoke-tests/smoke-test.ts similarity index 76% rename from smoke-tests/smoke-test.mjs rename to smoke-tests/smoke-test.ts index be1e7f4a5..cb98fe50a 100644 --- a/smoke-tests/smoke-test.mjs +++ b/smoke-tests/smoke-test.ts @@ -3,49 +3,114 @@ import { readFileSync, writeFileSync, existsSync, readdirSync } from "node:fs"; import { join, resolve, dirname } from "node:path"; import { fileURLToPath } from "node:url"; +import { parseArgs as parseCliArgs } from "node:util"; import yaml from "js-yaml"; const __dirname = dirname(fileURLToPath(import.meta.url)); +type ResultStatus = + | "skip" + | "pass" + | "warn" + | "fail" + | "fail-bundle" + | "fail-load"; + +type CountBuckets = { + pass: number; + fail: number; + warn?: number; + skip?: number; + [key: string]: number | undefined; +}; + +type CliArgs = { + pluginsYaml: string; + configs: string[]; + envFile: string | null; + pluginsRoot: string; + resultsFile: string; + skipDownload: boolean; +}; + +type PluginEntry = { + package: string; + disabled?: boolean; +}; + +type PluginsDoc = { + plugins?: PluginEntry[]; +}; + +type OciRef = { + imageRef: string; + pluginPath: string | null; +}; + +type PluginMeta = { + pkgName: string; + role: string; + pluginId: string | null; +}; + +type ProbeResult = { + pkgName: string; + role: string; + pluginPath: string; + status: ResultStatus; + pluginId?: string; + http?: number; + detail?: string; + error?: string; +}; + +type LoadedPlugin = { + name?: string; + platform?: string; + failure?: string; +}; + +function toErrorMessage(err: unknown): string { + return err instanceof Error ? err.message : String(err); +} + // --------------------------------------------------------------------------- // CLI // --------------------------------------------------------------------------- -function parseArgs(argv) { - const args = { - pluginsYaml: null, - configs: [], - envFile: null, - pluginsRoot: resolve(__dirname, "dynamic-plugins-root"), - resultsFile: resolve(__dirname, "results.json"), - skipDownload: false, - }; - for (let i = 0; i < argv.length; i++) { - switch (argv[i]) { - case "--plugins-yaml": - args.pluginsYaml = resolve(argv[++i]); - break; - case "--config": - args.configs.push(resolve(argv[++i])); - break; - case "--env-file": - args.envFile = resolve(argv[++i]); - break; - case "--skip-download": - args.skipDownload = true; - break; - } - } - if (!args.pluginsYaml) { +function parseArgs(argv: string[]): CliArgs { + const { values } = parseCliArgs({ + args: argv, + allowPositionals: false, + options: { + "plugins-yaml": { type: "string" }, + config: { type: "string", multiple: true }, + "env-file": { type: "string" }, + "skip-download": { type: "boolean", default: false }, + }, + }); + + const pluginsYaml = values["plugins-yaml"] + ? resolve(values["plugins-yaml"]) + : null; + if (!pluginsYaml) { console.error( - "Usage: node smoke-test.mjs --plugins-yaml [--config ...] [--env-file ] [--skip-download]", + "Usage: node smoke-test.ts --plugins-yaml [--config ...] [--env-file ] [--skip-download]", ); process.exit(1); } - return args; + + return { + pluginsYaml, + configs: (values.config ?? []).map((configPath) => resolve(configPath)), + envFile: values["env-file"] ? resolve(values["env-file"]) : null, + pluginsRoot: resolve(__dirname, "dynamic-plugins-root"), + resultsFile: resolve(__dirname, "results.json"), + skipDownload: values["skip-download"] ?? false, + }; } -function loadEnvFile(filePath) { +function loadEnvFile(filePath: string | null): void { if (!filePath || !existsSync(filePath)) return; for (const line of readFileSync(filePath, "utf8").split("\n")) { const t = line.trim(); @@ -60,12 +125,12 @@ function loadEnvFile(filePath) { // Plugins YAML // --------------------------------------------------------------------------- -function parsePluginsYaml(filePath) { - const doc = yaml.load(readFileSync(filePath, "utf8")); +function parsePluginsYaml(filePath: string): PluginEntry[] { + const doc = yaml.load(readFileSync(filePath, "utf8")) as PluginsDoc | undefined; return (doc?.plugins ?? []).filter((p) => !p.disabled); } -function parseOciRef(packageStr) { +function parseOciRef(packageStr: string): OciRef { const cleaned = packageStr.replace(/^"/, "").replace(/"$/, ""); const withoutOci = cleaned.replace(/^oci:\/\//, ""); const bangIdx = withoutOci.indexOf("!"); @@ -78,7 +143,7 @@ function parseOciRef(packageStr) { const FRONTEND_ROLES = new Set(["frontend-plugin", "frontend-plugin-module"]); -function isFrontendRole(role) { +function isFrontendRole(role: string): boolean { return FRONTEND_ROLES.has(role); } @@ -86,7 +151,7 @@ function isFrontendRole(role) { // Frontend bundle validation (Layer 1) // --------------------------------------------------------------------------- -function findJsFiles(dir) { +function findJsFiles(dir: string): boolean { const entries = readdirSync(dir, { withFileTypes: true }); for (const entry of entries) { const full = join(dir, entry.name); @@ -99,8 +164,11 @@ function findJsFiles(dir) { return false; } -function validateFrontendBundles(plugins, pluginsRoot) { - const results = []; +function validateFrontendBundles( + plugins: PluginEntry[], + pluginsRoot: string, +): ProbeResult[] { + const results: ProbeResult[] = []; for (const plugin of plugins) { const { pluginPath } = parseOciRef(plugin.package); if (!pluginPath) continue; @@ -141,7 +209,7 @@ function validateFrontendBundles(plugins, pluginsRoot) { // Backend boot (startTestBackend + probe plugin) // --------------------------------------------------------------------------- -async function bootBackend(configPaths) { +async function bootBackend(configPaths: string[]): Promise<{ server: any; port: number }> { const { startTestBackend } = await import("@backstage/backend-test-utils"); const { dynamicPluginsFeatureLoader, @@ -156,13 +224,13 @@ async function bootBackend(configPaths) { const smokeTestProbePlugin = createBackendPlugin({ pluginId: "smoke-test-probe", - register(env) { + register(env: any) { env.registerInit({ deps: { http: coreServices.httpRouter, dynamicPlugins: dynamicPluginsServiceRef, }, - async init({ http, dynamicPlugins }) { + async init({ http, dynamicPlugins }: { http: any; dynamicPlugins: any }) { const { Router } = await import("express"); const router = Router(); router.get("/loaded-plugins", (_, res) => { @@ -191,7 +259,7 @@ async function bootBackend(configPaths) { ({ server } = await startTestBackend({ features: [ dynamicPluginsFeatureLoader({ - schemaLocator(pluginPackage) { + schemaLocator(pluginPackage: any) { const platform = PackageRoles.getRoleInfo( pluginPackage.manifest.backstage.role, ).platform; @@ -200,7 +268,7 @@ async function bootBackend(configPaths) { "configSchema.json", ); }, - moduleLoader: (logger) => new CommonJSModuleLoader({ logger }), + moduleLoader: (logger: any) => new CommonJSModuleLoader({ logger }), }), createServiceFactory({ service: dynamicPluginsFrontendServiceRef, @@ -223,7 +291,7 @@ async function bootBackend(configPaths) { // Plugin metadata & route probing // --------------------------------------------------------------------------- -function readPluginMeta(pluginsRoot, pluginPath) { +function readPluginMeta(pluginsRoot: string, pluginPath: string): PluginMeta { try { const pkg = JSON.parse( readFileSync(join(pluginsRoot, pluginPath, "package.json"), "utf8"), @@ -238,8 +306,12 @@ function readPluginMeta(pluginsRoot, pluginPath) { } } -async function probePluginRoutes(plugins, port, pluginsRoot) { - const results = []; +async function probePluginRoutes( + plugins: PluginEntry[], + port: number, + pluginsRoot: string, +): Promise { + const results: ProbeResult[] = []; for (const plugin of plugins) { const { pluginPath } = parseOciRef(plugin.package); if (!pluginPath) continue; @@ -282,7 +354,7 @@ async function probePluginRoutes(plugins, port, pluginsRoot) { pluginPath, pluginId, status: "fail", - error: err.message, + error: toErrorMessage(err), }); } } @@ -293,8 +365,12 @@ async function probePluginRoutes(plugins, port, pluginsRoot) { // Frontend plugin probing (Layer 2) // --------------------------------------------------------------------------- -async function probeFrontendPlugins(plugins, port, pluginsRoot) { - const frontendPlugins = []; +async function probeFrontendPlugins( + plugins: PluginEntry[], + port: number, + pluginsRoot: string, +): Promise { + const frontendPlugins: Array = []; for (const plugin of plugins) { const { pluginPath } = parseOciRef(plugin.package); if (!pluginPath) continue; @@ -306,7 +382,7 @@ async function probeFrontendPlugins(plugins, port, pluginsRoot) { if (frontendPlugins.length === 0) return []; - const failAll = (detail) => + const failAll = (detail: string): ProbeResult[] => frontendPlugins.map((fp) => ({ pkgName: fp.pkgName, role: fp.role, @@ -321,7 +397,7 @@ async function probeFrontendPlugins(plugins, port, pluginsRoot) { `http://localhost:${port}/api/smoke-test-probe/loaded-plugins`, ); } catch (err) { - return failAll(`probe endpoint unreachable: ${err.message}`); + return failAll(`probe endpoint unreachable: ${toErrorMessage(err)}`); } if (!res.ok) { @@ -339,7 +415,10 @@ async function probeFrontendPlugins(plugins, port, pluginsRoot) { return failAll("invalid probe response"); } - const toFrontendProbeResult = (fp, loaded) => { + const toFrontendProbeResult = ( + fp: PluginMeta & { pluginPath: string }, + loaded: LoadedPlugin | undefined, + ): ProbeResult => { if (!loaded) { return { pkgName: fp.pkgName, @@ -375,11 +454,16 @@ async function probeFrontendPlugins(plugins, port, pluginsRoot) { }; }; - const results = []; + const results: ProbeResult[] = []; for (const fp of frontendPlugins) { - const loaded = body.find( - (lp) => lp && typeof lp === "object" && lp.name === fp.pkgName, + const loadedCandidate = body.find( + (lp: unknown) => + lp && typeof lp === "object" && "name" in lp && (lp as LoadedPlugin).name === fp.pkgName, ); + const loaded = + loadedCandidate && typeof loadedCandidate === "object" + ? (loadedCandidate as LoadedPlugin) + : undefined; results.push(toFrontendProbeResult(fp, loaded)); } return results; @@ -389,7 +473,7 @@ async function probeFrontendPlugins(plugins, port, pluginsRoot) { // Reporting // --------------------------------------------------------------------------- -function logPassResult(result) { +function logPassResult(result: ProbeResult): string | null { if (result.pluginId) { return ` PASS ${result.pkgName} → /api/${result.pluginId} (${result.http})`; } @@ -399,8 +483,10 @@ function logPassResult(result) { return null; } -function logResultAndCollectFailures(result, failedPlugins) { - const statusHandlers = { +function logResultAndCollectFailures(result: ProbeResult, failedPlugins: string[]): void { + const statusHandlers: Partial< + Record { line: string | null; failed?: boolean }> + > = { skip: () => ({ line: ` SKIP ${result.pkgName} (${result.role})` }), pass: () => ({ line: logPassResult(result) }), warn: () => ({ @@ -429,7 +515,11 @@ function logResultAndCollectFailures(result, failedPlugins) { } } -function updateResultCounts(result, backendCounts, frontendCounts) { +function updateResultCounts( + result: ProbeResult, + backendCounts: CountBuckets, + frontendCounts: CountBuckets, +): void { const isFrontend = isFrontendRole(result.role); if (result.status === "fail-bundle" || result.status === "fail-load") { if (isFrontend) frontendCounts.fail++; @@ -445,9 +535,9 @@ function updateResultCounts(result, backendCounts, frontendCounts) { backendCounts[result.status] = (backendCounts[result.status] ?? 0) + 1; } -function reportAndWrite(results, resultsFile) { +function reportAndWrite(results: ProbeResult[], resultsFile: string): boolean { console.log("\n========== Smoke Test Results ==========\n"); - const failedPlugins = []; + const failedPlugins: string[] = []; for (const r of results) { logResultAndCollectFailures(r, failedPlugins); @@ -476,8 +566,11 @@ function reportAndWrite(results, resultsFile) { // Result merging // --------------------------------------------------------------------------- -function mergeFrontendResults(bundleResults, loadResults) { - const loadMap = new Map(); +function mergeFrontendResults( + bundleResults: ProbeResult[], + loadResults: ProbeResult[], +): ProbeResult[] { + const loadMap = new Map(); for (const r of loadResults) { if (!loadMap.has(r.pkgName)) loadMap.set(r.pkgName, r); } @@ -502,7 +595,7 @@ function mergeFrontendResults(bundleResults, loadResults) { // Main // --------------------------------------------------------------------------- -async function main() { +async function main(): Promise { const args = parseArgs(process.argv.slice(2)); console.log("\n=== RHDH Smoke Test (Docker-free) ===\n"); @@ -595,7 +688,7 @@ async function main() { process.exit(success ? 0 : 1); } -await main().catch((err) => { - console.error("\nSmoke test failed:", err.message || err); +await main().catch((err: unknown) => { + console.error("\nSmoke test failed:", toErrorMessage(err)); process.exit(1); });