diff --git a/docs/public/SUMMARY.md b/docs/public/SUMMARY.md index b2885024..ac6126bb 100644 --- a/docs/public/SUMMARY.md +++ b/docs/public/SUMMARY.md @@ -31,6 +31,7 @@ * [Service Accounts](operations/service-accounts.md) * [Backup & Restore](operations/backup-restore.md) * [GitOps](operations/gitops.md) +* [Outbound Webhooks](operations/outbound-webhooks.md) * [Security](operations/security.md) * [Upgrading](operations/upgrading.md) diff --git a/docs/public/operations/outbound-webhooks.md b/docs/public/operations/outbound-webhooks.md new file mode 100644 index 00000000..d4d96737 --- /dev/null +++ b/docs/public/operations/outbound-webhooks.md @@ -0,0 +1,154 @@ +# Outbound Webhooks + +VectorFlow can send HMAC-signed HTTP notifications to external systems when key events occur. Use outbound webhooks to integrate with incident management tools, CI/CD pipelines, custom dashboards, or any service that accepts HTTP callbacks. + +## Overview + +Each **webhook endpoint** is a URL that receives POST requests when one or more subscribed events fire. Requests carry Standard-Webhooks-compliant signature headers so receivers can verify authenticity. + +Key properties of each endpoint: + +- **Name** — A descriptive label shown in the management UI. +- **URL** — The HTTPS endpoint that receives event payloads. +- **Event types** — One or more event types that trigger delivery. +- **Signing secret** — Optional HMAC key. When set, every request includes a `webhook-signature` header. +- **Enabled / Disabled** — Endpoints can be temporarily disabled without deleting them. + +## Supported events + +| Event | When it fires | +|-------|---------------| +| `deploy_completed` | A pipeline deployment completed successfully | +| `deploy_rejected` | A deployment request was rejected | +| `deploy_cancelled` | A pending deployment was cancelled | +| `pipeline_crashed` | A running pipeline process exited unexpectedly | +| `node_unreachable` | A fleet node stopped sending heartbeats | +| `node_joined` | A new fleet node enrolled | +| `node_left` | A fleet node was removed | +| `promotion_completed` | A pipeline was promoted to another environment | + +## Creating a webhook endpoint + +{% stepper %} +{% step %} +### Open Webhook Settings +Navigate to **Settings → Outbound Webhooks**. +{% endstep %} +{% step %} +### Click New Endpoint +Click the **New Endpoint** button in the top-right corner. +{% endstep %} +{% step %} +### Fill in the form +- **Name** — A descriptive label (e.g., "PagerDuty Pipeline Alerts"). +- **Endpoint URL** — The HTTPS URL that will receive events. +- **Signing secret** — Optional. If provided, every request is signed and the secret is shown once — copy it before closing the dialog. +- **Event types** — Select one or more events this endpoint should receive. +{% endstep %} +{% step %} +### Create +Click **Create**. If you provided a signing secret, the dialog shows it once — copy it to a secure location. +{% endstep %} +{% endstepper %} + +{% hint style="warning" %} +The signing secret is displayed once at creation time and cannot be retrieved afterwards. Store it securely in your receiving application's configuration. +{% endhint %} + +## Payload format + +All webhook deliveries use the same envelope format: + +```json +{ + "type": "deploy_completed", + "timestamp": "2026-03-27T12:00:00.000Z", + "data": { + // Event-specific fields + } +} +``` + +| Field | Description | +|-------|-------------| +| `type` | The `AlertMetric` value that triggered this delivery | +| `timestamp` | ISO-8601 UTC timestamp of the event | +| `data` | Event-specific payload fields | + +## Verifying signatures + +When a signing secret is configured, every request includes three headers: + +| Header | Description | +|--------|-------------| +| `webhook-id` | Unique UUID for this delivery | +| `webhook-timestamp` | Unix timestamp (integer seconds) | +| `webhook-signature` | `v1,{base64(HMAC-SHA256)}` | + +To verify a request, compute: + +``` +signing_string = "{webhook-id}.{webhook-timestamp}.{raw_request_body}" +expected_sig = base64( HMAC-SHA256(signing_string, secret) ) +``` + +The received signature header is `v1,{expected_sig}`. Compare the value after the `v1,` prefix. + +{% hint style="info" %} +VectorFlow follows the [Standard Webhooks](https://www.standardwebhooks.com/) specification. Libraries are available for most languages. +{% endhint %} + +## Delivery and retry + +VectorFlow attempts delivery immediately when an event fires. If the request fails, it retries with exponential backoff: + +| Attempt | Delay | +|---------|-------| +| 1 | Immediate | +| 2 | 30 seconds | +| 3 | 5 minutes | +| 4 | 30 minutes | +| 5+ | 2 hours | + +**Permanent failures** (HTTP 4xx excluding 429, DNS errors, connection refused) are moved to **dead-letter** immediately and are not retried. + +**Transient failures** (HTTP 5xx, HTTP 429, timeouts) are retried up to the schedule above. + +## Delivery history + +Each endpoint row in the settings UI can be expanded to show recent deliveries: + +- **Event type** — Which event triggered the delivery. +- **Status** — `success`, `failed`, `dead_letter`, or `pending`. +- **HTTP status** — The HTTP status code returned by the receiver. +- **Attempt** — Which retry attempt this represents. +- **Requested / Completed** — Relative timestamps. + +## Test delivery + +To send a test delivery to an endpoint without waiting for a real event, click the **Play** button (▶) in the endpoint row. The test payload is: + +```json +{ + "type": "test", + "timestamp": "2026-03-27T12:00:00.000Z", + "data": { + "message": "Test delivery from VectorFlow", + "endpointId": "..." + } +} +``` + +The UI shows a success or failure notification. Check delivery history for the HTTP status code and any error details. + +## Managing endpoints + +| Action | How | +|--------|-----| +| **Enable / Disable** | Click the toggle icon in the endpoint row | +| **Edit** | Click the pencil icon to update name, URL, events, or rotate the secret | +| **Delete** | Click the trash icon — all delivery history is also deleted | + +{% hint style="info" %} +Disabling an endpoint stops deliveries immediately without deleting the endpoint or its history. Re-enable it when ready to receive events again. +{% endhint %} diff --git a/docs/public/reference/api.md b/docs/public/reference/api.md index 267416f6..473f6c69 100644 --- a/docs/public/reference/api.md +++ b/docs/public/reference/api.md @@ -479,6 +479,50 @@ Common error codes: --- +## OpenAPI Specification + +VectorFlow provides a machine-readable [OpenAPI 3.1](https://spec.openapis.org/oas/v3.1.0) specification covering all REST v1 endpoints and key tRPC procedures. + +### Fetching the spec + +```bash +curl -s https://vectorflow.example.com/api/v1/openapi.json | jq .info +``` + +The spec is served at `/api/v1/openapi.json` with CORS enabled — you can fetch it from any origin without credentials. + +### Importing into tools + +**Postman:** File > Import > paste URL `https://vectorflow.example.com/api/v1/openapi.json` + +**Swagger UI / Stoplight:** Point to the spec URL or paste the JSON content. + +### Client generation + +Generate a typed API client in any language using [openapi-generator](https://openapi-generator.tech/): + +```bash +npx @openapitools/openapi-generator-cli generate \ + -i https://vectorflow.example.com/api/v1/openapi.json \ + -g python \ + -o ./vectorflow-client +``` + +### What's included + +The spec documents two API surfaces: + +| Surface | Auth | Endpoints | +|---------|------|-----------| +| REST v1 (`/api/v1/*`) | Service account Bearer token | Pipeline CRUD, deploy, rollback, nodes, secrets, alerts, audit | +| tRPC (`/api/trpc/*`) | Session cookie | Pipeline management, fleet, environments, secrets, deploy, alerts, service accounts | + +{% hint style="info" %} +**tRPC encoding note:** tRPC endpoints use [SuperJSON](https://github.com/blitz-js/superjson) encoding. For queries, input is URL-encoded JSON in `?input=` (wrap as `{"json": }`). For mutations, the body is `{"json": }`. Using a tRPC client is recommended for full type safety; the OpenAPI spec is provided for discoverability and non-TypeScript integrations. +{% endhint %} + +--- + ## REST API (v1) The REST API provides a standard HTTP interface for automation and CI/CD. All endpoints require a [Service Account](../operations/service-accounts.md) API key. diff --git a/docs/public/user-guide/fleet.md b/docs/public/user-guide/fleet.md index 1431cd4b..f9cbc551 100644 --- a/docs/public/user-guide/fleet.md +++ b/docs/public/user-guide/fleet.md @@ -166,6 +166,75 @@ The deploy dialog shows a live count of matching nodes (e.g., "3 of 5 nodes matc Changing a pipeline's node selector on a subsequent deploy updates the targeting. Nodes that no longer match will stop the pipeline on their next poll. {% endhint %} +## Node groups + +Node groups let administrators segment their fleet into logical clusters based on node labels -- for example by datacenter, role, or region. Groups are managed from **Settings > Fleet**. + +Each node group has: + +| Field | Description | +|-------|-------------| +| **Name** | A unique display name for the group within the environment. | +| **Criteria** | A label selector (key-value pairs) that determines which enrolling nodes match the group. An empty criteria matches all nodes. | +| **Label template** | Key-value labels that are automatically merged into a node's labels when it enrolls and matches the group's criteria. | +| **Required labels** | Label keys that every node should have. Nodes missing any required label are flagged as non-compliant in the fleet list. | + +{% hint style="info" %} +Label templates are applied once at enrollment time. Changing a group's template does not retroactively update existing nodes. +{% endhint %} + +## Label compliance + +When node groups define **required labels**, the fleet list displays a **Non-compliant** badge next to any node that is missing one or more of those labels. This is a warn-only indicator -- non-compliant nodes continue to receive heartbeats and deployments normally. + +To resolve a non-compliant node, add the missing labels via the node detail page or ensure the node enrolls with matching labels so that group templates apply automatically. + +## Fleet health dashboard + +The Health tab on the Fleet page provides an aggregated view of fleet status organized by node group. This is especially useful for large fleets where you want to see health at a glance before drilling into individual nodes. + +### Group summary cards + +Each node group is represented as a collapsible card showing three metrics: + +| Metric | Description | +|--------|-------------| +| **Online** | Count of HEALTHY nodes out of the group total (e.g. `4/5`). Shown in amber when any nodes are offline. | +| **Alerts** | Count of nodes with at least one firing alert rule. Shown in red when greater than zero. | +| **Compliance** | Percentage of nodes that have all required labels defined by the group. Shown in amber when below 100%. | + +### Drill-down + +Click any group card to expand it and see a per-node detail table with: + +- **Name** — the node name, linked to its detail page +- **Status** — current health status badge (Healthy, Degraded, Unreachable, Unknown) +- **CPU Load** — the 1-minute load average from the latest heartbeat, or `--` if no metrics are available +- **Last Seen** — how long ago the node last sent a heartbeat +- **Compliance** — whether the node has all required labels for the group + +Nodes are sorted by health status with the least healthy nodes shown first, then alphabetically by name. + +### Filtering + +The toolbar above the group cards supports three filter types: + +- **Group** — show only a specific group card +- **Labels** — filter by label key/value pairs (applied to the per-node detail table inside expanded cards) +- **Compliance** — toggle between All, Compliant (100% compliance rate), or Non-compliant (below 100%) + +{% hint style="info" %} +Filter state is stored in the URL as query parameters, so you can copy and share the URL with filters applied. +{% endhint %} + +### Ungrouped nodes + +Nodes that do not match the criteria of any defined group appear under an **Ungrouped** card. This card behaves the same as any other group card — you can expand it to see the per-node table. + +{% hint style="info" %} +The Ungrouped card only appears when at least one node exists outside all group criteria. If all nodes belong to a group, no Ungrouped card is shown. +{% endhint %} + ## Maintenance mode Maintenance mode lets you temporarily stop all pipelines on a node without removing it from the fleet. This is useful for host upgrades, kernel patches, disk maintenance, or any situation where you need the node idle but still connected. diff --git a/docs/public/user-guide/pipeline-editor.md b/docs/public/user-guide/pipeline-editor.md index 552a333f..04d00989 100644 --- a/docs/public/user-guide/pipeline-editor.md +++ b/docs/public/user-guide/pipeline-editor.md @@ -258,6 +258,32 @@ Click the pipeline name in the top-left corner of the editor to rename it inline On Windows and Linux, use `Ctrl` instead of `Cmd` for all keyboard shortcuts. {% endhint %} +## Cross-Environment Promotion + +Promote a pipeline from one environment to another (e.g., dev to staging, staging to production) with built-in validation and approval workflow. + +### Promoting a Pipeline + +1. From the pipeline list, click the **...** menu on any pipeline and select **Promote to...** +2. Select the **target environment** and optionally rename the pipeline +3. VectorFlow validates that all secret references in the pipeline exist in the target environment +4. Review the **substitution diff** showing what will change between environments +5. Click **Confirm Promotion** to submit + +### Approval Workflow + +If the target environment has **Require Deploy Approval** enabled, the promotion creates a request that must be approved by an administrator before the pipeline appears in the target environment. + +If approval is not required, the pipeline is promoted immediately. + +### Secret Pre-flight Validation + +Before promotion proceeds, VectorFlow checks that every `SECRET[name]` reference in the source pipeline has a corresponding secret defined in the target environment. If any secrets are missing, promotion is blocked with a clear list of which secrets need to be created. + +### Promotion History + +Each pipeline's detail page shows a promotion history log with source environment, target environment, who promoted, and the current status. + ## AI-Powered Suggestions When AI is configured for your team (Settings → AI), two AI features become available: diff --git a/package.json b/package.json index f574620c..db704d81 100644 --- a/package.json +++ b/package.json @@ -9,13 +9,15 @@ "start": "next start", "lint": "eslint", "test": "vitest run", - "postinstall": "prisma generate" + "postinstall": "prisma generate", + "generate:openapi": "tsx scripts/generate-openapi.ts" }, "dependencies": { "@auth/prisma-adapter": "^2.11.1", "@dagrejs/dagre": "^2.0.4", "@hookform/resolvers": "^5.2.2", "@monaco-editor/react": "^4.7.0", + "@octokit/rest": "^22.0.1", "@prisma/adapter-pg": "^7.4.2", "@prisma/client": "^7.4.2", "@prisma/client-runtime-utils": "^7.4.2", @@ -68,6 +70,7 @@ } }, "devDependencies": { + "@asteasolutions/zod-to-openapi": "^8.5.0", "@next/bundle-analyzer": "^16.2.1", "@tailwindcss/postcss": "^4", "@types/bcryptjs": "^3.0.0", diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 0688aab3..4c5a84e7 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -24,6 +24,9 @@ importers: '@monaco-editor/react': specifier: ^4.7.0 version: 4.7.0(monaco-editor@0.55.1)(react-dom@19.2.3(react@19.2.3))(react@19.2.3) + '@octokit/rest': + specifier: ^22.0.1 + version: 22.0.1 '@prisma/adapter-pg': specifier: ^7.4.2 version: 7.4.2 @@ -145,6 +148,9 @@ importers: specifier: ^5.0.11 version: 5.0.11(@types/react@19.2.14)(immer@11.1.4)(react@19.2.3)(use-sync-external-store@1.6.0(react@19.2.3)) devDependencies: + '@asteasolutions/zod-to-openapi': + specifier: ^8.5.0 + version: 8.5.0(zod@4.3.6) '@next/bundle-analyzer': specifier: ^16.2.1 version: 16.2.1 @@ -204,10 +210,10 @@ importers: version: 5.9.3 vitest: specifier: ^4.1.0 - version: 4.1.0(@opentelemetry/api@1.9.1)(@types/node@20.19.35)(msw@2.12.10(@types/node@20.19.35)(typescript@5.9.3))(vite@8.0.1(@types/node@20.19.35)(esbuild@0.27.4)(jiti@2.6.1)(tsx@4.21.0)) + version: 4.1.0(@opentelemetry/api@1.9.1)(@types/node@20.19.35)(msw@2.12.10(@types/node@20.19.35)(typescript@5.9.3))(vite@8.0.1(@types/node@20.19.35)(esbuild@0.27.4)(jiti@2.6.1)(tsx@4.21.0)(yaml@2.8.3)) vitest-mock-extended: specifier: ^3.1.0 - version: 3.1.0(typescript@5.9.3)(vitest@4.1.0(@opentelemetry/api@1.9.1)(@types/node@20.19.35)(msw@2.12.10(@types/node@20.19.35)(typescript@5.9.3))(vite@8.0.1(@types/node@20.19.35)(esbuild@0.27.4)(jiti@2.6.1)(tsx@4.21.0))) + version: 3.1.0(typescript@5.9.3)(vitest@4.1.0(@opentelemetry/api@1.9.1)(@types/node@20.19.35)(msw@2.12.10(@types/node@20.19.35)(typescript@5.9.3))(vite@8.0.1(@types/node@20.19.35)(esbuild@0.27.4)(jiti@2.6.1)(tsx@4.21.0)(yaml@2.8.3))) packages: @@ -219,6 +225,11 @@ packages: resolution: {integrity: sha512-9q/yCljni37pkMr4sPrI3G4jqdIk074+iukc5aFJl7kmDCCsiJrbZ6zKxnES1Gwg+i9RcDZwvktl23puGslmvA==} hasBin: true + '@asteasolutions/zod-to-openapi@8.5.0': + resolution: {integrity: sha512-SABbKiObg5dLRiTFnqiW1WWwGcg1BJfmHtT2asIBnBHg6Smy/Ms2KHc650+JI4Hw7lSkdiNebEGXpwoxfben8Q==} + peerDependencies: + zod: ^4.0.0 + '@auth/core@0.41.0': resolution: {integrity: sha512-Wd7mHPQ/8zy6Qj7f4T46vg3aoor8fskJm6g2Zyj064oQ3+p0xNZXAV60ww0hY+MbTesfu29kK14Zk5d5JTazXQ==} peerDependencies: @@ -1002,6 +1013,58 @@ packages: resolution: {integrity: sha512-nn5ozdjYQpUCZlWGuxcJY/KpxkWQs4DcbMCmKojjyrYDEAGy4Ce19NN4v5MduafTwJlbKc99UA8YhSVqq9yPZA==} engines: {node: '>=12.4.0'} + '@octokit/auth-token@6.0.0': + resolution: {integrity: sha512-P4YJBPdPSpWTQ1NU4XYdvHvXJJDxM6YwpS0FZHRgP7YFkdVxsWcpWGy/NVqlAA7PcPCnMacXlRm1y2PFZRWL/w==} + engines: {node: '>= 20'} + + '@octokit/core@7.0.6': + resolution: {integrity: sha512-DhGl4xMVFGVIyMwswXeyzdL4uXD5OGILGX5N8Y+f6W7LhC1Ze2poSNrkF/fedpVDHEEZ+PHFW0vL14I+mm8K3Q==} + engines: {node: '>= 20'} + + '@octokit/endpoint@11.0.3': + resolution: {integrity: sha512-FWFlNxghg4HrXkD3ifYbS/IdL/mDHjh9QcsNyhQjN8dplUoZbejsdpmuqdA76nxj2xoWPs7p8uX2SNr9rYu0Ag==} + engines: {node: '>= 20'} + + '@octokit/graphql@9.0.3': + resolution: {integrity: sha512-grAEuupr/C1rALFnXTv6ZQhFuL1D8G5y8CN04RgrO4FIPMrtm+mcZzFG7dcBm+nq+1ppNixu+Jd78aeJOYxlGA==} + engines: {node: '>= 20'} + + '@octokit/openapi-types@27.0.0': + resolution: {integrity: sha512-whrdktVs1h6gtR+09+QsNk2+FO+49j6ga1c55YZudfEG+oKJVvJLQi3zkOm5JjiUXAagWK2tI2kTGKJ2Ys7MGA==} + + '@octokit/plugin-paginate-rest@14.0.0': + resolution: {integrity: sha512-fNVRE7ufJiAA3XUrha2omTA39M6IXIc6GIZLvlbsm8QOQCYvpq/LkMNGyFlB1d8hTDzsAXa3OKtybdMAYsV/fw==} + engines: {node: '>= 20'} + peerDependencies: + '@octokit/core': '>=6' + + '@octokit/plugin-request-log@6.0.0': + resolution: {integrity: sha512-UkOzeEN3W91/eBq9sPZNQ7sUBvYCqYbrrD8gTbBuGtHEuycE4/awMXcYvx6sVYo7LypPhmQwwpUe4Yyu4QZN5Q==} + engines: {node: '>= 20'} + peerDependencies: + '@octokit/core': '>=6' + + '@octokit/plugin-rest-endpoint-methods@17.0.0': + resolution: {integrity: sha512-B5yCyIlOJFPqUUeiD0cnBJwWJO8lkJs5d8+ze9QDP6SvfiXSz1BF+91+0MeI1d2yxgOhU/O+CvtiZ9jSkHhFAw==} + engines: {node: '>= 20'} + peerDependencies: + '@octokit/core': '>=6' + + '@octokit/request-error@7.1.0': + resolution: {integrity: sha512-KMQIfq5sOPpkQYajXHwnhjCC0slzCNScLHs9JafXc4RAJI+9f+jNDlBNaIMTvazOPLgb4BnlhGJOTbnN0wIjPw==} + engines: {node: '>= 20'} + + '@octokit/request@10.0.8': + resolution: {integrity: sha512-SJZNwY9pur9Agf7l87ywFi14W+Hd9Jg6Ifivsd33+/bGUQIjNujdFiXII2/qSlN2ybqUHfp5xpekMEjIBTjlSw==} + engines: {node: '>= 20'} + + '@octokit/rest@22.0.1': + resolution: {integrity: sha512-Jzbhzl3CEexhnivb1iQ0KJ7s5vvjMWcmRtq5aUsKmKDrRW6z3r84ngmiFKFvpZjpiU/9/S6ITPFRpn5s/3uQJw==} + engines: {node: '>= 20'} + + '@octokit/types@16.0.0': + resolution: {integrity: sha512-sKq+9r1Mm4efXW1FCk7hFSeJo4QKreL/tTbR0rz/qx/r1Oa2VV83LTA/H/MuCOX7uCIJmQVRKBcbmWoySjAnSg==} + '@open-draft/deferred-promise@2.2.0': resolution: {integrity: sha512-CecwLWx3rhxVQF6V4bAgPS5t+So2sTbPgAzafKkVizyi7tlwpcFpdFqq+wqF2OwNBmqFuu6tOyouTuxgpMfzmA==} @@ -2432,6 +2495,9 @@ packages: resolution: {integrity: sha512-GlF5wPWnSa/X5LKM1o0wz0suXIINz1iHRLvTS+sLyi7XPbe5ycmYI3DlZqVGZZtDgl4DmasFg7gOB3JYbphV5g==} hasBin: true + before-after-hook@4.0.0: + resolution: {integrity: sha512-q6tR3RPqIB1pMiTRMFcZwuG5T8vwp+vUvEG0vuI6B+Rikh5BfPp2fQ82c925FOs+b0lcFQ8CFrL+KbilfZFhOQ==} + bintrees@1.0.2: resolution: {integrity: sha512-VOMgTMwjAaUG580SXn3LacVgjurrbMme7ZZNYGSSV7mmtY6QQRh0Eg3pwIcntQ77DErK1L0NxkbetjcoXzVwKw==} @@ -3113,6 +3179,9 @@ packages: resolution: {integrity: sha512-h5+1OzzfCC3Ef7VbtKdcv7zsstUQwUDlYpUTvjeUsJAssPgLn7QzbboPtL5ro04Mq0rPOsMzl7q5hIbRs2wD1A==} engines: {node: '>=8.0.0'} + fast-content-type-parse@3.0.0: + resolution: {integrity: sha512-ZvLdcY8P+N8mGQJahJV5G4U88CSvT1rP8ApL6uETe88MBXrBHAkZlSEySdUlyztF7ccb+Znos3TFqaepHxdhBg==} + fast-deep-equal@3.1.3: resolution: {integrity: sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==} @@ -3680,6 +3749,9 @@ packages: json-stable-stringify-without-jsonify@1.0.1: resolution: {integrity: sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==} + json-with-bigint@3.5.8: + resolution: {integrity: sha512-eq/4KP6K34kwa7TcFdtvnftvHCD9KvHOGGICWwMFc4dOOKF5t4iYqnfLK8otCRCRv06FXOzGGyqE8h8ElMvvdw==} + json5@1.0.2: resolution: {integrity: sha512-g1MWMLBiz8FKi1e4w0UyVL3w+iJceWAFBAaBnnGKOpNa5f8TLktkbre1+s6oICydWAm+HRUGTmI+//xv2hvXYA==} hasBin: true @@ -4186,6 +4258,9 @@ packages: resolution: {integrity: sha512-smsWv2LzFjP03xmvFoJ331ss6h+jixfA4UUV/Bsiyuu4YJPfN+FIQGOIiv4w9/+MoHkfkJ22UIaQWRVFRfH6Vw==} engines: {node: '>=20'} + openapi3-ts@4.5.0: + resolution: {integrity: sha512-jaL+HgTq2Gj5jRcfdutgRGLosCy/hT8sQf6VOy+P+g36cZOjI1iukdPnijC+4CmeRzg/jEllJUboEic2FhxhtQ==} + opener@1.5.2: resolution: {integrity: sha512-ur5UIdyw5Y7yEj9wLzhqXiy6GZ3Mwx0yGI+5sMn2r0N0v3cKJvUmFH5yPP+WXh9e0xfyzyJX95D8l088DNFj7A==} hasBin: true @@ -5045,6 +5120,9 @@ packages: resolution: {integrity: sha512-+QBBXBCvifc56fsbuxZQ6Sic3wqqc3WWaqxs58gvJrcOuN83HGTCwz3oS5phzU9LthRNE9VrJCFCLUgHeeFnfA==} engines: {node: '>=18'} + universal-user-agent@7.0.3: + resolution: {integrity: sha512-TmnEAEAsBJVZM/AADELsK76llnwcf9vMKuPz8JflO1frO8Lchitr0fNaN9d+Ap0BjKtqWqd/J17qeDnXh8CL2A==} + universalify@2.0.1: resolution: {integrity: sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==} engines: {node: '>= 10.0.0'} @@ -5287,6 +5365,11 @@ packages: yallist@3.1.1: resolution: {integrity: sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==} + yaml@2.8.3: + resolution: {integrity: sha512-AvbaCLOO2Otw/lW5bmh9d/WEdcDFdQp2Z2ZUH3pX9U2ihyUY0nvLv7J6TrWowklRGPYbB/IuIMfYgxaCPg5Bpg==} + engines: {node: '>= 14.6'} + hasBin: true + yargs-parser@18.1.3: resolution: {integrity: sha512-o50j0JeToy/4K6OZcaQmW6lyXXKhq7csREXcDwk2omFPJEwUNOVtJKvmDr9EI1fAJZUyZcRF7kxGBWmRXudrCQ==} engines: {node: '>=6'} @@ -5379,6 +5462,11 @@ snapshots: package-manager-detector: 1.6.0 tinyexec: 1.0.2 + '@asteasolutions/zod-to-openapi@8.5.0(zod@4.3.6)': + dependencies: + openapi3-ts: 4.5.0 + zod: 4.3.6 + '@auth/core@0.41.0(nodemailer@8.0.4)': dependencies: '@panva/hkdf': 1.2.1 @@ -6109,6 +6197,69 @@ snapshots: '@nolyfill/is-core-module@1.0.39': {} + '@octokit/auth-token@6.0.0': {} + + '@octokit/core@7.0.6': + dependencies: + '@octokit/auth-token': 6.0.0 + '@octokit/graphql': 9.0.3 + '@octokit/request': 10.0.8 + '@octokit/request-error': 7.1.0 + '@octokit/types': 16.0.0 + before-after-hook: 4.0.0 + universal-user-agent: 7.0.3 + + '@octokit/endpoint@11.0.3': + dependencies: + '@octokit/types': 16.0.0 + universal-user-agent: 7.0.3 + + '@octokit/graphql@9.0.3': + dependencies: + '@octokit/request': 10.0.8 + '@octokit/types': 16.0.0 + universal-user-agent: 7.0.3 + + '@octokit/openapi-types@27.0.0': {} + + '@octokit/plugin-paginate-rest@14.0.0(@octokit/core@7.0.6)': + dependencies: + '@octokit/core': 7.0.6 + '@octokit/types': 16.0.0 + + '@octokit/plugin-request-log@6.0.0(@octokit/core@7.0.6)': + dependencies: + '@octokit/core': 7.0.6 + + '@octokit/plugin-rest-endpoint-methods@17.0.0(@octokit/core@7.0.6)': + dependencies: + '@octokit/core': 7.0.6 + '@octokit/types': 16.0.0 + + '@octokit/request-error@7.1.0': + dependencies: + '@octokit/types': 16.0.0 + + '@octokit/request@10.0.8': + dependencies: + '@octokit/endpoint': 11.0.3 + '@octokit/request-error': 7.1.0 + '@octokit/types': 16.0.0 + fast-content-type-parse: 3.0.0 + json-with-bigint: 3.5.8 + universal-user-agent: 7.0.3 + + '@octokit/rest@22.0.1': + dependencies: + '@octokit/core': 7.0.6 + '@octokit/plugin-paginate-rest': 14.0.0(@octokit/core@7.0.6) + '@octokit/plugin-request-log': 6.0.0(@octokit/core@7.0.6) + '@octokit/plugin-rest-endpoint-methods': 17.0.0(@octokit/core@7.0.6) + + '@octokit/types@16.0.0': + dependencies: + '@octokit/openapi-types': 27.0.0 + '@open-draft/deferred-promise@2.2.0': {} '@open-draft/logger@0.3.0': @@ -7375,14 +7526,14 @@ snapshots: chai: 6.2.2 tinyrainbow: 3.1.0 - '@vitest/mocker@4.1.0(msw@2.12.10(@types/node@20.19.35)(typescript@5.9.3))(vite@8.0.1(@types/node@20.19.35)(esbuild@0.27.4)(jiti@2.6.1)(tsx@4.21.0))': + '@vitest/mocker@4.1.0(msw@2.12.10(@types/node@20.19.35)(typescript@5.9.3))(vite@8.0.1(@types/node@20.19.35)(esbuild@0.27.4)(jiti@2.6.1)(tsx@4.21.0)(yaml@2.8.3))': dependencies: '@vitest/spy': 4.1.0 estree-walker: 3.0.3 magic-string: 0.30.21 optionalDependencies: msw: 2.12.10(@types/node@20.19.35)(typescript@5.9.3) - vite: 8.0.1(@types/node@20.19.35)(esbuild@0.27.4)(jiti@2.6.1)(tsx@4.21.0) + vite: 8.0.1(@types/node@20.19.35)(esbuild@0.27.4)(jiti@2.6.1)(tsx@4.21.0)(yaml@2.8.3) '@vitest/pretty-format@4.1.0': dependencies: @@ -7579,6 +7730,8 @@ snapshots: bcryptjs@3.0.3: {} + before-after-hook@4.0.0: {} + bintrees@1.0.2: {} body-parser@2.2.2: @@ -8431,6 +8584,8 @@ snapshots: dependencies: pure-rand: 6.1.0 + fast-content-type-parse@3.0.0: {} + fast-deep-equal@3.1.3: {} fast-equals@4.0.3: {} @@ -8951,6 +9106,8 @@ snapshots: json-stable-stringify-without-jsonify@1.0.1: {} + json-with-bigint@3.5.8: {} + json5@1.0.2: dependencies: minimist: 1.2.8 @@ -9396,6 +9553,10 @@ snapshots: powershell-utils: 0.1.0 wsl-utils: 0.3.1 + openapi3-ts@4.5.0: + dependencies: + yaml: 2.8.3 + opener@1.5.2: {} optionator@0.9.4: @@ -10440,6 +10601,8 @@ snapshots: unicorn-magic@0.3.0: {} + universal-user-agent@7.0.3: {} + universalify@2.0.1: {} unpipe@1.0.0: {} @@ -10526,7 +10689,7 @@ snapshots: d3-time: 3.1.0 d3-timer: 3.0.1 - vite@8.0.1(@types/node@20.19.35)(esbuild@0.27.4)(jiti@2.6.1)(tsx@4.21.0): + vite@8.0.1(@types/node@20.19.35)(esbuild@0.27.4)(jiti@2.6.1)(tsx@4.21.0)(yaml@2.8.3): dependencies: lightningcss: 1.32.0 picomatch: 4.0.4 @@ -10539,17 +10702,18 @@ snapshots: fsevents: 2.3.3 jiti: 2.6.1 tsx: 4.21.0 + yaml: 2.8.3 - vitest-mock-extended@3.1.0(typescript@5.9.3)(vitest@4.1.0(@opentelemetry/api@1.9.1)(@types/node@20.19.35)(msw@2.12.10(@types/node@20.19.35)(typescript@5.9.3))(vite@8.0.1(@types/node@20.19.35)(esbuild@0.27.4)(jiti@2.6.1)(tsx@4.21.0))): + vitest-mock-extended@3.1.0(typescript@5.9.3)(vitest@4.1.0(@opentelemetry/api@1.9.1)(@types/node@20.19.35)(msw@2.12.10(@types/node@20.19.35)(typescript@5.9.3))(vite@8.0.1(@types/node@20.19.35)(esbuild@0.27.4)(jiti@2.6.1)(tsx@4.21.0)(yaml@2.8.3))): dependencies: ts-essentials: 10.1.1(typescript@5.9.3) typescript: 5.9.3 - vitest: 4.1.0(@opentelemetry/api@1.9.1)(@types/node@20.19.35)(msw@2.12.10(@types/node@20.19.35)(typescript@5.9.3))(vite@8.0.1(@types/node@20.19.35)(esbuild@0.27.4)(jiti@2.6.1)(tsx@4.21.0)) + vitest: 4.1.0(@opentelemetry/api@1.9.1)(@types/node@20.19.35)(msw@2.12.10(@types/node@20.19.35)(typescript@5.9.3))(vite@8.0.1(@types/node@20.19.35)(esbuild@0.27.4)(jiti@2.6.1)(tsx@4.21.0)(yaml@2.8.3)) - vitest@4.1.0(@opentelemetry/api@1.9.1)(@types/node@20.19.35)(msw@2.12.10(@types/node@20.19.35)(typescript@5.9.3))(vite@8.0.1(@types/node@20.19.35)(esbuild@0.27.4)(jiti@2.6.1)(tsx@4.21.0)): + vitest@4.1.0(@opentelemetry/api@1.9.1)(@types/node@20.19.35)(msw@2.12.10(@types/node@20.19.35)(typescript@5.9.3))(vite@8.0.1(@types/node@20.19.35)(esbuild@0.27.4)(jiti@2.6.1)(tsx@4.21.0)(yaml@2.8.3)): dependencies: '@vitest/expect': 4.1.0 - '@vitest/mocker': 4.1.0(msw@2.12.10(@types/node@20.19.35)(typescript@5.9.3))(vite@8.0.1(@types/node@20.19.35)(esbuild@0.27.4)(jiti@2.6.1)(tsx@4.21.0)) + '@vitest/mocker': 4.1.0(msw@2.12.10(@types/node@20.19.35)(typescript@5.9.3))(vite@8.0.1(@types/node@20.19.35)(esbuild@0.27.4)(jiti@2.6.1)(tsx@4.21.0)(yaml@2.8.3)) '@vitest/pretty-format': 4.1.0 '@vitest/runner': 4.1.0 '@vitest/snapshot': 4.1.0 @@ -10566,7 +10730,7 @@ snapshots: tinyexec: 1.0.2 tinyglobby: 0.2.15 tinyrainbow: 3.1.0 - vite: 8.0.1(@types/node@20.19.35)(esbuild@0.27.4)(jiti@2.6.1)(tsx@4.21.0) + vite: 8.0.1(@types/node@20.19.35)(esbuild@0.27.4)(jiti@2.6.1)(tsx@4.21.0)(yaml@2.8.3) why-is-node-running: 2.3.0 optionalDependencies: '@opentelemetry/api': 1.9.1 @@ -10682,6 +10846,8 @@ snapshots: yallist@3.1.1: {} + yaml@2.8.3: {} + yargs-parser@18.1.3: dependencies: camelcase: 5.3.1 diff --git a/prisma/migrations/20260326400000_phase2_fleet_organization/migration.sql b/prisma/migrations/20260326400000_phase2_fleet_organization/migration.sql new file mode 100644 index 00000000..99e947cc --- /dev/null +++ b/prisma/migrations/20260326400000_phase2_fleet_organization/migration.sql @@ -0,0 +1,36 @@ +-- Phase 2: Fleet Organization +-- Adds NodeGroup model and PipelineGroup parentId self-reference + +-- AlterTable: Remove unique constraint on PipelineGroup(environmentId, name) +-- and add parentId self-reference +ALTER TABLE "PipelineGroup" DROP CONSTRAINT "PipelineGroup_environmentId_name_key"; + +ALTER TABLE "PipelineGroup" ADD COLUMN "parentId" TEXT; + +ALTER TABLE "PipelineGroup" ADD CONSTRAINT "PipelineGroup_parentId_fkey" FOREIGN KEY ("parentId") REFERENCES "PipelineGroup"("id") ON DELETE SET NULL ON UPDATE CASCADE; + +-- CreateIndex: index on PipelineGroup.parentId +CREATE INDEX "PipelineGroup_parentId_idx" ON "PipelineGroup"("parentId"); + +-- CreateTable: NodeGroup +CREATE TABLE "NodeGroup" ( + "id" TEXT NOT NULL, + "name" TEXT NOT NULL, + "environmentId" TEXT NOT NULL, + "criteria" JSONB NOT NULL DEFAULT '{}', + "labelTemplate" JSONB NOT NULL DEFAULT '{}', + "requiredLabels" JSONB NOT NULL DEFAULT '[]', + "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, + "updatedAt" TIMESTAMP(3) NOT NULL, + + CONSTRAINT "NodeGroup_pkey" PRIMARY KEY ("id") +); + +-- CreateIndex +CREATE UNIQUE INDEX "NodeGroup_environmentId_name_key" ON "NodeGroup"("environmentId", "name"); + +-- CreateIndex +CREATE INDEX "NodeGroup_environmentId_idx" ON "NodeGroup"("environmentId"); + +-- AddForeignKey +ALTER TABLE "NodeGroup" ADD CONSTRAINT "NodeGroup_environmentId_fkey" FOREIGN KEY ("environmentId") REFERENCES "Environment"("id") ON DELETE RESTRICT ON UPDATE CASCADE; diff --git a/prisma/migrations/20260327000000_add_promotion_request/migration.sql b/prisma/migrations/20260327000000_add_promotion_request/migration.sql new file mode 100644 index 00000000..a8fc04ef --- /dev/null +++ b/prisma/migrations/20260327000000_add_promotion_request/migration.sql @@ -0,0 +1,48 @@ +-- CreateTable +CREATE TABLE "PromotionRequest" ( + "id" TEXT NOT NULL, + "sourcePipelineId" TEXT NOT NULL, + "targetPipelineId" TEXT, + "sourceEnvironmentId" TEXT NOT NULL, + "targetEnvironmentId" TEXT NOT NULL, + "status" TEXT NOT NULL DEFAULT 'PENDING', + "promotedById" TEXT, + "approvedById" TEXT, + "nodesSnapshot" JSONB, + "edgesSnapshot" JSONB, + "globalConfigSnapshot" JSONB, + "targetPipelineName" TEXT, + "reviewNote" TEXT, + "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, + "reviewedAt" TIMESTAMP(3), + "deployedAt" TIMESTAMP(3), + + CONSTRAINT "PromotionRequest_pkey" PRIMARY KEY ("id") +); + +-- CreateIndex +CREATE INDEX "PromotionRequest_sourcePipelineId_status_idx" ON "PromotionRequest"("sourcePipelineId", "status"); + +-- CreateIndex +CREATE INDEX "PromotionRequest_sourceEnvironmentId_idx" ON "PromotionRequest"("sourceEnvironmentId"); + +-- CreateIndex +CREATE INDEX "PromotionRequest_targetEnvironmentId_idx" ON "PromotionRequest"("targetEnvironmentId"); + +-- AddForeignKey +ALTER TABLE "PromotionRequest" ADD CONSTRAINT "PromotionRequest_sourcePipelineId_fkey" FOREIGN KEY ("sourcePipelineId") REFERENCES "Pipeline"("id") ON DELETE CASCADE ON UPDATE CASCADE; + +-- AddForeignKey +ALTER TABLE "PromotionRequest" ADD CONSTRAINT "PromotionRequest_targetPipelineId_fkey" FOREIGN KEY ("targetPipelineId") REFERENCES "Pipeline"("id") ON DELETE SET NULL ON UPDATE CASCADE; + +-- AddForeignKey +ALTER TABLE "PromotionRequest" ADD CONSTRAINT "PromotionRequest_sourceEnvironmentId_fkey" FOREIGN KEY ("sourceEnvironmentId") REFERENCES "Environment"("id") ON DELETE CASCADE ON UPDATE CASCADE; + +-- AddForeignKey +ALTER TABLE "PromotionRequest" ADD CONSTRAINT "PromotionRequest_targetEnvironmentId_fkey" FOREIGN KEY ("targetEnvironmentId") REFERENCES "Environment"("id") ON DELETE CASCADE ON UPDATE CASCADE; + +-- AddForeignKey +ALTER TABLE "PromotionRequest" ADD CONSTRAINT "PromotionRequest_promotedById_fkey" FOREIGN KEY ("promotedById") REFERENCES "User"("id") ON DELETE SET NULL ON UPDATE CASCADE; + +-- AddForeignKey +ALTER TABLE "PromotionRequest" ADD CONSTRAINT "PromotionRequest_approvedById_fkey" FOREIGN KEY ("approvedById") REFERENCES "User"("id") ON DELETE SET NULL ON UPDATE CASCADE; diff --git a/prisma/migrations/20260327100000_add_gitops_promotion_fields/migration.sql b/prisma/migrations/20260327100000_add_gitops_promotion_fields/migration.sql new file mode 100644 index 00000000..2ec9234b --- /dev/null +++ b/prisma/migrations/20260327100000_add_gitops_promotion_fields/migration.sql @@ -0,0 +1,3 @@ +-- AlterTable: Add GitOps promotion tracking fields to PromotionRequest +ALTER TABLE "PromotionRequest" ADD COLUMN "prUrl" TEXT; +ALTER TABLE "PromotionRequest" ADD COLUMN "prNumber" INTEGER; diff --git a/prisma/schema.prisma b/prisma/schema.prisma index ecfd80d3..6b788fe1 100644 --- a/prisma/schema.prisma +++ b/prisma/schema.prisma @@ -34,6 +34,8 @@ model User { deployRequestsMade DeployRequest[] @relation("deployRequester") deployRequestsReviewed DeployRequest[] @relation("deployReviewer") deployRequestsExecuted DeployRequest[] @relation("deployExecutor") + promotionRequests PromotionRequest[] @relation("PromotionRequester") + promotionApprovals PromotionRequest[] @relation("PromotionApprover") preferences UserPreference[] aiConversationsCreated AiConversation[] @relation("AiConversationCreatedBy") aiMessagesCreated AiMessage[] @relation("AiMessageCreatedBy") @@ -70,6 +72,7 @@ model Team { vrlSnippets VrlSnippet[] alertRules AlertRule[] availableTags Json? @default("[]") // string[] of admin-defined classification tags + webhookEndpoints WebhookEndpoint[] // AI-powered suggestions configuration aiProvider String? // "openai" | "anthropic" | "custom" @@ -136,7 +139,7 @@ model Environment { gitRepoUrl String? gitBranch String? @default("main") gitToken String? // Stored encrypted via crypto.ts - gitOpsMode String @default("off") // "off" | "push" | "bidirectional" + gitOpsMode String @default("off") // "off" | "push" | "bidirectional" | "promotion" gitWebhookSecret String? // HMAC secret for validating incoming git webhooks requireDeployApproval Boolean @default(false) alertRules AlertRule[] @@ -147,7 +150,10 @@ model Environment { teamDefaults Team[] @relation("teamDefault") sharedComponents SharedComponent[] pipelineGroups PipelineGroup[] + nodeGroups NodeGroup[] stagedRollouts StagedRollout[] + promotionSources PromotionRequest[] @relation("PromotionSourceEnv") + promotionTargets PromotionRequest[] @relation("PromotionTargetEnv") createdAt DateTime @default(now()) } @@ -271,12 +277,30 @@ enum ProcessStatus { } model PipelineGroup { - id String @id @default(cuid()) + id String @id @default(cuid()) name String color String? environmentId String - environment Environment @relation(fields: [environmentId], references: [id]) + environment Environment @relation(fields: [environmentId], references: [id]) + parentId String? + parent PipelineGroup? @relation("GroupChildren", fields: [parentId], references: [id], onDelete: SetNull) + children PipelineGroup[] @relation("GroupChildren") pipelines Pipeline[] + createdAt DateTime @default(now()) + updatedAt DateTime @updatedAt + + @@index([environmentId]) + @@index([parentId]) +} + +model NodeGroup { + id String @id @default(cuid()) + name String + environmentId String + environment Environment @relation(fields: [environmentId], references: [id]) + criteria Json @default("{}") + labelTemplate Json @default("{}") + requiredLabels Json @default("[]") createdAt DateTime @default(now()) updatedAt DateTime @updatedAt @@ -321,6 +345,8 @@ model Pipeline { stagedRollouts StagedRollout[] upstreamDeps PipelineDependency[] @relation("PipelineDownstream") downstreamDeps PipelineDependency[] @relation("PipelineUpstream") + promotionSources PromotionRequest[] @relation("PromotionSource") + promotionTargets PromotionRequest[] @relation("PromotionTarget") createdAt DateTime @default(now()) updatedAt DateTime @updatedAt } @@ -702,6 +728,37 @@ model DeployRequest { @@index([environmentId, status]) } +model PromotionRequest { + id String @id @default(cuid()) + sourcePipelineId String + sourcePipeline Pipeline @relation("PromotionSource", fields: [sourcePipelineId], references: [id], onDelete: Cascade) + targetPipelineId String? + targetPipeline Pipeline? @relation("PromotionTarget", fields: [targetPipelineId], references: [id], onDelete: SetNull) + sourceEnvironmentId String + sourceEnvironment Environment @relation("PromotionSourceEnv", fields: [sourceEnvironmentId], references: [id], onDelete: Cascade) + targetEnvironmentId String + targetEnvironment Environment @relation("PromotionTargetEnv", fields: [targetEnvironmentId], references: [id], onDelete: Cascade) + status String @default("PENDING") // PENDING | APPROVED | DEPLOYED | REJECTED | CANCELLED | AWAITING_PR_MERGE | DEPLOYING + promotedById String? + promotedBy User? @relation("PromotionRequester", fields: [promotedById], references: [id], onDelete: SetNull) + approvedById String? + approvedBy User? @relation("PromotionApprover", fields: [approvedById], references: [id], onDelete: SetNull) + nodesSnapshot Json? + edgesSnapshot Json? + globalConfigSnapshot Json? + targetPipelineName String? + reviewNote String? + prUrl String? // GitHub PR HTML URL (GitOps promotion only) + prNumber Int? // GitHub PR number (GitOps promotion only) + createdAt DateTime @default(now()) + reviewedAt DateTime? + deployedAt DateTime? + + @@index([sourcePipelineId, status]) + @@index([sourceEnvironmentId]) + @@index([targetEnvironmentId]) +} + enum AlertMetric { // Infrastructure (threshold-based, per-node) node_unreachable @@ -729,6 +786,8 @@ enum AlertMetric { certificate_expiring node_joined node_left + // Phase 5 event — enum value added early so subscriptions can be created + promotion_completed } enum AlertCondition { @@ -781,6 +840,41 @@ model AlertWebhook { @@index([environmentId]) } +model WebhookEndpoint { + id String @id @default(cuid()) + teamId String + team Team @relation(fields: [teamId], references: [id], onDelete: Cascade) + name String + url String + eventTypes AlertMetric[] + encryptedSecret String? + enabled Boolean @default(true) + createdAt DateTime @default(now()) + updatedAt DateTime @updatedAt + deliveries WebhookDelivery[] + + @@index([teamId]) +} + +model WebhookDelivery { + id String @id @default(cuid()) + webhookEndpointId String + webhookEndpoint WebhookEndpoint @relation(fields: [webhookEndpointId], references: [id], onDelete: Cascade) + eventType AlertMetric + msgId String + payload Json + status String // 'pending' | 'success' | 'failed' | 'dead_letter' + statusCode Int? + errorMessage String? + attemptNumber Int @default(1) + nextRetryAt DateTime? + requestedAt DateTime @default(now()) + completedAt DateTime? + + @@index([webhookEndpointId, requestedAt]) + @@index([status, nextRetryAt]) +} + model AlertEvent { id String @id @default(cuid()) alertRuleId String diff --git a/scripts/generate-openapi.ts b/scripts/generate-openapi.ts new file mode 100644 index 00000000..301e82c7 --- /dev/null +++ b/scripts/generate-openapi.ts @@ -0,0 +1,84 @@ +/** + * generate-openapi.ts + * + * Build-time script that generates the VectorFlow OpenAPI 3.1 specification + * and writes it to public/openapi.json for static access. + * + * Usage: + * pnpm generate:openapi + * + * Output: + * public/openapi.json — Machine-readable OpenAPI 3.1 specification + */ + +import { writeFileSync, mkdirSync } from "fs"; +import { join } from "path"; +import { generateOpenAPISpec } from "../src/app/api/v1/_lib/openapi-spec"; + +try { + const spec = generateOpenAPISpec(); + + const outDir = join(process.cwd(), "public"); + mkdirSync(outDir, { recursive: true }); + + const jsonOutput = JSON.stringify(spec, null, 2); + writeFileSync(join(outDir, "openapi.json"), jsonOutput, "utf8"); + + // Count paths and operations, split by surface (REST v1 vs tRPC) + const paths = spec.paths as Record>; + const httpMethods = ["get", "post", "put", "delete", "patch", "head", "options"]; + + let restOps = 0; + let trpcOps = 0; + const duplicateOperationIds = new Set(); + const seenOperationIds = new Set(); + const pathsWithNoOps: string[] = []; + + for (const [path, methods] of Object.entries(paths)) { + const ops = Object.entries(methods).filter(([m]) => httpMethods.includes(m)); + if (ops.length === 0) { + pathsWithNoOps.push(path); + continue; + } + + for (const [, operation] of ops) { + const isTrpc = operation.tags?.includes("tRPC") ?? path.startsWith("/api/trpc/"); + if (isTrpc) { + trpcOps++; + } else { + restOps++; + } + + // Check for duplicate operationIds + if (operation.operationId) { + if (seenOperationIds.has(operation.operationId)) { + duplicateOperationIds.add(operation.operationId); + } + seenOperationIds.add(operation.operationId); + } + } + } + + const totalOps = restOps + trpcOps; + const pathCount = Object.keys(paths).length; + + console.log(`OpenAPI spec written to public/openapi.json`); + console.log(` Paths: ${pathCount}`); + console.log(` Operations: ${totalOps} (${restOps} REST v1, ${trpcOps} tRPC)`); + + // Validation warnings + if (pathsWithNoOps.length > 0) { + console.warn(` WARNING: ${pathsWithNoOps.length} paths have no operations: ${pathsWithNoOps.join(", ")}`); + } + if (duplicateOperationIds.size > 0) { + console.warn(` WARNING: Duplicate operationIds found: ${[...duplicateOperationIds].join(", ")}`); + } + if (duplicateOperationIds.size === 0 && pathsWithNoOps.length === 0) { + console.log(` Validation: OK (no duplicate operationIds, all paths have operations)`); + } + + process.exit(0); +} catch (err) { + console.error("Failed to generate OpenAPI spec:", err); + process.exit(1); +} diff --git a/src/app/(dashboard)/fleet/health/page.tsx b/src/app/(dashboard)/fleet/health/page.tsx new file mode 100644 index 00000000..bd803439 --- /dev/null +++ b/src/app/(dashboard)/fleet/health/page.tsx @@ -0,0 +1,51 @@ +"use client"; + +import Link from "next/link"; +import { useEnvironmentStore } from "@/stores/environment-store"; +import { useTeamStore } from "@/stores/team-store"; +import { useQuery } from "@tanstack/react-query"; +import { useTRPC } from "@/trpc/client"; +import { FleetHealthDashboard } from "@/components/fleet/fleet-health-dashboard"; + +export default function FleetHealthPage() { + const trpc = useTRPC(); + const selectedEnvironmentId = useEnvironmentStore( + (s) => s.selectedEnvironmentId, + ); + const selectedTeamId = useTeamStore((s) => s.selectedTeamId); + + const environmentsQuery = useQuery( + trpc.environment.list.queryOptions( + { teamId: selectedTeamId! }, + { enabled: !!selectedTeamId }, + ), + ); + + const environments = environmentsQuery.data ?? []; + const activeEnvId = + selectedEnvironmentId || environments[0]?.id || ""; + + return ( +
+
+ + Nodes + + + Overview + + + Health + +
+ + {activeEnvId && } +
+ ); +} diff --git a/src/app/(dashboard)/fleet/page.tsx b/src/app/(dashboard)/fleet/page.tsx index 08ceb331..8c2ad7ef 100644 --- a/src/app/(dashboard)/fleet/page.tsx +++ b/src/app/(dashboard)/fleet/page.tsx @@ -108,7 +108,7 @@ export default function FleetPage() { environmentsQuery.isLoading || nodesQuery.isLoading; - const rawNodes = nodesQuery.data ?? []; + const rawNodes = useMemo(() => nodesQuery.data ?? [], [nodesQuery.data]); // Sort client-side const nodes = useMemo(() => { @@ -195,6 +195,12 @@ export default function FleetPage() { > Overview + + Health + {/* Toolbar — shown when not loading and nodes exist or filters active */} @@ -375,6 +381,18 @@ export default function FleetPage() { )} + {node.labelCompliant === false && ( + + + + Non-compliant + + + + This node is missing one or more required labels defined in node groups + + + )} {formatLastSeen(node.lastSeen)} diff --git a/src/app/(dashboard)/library/shared-components/new/page.tsx b/src/app/(dashboard)/library/shared-components/new/page.tsx index 23b50c83..938c8db4 100644 --- a/src/app/(dashboard)/library/shared-components/new/page.tsx +++ b/src/app/(dashboard)/library/shared-components/new/page.tsx @@ -5,7 +5,7 @@ import { useRouter } from "next/navigation"; import { useMutation } from "@tanstack/react-query"; import { useTRPC } from "@/trpc/client"; import { useEnvironmentStore } from "@/stores/environment-store"; -import { VECTOR_CATALOG } from "@/lib/vector/catalog"; +import { getVectorCatalog } from "@/lib/vector/catalog"; import { toast } from "sonner"; import Link from "next/link"; import { ArrowLeft, ChevronDown, Loader2, Plus, Search } from "lucide-react"; @@ -84,9 +84,9 @@ export default function NewSharedComponentPage() { const [config, setConfig] = useState>({}); const filteredCatalog = useMemo(() => { - if (!search) return VECTOR_CATALOG; + if (!search) return getVectorCatalog(); const q = search.toLowerCase(); - return VECTOR_CATALOG.filter( + return getVectorCatalog().filter( (c) => c.displayName.toLowerCase().includes(q) || c.type.toLowerCase().includes(q) || diff --git a/src/app/(dashboard)/pipelines/[id]/page.tsx b/src/app/(dashboard)/pipelines/[id]/page.tsx index 502af3a4..9a72ff92 100644 --- a/src/app/(dashboard)/pipelines/[id]/page.tsx +++ b/src/app/(dashboard)/pipelines/[id]/page.tsx @@ -10,6 +10,7 @@ import { type Edge, } from "@xyflow/react"; import { Trash2, Pencil, Check, X, AlertTriangle } from "lucide-react"; +import { Badge } from "@/components/ui/badge"; import { useTRPC } from "@/trpc/client"; import { useFlowStore } from "@/stores/flow-store"; import { generateVectorYaml } from "@/lib/config-generator"; @@ -116,6 +117,79 @@ function dbEdgesToFlowEdges( })); } +type BadgeVariant = "default" | "secondary" | "destructive" | "outline"; + +function statusVariant(status: string): BadgeVariant { + switch (status) { + case "DEPLOYED": + return "default"; + case "PENDING": + case "APPROVED": + return "secondary"; + case "REJECTED": + return "destructive"; + case "CANCELLED": + return "outline"; + default: + return "secondary"; + } +} + +function PromotionHistory({ pipelineId }: { pipelineId: string }) { + const trpc = useTRPC(); + const { data: history, isLoading } = useQuery( + trpc.promotion.history.queryOptions({ pipelineId }) + ); + + if (isLoading) + return ( +
+ Loading promotion history... +
+ ); + if (!history?.length) return null; + + return ( +
+
+

Promotion History

+
+ + + + + + + + + + + + {history.map((item) => ( + + + + + + + + ))} + +
DateSourceTargetPromoted ByStatus
+ {new Date(item.createdAt).toLocaleDateString()} + {item.sourceEnvironment.name}{item.targetEnvironment.name} + {item.promotedBy?.name ?? item.promotedBy?.email ?? "—"} + + + {item.status} + +
+
+
+
+ ); +} + function PipelineBuilderInner({ pipelineId }: { pipelineId: string }) { const trpc = useTRPC(); const router = useRouter(); @@ -529,6 +603,7 @@ function PipelineBuilderInner({ pipelineId }: { pipelineId: string }) { )} + + (groupsQuery.data ?? []).map((g) => ({ + id: g.id, + name: g.name, + color: g.color, + parentId: g.parentId ?? null, + })), + [groupsQuery.data], + ); + + // Build group tree for "Move to group" nested menu + const groupTree = useMemo( + () => buildGroupTree(groupsWithParent), + [groupsWithParent], + ); + + // Breadcrumb path for currently selected group + const breadcrumbs = useMemo( + () => buildBreadcrumbs(groupsWithParent, groupId), + [groupsWithParent, groupId], + ); + // --- "Move to group" mutation --- const setGroupMutation = useMutation( trpc.pipeline.update.mutationOptions({ @@ -499,6 +531,30 @@ export default function PipelinesPage() { setGroupId(null); }; + // Recursive renderer for nested "Move to group" dropdown items + function renderGroupMenuItems( + nodes: GroupNode[], + depth: number, + onMove: (groupId: string | null) => void, + ): React.ReactNode { + return nodes.map((node) => ( + + onMove(node.id)} + style={{ paddingLeft: `${(depth + 1) * 12}px` }} + > + + {node.name} + + {node.children.length > 0 && + renderGroupMenuItems(node.children, depth + 1, onMove)} + + )); + } + return (
@@ -516,56 +572,110 @@ export default function PipelinesPage() {
- {/* Toolbar — always shown when pipelines exist, even during loading */} - {!isLoading && pipelines.length > 0 && ( - setManageGroupsOpen(true)} - /> - )} +
+ {/* Sidebar: group tree — only show when there are groups */} + {!isLoading && (groups.length > 0 || groupsQuery.isLoading) && effectiveEnvId && ( +
+
+ + Groups + + +
+ +
+ )} - {selectedPipelineIds.size > 0 && ( - setSelectedPipelineIds(new Set())} - /> - )} + {/* Main content */} +
+ {/* Toolbar — always shown when pipelines exist, even during loading */} + {!isLoading && pipelines.length > 0 && ( + setManageGroupsOpen(true)} + /> + )} - {isLoading ? ( -
- {Array.from({ length: 3 }).map((_, i) => ( - - ))} -
- ) : pipelines.length === 0 ? ( - - ) : filteredPipelines.length === 0 ? ( -
-

No pipelines match your filters

- -
- ) : ( + {selectedPipelineIds.size > 0 && ( + setSelectedPipelineIds(new Set())} + /> + )} + + {/* Breadcrumb navigation */} + {groupId && breadcrumbs.length > 0 && ( + + )} + + {isLoading ? ( +
+ {Array.from({ length: 3 }).map((_, i) => ( + + ))} +
+ ) : pipelines.length === 0 ? ( + + ) : filteredPipelines.length === 0 ? ( +
+

No pipelines match your filters

+ +
+ ) : ( @@ -936,20 +1046,9 @@ export default function PipelinesPage() { No group - {groups.map((g) => ( - - setGroupMutation.mutate({ id: pipeline.id, groupId: g.id }) - } - > - - {g.name} - - ))} + {renderGroupMenuItems(groupTree, 0, (gid) => + setGroupMutation.mutate({ id: pipeline.id, groupId: gid }) + )} )} @@ -974,7 +1073,9 @@ export default function PipelinesPage() { })}
- )} + )} +
+
s.selectedEnvironmentId); const settingsQuery = useQuery(trpc.settings.get.queryOptions()); const settings = settingsQuery.data; @@ -76,6 +79,7 @@ export function FleetSettings() { } return ( +
Fleet Polling Configuration @@ -148,5 +152,10 @@ export function FleetSettings() { + + {environmentId && ( + + )} +
); } diff --git a/src/app/(dashboard)/settings/webhooks/page.tsx b/src/app/(dashboard)/settings/webhooks/page.tsx new file mode 100644 index 00000000..3ee1ff65 --- /dev/null +++ b/src/app/(dashboard)/settings/webhooks/page.tsx @@ -0,0 +1,901 @@ +"use client"; + +import Link from "next/link"; +import { useState } from "react"; +import { useQuery, useMutation, useQueryClient } from "@tanstack/react-query"; +import { useTRPC } from "@/trpc/client"; +import { useTeamStore } from "@/stores/team-store"; +import { copyToClipboard } from "@/lib/utils"; +import { toast } from "sonner"; +import { + ArrowLeft, + Plus, + Loader2, + Copy, + Trash2, + Webhook, + ShieldCheck, + Clock, + ChevronDown, + ChevronRight, + Play, + CheckCircle, + XCircle, + AlertCircle, + Pencil, + ToggleLeft, + ToggleRight, +} from "lucide-react"; + +import { Button } from "@/components/ui/button"; +import { Input } from "@/components/ui/input"; +import { Label } from "@/components/ui/label"; +import { + Card, + CardContent, + CardDescription, + CardHeader, + CardTitle, +} from "@/components/ui/card"; +import { Badge } from "@/components/ui/badge"; +import { + Table, + TableBody, + TableCell, + TableHead, + TableHeader, + TableRow, +} from "@/components/ui/table"; +import { Skeleton } from "@/components/ui/skeleton"; +import { Checkbox } from "@/components/ui/checkbox"; +import { QueryError } from "@/components/query-error"; +import { + Dialog, + DialogContent, + DialogDescription, + DialogFooter, + DialogHeader, + DialogTitle, +} from "@/components/ui/dialog"; +import { ConfirmDialog } from "@/components/confirm-dialog"; +import type { AlertMetric } from "@/generated/prisma"; + +// ─── Constants ────────────────────────────────────────────────────────────── + +/** + * Supported webhook event types with human-readable labels. + * Only the outbound-webhook-relevant subset of AlertMetric. + */ +const WEBHOOK_EVENT_TYPES: { value: AlertMetric; label: string; description: string }[] = [ + { + value: "deploy_completed" as AlertMetric, + label: "Deploy Completed", + description: "A pipeline was successfully deployed", + }, + { + value: "pipeline_crashed" as AlertMetric, + label: "Pipeline Crashed", + description: "A running pipeline process exited unexpectedly", + }, + { + value: "node_unreachable" as AlertMetric, + label: "Node Unreachable", + description: "A fleet node stopped sending heartbeats", + }, + { + value: "node_joined" as AlertMetric, + label: "Node Joined", + description: "A new fleet node enrolled", + }, + { + value: "node_left" as AlertMetric, + label: "Node Left", + description: "A fleet node was removed", + }, + { + value: "deploy_rejected" as AlertMetric, + label: "Deploy Rejected", + description: "A deployment request was rejected", + }, + { + value: "deploy_cancelled" as AlertMetric, + label: "Deploy Cancelled", + description: "A pending deployment was cancelled", + }, + { + value: "promotion_completed" as AlertMetric, + label: "Promotion Completed", + description: "A pipeline was promoted to another environment", + }, +]; + +// ─── Helpers ───────────────────────────────────────────────────────────────── + +function formatRelativeTime(date: Date | string | null | undefined): string { + if (!date) return "Never"; + const d = typeof date === "string" ? new Date(date) : date; + const diffMs = Date.now() - d.getTime(); + const diffSec = Math.floor(diffMs / 1000); + if (diffSec < 60) return "Just now"; + const diffMin = Math.floor(diffSec / 60); + if (diffMin < 60) return `${diffMin}m ago`; + const diffHr = Math.floor(diffMin / 60); + if (diffHr < 24) return `${diffHr}h ago`; + return `${Math.floor(diffHr / 24)}d ago`; +} + +function deliveryStatusBadge(status: string) { + switch (status) { + case "success": + return ( + + + Success + + ); + case "failed": + return ( + + + Failed + + ); + case "dead_letter": + return ( + + + Dead Letter + + ); + default: + return ( + + + Pending + + ); + } +} + +// ─── Delivery History Row ───────────────────────────────────────────────────── + +type DeliveryRecord = { + id: string; + eventType: AlertMetric; + status: string; + statusCode: number | null; + attemptNumber: number; + errorMessage: string | null; + requestedAt: Date; + completedAt: Date | null; + nextRetryAt: Date | null; +}; + +function DeliveryHistoryPanel({ + endpointId, + teamId, +}: { + endpointId: string; + teamId: string; +}) { + const trpc = useTRPC(); + const [skip, setSkip] = useState(0); + const take = 10; + + const query = useQuery( + trpc.webhookEndpoint.listDeliveries.queryOptions( + { webhookEndpointId: endpointId, teamId, take, skip }, + { enabled: !!endpointId }, + ), + ); + + const deliveries = (query.data?.deliveries ?? []) as DeliveryRecord[]; + const total = query.data?.total ?? 0; + + if (query.isError) { + return ( +
+ Failed to load delivery history. +
+ ); + } + + if (query.isLoading) { + return ( +
+ {[...Array(3)].map((_, i) => ( + + ))} +
+ ); + } + + if (deliveries.length === 0) { + return ( +
+ No deliveries yet. Trigger a test delivery or wait for an event. +
+ ); + } + + return ( +
+ + + + Event + Status + HTTP + Attempt + Requested + Completed + + + + {deliveries.map((d) => { + const eventLabel = + WEBHOOK_EVENT_TYPES.find((e) => e.value === d.eventType)?.label ?? d.eventType; + return ( + + {eventLabel} + {deliveryStatusBadge(d.status)} + + {d.statusCode ?? "—"} + + + #{d.attemptNumber} + + + {formatRelativeTime(d.requestedAt)} + + + {d.completedAt ? formatRelativeTime(d.completedAt) : "—"} + + + ); + })} + +
+ {total > take && ( +
+ + {skip + 1}–{Math.min(skip + take, total)} of {total} + +
+ + +
+
+ )} +
+ ); +} + +// ─── Endpoint Row ───────────────────────────────────────────────────────────── + +type Endpoint = { + id: string; + name: string; + url: string; + eventTypes: AlertMetric[]; + enabled: boolean; + createdAt: Date; + updatedAt: Date; +}; + +function EndpointRow({ + endpoint, + teamId, + onEdit, + onDelete, + onToggle, + onTest, + testPending, +}: { + endpoint: Endpoint; + teamId: string; + onEdit: (ep: Endpoint) => void; + onDelete: (ep: Endpoint) => void; + onToggle: (id: string) => void; + onTest: (id: string) => void; + testPending: boolean; +}) { + const [expanded, setExpanded] = useState(false); + + return ( + <> + + +
+
{endpoint.name}
+
+ {endpoint.url} +
+
+
+ +
+ {endpoint.eventTypes.map((et) => { + const label = WEBHOOK_EVENT_TYPES.find((e) => e.value === et)?.label ?? et; + return ( + + {label} + + ); + })} +
+
+ + + {endpoint.enabled ? "Enabled" : "Disabled"} + + + + {formatRelativeTime(endpoint.createdAt)} + + +
+ + + + + +
+
+
+ {expanded && ( + + +
+
+ Delivery History +
+ +
+
+
+ )} + + ); +} + +// ─── Create / Edit Dialog ───────────────────────────────────────────────────── + +function EndpointDialog({ + open, + onOpenChange, + teamId, + editTarget, + onSuccess, +}: { + open: boolean; + onOpenChange: (open: boolean) => void; + teamId: string; + editTarget: Endpoint | null; + onSuccess: (secret: string | null) => void; +}) { + const trpc = useTRPC(); + const queryClient = useQueryClient(); + const isEdit = !!editTarget; + + const [name, setName] = useState(editTarget?.name ?? ""); + const [url, setUrl] = useState(editTarget?.url ?? ""); + const [secret, setSecret] = useState(""); + const [selectedEvents, setSelectedEvents] = useState>( + new Set(editTarget?.eventTypes ?? []), + ); + + // Reset when dialog opens/closes or editTarget changes + function reset() { + setName(editTarget?.name ?? ""); + setUrl(editTarget?.url ?? ""); + setSecret(""); + setSelectedEvents(new Set(editTarget?.eventTypes ?? [])); + } + + const createMutation = useMutation( + trpc.webhookEndpoint.create.mutationOptions({ + onSuccess: (data) => { + queryClient.invalidateQueries({ + queryKey: trpc.webhookEndpoint.list.queryKey(), + }); + onOpenChange(false); + onSuccess((data as { secret?: string | null }).secret ?? null); + toast.success("Webhook endpoint created"); + }, + onError: (err) => { + toast.error(err.message || "Failed to create webhook endpoint"); + }, + }), + ); + + const updateMutation = useMutation( + trpc.webhookEndpoint.update.mutationOptions({ + onSuccess: () => { + queryClient.invalidateQueries({ + queryKey: trpc.webhookEndpoint.list.queryKey(), + }); + onOpenChange(false); + toast.success("Webhook endpoint updated"); + }, + onError: (err) => { + toast.error(err.message || "Failed to update webhook endpoint"); + }, + }), + ); + + function toggleEvent(value: string) { + setSelectedEvents((prev) => { + const next = new Set(prev); + if (next.has(value)) next.delete(value); + else next.add(value); + return next; + }); + } + + function handleSubmit() { + if (!name.trim() || !url.trim() || selectedEvents.size === 0) { + toast.error("Name, URL, and at least one event type are required"); + return; + } + const eventTypes = Array.from(selectedEvents) as AlertMetric[]; + if (isEdit && editTarget) { + updateMutation.mutate({ + id: editTarget.id, + teamId, + name: name.trim(), + url: url.trim(), + eventTypes, + secret: secret.trim() || undefined, + }); + } else { + createMutation.mutate({ + teamId, + name: name.trim(), + url: url.trim(), + eventTypes, + secret: secret.trim() || undefined, + }); + } + } + + const isPending = createMutation.isPending || updateMutation.isPending; + + return ( + { + if (!v) reset(); + onOpenChange(v); + }} + > + + + {isEdit ? "Edit Webhook Endpoint" : "Create Webhook Endpoint"} + + {isEdit + ? "Update the endpoint configuration. Leave the signing secret blank to keep the existing one." + : "Webhook deliveries are HMAC-SHA256 signed. The signing secret is shown once — store it securely."} + + + +
+ {/* Name */} +
+ + setName(e.target.value)} + /> +
+ + {/* URL */} +
+ + setUrl(e.target.value)} + /> +
+ + {/* Secret */} +
+ + setSecret(e.target.value)} + /> +
+ + {/* Event Types */} +
+ +
+ {WEBHOOK_EVENT_TYPES.map((evt) => ( + + ))} +
+
+
+ + + + + +
+
+ ); +} + +// ─── Secret Display Modal ───────────────────────────────────────────────────── + +function SecretModal({ + open, + secret, + onClose, +}: { + open: boolean; + secret: string | null; + onClose: () => void; +}) { + return ( + !v && onClose()}> + + + + + Signing Secret + + + Copy this secret now — it will not be shown again. Use it to verify + the webhook-signature header on incoming requests. + + +
+
+ {secret} +
+ +
+ + + +
+
+ ); +} + +// ─── Main Component ─────────────────────────────────────────────────────────── + +function WebhookEndpointsSettings() { + const trpc = useTRPC(); + const queryClient = useQueryClient(); + const { selectedTeamId } = useTeamStore(); + + const [createOpen, setCreateOpen] = useState(false); + const [editTarget, setEditTarget] = useState(null); + const [deleteTarget, setDeleteTarget] = useState(null); + const [secretModalSecret, setSecretModalSecret] = useState(null); + const [testingId, setTestingId] = useState(null); + + const listQuery = useQuery( + trpc.webhookEndpoint.list.queryOptions( + { teamId: selectedTeamId ?? "" }, + { enabled: !!selectedTeamId }, + ), + ); + + const toggleMutation = useMutation( + trpc.webhookEndpoint.toggleEnabled.mutationOptions({ + onSuccess: () => { + queryClient.invalidateQueries({ + queryKey: trpc.webhookEndpoint.list.queryKey(), + }); + }, + onError: (err) => { + toast.error(err.message || "Failed to toggle endpoint"); + }, + }), + ); + + const deleteMutation = useMutation( + trpc.webhookEndpoint.delete.mutationOptions({ + onSuccess: () => { + setDeleteTarget(null); + queryClient.invalidateQueries({ + queryKey: trpc.webhookEndpoint.list.queryKey(), + }); + toast.success("Webhook endpoint deleted"); + }, + onError: (err) => { + toast.error(err.message || "Failed to delete endpoint"); + }, + }), + ); + + const testMutation = useMutation( + trpc.webhookEndpoint.testDelivery.mutationOptions({ + onSuccess: (result) => { + setTestingId(null); + if ((result as { success?: boolean }).success) { + toast.success("Test delivery sent successfully"); + } else { + toast.error(`Test delivery failed: ${(result as { error?: string }).error ?? "unknown error"}`); + } + }, + onError: (err) => { + setTestingId(null); + toast.error(err.message || "Test delivery failed"); + }, + }), + ); + + function handleTest(id: string) { + if (!selectedTeamId) return; + setTestingId(id); + testMutation.mutate({ id, teamId: selectedTeamId }); + } + + function handleToggle(id: string) { + if (!selectedTeamId) return; + toggleMutation.mutate({ id, teamId: selectedTeamId }); + } + + const endpoints = (listQuery.data ?? []) as Endpoint[]; + + if (listQuery.isError) { + return ( + listQuery.refetch()} + /> + ); + } + + return ( +
+ {/* Header */} +
+

+ Send HMAC-signed event notifications to external systems +

+ +
+ + {/* Endpoints Table */} + + + + + Webhook Endpoints + + + Endpoints receive signed HTTP POST requests when subscribed events occur. + Expand a row to view delivery history. + + + + {listQuery.isLoading ? ( +
+ {[...Array(3)].map((_, i) => ( + + ))} +
+ ) : endpoints.length === 0 ? ( +
+ +

No webhook endpoints

+

Create an endpoint to start receiving event notifications

+
+ ) : ( + + + + Endpoint + Events + Status + Created + Actions + + + + {endpoints.map((ep) => ( + + ))} + +
+ )} +
+
+ + {/* Create dialog */} + { + if (secret) setSecretModalSecret(secret); + }} + /> + + {/* Edit dialog */} + {editTarget && ( + !v && setEditTarget(null)} + teamId={selectedTeamId ?? ""} + editTarget={editTarget} + onSuccess={() => {}} + /> + )} + + {/* Secret display modal */} + setSecretModalSecret(null)} + /> + + {/* Delete confirmation */} + !v && setDeleteTarget(null)} + title="Delete Webhook Endpoint" + description={`Are you sure you want to delete "${deleteTarget?.name}"? All delivery history will also be deleted.`} + confirmLabel="Delete" + variant="destructive" + onConfirm={() => { + if (deleteTarget && selectedTeamId) { + deleteMutation.mutate({ id: deleteTarget.id, teamId: selectedTeamId }); + } + }} + isPending={deleteMutation.isPending} + /> +
+ ); +} + +// ─── Page wrapper ───────────────────────────────────────────────────────────── + +export default function WebhooksPage() { + return ( +
+
+ + + +

Outbound Webhooks

+
+ +
+ ); +} diff --git a/src/app/api/agent/enroll/__tests__/route.test.ts b/src/app/api/agent/enroll/__tests__/route.test.ts new file mode 100644 index 00000000..ca9ad4a9 --- /dev/null +++ b/src/app/api/agent/enroll/__tests__/route.test.ts @@ -0,0 +1,165 @@ +import { vi, describe, it, expect, beforeEach } from "vitest"; +import { mockDeep, mockReset, type DeepMockProxy } from "vitest-mock-extended"; +import type { PrismaClient } from "@/generated/prisma"; + +// ─── Mock dependencies before importing SUT ───────────────────────────────── + +vi.mock("@/lib/prisma", () => ({ + prisma: mockDeep(), +})); + +vi.mock("@/server/services/agent-token", () => ({ + verifyEnrollmentToken: vi.fn(), + generateNodeToken: vi.fn(), +})); + +vi.mock("@/server/services/event-alerts", () => ({ + fireEventAlert: vi.fn(), +})); + +vi.mock("@/lib/logger", () => ({ + debugLog: vi.fn(), +})); + +// ─── Import SUT + mocks after vi.mock ─────────────────────────────────────── + +import { POST } from "../route"; +import { prisma } from "@/lib/prisma"; +import { verifyEnrollmentToken, generateNodeToken } from "@/server/services/agent-token"; + +const prismaMock = prisma as unknown as DeepMockProxy; + +// ─── Helpers ──────────────────────────────────────────────────────────────── + +function makeRequest(body: Record): Request { + return new Request("http://localhost/api/agent/enroll", { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify(body), + }); +} + +const mockEnv = { + id: "env-1", + name: "Production", + enrollmentTokenHash: "hashed-token", + team: { id: "team-1" }, +}; + +const mockNode = { + id: "node-1", + name: "web-server-01", + host: "web-server-01", + environmentId: "env-1", + status: "HEALTHY", + nodeTokenHash: "hashed-node-token", + enrolledAt: new Date(), + lastHeartbeat: new Date(), + agentVersion: "1.0.0", + vectorVersion: "0.40.0", + os: "linux", + labels: { region: "us-east" }, + metadata: { enrolledVia: "agent" }, + createdAt: new Date(), +}; + +// ─── Tests ────────────────────────────────────────────────────────────────── + +describe("POST /api/agent/enroll -- NODE-03 label template auto-assignment", () => { + beforeEach(() => { + mockReset(prismaMock); + vi.mocked(verifyEnrollmentToken).mockResolvedValue(true); + vi.mocked(generateNodeToken).mockResolvedValue({ token: "vf_node_abc123", hash: "h-abc" }); + prismaMock.environment.findMany.mockResolvedValue([mockEnv] as never); + prismaMock.vectorNode.create.mockResolvedValue(mockNode as never); + prismaMock.nodeStatusEvent.create.mockResolvedValue({} as never); + }); + + it("merges matching NodeGroup label templates into node labels", async () => { + // Group with criteria matching the node's labels + prismaMock.nodeGroup.findMany.mockResolvedValue([ + { + id: "ng-1", + name: "US East", + environmentId: "env-1", + criteria: { region: "us-east" }, + labelTemplate: { env: "prod", tier: "1" }, + requiredLabels: [], + createdAt: new Date(), + updatedAt: new Date(), + }, + ] as never); + prismaMock.vectorNode.update.mockResolvedValue({ + ...mockNode, + labels: { region: "us-east", env: "prod", tier: "1" }, + } as never); + + const req = makeRequest({ + token: "vf_enroll_test", + hostname: "web-server-01", + agentVersion: "1.0.0", + vectorVersion: "0.40.0", + os: "linux", + }); + + const res = await POST(req); + expect(res.status).toBe(200); + + // Should call update with merged labels + expect(prismaMock.vectorNode.update).toHaveBeenCalledWith({ + where: { id: "node-1" }, + data: { + labels: { + region: "us-east", + env: "prod", + tier: "1", + }, + }, + }); + }); + + it("skips non-matching NodeGroup label templates", async () => { + // Node has region: eu-west, but group criteria expects region: us-east + const nodeWithEuLabels = { ...mockNode, labels: { region: "eu-west" } }; + prismaMock.vectorNode.create.mockResolvedValue(nodeWithEuLabels as never); + + prismaMock.nodeGroup.findMany.mockResolvedValue([ + { + id: "ng-1", + name: "US East", + environmentId: "env-1", + criteria: { region: "us-east" }, + labelTemplate: { env: "prod" }, + requiredLabels: [], + createdAt: new Date(), + updatedAt: new Date(), + }, + ] as never); + + const req = makeRequest({ + token: "vf_enroll_test", + hostname: "eu-server-01", + }); + + const res = await POST(req); + expect(res.status).toBe(200); + + // No matching criteria -> update should NOT be called + expect(prismaMock.vectorNode.update).not.toHaveBeenCalled(); + }); + + it("does not update labels when no NodeGroups exist", async () => { + prismaMock.nodeGroup.findMany.mockResolvedValue([]); + + const req = makeRequest({ + token: "vf_enroll_test", + hostname: "bare-server-01", + }); + + const res = await POST(req); + expect(res.status).toBe(200); + + // Empty nodeGroups -> update should NOT be called + expect(prismaMock.vectorNode.update).not.toHaveBeenCalled(); + }); +}); diff --git a/src/app/api/agent/enroll/route.ts b/src/app/api/agent/enroll/route.ts index 60ab30be..c4f7c15c 100644 --- a/src/app/api/agent/enroll/route.ts +++ b/src/app/api/agent/enroll/route.ts @@ -4,6 +4,7 @@ import { prisma } from "@/lib/prisma"; import { verifyEnrollmentToken, generateNodeToken } from "@/server/services/agent-token"; import { fireEventAlert } from "@/server/services/event-alerts"; import { debugLog } from "@/lib/logger"; +import { nodeMatchesGroup } from "@/lib/node-group-utils"; const enrollSchema = z.object({ token: z.string().min(1), @@ -81,6 +82,37 @@ export async function POST(request: Request) { metadata: { enrolledVia: "agent" }, }, }); + // NODE-03: Auto-apply matching NodeGroup label templates + try { + const nodeGroups = await prisma.nodeGroup.findMany({ + where: { environmentId: matchedEnv.id }, + }); + + const mergedLabels: Record = {}; + for (const group of nodeGroups) { + const criteria = group.criteria as Record; + const nodeLabels = (node.labels as Record) ?? {}; + if (nodeMatchesGroup(nodeLabels, criteria)) { + Object.assign(mergedLabels, group.labelTemplate as Record); + } + } + + if (Object.keys(mergedLabels).length > 0) { + await prisma.vectorNode.update({ + where: { id: node.id }, + data: { + labels: { + ...((node.labels as Record) ?? {}), + ...mergedLabels, + }, + }, + }); + } + } catch (err) { + // Non-fatal: enrollment still succeeds even if label template application fails + console.error("[enroll] label template application failed:", err); + } + debugLog("enroll", `SUCCESS -- node ${node.id} enrolled in "${matchedEnv.name}"`); await prisma.nodeStatusEvent.create({ diff --git a/src/app/api/agent/heartbeat/__tests__/heartbeat-async.test.ts b/src/app/api/agent/heartbeat/__tests__/heartbeat-async.test.ts index e99c42fe..acb19a6d 100644 --- a/src/app/api/agent/heartbeat/__tests__/heartbeat-async.test.ts +++ b/src/app/api/agent/heartbeat/__tests__/heartbeat-async.test.ts @@ -166,10 +166,8 @@ describe("heartbeat async decomposition", () => { setupBaseMocks(); }); - it("returns 200 while evaluateAlerts is still pending (fire-and-forget)", async () => { - // evaluateAlerts returns a forever-pending promise - evaluateAlertsMock.mockReturnValue(new Promise(() => {})); - + // PERF-01: Heartbeat no longer triggers per-request alert evaluation + it("returns 200 and does NOT call evaluateAlerts (PERF-01)", async () => { // Sample processing — make findUnique for sample request never resolve too prismaMock.eventSampleRequest.findUnique.mockReturnValue( new Promise(() => {}) as never, @@ -183,8 +181,8 @@ describe("heartbeat async decomposition", () => { expect(response.status).toBe(200); expect(await response.json()).toEqual({ ok: true }); - // Proves evaluateAlerts was invoked (but not awaited) - expect(evaluateAlertsMock).toHaveBeenCalledWith("node-1", "env-1"); + // Proves evaluateAlerts is NOT called from heartbeat (PERF-01) + expect(evaluateAlertsMock).not.toHaveBeenCalled(); }); it("returns 200 while sample processing is still pending (fire-and-forget)", async () => { @@ -236,8 +234,7 @@ describe("heartbeat async decomposition", () => { .spyOn(console, "error") .mockImplementation(() => {}); - // All three fire-and-forget operations reject - evaluateAlertsMock.mockRejectedValue(new Error("alert eval boom")); + // Fire-and-forget operations reject prismaMock.eventSampleRequest.findUnique.mockRejectedValue( new Error("sample boom") as never, ); @@ -253,7 +250,6 @@ describe("heartbeat async decomposition", () => { // Verify errors are logged, not swallowed const errorMessages = consoleErrorSpy.mock.calls.map((c) => c[0]); - expect(errorMessages).toContain("Alert evaluation failed:"); expect(errorMessages).toContain("Sample processing error:"); expect(errorMessages).toContain("Per-component latency upsert error:"); diff --git a/src/app/api/agent/heartbeat/route.ts b/src/app/api/agent/heartbeat/route.ts index 8fea4153..8e68b6a6 100644 --- a/src/app/api/agent/heartbeat/route.ts +++ b/src/app/api/agent/heartbeat/route.ts @@ -10,12 +10,8 @@ import { cleanupOldMetrics } from "@/server/services/metrics-cleanup"; import { metricStore } from "@/server/services/metric-store"; import { broadcastSSE, broadcastMetrics } from "@/server/services/sse-broadcast"; import type { FleetStatusEvent, LogEntryEvent, StatusChangeEvent } from "@/lib/sse/types"; -import { evaluateAlerts } from "@/server/services/alert-evaluator"; import { isLeader } from "@/server/services/leader-election"; import { batchUpsertPipelineStatuses } from "@/server/services/heartbeat-batch"; -import { deliverSingleWebhook } from "@/server/services/webhook-delivery"; -import { deliverToChannels } from "@/server/services/channels"; -import { trackWebhookDelivery } from "@/server/services/delivery-tracking"; import { DeploymentMode } from "@/generated/prisma"; import { isVersionOlder } from "@/lib/version"; @@ -183,74 +179,6 @@ async function processSampleResults(results: SampleResult[], nodeId: string): Pr } } -async function evaluateAndDeliverAlerts(nodeId: string, environmentId: string): Promise { - const firedAlerts = await evaluateAlerts(nodeId, environmentId); - - if (firedAlerts.length > 0) { - const [nodeInfo, envInfo] = await Promise.all([ - prisma.vectorNode.findUnique({ - where: { id: nodeId }, - select: { host: true }, - }), - prisma.environment.findUnique({ - where: { id: environmentId }, - select: { name: true, team: { select: { name: true } } }, - }), - ]); - - for (const alert of firedAlerts) { - const pipeline = alert.rule.pipelineId - ? await prisma.pipeline.findUnique({ - where: { id: alert.rule.pipelineId }, - select: { name: true }, - }) - : null; - - const channelPayload = { - alertId: alert.event.id, - status: alert.event.status as "firing" | "resolved", - ruleName: alert.rule.name, - severity: "warning", - environment: envInfo?.name ?? "Unknown", - team: envInfo?.team?.name, - node: nodeInfo?.host ?? nodeId, - pipeline: pipeline?.name, - metric: alert.rule.metric, - value: alert.event.value, - threshold: alert.rule.threshold ?? 0, - message: alert.event.message ?? "", - timestamp: alert.event.firedAt.toISOString(), - dashboardUrl: `${process.env.NEXTAUTH_URL ?? ""}/alerts`, - }; - - // Deliver to legacy webhooks with delivery tracking - const webhooks = await prisma.alertWebhook.findMany({ - where: { environmentId: alert.rule.environmentId, enabled: true }, - }); - for (const webhook of webhooks) { - trackWebhookDelivery( - alert.event.id, - webhook.id, - webhook.url, - () => deliverSingleWebhook(webhook, channelPayload), - ).catch((err) => - console.error(`Tracked webhook delivery error for ${webhook.url}:`, err), - ); - } - - // Deliver to notification channels with delivery tracking - deliverToChannels( - alert.rule.environmentId, - alert.rule.id, - channelPayload, - alert.event.id, - ).catch((err) => - console.error("Channel delivery error:", err), - ); - } - } -} - export async function POST(request: Request) { const agent = await authenticateAgent(request); @@ -592,14 +520,6 @@ export async function POST(request: Request) { console.error("Node health check error:", err), ); - // Evaluate alert rules and deliver webhooks for any fired/resolved alerts (fire-and-forget). - // Only the leader instance evaluates alerts — followers skip since the leader handles it from DB state. - if (isLeader()) { - evaluateAndDeliverAlerts(agent.nodeId, agent.environmentId).catch((err) => - console.error("Alert evaluation failed:", err), - ); - } - // Throttle cleanup to once per hour. Only leader runs cleanup. const ONE_HOUR = 60 * 60 * 1000; if (isLeader() && Date.now() - lastCleanup > ONE_HOUR) { diff --git a/src/app/api/sse/route.ts b/src/app/api/sse/route.ts index 5529090c..5a19ed96 100644 --- a/src/app/api/sse/route.ts +++ b/src/app/api/sse/route.ts @@ -4,6 +4,11 @@ import { sseRegistry } from "@/server/services/sse-registry"; export const dynamic = "force-dynamic"; +const MAX_SSE_CONNECTIONS = parseInt( + process.env.SSE_MAX_CONNECTIONS ?? "1000", + 10, +); + export async function GET(request: Request): Promise { const session = await auth(); if (!session?.user?.id) { @@ -40,6 +45,20 @@ export async function GET(request: Request): Promise { environmentIds = environments.map((e) => e.id); } + // PERF-03: Enforce per-instance SSE connection limit + if (sseRegistry.size >= MAX_SSE_CONNECTIONS) { + return new Response( + JSON.stringify({ error: "SSE connection limit reached" }), + { + status: 503, + headers: { + "Content-Type": "application/json", + "Retry-After": "30", + }, + }, + ); + } + const connectionId = crypto.randomUUID(); let controllerRef: ReadableStreamDefaultController | null = null; diff --git a/src/app/api/v1/_lib/openapi-spec.test.ts b/src/app/api/v1/_lib/openapi-spec.test.ts new file mode 100644 index 00000000..b582d843 --- /dev/null +++ b/src/app/api/v1/_lib/openapi-spec.test.ts @@ -0,0 +1,249 @@ +import { describe, it, expect } from "vitest"; +import { generateOpenAPISpec } from "./openapi-spec"; + +describe("generateOpenAPISpec", () => { + it("returns an object with openapi === '3.1.0'", () => { + const spec = generateOpenAPISpec(); + expect(spec.openapi).toBe("3.1.0"); + }); + + it("has correct info.title and info.version", () => { + const spec = generateOpenAPISpec(); + expect(spec.info.title).toBe("VectorFlow REST API"); + expect(spec.info.version).toBe("1.0.0"); + }); + + it("spec.paths contains all 16 REST v1 operations", () => { + const spec = generateOpenAPISpec(); + const paths = spec.paths as Record>; + + // Pipeline operations + expect(paths["/api/v1/pipelines"]?.get).toBeDefined(); + expect(paths["/api/v1/pipelines/{id}"]?.get).toBeDefined(); + expect(paths["/api/v1/pipelines/{id}/deploy"]?.post).toBeDefined(); + expect(paths["/api/v1/pipelines/{id}/rollback"]?.post).toBeDefined(); + expect(paths["/api/v1/pipelines/{id}/undeploy"]?.post).toBeDefined(); + expect(paths["/api/v1/pipelines/{id}/versions"]?.get).toBeDefined(); + + // Node operations + expect(paths["/api/v1/nodes"]?.get).toBeDefined(); + expect(paths["/api/v1/nodes/{id}"]?.get).toBeDefined(); + expect(paths["/api/v1/nodes/{id}/maintenance"]?.post).toBeDefined(); + + // Secret operations + expect(paths["/api/v1/secrets"]?.get).toBeDefined(); + expect(paths["/api/v1/secrets"]?.post).toBeDefined(); + expect(paths["/api/v1/secrets"]?.put).toBeDefined(); + expect(paths["/api/v1/secrets"]?.delete).toBeDefined(); + + // Alert operations + expect(paths["/api/v1/alerts/rules"]?.get).toBeDefined(); + expect(paths["/api/v1/alerts/rules"]?.post).toBeDefined(); + + // Audit operations + expect(paths["/api/v1/audit"]?.get).toBeDefined(); + }); + + it("every REST v1 operation has a security requirement referencing BearerAuth", () => { + const spec = generateOpenAPISpec(); + const paths = spec.paths as Record>; + + const restPaths = Object.entries(paths).filter(([path]) => path.startsWith("/api/v1/")); + + for (const [path, methods] of restPaths) { + for (const [method, operation] of Object.entries(methods)) { + const op = operation as { security?: Array>; tags?: string[] }; + // Only check REST v1 ops (not tRPC) + if (op.tags?.includes("tRPC")) continue; + expect(op.security, `${method.toUpperCase()} ${path} should have security`).toBeDefined(); + expect(op.security!.length, `${method.toUpperCase()} ${path} security should not be empty`).toBeGreaterThan(0); + const secKeys = Object.keys(op.security![0]); + expect(secKeys, `${method.toUpperCase()} ${path} should use BearerAuth`).toContain("BearerAuth"); + } + } + }); + + it("every operation has at least one response with a content schema", () => { + const spec = generateOpenAPISpec(); + const paths = spec.paths as Record>; + + for (const [path, methods] of Object.entries(paths)) { + for (const [method, operation] of Object.entries(methods)) { + const op = operation as { responses?: Record; description?: string }> }; + expect(op.responses, `${method.toUpperCase()} ${path} should have responses`).toBeDefined(); + const hasContentSchema = Object.values(op.responses!).some( + (r) => r.content && Object.keys(r.content).length > 0 + ); + expect(hasContentSchema, `${method.toUpperCase()} ${path} should have at least one response with a content schema`).toBe(true); + } + } + }); + + it("POST /api/v1/pipelines/{id}/deploy has requestBody with changelog field", () => { + const spec = generateOpenAPISpec(); + const paths = spec.paths as Record>; + const deployOp = paths["/api/v1/pipelines/{id}/deploy"]?.post as { + requestBody?: { + content: { + "application/json": { + schema: { + properties?: Record; + }; + }; + }; + }; + }; + + expect(deployOp?.requestBody).toBeDefined(); + const schema = deployOp?.requestBody?.content?.["application/json"]?.schema; + expect(schema?.properties?.changelog).toBeDefined(); + }); + + it("GET /api/v1/audit has query parameters: after, limit, action", () => { + const spec = generateOpenAPISpec(); + const paths = spec.paths as Record>; + const auditOp = paths["/api/v1/audit"]?.get as { + parameters?: Array<{ name: string; in: string }>; + }; + + expect(auditOp?.parameters).toBeDefined(); + const paramNames = auditOp?.parameters?.map((p) => p.name) ?? []; + expect(paramNames).toContain("after"); + expect(paramNames).toContain("limit"); + expect(paramNames).toContain("action"); + }); + + // ─── tRPC procedure tests ─────────────────────────────────────────────────── + + it("spec.paths contains tRPC procedure paths under /api/trpc/ prefix", () => { + const spec = generateOpenAPISpec(); + const paths = spec.paths as Record>; + + const trpcPaths = Object.keys(paths).filter((p) => p.startsWith("/api/trpc/")); + expect(trpcPaths.length).toBeGreaterThan(0); + // Spot check a few expected paths + expect(paths["/api/trpc/pipeline.list"]).toBeDefined(); + expect(paths["/api/trpc/fleet.list"]).toBeDefined(); + expect(paths["/api/trpc/secret.list"]).toBeDefined(); + }); + + it("tRPC query procedures map to GET operations, mutations map to POST operations", () => { + const spec = generateOpenAPISpec(); + const paths = spec.paths as Record>; + + // Queries → GET + expect(paths["/api/trpc/pipeline.list"]?.get).toBeDefined(); + expect(paths["/api/trpc/pipeline.get"]?.get).toBeDefined(); + expect(paths["/api/trpc/fleet.list"]?.get).toBeDefined(); + expect(paths["/api/trpc/fleet.get"]?.get).toBeDefined(); + expect(paths["/api/trpc/environment.list"]?.get).toBeDefined(); + expect(paths["/api/trpc/secret.list"]?.get).toBeDefined(); + expect(paths["/api/trpc/alert.listRules"]?.get).toBeDefined(); + expect(paths["/api/trpc/serviceAccount.list"]?.get).toBeDefined(); + + // Mutations → POST + expect(paths["/api/trpc/pipeline.create"]?.post).toBeDefined(); + expect(paths["/api/trpc/pipeline.update"]?.post).toBeDefined(); + expect(paths["/api/trpc/pipeline.delete"]?.post).toBeDefined(); + expect(paths["/api/trpc/deploy.agent"]?.post).toBeDefined(); + expect(paths["/api/trpc/deploy.undeploy"]?.post).toBeDefined(); + expect(paths["/api/trpc/secret.create"]?.post).toBeDefined(); + expect(paths["/api/trpc/serviceAccount.create"]?.post).toBeDefined(); + }); + + it("tRPC procedure entries include a 'tRPC' tag for grouping", () => { + const spec = generateOpenAPISpec(); + const paths = spec.paths as Record>; + + const trpcPaths = Object.entries(paths).filter(([p]) => p.startsWith("/api/trpc/")); + expect(trpcPaths.length).toBeGreaterThan(0); + + for (const [path, methods] of trpcPaths) { + for (const [, operation] of Object.entries(methods)) { + const op = operation as { tags?: string[] }; + expect(op.tags, `${path} tRPC operation should have 'tRPC' tag`).toContain("tRPC"); + } + } + }); + + it("at least 10 tRPC procedures appear in the spec", () => { + const spec = generateOpenAPISpec(); + const paths = spec.paths as Record>; + + const trpcPaths = Object.keys(paths).filter((p) => p.startsWith("/api/trpc/")); + expect(trpcPaths.length).toBeGreaterThanOrEqual(10); + + // Specifically verify the 10 required procedures are present + const required = [ + "/api/trpc/pipeline.list", + "/api/trpc/pipeline.get", + "/api/trpc/pipeline.create", + "/api/trpc/pipeline.delete", + "/api/trpc/deploy.agent", + "/api/trpc/fleet.list", + "/api/trpc/fleet.get", + "/api/trpc/secret.list", + "/api/trpc/environment.list", + "/api/trpc/serviceAccount.list", + ]; + + for (const path of required) { + expect(paths[path], `Expected tRPC path ${path} to be in spec`).toBeDefined(); + } + }); + + it("tRPC query procedures document the SuperJSON input encoding via ?input= query param", () => { + const spec = generateOpenAPISpec(); + const paths = spec.paths as Record>; + + const pipelineListOp = paths["/api/trpc/pipeline.list"]?.get as { + parameters?: Array<{ name: string; in: string; description?: string }>; + }; + + expect(pipelineListOp?.parameters).toBeDefined(); + const inputParam = pipelineListOp?.parameters?.find((p) => p.name === "input"); + expect(inputParam).toBeDefined(); + expect(inputParam?.in).toBe("query"); + // Description should mention SuperJSON or url-encoded + expect( + inputParam?.description?.toLowerCase().includes("superjson") || + inputParam?.description?.toLowerCase().includes("url-encoded") || + inputParam?.description?.toLowerCase().includes("json") + ).toBe(true); + }); + + it("total operation count (REST v1 + tRPC) exceeds 25", () => { + const spec = generateOpenAPISpec(); + const paths = spec.paths as Record>; + const httpMethods = ["get", "post", "put", "delete", "patch", "head", "options"]; + + const totalOps = Object.values(paths).reduce((acc, methods) => { + return acc + Object.keys(methods).filter((m) => httpMethods.includes(m)).length; + }, 0); + + expect(totalOps).toBeGreaterThan(25); + }); + + it("CookieAuth security scheme is defined", () => { + const spec = generateOpenAPISpec(); + const components = spec.components as { securitySchemes?: Record }; + expect(components?.securitySchemes?.CookieAuth).toBeDefined(); + }); + + it("tRPC operations use CookieAuth security scheme", () => { + const spec = generateOpenAPISpec(); + const paths = spec.paths as Record>; + + const trpcPaths = Object.entries(paths).filter(([p]) => p.startsWith("/api/trpc/")); + expect(trpcPaths.length).toBeGreaterThan(0); + + for (const [path, methods] of trpcPaths) { + for (const [method, operation] of Object.entries(methods)) { + const op = operation as { security?: Array> }; + expect(op.security, `${method.toUpperCase()} ${path} should have security`).toBeDefined(); + const secKeys = Object.keys(op.security![0]); + expect(secKeys, `${method.toUpperCase()} ${path} should use CookieAuth`).toContain("CookieAuth"); + } + } + }); +}); diff --git a/src/app/api/v1/_lib/openapi-spec.ts b/src/app/api/v1/_lib/openapi-spec.ts new file mode 100644 index 00000000..e8ae5ab1 --- /dev/null +++ b/src/app/api/v1/_lib/openapi-spec.ts @@ -0,0 +1,1525 @@ +import { + OpenAPIRegistry, + OpenApiGeneratorV31, + extendZodWithOpenApi, +} from "@asteasolutions/zod-to-openapi"; +import { z } from "zod"; + +// IMPORTANT: Must be called before any z.object(...) calls +extendZodWithOpenApi(z); + +// --------------------------------------------------------------------------- +// Registry bootstrap +// --------------------------------------------------------------------------- + +const registry = new OpenAPIRegistry(); + +const bearerAuth = registry.registerComponent("securitySchemes", "BearerAuth", { + type: "http", + scheme: "bearer", + description: + "Service account API key. Format: Authorization: Bearer vf_. Service accounts are environment-scoped.", +}); + +const cookieAuth = registry.registerComponent("securitySchemes", "CookieAuth", { + type: "apiKey", + in: "cookie", + name: "authjs.session-token", + description: + "Session cookie set on sign-in. Used by the VectorFlow web UI and tRPC procedures.", +}); + +// --------------------------------------------------------------------------- +// Shared error schemas +// --------------------------------------------------------------------------- + +const ErrorResponse = z + .object({ + error: z.string().openapi({ example: "Not found" }), + }) + .openapi("ErrorResponse"); + +const ValidationErrorResponse = z + .object({ + error: z.string().openapi({ example: "Deployment failed" }), + validationErrors: z.array(z.string()).optional(), + }) + .openapi("ValidationErrorResponse"); + +// --------------------------------------------------------------------------- +// Pipelines — shared schemas +// --------------------------------------------------------------------------- + +const PipelineSchema = z + .object({ + id: z.string().openapi({ example: "clxyz123abc" }), + name: z.string().openapi({ example: "my-pipeline" }), + description: z.string().nullable().openapi({ example: "Collects nginx logs" }), + isDraft: z.boolean().openapi({ example: false }), + deployedAt: z + .string() + .nullable() + .openapi({ example: "2024-01-15T10:00:00Z", format: "date-time" }), + createdAt: z.string().openapi({ example: "2024-01-01T00:00:00Z", format: "date-time" }), + updatedAt: z.string().openapi({ example: "2024-01-15T10:00:00Z", format: "date-time" }), + }) + .openapi("Pipeline"); + +const PipelineNodeSchema = z + .object({ + id: z.string(), + componentKey: z.string().openapi({ example: "vector.sources.file" }), + componentType: z.string().openapi({ example: "source" }), + kind: z.string().openapi({ example: "source" }), + positionX: z.number(), + positionY: z.number(), + disabled: z.boolean(), + }) + .openapi("PipelineNode"); + +const PipelineEdgeSchema = z + .object({ + id: z.string(), + sourceNodeId: z.string(), + targetNodeId: z.string(), + sourcePort: z.string().nullable(), + }) + .openapi("PipelineEdge"); + +const PipelineNodeStatusSchema = z + .object({ + nodeId: z.string(), + status: z.string().openapi({ example: "running" }), + version: z.string().nullable(), + eventsIn: z.number().nullable(), + eventsOut: z.number().nullable(), + errorsTotal: z.number().nullable(), + }) + .openapi("PipelineNodeStatus"); + +const PipelineDetailSchema = z + .object({ + id: z.string(), + name: z.string(), + description: z.string().nullable(), + isDraft: z.boolean(), + deployedAt: z.string().nullable().openapi({ format: "date-time" }), + environmentId: z.string(), + createdAt: z.string().openapi({ format: "date-time" }), + updatedAt: z.string().openapi({ format: "date-time" }), + nodes: z.array(PipelineNodeSchema), + edges: z.array(PipelineEdgeSchema), + nodeStatuses: z.array(PipelineNodeStatusSchema), + }) + .openapi("PipelineDetail"); + +const PipelineVersionSchema = z + .object({ + id: z.string(), + version: z.number().openapi({ example: 3 }), + changelog: z.string().nullable().openapi({ example: "Deployed via REST API" }), + createdById: z.string().nullable(), + createdAt: z.string().openapi({ format: "date-time" }), + }) + .openapi("PipelineVersion"); + +// --------------------------------------------------------------------------- +// Nodes — shared schemas +// --------------------------------------------------------------------------- + +const NodeEnvironmentSchema = z + .object({ + id: z.string(), + name: z.string().openapi({ example: "production" }), + }) + .openapi("NodeEnvironment"); + +const NodeSchema = z + .object({ + id: z.string(), + name: z.string().openapi({ example: "node-prod-01" }), + host: z.string().openapi({ example: "10.0.1.50" }), + apiPort: z.number().openapi({ example: 8686 }), + environmentId: z.string(), + status: z.string().openapi({ example: "online" }), + lastSeen: z.string().nullable().openapi({ format: "date-time" }), + lastHeartbeat: z.string().nullable().openapi({ format: "date-time" }), + agentVersion: z.string().nullable().openapi({ example: "0.9.1" }), + vectorVersion: z.string().nullable().openapi({ example: "0.43.0" }), + os: z.string().nullable().openapi({ example: "linux" }), + deploymentMode: z.string().nullable().openapi({ example: "docker" }), + maintenanceMode: z.boolean(), + maintenanceModeAt: z.string().nullable().openapi({ format: "date-time" }), + metadata: z.record(z.unknown()).nullable(), + enrolledAt: z.string().nullable().openapi({ format: "date-time" }), + createdAt: z.string().openapi({ format: "date-time" }), + environment: NodeEnvironmentSchema, + }) + .openapi("Node"); + +const NodePipelineStatusSchema = z + .object({ + id: z.string(), + status: z.string().openapi({ example: "running" }), + pipeline: z.object({ + id: z.string(), + name: z.string(), + }), + }) + .openapi("NodePipelineStatus"); + +const NodeDetailSchema = z + .object({ + id: z.string(), + name: z.string(), + host: z.string(), + apiPort: z.number(), + environmentId: z.string(), + status: z.string(), + lastSeen: z.string().nullable().openapi({ format: "date-time" }), + lastHeartbeat: z.string().nullable().openapi({ format: "date-time" }), + agentVersion: z.string().nullable(), + vectorVersion: z.string().nullable(), + os: z.string().nullable(), + deploymentMode: z.string().nullable(), + maintenanceMode: z.boolean(), + maintenanceModeAt: z.string().nullable().openapi({ format: "date-time" }), + metadata: z.record(z.unknown()).nullable(), + enrolledAt: z.string().nullable().openapi({ format: "date-time" }), + createdAt: z.string().openapi({ format: "date-time" }), + environment: NodeEnvironmentSchema, + pipelineStatuses: z.array(NodePipelineStatusSchema), + }) + .openapi("NodeDetail"); + +const NodeMaintenanceResponseSchema = z + .object({ + id: z.string(), + name: z.string(), + maintenanceMode: z.boolean(), + maintenanceModeAt: z.string().nullable().openapi({ format: "date-time" }), + }) + .openapi("NodeMaintenanceResponse"); + +// --------------------------------------------------------------------------- +// Secrets — shared schemas +// --------------------------------------------------------------------------- + +const SecretMetaSchema = z + .object({ + id: z.string(), + name: z.string().openapi({ example: "DATABASE_PASSWORD" }), + createdAt: z.string().openapi({ format: "date-time" }), + updatedAt: z.string().openapi({ format: "date-time" }), + }) + .openapi("SecretMeta"); + +const SecretUpdatedSchema = z + .object({ + id: z.string(), + name: z.string(), + updatedAt: z.string().openapi({ format: "date-time" }), + }) + .openapi("SecretUpdated"); + +// --------------------------------------------------------------------------- +// Alert Rules — shared schemas +// --------------------------------------------------------------------------- + +const AlertMetric = z + .enum([ + "node_unreachable", + "cpu_usage", + "memory_usage", + "disk_usage", + "error_rate", + "discarded_rate", + "pipeline_crashed", + "fleet_error_rate", + "fleet_throughput_drop", + "fleet_event_volume", + "node_load_imbalance", + ]) + .openapi("AlertMetric"); + +const AlertCondition = z.enum(["gt", "lt", "eq"]).openapi("AlertCondition"); + +const AlertRuleSchema = z + .object({ + id: z.string(), + name: z.string().openapi({ example: "High CPU Usage" }), + environmentId: z.string(), + teamId: z.string(), + pipelineId: z.string().nullable(), + metric: AlertMetric, + condition: AlertCondition, + threshold: z.number().openapi({ example: 90 }), + durationSeconds: z.number().openapi({ example: 60 }), + createdAt: z.string().openapi({ format: "date-time" }), + updatedAt: z.string().openapi({ format: "date-time" }), + pipeline: z + .object({ + id: z.string(), + name: z.string(), + }) + .nullable(), + }) + .openapi("AlertRule"); + +// --------------------------------------------------------------------------- +// Audit — shared schemas +// --------------------------------------------------------------------------- + +const AuditEventSchema = z + .object({ + id: z.string(), + action: z.string().openapi({ example: "api.pipeline_deployed" }), + entityType: z.string().nullable(), + entityId: z.string().nullable(), + createdAt: z.string().openapi({ format: "date-time" }), + user: z + .object({ + id: z.string(), + name: z.string().nullable(), + email: z.string(), + }) + .nullable(), + }) + .openapi("AuditEvent"); + +// --------------------------------------------------------------------------- +// Register all 16 paths +// --------------------------------------------------------------------------- + +// 1. GET /api/v1/pipelines +registry.registerPath({ + method: "get", + path: "/api/v1/pipelines", + operationId: "listPipelines", + summary: "List pipelines", + description: + "Returns all pipelines in the environment associated with the service account, ordered by most recently updated.", + tags: ["Pipelines"], + security: [{ [bearerAuth.name]: [] }], + responses: { + 200: { + description: "List of pipelines", + content: { + "application/json": { + schema: z.object({ pipelines: z.array(PipelineSchema) }), + }, + }, + }, + 401: { description: "Unauthorized — invalid or missing API key" }, + 403: { description: "Forbidden — service account lacks pipelines.read permission" }, + }, +}); + +// 2. GET /api/v1/pipelines/{id} +registry.registerPath({ + method: "get", + path: "/api/v1/pipelines/{id}", + operationId: "getPipeline", + summary: "Get pipeline", + description: + "Returns a single pipeline with its node graph, edges, and current node statuses.", + tags: ["Pipelines"], + security: [{ [bearerAuth.name]: [] }], + request: { + params: z.object({ + id: z.string().openapi({ description: "Pipeline ID" }), + }), + }, + responses: { + 200: { + description: "Pipeline detail", + content: { + "application/json": { + schema: z.object({ pipeline: PipelineDetailSchema }), + }, + }, + }, + 401: { description: "Unauthorized" }, + 403: { description: "Forbidden" }, + 404: { + description: "Pipeline not found", + content: { + "application/json": { + schema: ErrorResponse, + }, + }, + }, + }, +}); + +// 3. POST /api/v1/pipelines/{id}/deploy +registry.registerPath({ + method: "post", + path: "/api/v1/pipelines/{id}/deploy", + operationId: "deployPipeline", + summary: "Deploy pipeline", + description: + "Creates a new pipeline version and deploys it to all matching fleet nodes. Returns the version ID and version number on success.", + tags: ["Pipelines"], + security: [{ [bearerAuth.name]: [] }], + request: { + params: z.object({ + id: z.string().openapi({ description: "Pipeline ID" }), + }), + body: { + required: false, + content: { + "application/json": { + schema: z.object({ + changelog: z + .string() + .optional() + .openapi({ example: "Deployed via CI/CD pipeline" }), + }), + }, + }, + }, + }, + responses: { + 200: { + description: "Deployment successful", + content: { + "application/json": { + schema: z.object({ + success: z.literal(true), + versionId: z.string(), + versionNumber: z.number(), + }), + }, + }, + }, + 401: { description: "Unauthorized" }, + 403: { description: "Forbidden" }, + 404: { + description: "Pipeline not found", + content: { "application/json": { schema: ErrorResponse } }, + }, + 422: { + description: "Deployment failed — validation errors in pipeline config", + content: { + "application/json": { + schema: ValidationErrorResponse, + }, + }, + }, + }, +}); + +// 4. POST /api/v1/pipelines/{id}/rollback +registry.registerPath({ + method: "post", + path: "/api/v1/pipelines/{id}/rollback", + operationId: "rollbackPipeline", + summary: "Rollback pipeline", + description: + "Rolls back the pipeline to a specific previously deployed version.", + tags: ["Pipelines"], + security: [{ [bearerAuth.name]: [] }], + request: { + params: z.object({ + id: z.string().openapi({ description: "Pipeline ID" }), + }), + body: { + required: true, + content: { + "application/json": { + schema: z.object({ + targetVersionId: z + .string() + .openapi({ description: "ID of the version to roll back to" }), + }), + }, + }, + }, + }, + responses: { + 200: { + description: "Rollback successful", + content: { + "application/json": { + schema: z.object({ + success: z.literal(true), + versionId: z.string(), + versionNumber: z.number(), + }), + }, + }, + }, + 400: { + description: "Missing or invalid targetVersionId", + content: { "application/json": { schema: ErrorResponse } }, + }, + 401: { description: "Unauthorized" }, + 403: { description: "Forbidden" }, + 404: { + description: "Pipeline not found", + content: { "application/json": { schema: ErrorResponse } }, + }, + }, +}); + +// 5. POST /api/v1/pipelines/{id}/undeploy +registry.registerPath({ + method: "post", + path: "/api/v1/pipelines/{id}/undeploy", + operationId: "undeployPipeline", + summary: "Undeploy pipeline", + description: "Stops a deployed pipeline on all fleet nodes.", + tags: ["Pipelines"], + security: [{ [bearerAuth.name]: [] }], + request: { + params: z.object({ + id: z.string().openapi({ description: "Pipeline ID" }), + }), + }, + responses: { + 200: { + description: "Undeployment result", + content: { + "application/json": { + schema: z.object({ + success: z.boolean(), + }), + }, + }, + }, + 401: { description: "Unauthorized" }, + 403: { description: "Forbidden" }, + 404: { + description: "Pipeline not found", + content: { "application/json": { schema: ErrorResponse } }, + }, + }, +}); + +// 6. GET /api/v1/pipelines/{id}/versions +registry.registerPath({ + method: "get", + path: "/api/v1/pipelines/{id}/versions", + operationId: "listPipelineVersions", + summary: "List pipeline versions", + description: "Returns all saved versions of a pipeline, ordered by version number descending.", + tags: ["Pipelines"], + security: [{ [bearerAuth.name]: [] }], + request: { + params: z.object({ + id: z.string().openapi({ description: "Pipeline ID" }), + }), + }, + responses: { + 200: { + description: "Pipeline versions", + content: { + "application/json": { + schema: z.object({ versions: z.array(PipelineVersionSchema) }), + }, + }, + }, + 401: { description: "Unauthorized" }, + 403: { description: "Forbidden" }, + 404: { + description: "Pipeline not found", + content: { "application/json": { schema: ErrorResponse } }, + }, + }, +}); + +// 7. GET /api/v1/nodes +registry.registerPath({ + method: "get", + path: "/api/v1/nodes", + operationId: "listNodes", + summary: "List nodes", + description: + "Returns all fleet nodes in the environment. Optionally filter by label using the `label` query parameter in `key:value` format.", + tags: ["Nodes"], + security: [{ [bearerAuth.name]: [] }], + request: { + query: z.object({ + label: z + .string() + .optional() + .openapi({ example: "env:production", description: "Filter nodes by label in key:value format" }), + }), + }, + responses: { + 200: { + description: "List of nodes", + content: { + "application/json": { + schema: z.object({ nodes: z.array(NodeSchema) }), + }, + }, + }, + 401: { description: "Unauthorized" }, + 403: { description: "Forbidden" }, + }, +}); + +// 8. GET /api/v1/nodes/{id} +registry.registerPath({ + method: "get", + path: "/api/v1/nodes/{id}", + operationId: "getNode", + summary: "Get node", + description: "Returns a single node with its pipeline deployment statuses.", + tags: ["Nodes"], + security: [{ [bearerAuth.name]: [] }], + request: { + params: z.object({ + id: z.string().openapi({ description: "Node ID" }), + }), + }, + responses: { + 200: { + description: "Node detail", + content: { + "application/json": { + schema: z.object({ node: NodeDetailSchema }), + }, + }, + }, + 401: { description: "Unauthorized" }, + 403: { description: "Forbidden" }, + 404: { + description: "Node not found", + content: { "application/json": { schema: ErrorResponse } }, + }, + }, +}); + +// 9. POST /api/v1/nodes/{id}/maintenance +registry.registerPath({ + method: "post", + path: "/api/v1/nodes/{id}/maintenance", + operationId: "toggleMaintenance", + summary: "Toggle maintenance mode", + description: + "Enable or disable maintenance mode on a node. Nodes in maintenance mode stop receiving new deployments.", + tags: ["Nodes"], + security: [{ [bearerAuth.name]: [] }], + request: { + params: z.object({ + id: z.string().openapi({ description: "Node ID" }), + }), + body: { + required: true, + content: { + "application/json": { + schema: z.object({ + enabled: z.boolean().openapi({ description: "true to enable maintenance mode, false to disable" }), + }), + }, + }, + }, + }, + responses: { + 200: { + description: "Updated maintenance mode state", + content: { + "application/json": { + schema: z.object({ node: NodeMaintenanceResponseSchema }), + }, + }, + }, + 400: { + description: "Invalid request body", + content: { "application/json": { schema: ErrorResponse } }, + }, + 401: { description: "Unauthorized" }, + 403: { description: "Forbidden" }, + 404: { + description: "Node not found", + content: { "application/json": { schema: ErrorResponse } }, + }, + }, +}); + +// 10. GET /api/v1/secrets +registry.registerPath({ + method: "get", + path: "/api/v1/secrets", + operationId: "listSecrets", + summary: "List secrets", + description: + "Returns metadata (id, name, timestamps) for all secrets in the environment. Secret values are never returned.", + tags: ["Secrets"], + security: [{ [bearerAuth.name]: [] }], + responses: { + 200: { + description: "List of secret metadata", + content: { + "application/json": { + schema: z.object({ secrets: z.array(SecretMetaSchema) }), + }, + }, + }, + 401: { description: "Unauthorized" }, + 403: { description: "Forbidden" }, + }, +}); + +// 11. POST /api/v1/secrets +registry.registerPath({ + method: "post", + path: "/api/v1/secrets", + operationId: "createSecret", + summary: "Create secret", + description: + "Creates a new encrypted secret. Name must start with a letter or number and contain only letters, numbers, hyphens, and underscores.", + tags: ["Secrets"], + security: [{ [bearerAuth.name]: [] }], + request: { + body: { + required: true, + content: { + "application/json": { + schema: z.object({ + name: z + .string() + .regex(/^[a-zA-Z0-9][a-zA-Z0-9_-]*$/) + .openapi({ example: "DATABASE_PASSWORD" }), + value: z.string().openapi({ example: "supersecret123" }), + }), + }, + }, + }, + }, + responses: { + 201: { + description: "Secret created", + content: { + "application/json": { + schema: z.object({ secret: SecretMetaSchema }), + }, + }, + }, + 400: { + description: "Invalid request body or name format", + content: { "application/json": { schema: ErrorResponse } }, + }, + 401: { description: "Unauthorized" }, + 403: { description: "Forbidden" }, + 409: { + description: "A secret with this name already exists", + content: { "application/json": { schema: ErrorResponse } }, + }, + }, +}); + +// 12. PUT /api/v1/secrets +registry.registerPath({ + method: "put", + path: "/api/v1/secrets", + operationId: "updateSecret", + summary: "Update secret", + description: + "Updates the value of an existing secret. Identify the secret by id or name (one required).", + tags: ["Secrets"], + security: [{ [bearerAuth.name]: [] }], + request: { + body: { + required: true, + content: { + "application/json": { + schema: z.object({ + id: z.string().optional().openapi({ description: "Secret ID (id or name required)" }), + name: z.string().optional().openapi({ description: "Secret name (id or name required)" }), + value: z.string().openapi({ example: "newsecretvalue" }), + }), + }, + }, + }, + }, + responses: { + 200: { + description: "Secret updated", + content: { + "application/json": { + schema: z.object({ secret: SecretUpdatedSchema }), + }, + }, + }, + 400: { + description: "Missing id or name, or missing value", + content: { "application/json": { schema: ErrorResponse } }, + }, + 401: { description: "Unauthorized" }, + 403: { description: "Forbidden" }, + 404: { + description: "Secret not found", + content: { "application/json": { schema: ErrorResponse } }, + }, + }, +}); + +// 13. DELETE /api/v1/secrets +registry.registerPath({ + method: "delete", + path: "/api/v1/secrets", + operationId: "deleteSecret", + summary: "Delete secret", + description: + "Deletes a secret by id or name query parameter (one required).", + tags: ["Secrets"], + security: [{ [bearerAuth.name]: [] }], + request: { + query: z.object({ + id: z.string().optional().openapi({ description: "Secret ID" }), + name: z.string().optional().openapi({ description: "Secret name" }), + }), + }, + responses: { + 200: { + description: "Secret deleted", + content: { + "application/json": { + schema: z.object({ deleted: z.literal(true) }), + }, + }, + }, + 400: { + description: "Neither id nor name provided", + content: { "application/json": { schema: ErrorResponse } }, + }, + 401: { description: "Unauthorized" }, + 403: { description: "Forbidden" }, + 404: { + description: "Secret not found", + content: { "application/json": { schema: ErrorResponse } }, + }, + }, +}); + +// 14. GET /api/v1/alerts/rules +registry.registerPath({ + method: "get", + path: "/api/v1/alerts/rules", + operationId: "listAlertRules", + summary: "List alert rules", + description: "Returns all alert rules in the environment, ordered by most recently created.", + tags: ["Alerts"], + security: [{ [bearerAuth.name]: [] }], + responses: { + 200: { + description: "List of alert rules", + content: { + "application/json": { + schema: z.object({ rules: z.array(AlertRuleSchema) }), + }, + }, + }, + 401: { description: "Unauthorized" }, + 403: { description: "Forbidden" }, + }, +}); + +// 15. POST /api/v1/alerts/rules +registry.registerPath({ + method: "post", + path: "/api/v1/alerts/rules", + operationId: "createAlertRule", + summary: "Create alert rule", + description: + "Creates a new alert rule. Fleet-scoped metrics (fleet_error_rate, fleet_throughput_drop, fleet_event_volume, node_load_imbalance) cannot be scoped to a specific pipeline.", + tags: ["Alerts"], + security: [{ [bearerAuth.name]: [] }], + request: { + body: { + required: true, + content: { + "application/json": { + schema: z.object({ + name: z.string().openapi({ example: "High CPU Usage" }), + metric: AlertMetric, + condition: AlertCondition, + threshold: z.number().openapi({ example: 90 }), + pipelineId: z + .string() + .optional() + .openapi({ description: "Scope rule to a specific pipeline. Not allowed for fleet metrics." }), + durationSeconds: z + .number() + .optional() + .openapi({ example: 60, description: "Duration the condition must persist before firing. Defaults to 60." }), + }), + }, + }, + }, + }, + responses: { + 201: { + description: "Alert rule created", + content: { + "application/json": { + schema: z.object({ rule: AlertRuleSchema }), + }, + }, + }, + 400: { + description: "Invalid request body or metric/condition combination", + content: { "application/json": { schema: ErrorResponse } }, + }, + 401: { description: "Unauthorized" }, + 403: { description: "Forbidden" }, + 404: { + description: "Pipeline not found", + content: { "application/json": { schema: ErrorResponse } }, + }, + }, +}); + +// 16. GET /api/v1/audit +registry.registerPath({ + method: "get", + path: "/api/v1/audit", + operationId: "listAuditEvents", + summary: "List audit events", + description: + "Returns audit log events for the environment with cursor-based pagination. Events are ordered by creation time ascending.", + tags: ["Audit"], + security: [{ [bearerAuth.name]: [] }], + request: { + query: z.object({ + after: z + .string() + .optional() + .openapi({ description: "Pagination cursor — ID of the last event from the previous page" }), + limit: z + .string() + .optional() + .openapi({ example: "50", description: "Number of events to return (1–200, default 50)" }), + action: z + .string() + .optional() + .openapi({ example: "api.pipeline_deployed", description: "Filter by action type" }), + }), + }, + responses: { + 200: { + description: "Audit events page", + content: { + "application/json": { + schema: z.object({ + events: z.array(AuditEventSchema), + cursor: z.string().nullable().openapi({ description: "Cursor for the next page" }), + hasMore: z.boolean(), + }), + }, + }, + }, + 400: { + description: "Invalid cursor", + content: { "application/json": { schema: ErrorResponse } }, + }, + 401: { description: "Unauthorized" }, + 403: { description: "Forbidden" }, + }, +}); + +// --------------------------------------------------------------------------- +// tRPC — shared helpers +// --------------------------------------------------------------------------- + +/** + * tRPC query (GET) input parameter. + * + * For tRPC queries the entire input is URL-encoded as JSON and passed in the + * `?input=` query parameter using SuperJSON encoding: + * GET /api/trpc/.?input= + * where the JSON payload is { "json": }. + */ +function trpcInputQueryParam(description: string) { + return z.string().optional().openapi({ + description: `URL-encoded JSON input (SuperJSON). Encode as \`?input=${encodeURIComponent(JSON.stringify({ json: { "...": "..." } }))}\`. ${description}`, + example: '{"json":{"environmentId":"clxyz123"}}', + }); +} + +/** + * Standard tRPC response wrapper. + * All tRPC responses are wrapped in { result: { data: { json: } } } + */ +const TrpcResponseSchema = z + .object({ + result: z.object({ + data: z.object({ + json: z + .unknown() + .openapi({ + description: + "SuperJSON-encoded response payload. When using tRPC with a TypeScript client the data is automatically deserialized. Raw HTTP callers receive the SuperJSON wire format.", + }), + }), + }), + }) + .openapi("TrpcResponse"); + +/** + * Standard tRPC error response. + */ +const TrpcErrorSchema = z + .object({ + error: z.object({ + json: z.object({ + message: z.string().openapi({ example: "Pipeline not found" }), + code: z.number().openapi({ example: -32004 }), + data: z.object({ + code: z.string().openapi({ example: "NOT_FOUND" }), + httpStatus: z.number().openapi({ example: 404 }), + }), + }), + }), + }) + .openapi("TrpcError"); + +const trpcSecurity = [{ [cookieAuth.name]: [] }]; + +const trpcNote = + "**tRPC endpoint.** Auth: session cookie (`authjs.session-token`). Uses SuperJSON encoding. " + + "For full type safety and automatic deserialization use the TypeScript tRPC client."; + +// --------------------------------------------------------------------------- +// tRPC — Pipeline procedures +// --------------------------------------------------------------------------- + +// pipeline.list (query → GET) +registry.registerPath({ + method: "get", + path: "/api/trpc/pipeline.list", + operationId: "trpcPipelineList", + summary: "pipeline.list — List pipelines", + description: `${trpcNote}\n\nReturns all pipelines in an environment.`, + tags: ["tRPC"], + security: trpcSecurity, + request: { + query: z.object({ + input: trpcInputQueryParam("Required fields: `environmentId: string`"), + }), + }, + responses: { + 200: { + description: "List of pipelines", + content: { "application/json": { schema: TrpcResponseSchema } }, + }, + 401: { description: "Unauthorized — not signed in" }, + 403: { description: "Forbidden — insufficient role" }, + }, +}); + +// pipeline.get (query → GET) +registry.registerPath({ + method: "get", + path: "/api/trpc/pipeline.get", + operationId: "trpcPipelineGet", + summary: "pipeline.get — Get pipeline", + description: `${trpcNote}\n\nReturns a single pipeline with its node graph, edges, and config change status.`, + tags: ["tRPC"], + security: trpcSecurity, + request: { + query: z.object({ + input: trpcInputQueryParam("Required fields: `id: string`"), + }), + }, + responses: { + 200: { + description: "Pipeline detail", + content: { "application/json": { schema: TrpcResponseSchema } }, + }, + 401: { description: "Unauthorized" }, + 403: { description: "Forbidden" }, + 404: { description: "Pipeline not found", content: { "application/json": { schema: TrpcErrorSchema } } }, + }, +}); + +// pipeline.create (mutation → POST) +registry.registerPath({ + method: "post", + path: "/api/trpc/pipeline.create", + operationId: "trpcPipelineCreate", + summary: "pipeline.create — Create pipeline", + description: `${trpcNote}\n\nCreates a new draft pipeline.`, + tags: ["tRPC"], + security: trpcSecurity, + request: { + body: { + required: true, + content: { + "application/json": { + schema: z.object({ + json: z.object({ + name: z.string().openapi({ + example: "syslog-to-s3", + description: "Must match `^[a-zA-Z0-9][a-zA-Z0-9 _-]*$`, 1–100 characters.", + }), + description: z.string().optional().openapi({ example: "Ships syslog to S3" }), + environmentId: z.string().openapi({ example: "clxyz123" }), + }), + }).openapi({ description: "SuperJSON mutation body: `{\"json\": }`" }), + }, + }, + }, + }, + responses: { + 200: { + description: "Created pipeline", + content: { "application/json": { schema: TrpcResponseSchema } }, + }, + 400: { description: "Invalid input or name format", content: { "application/json": { schema: TrpcErrorSchema } } }, + 401: { description: "Unauthorized" }, + 403: { description: "Forbidden — minimum role: EDITOR" }, + 404: { description: "Environment not found", content: { "application/json": { schema: TrpcErrorSchema } } }, + }, +}); + +// pipeline.update (mutation → POST) +registry.registerPath({ + method: "post", + path: "/api/trpc/pipeline.update", + operationId: "trpcPipelineUpdate", + summary: "pipeline.update — Update pipeline", + description: `${trpcNote}\n\nUpdates pipeline name or description.`, + tags: ["tRPC"], + security: trpcSecurity, + request: { + body: { + required: true, + content: { + "application/json": { + schema: z.object({ + json: z.object({ + id: z.string().openapi({ example: "clxyz123" }), + name: z.string().optional().openapi({ example: "updated-name" }), + description: z.string().nullable().optional().openapi({ example: "Updated description" }), + }), + }).openapi({ description: "SuperJSON mutation body." }), + }, + }, + }, + }, + responses: { + 200: { + description: "Updated pipeline", + content: { "application/json": { schema: TrpcResponseSchema } }, + }, + 401: { description: "Unauthorized" }, + 403: { description: "Forbidden — minimum role: EDITOR" }, + 404: { description: "Pipeline not found", content: { "application/json": { schema: TrpcErrorSchema } } }, + }, +}); + +// pipeline.delete (mutation → POST) +registry.registerPath({ + method: "post", + path: "/api/trpc/pipeline.delete", + operationId: "trpcPipelineDelete", + summary: "pipeline.delete — Delete pipeline", + description: `${trpcNote}\n\nDeletes a pipeline (undeploys first if deployed).`, + tags: ["tRPC"], + security: trpcSecurity, + request: { + body: { + required: true, + content: { + "application/json": { + schema: z.object({ + json: z.object({ + id: z.string().openapi({ example: "clxyz123" }), + }), + }).openapi({ description: "SuperJSON mutation body." }), + }, + }, + }, + }, + responses: { + 200: { + description: "Deletion result", + content: { "application/json": { schema: TrpcResponseSchema } }, + }, + 401: { description: "Unauthorized" }, + 403: { description: "Forbidden — minimum role: EDITOR" }, + 404: { description: "Pipeline not found", content: { "application/json": { schema: TrpcErrorSchema } } }, + }, +}); + +// --------------------------------------------------------------------------- +// tRPC — Deploy procedures +// --------------------------------------------------------------------------- + +// deploy.agent (mutation → POST) +registry.registerPath({ + method: "post", + path: "/api/trpc/deploy.agent", + operationId: "trpcDeployAgent", + summary: "deploy.agent — Deploy pipeline to agents", + description: + `${trpcNote}\n\nValidates the pipeline config, creates a new version, and marks it as deployed. ` + + "Fleet agents pick up the change on their next poll. If the environment requires deploy approval and the caller is an EDITOR (not ADMIN), " + + "a deploy request is created instead of deploying directly.", + tags: ["tRPC"], + security: trpcSecurity, + request: { + body: { + required: true, + content: { + "application/json": { + schema: z.object({ + json: z.object({ + pipelineId: z.string().openapi({ example: "clxyz123" }), + changelog: z.string().min(1).openapi({ example: "Deployed from CI" }), + nodeSelector: z + .record(z.string(), z.string()) + .optional() + .openapi({ + description: "Optional key/value label filter to target a subset of fleet nodes.", + example: { env: "production" }, + }), + }), + }).openapi({ description: "SuperJSON mutation body." }), + }, + }, + }, + }, + responses: { + 200: { + description: "Deployment result or deploy request created", + content: { "application/json": { schema: TrpcResponseSchema } }, + }, + 400: { description: "Invalid pipeline config", content: { "application/json": { schema: TrpcErrorSchema } } }, + 401: { description: "Unauthorized" }, + 403: { description: "Forbidden — minimum role: EDITOR" }, + 404: { description: "Pipeline not found", content: { "application/json": { schema: TrpcErrorSchema } } }, + }, +}); + +// deploy.undeploy (mutation → POST) +registry.registerPath({ + method: "post", + path: "/api/trpc/deploy.undeploy", + operationId: "trpcDeployUndeploy", + summary: "deploy.undeploy — Undeploy pipeline", + description: `${trpcNote}\n\nStops a deployed pipeline on all fleet nodes (agents stop it on their next poll).`, + tags: ["tRPC"], + security: trpcSecurity, + request: { + body: { + required: true, + content: { + "application/json": { + schema: z.object({ + json: z.object({ + pipelineId: z.string().openapi({ example: "clxyz123" }), + }), + }).openapi({ description: "SuperJSON mutation body." }), + }, + }, + }, + }, + responses: { + 200: { + description: "Undeploy result", + content: { "application/json": { schema: TrpcResponseSchema } }, + }, + 401: { description: "Unauthorized" }, + 403: { description: "Forbidden — minimum role: EDITOR" }, + 404: { description: "Pipeline not found", content: { "application/json": { schema: TrpcErrorSchema } } }, + }, +}); + +// --------------------------------------------------------------------------- +// tRPC — Fleet procedures +// --------------------------------------------------------------------------- + +// fleet.list (query → GET) +registry.registerPath({ + method: "get", + path: "/api/trpc/fleet.list", + operationId: "trpcFleetList", + summary: "fleet.list — List fleet nodes", + description: `${trpcNote}\n\nReturns all fleet nodes in an environment. Optionally filter by search term, status, or labels.`, + tags: ["tRPC"], + security: trpcSecurity, + request: { + query: z.object({ + input: trpcInputQueryParam( + "Required: `environmentId: string`. Optional: `search?: string`, `status?: string[]`, `labels?: Record`", + ), + }), + }, + responses: { + 200: { + description: "List of fleet nodes", + content: { "application/json": { schema: TrpcResponseSchema } }, + }, + 401: { description: "Unauthorized" }, + 403: { description: "Forbidden" }, + }, +}); + +// fleet.get (query → GET) +registry.registerPath({ + method: "get", + path: "/api/trpc/fleet.get", + operationId: "trpcFleetGet", + summary: "fleet.get — Get fleet node", + description: `${trpcNote}\n\nReturns a single fleet node with its pipeline deployment statuses.`, + tags: ["tRPC"], + security: trpcSecurity, + request: { + query: z.object({ + input: trpcInputQueryParam("Required fields: `id: string`"), + }), + }, + responses: { + 200: { + description: "Fleet node detail", + content: { "application/json": { schema: TrpcResponseSchema } }, + }, + 401: { description: "Unauthorized" }, + 403: { description: "Forbidden" }, + 404: { description: "Node not found", content: { "application/json": { schema: TrpcErrorSchema } } }, + }, +}); + +// --------------------------------------------------------------------------- +// tRPC — Environment procedures +// --------------------------------------------------------------------------- + +// environment.list (query → GET) +registry.registerPath({ + method: "get", + path: "/api/trpc/environment.list", + operationId: "trpcEnvironmentList", + summary: "environment.list — List environments", + description: `${trpcNote}\n\nReturns all environments for a team.`, + tags: ["tRPC"], + security: trpcSecurity, + request: { + query: z.object({ + input: trpcInputQueryParam("Required fields: `teamId: string`"), + }), + }, + responses: { + 200: { + description: "List of environments", + content: { "application/json": { schema: TrpcResponseSchema } }, + }, + 401: { description: "Unauthorized" }, + 403: { description: "Forbidden" }, + }, +}); + +// --------------------------------------------------------------------------- +// tRPC — Secret procedures +// --------------------------------------------------------------------------- + +// secret.list (query → GET) +registry.registerPath({ + method: "get", + path: "/api/trpc/secret.list", + operationId: "trpcSecretList", + summary: "secret.list — List secrets", + description: `${trpcNote}\n\nReturns secret metadata (id, name, timestamps) for all secrets in an environment. Secret values are never returned.`, + tags: ["tRPC"], + security: trpcSecurity, + request: { + query: z.object({ + input: trpcInputQueryParam("Required fields: `environmentId: string`"), + }), + }, + responses: { + 200: { + description: "List of secret metadata", + content: { "application/json": { schema: TrpcResponseSchema } }, + }, + 401: { description: "Unauthorized" }, + 403: { description: "Forbidden" }, + }, +}); + +// secret.create (mutation → POST) +registry.registerPath({ + method: "post", + path: "/api/trpc/secret.create", + operationId: "trpcSecretCreate", + summary: "secret.create — Create secret", + description: `${trpcNote}\n\nCreates a new encrypted secret in an environment.`, + tags: ["tRPC"], + security: trpcSecurity, + request: { + body: { + required: true, + content: { + "application/json": { + schema: z.object({ + json: z.object({ + environmentId: z.string().openapi({ example: "clxyz123" }), + name: z.string().openapi({ + example: "DATABASE_PASSWORD", + description: "Must match `^[a-zA-Z0-9][a-zA-Z0-9_-]*$`", + }), + value: z.string().min(1).openapi({ example: "supersecret123" }), + }), + }).openapi({ description: "SuperJSON mutation body." }), + }, + }, + }, + }, + responses: { + 200: { + description: "Created secret metadata", + content: { "application/json": { schema: TrpcResponseSchema } }, + }, + 400: { description: "Invalid name format", content: { "application/json": { schema: TrpcErrorSchema } } }, + 401: { description: "Unauthorized" }, + 403: { description: "Forbidden — minimum role: EDITOR" }, + 409: { description: "Secret name already exists", content: { "application/json": { schema: TrpcErrorSchema } } }, + }, +}); + +// --------------------------------------------------------------------------- +// tRPC — Alert procedures +// --------------------------------------------------------------------------- + +// alert.listRules (query → GET) +registry.registerPath({ + method: "get", + path: "/api/trpc/alert.listRules", + operationId: "trpcAlertListRules", + summary: "alert.listRules — List alert rules", + description: `${trpcNote}\n\nReturns all alert rules for an environment.`, + tags: ["tRPC"], + security: trpcSecurity, + request: { + query: z.object({ + input: trpcInputQueryParam("Required fields: `environmentId: string`"), + }), + }, + responses: { + 200: { + description: "List of alert rules", + content: { "application/json": { schema: TrpcResponseSchema } }, + }, + 401: { description: "Unauthorized" }, + 403: { description: "Forbidden" }, + }, +}); + +// --------------------------------------------------------------------------- +// tRPC — Service Account procedures +// --------------------------------------------------------------------------- + +// serviceAccount.list (query → GET) +registry.registerPath({ + method: "get", + path: "/api/trpc/serviceAccount.list", + operationId: "trpcServiceAccountList", + summary: "serviceAccount.list — List service accounts", + description: `${trpcNote}\n\nReturns all service accounts for an environment. Minimum role: ADMIN.`, + tags: ["tRPC"], + security: trpcSecurity, + request: { + query: z.object({ + input: trpcInputQueryParam("Required fields: `environmentId: string`"), + }), + }, + responses: { + 200: { + description: "List of service accounts", + content: { "application/json": { schema: TrpcResponseSchema } }, + }, + 401: { description: "Unauthorized" }, + 403: { description: "Forbidden — minimum role: ADMIN" }, + }, +}); + +// serviceAccount.create (mutation → POST) +registry.registerPath({ + method: "post", + path: "/api/trpc/serviceAccount.create", + operationId: "trpcServiceAccountCreate", + summary: "serviceAccount.create — Create service account", + description: + `${trpcNote}\n\nCreates a new service account and returns the raw API key (shown once only). ` + + "Minimum role: ADMIN.", + tags: ["tRPC"], + security: trpcSecurity, + request: { + body: { + required: true, + content: { + "application/json": { + schema: z.object({ + json: z.object({ + environmentId: z.string().openapi({ example: "clxyz123" }), + name: z.string().min(1).max(100).openapi({ example: "ci-deployer" }), + description: z.string().max(500).optional().openapi({ example: "CI/CD deployment account" }), + permissions: z.array( + z.enum([ + "pipelines.read", + "pipelines.deploy", + "nodes.read", + "nodes.manage", + "secrets.read", + "secrets.manage", + "alerts.read", + "alerts.manage", + "audit.read", + ]).openapi({}), + ).min(1).openapi({ example: ["pipelines.read", "pipelines.deploy"] }), + expiresInDays: z.number().int().min(1).optional().openapi({ example: 365 }), + }), + }).openapi({ description: "SuperJSON mutation body." }), + }, + }, + }, + }, + responses: { + 200: { + description: "Created service account with raw API key (shown once)", + content: { "application/json": { schema: TrpcResponseSchema } }, + }, + 400: { description: "Invalid input", content: { "application/json": { schema: TrpcErrorSchema } } }, + 401: { description: "Unauthorized" }, + 403: { description: "Forbidden — minimum role: ADMIN" }, + 409: { description: "Service account name already exists", content: { "application/json": { schema: TrpcErrorSchema } } }, + }, +}); + +// --------------------------------------------------------------------------- +// Generator function +// --------------------------------------------------------------------------- + +let _cachedSpec: ReturnType | null = null; + +/** + * Generates (and caches) the OpenAPI 3.1 specification document for all + * VectorFlow REST v1 endpoints. + */ +export function generateOpenAPISpec() { + if (_cachedSpec) return _cachedSpec; + + const generator = new OpenApiGeneratorV31(registry.definitions); + _cachedSpec = generator.generateDocument({ + openapi: "3.1.0", + info: { + title: "VectorFlow REST API", + version: "1.0.0", + description: + "REST API for managing Vector data pipelines, fleet nodes, secrets, and alerts in VectorFlow.", + }, + servers: [ + { + url: "/api/v1", + description: "VectorFlow REST API v1", + }, + ], + }); + + return _cachedSpec; +} diff --git a/src/app/api/v1/openapi.json/route.ts b/src/app/api/v1/openapi.json/route.ts new file mode 100644 index 00000000..ac16c6d9 --- /dev/null +++ b/src/app/api/v1/openapi.json/route.ts @@ -0,0 +1,47 @@ +import { NextResponse } from "next/server"; +import { generateOpenAPISpec } from "@/app/api/v1/_lib/openapi-spec"; + +const CORS_HEADERS = { + "Access-Control-Allow-Origin": "*", + "Access-Control-Allow-Methods": "GET, OPTIONS", + "Access-Control-Allow-Headers": "Content-Type", +}; + +// Cache the serialized spec at module level so repeated requests are cheap +let _specJson: string | null = null; + +function getSpecJson(): string { + if (!_specJson) { + _specJson = JSON.stringify(generateOpenAPISpec(), null, 2); + } + return _specJson; +} + +/** + * GET /api/v1/openapi.json + * + * Public endpoint (no auth required) — returns the VectorFlow OpenAPI 3.1 + * specification as JSON. CORS headers allow external tooling (Swagger UI, + * Postman, etc.) to fetch the spec without credentials. + */ +export function GET() { + return new NextResponse(getSpecJson(), { + status: 200, + headers: { + "Content-Type": "application/json", + ...CORS_HEADERS, + }, + }); +} + +/** + * OPTIONS /api/v1/openapi.json + * + * CORS preflight handler. + */ +export function OPTIONS() { + return new NextResponse(null, { + status: 204, + headers: CORS_HEADERS, + }); +} diff --git a/src/app/api/webhooks/git/__tests__/pr-merge.test.ts b/src/app/api/webhooks/git/__tests__/pr-merge.test.ts new file mode 100644 index 00000000..c5668d38 --- /dev/null +++ b/src/app/api/webhooks/git/__tests__/pr-merge.test.ts @@ -0,0 +1,282 @@ +import { vi, describe, it, expect, beforeEach } from "vitest"; +import { mockDeep, mockReset, type DeepMockProxy } from "vitest-mock-extended"; +import type { PrismaClient } from "@/generated/prisma"; +import crypto from "crypto"; + +// ─── Module mocks ─────────────────────────────────────────────────────────── + +vi.mock("@/lib/prisma", () => ({ + prisma: mockDeep(), +})); + +vi.mock("@/server/services/crypto", () => ({ + decrypt: vi.fn((val: string) => `decrypted-${val}`), + encrypt: vi.fn(), +})); + +vi.mock("@/server/services/config-crypto", () => ({ + encryptNodeConfig: vi.fn((_: unknown, c: unknown) => c), + decryptNodeConfig: vi.fn((_: unknown, c: unknown) => c), +})); + +vi.mock("@/lib/config-generator", () => ({ + importVectorConfig: vi.fn().mockReturnValue({ nodes: [], edges: [], globalConfig: null }), + generateVectorYaml: vi.fn(), +})); + +vi.mock("@/server/services/audit", () => ({ + writeAuditLog: vi.fn(), +})); + +vi.mock("@/server/services/promotion-service", () => ({ + executePromotion: vi.fn().mockResolvedValue({ pipelineId: "new-pipe", pipelineName: "My Pipeline" }), + preflightSecrets: vi.fn(), + generateDiffPreview: vi.fn(), +})); + +vi.mock("@/server/services/gitops-promotion", () => ({ + createPromotionPR: vi.fn(), +})); + +// ─── Import SUT + mocks ───────────────────────────────────────────────────── + +import { POST } from "../route"; +import { prisma } from "@/lib/prisma"; +import { executePromotion } from "@/server/services/promotion-service"; + +const prismaMock = prisma as unknown as DeepMockProxy; + +// ─── Helpers ──────────────────────────────────────────────────────────────── + +const WEBHOOK_SECRET = "test-webhook-secret"; +const ENCRYPTED_SECRET = "enc-secret"; + +function makeHmacSignature(body: string, secret: string): string { + return ( + "sha256=" + crypto.createHmac("sha256", `decrypted-${secret}`).update(body).digest("hex") + ); +} + +function makeEnvironment(overrides: Record = {}) { + return { + id: "env-1", + name: "Production", + teamId: "team-1", + gitOpsMode: "promotion", + gitWebhookSecret: ENCRYPTED_SECRET, + gitRepoUrl: "https://github.com/myorg/myrepo", + gitBranch: "main", + gitToken: "enc-token", + requireDeployApproval: false, + ...overrides, + }; +} + +function makePrPayload(overrides: { + action?: string; + merged?: boolean; + body?: string; +} = {}) { + const { action = "closed", merged = true, body: prBody = "\n\nPromoted by VectorFlow." } = overrides; + return { + action, + pull_request: { + number: 42, + merged, + body: prBody, + html_url: "https://github.com/myorg/myrepo/pull/42", + }, + }; +} + +function makeRequest( + payload: Record, + eventType: string, + signatureOverride?: string, +): Request { + const body = JSON.stringify(payload); + const signature = signatureOverride ?? makeHmacSignature(body, ENCRYPTED_SECRET); + return new Request("http://localhost/api/webhooks/git", { + method: "POST", + headers: { + "Content-Type": "application/json", + "X-Hub-Signature-256": signature, + "X-GitHub-Event": eventType, + }, + body, + }); +} + +// ─── Tests ─────────────────────────────────────────────────────────────────── + +describe("Git webhook — PR merge handler", () => { + beforeEach(() => { + mockReset(prismaMock); + vi.clearAllMocks(); + }); + + it("responds pong to ping event without checking signature", async () => { + const req = new Request("http://localhost/api/webhooks/git", { + method: "POST", + headers: { "Content-Type": "application/json", "X-GitHub-Event": "ping" }, + body: JSON.stringify({ zen: "Testing is good." }), + }); + + const res = await POST(req as never); + const json = await res.json(); + + expect(res.status).toBe(200); + expect(json.message).toBe("pong"); + expect(prismaMock.environment.findMany).not.toHaveBeenCalled(); + }); + + it("returns 401 when signature is missing", async () => { + prismaMock.environment.findMany.mockResolvedValue([makeEnvironment()] as never); + const req = new Request("http://localhost/api/webhooks/git", { + method: "POST", + headers: { "Content-Type": "application/json", "X-GitHub-Event": "pull_request" }, + body: JSON.stringify(makePrPayload()), + }); + + const res = await POST(req as never); + expect(res.status).toBe(401); + }); + + it("returns 401 when HMAC signature is invalid", async () => { + prismaMock.environment.findMany.mockResolvedValue([makeEnvironment()] as never); + + const req = makeRequest(makePrPayload(), "pull_request", "sha256=badbadbadbad"); + const res = await POST(req as never); + + expect(res.status).toBe(401); + }); + + it("includes both promotion and bidirectional environments in HMAC lookup", async () => { + prismaMock.environment.findMany.mockResolvedValue([] as never); + prismaMock.promotionRequest.updateMany.mockResolvedValue({ count: 0 } as never); + + const payload = makePrPayload(); + const body = JSON.stringify(payload); + const signature = makeHmacSignature(body, ENCRYPTED_SECRET); + const req = new Request("http://localhost/api/webhooks/git", { + method: "POST", + headers: { + "Content-Type": "application/json", + "X-Hub-Signature-256": signature, + "X-GitHub-Event": "pull_request", + }, + body, + }); + + await POST(req as never); + + expect(prismaMock.environment.findMany).toHaveBeenCalledWith( + expect.objectContaining({ + where: expect.objectContaining({ + gitOpsMode: { in: ["bidirectional", "promotion"] }, + }), + }), + ); + }); + + it("triggers executePromotion for merged PR with VF promotion ID", async () => { + prismaMock.environment.findMany.mockResolvedValue([makeEnvironment()] as never); + prismaMock.promotionRequest.updateMany.mockResolvedValue({ count: 1 } as never); + prismaMock.promotionRequest.findUnique.mockResolvedValue({ + id: "req123abc456", + promotedById: "user-1", + } as never); + + const req = makeRequest(makePrPayload(), "pull_request"); + const res = await POST(req as never); + const json = await res.json(); + + expect(res.status).toBe(200); + expect(json.deployed).toBe(true); + expect(json.promotionRequestId).toBe("req123abc456"); + expect(executePromotion).toHaveBeenCalledWith("req123abc456", "user-1"); + }); + + it("uses system as executor when promotedById is null", async () => { + prismaMock.environment.findMany.mockResolvedValue([makeEnvironment()] as never); + prismaMock.promotionRequest.updateMany.mockResolvedValue({ count: 1 } as never); + prismaMock.promotionRequest.findUnique.mockResolvedValue({ + id: "req123abc456", + promotedById: null, + } as never); + + const req = makeRequest(makePrPayload(), "pull_request"); + await POST(req as never); + + expect(executePromotion).toHaveBeenCalledWith("req123abc456", "system"); + }); + + it("ignores PR closed without merge (merged = false)", async () => { + prismaMock.environment.findMany.mockResolvedValue([makeEnvironment()] as never); + + const req = makeRequest(makePrPayload({ merged: false }), "pull_request"); + const res = await POST(req as never); + const json = await res.json(); + + expect(res.status).toBe(200); + expect(json.message).toContain("closed without merge"); + expect(executePromotion).not.toHaveBeenCalled(); + }); + + it("ignores PR opened event (action != closed)", async () => { + prismaMock.environment.findMany.mockResolvedValue([makeEnvironment()] as never); + + const req = makeRequest(makePrPayload({ action: "opened", merged: false }), "pull_request"); + const res = await POST(req as never); + const json = await res.json(); + + expect(res.status).toBe(200); + expect(json.message).toContain("Not a closed event"); + expect(executePromotion).not.toHaveBeenCalled(); + }); + + it("ignores PR body without VF promotion ID", async () => { + prismaMock.environment.findMany.mockResolvedValue([makeEnvironment()] as never); + + const req = makeRequest( + makePrPayload({ body: "Just a regular PR with no VF ID." }), + "pull_request", + ); + const res = await POST(req as never); + const json = await res.json(); + + expect(res.status).toBe(200); + expect(json.message).toContain("No VectorFlow promotion ID"); + expect(executePromotion).not.toHaveBeenCalled(); + }); + + it("idempotency guard: ignores already-deployed promotion (updateMany count = 0)", async () => { + prismaMock.environment.findMany.mockResolvedValue([makeEnvironment()] as never); + prismaMock.promotionRequest.updateMany.mockResolvedValue({ count: 0 } as never); + + const req = makeRequest(makePrPayload(), "pull_request"); + const res = await POST(req as never); + const json = await res.json(); + + expect(res.status).toBe(200); + expect(json.message).toContain("already processed"); + expect(executePromotion).not.toHaveBeenCalled(); + }); + + it("atomic updateMany checks status = AWAITING_PR_MERGE", async () => { + prismaMock.environment.findMany.mockResolvedValue([makeEnvironment()] as never); + prismaMock.promotionRequest.updateMany.mockResolvedValue({ count: 1 } as never); + prismaMock.promotionRequest.findUnique.mockResolvedValue({ + id: "req123abc456", + promotedById: "user-1", + } as never); + + const req = makeRequest(makePrPayload(), "pull_request"); + await POST(req as never); + + expect(prismaMock.promotionRequest.updateMany).toHaveBeenCalledWith({ + where: { id: "req123abc456", status: "AWAITING_PR_MERGE" }, + data: { status: "DEPLOYING" }, + }); + }); +}); diff --git a/src/app/api/webhooks/git/route.ts b/src/app/api/webhooks/git/route.ts index f69189f8..6ffe2e5a 100644 --- a/src/app/api/webhooks/git/route.ts +++ b/src/app/api/webhooks/git/route.ts @@ -6,18 +6,29 @@ import { decrypt } from "@/server/services/crypto"; import { encryptNodeConfig } from "@/server/services/config-crypto"; import { writeAuditLog } from "@/server/services/audit"; import { ComponentKind, Prisma } from "@/generated/prisma"; +import { executePromotion } from "@/server/services/promotion-service"; export async function POST(req: NextRequest) { const body = await req.text(); const signature = req.headers.get("x-hub-signature-256"); + const eventType = req.headers.get("x-github-event") ?? "push"; + + // Handle GitHub ping (sent when webhook is first registered) + if (eventType === "ping") { + return NextResponse.json({ message: "pong" }, { status: 200 }); + } if (!signature) { return NextResponse.json({ error: "Missing signature" }, { status: 401 }); } - // 1. Find environments with bidirectional gitOps + // 1. Find environments with gitOps webhook configured. + // Includes both bidirectional (push) and promotion (PR-based) modes. const environments = await prisma.environment.findMany({ - where: { gitOpsMode: "bidirectional", gitWebhookSecret: { not: null } }, + where: { + gitOpsMode: { in: ["bidirectional", "promotion"] }, + gitWebhookSecret: { not: null }, + }, }); // 2. Verify HMAC signature against each environment's webhook secret @@ -45,7 +56,7 @@ export async function POST(req: NextRequest) { return NextResponse.json({ error: "Invalid signature" }, { status: 401 }); } - // 3. Parse GitHub push event + // 3. Parse payload let payload: Record; try { payload = JSON.parse(body); @@ -55,6 +66,57 @@ export async function POST(req: NextRequest) { { status: 400 }, ); } + + // ─── pull_request event: GitOps promotion merge trigger ────────────────── + if (eventType === "pull_request") { + // Only handle closed+merged — reject closed-without-merge + if (payload.action !== "closed") { + return NextResponse.json({ message: "Not a closed event, ignored" }, { status: 200 }); + } + const pr = payload.pull_request as Record | undefined; + if (!pr?.merged) { + return NextResponse.json({ message: "PR closed without merge, ignored" }, { status: 200 }); + } + + // Extract VF promotion request ID from PR body + const prBody = (pr.body as string) ?? ""; + const match = prBody.match(//); + if (!match) { + return NextResponse.json( + { message: "No VectorFlow promotion ID in PR body, ignored" }, + { status: 200 }, + ); + } + const promotionRequestId = match[1]; + + // Atomic idempotency guard — prevents double-deploy on GitHub retry + const updated = await prisma.promotionRequest.updateMany({ + where: { id: promotionRequestId, status: "AWAITING_PR_MERGE" }, + data: { status: "DEPLOYING" }, + }); + + if (updated.count === 0) { + // Already deployed, not found, or not in the right state — safe to ignore + return NextResponse.json( + { message: "Promotion already processed or not found" }, + { status: 200 }, + ); + } + + // Load the original promoter for audit attribution + const promotionRequest = await prisma.promotionRequest.findUnique({ + where: { id: promotionRequestId }, + select: { promotedById: true }, + }); + + // Execute the promotion (the promoter is the logical actor) + const executorId = promotionRequest?.promotedById ?? "system"; + await executePromotion(promotionRequestId, executorId); + + return NextResponse.json({ deployed: true, promotionRequestId }); + } + + // ─── push event: Bidirectional GitOps config import ────────────────────── const ref: string | undefined = payload.ref as string | undefined; // "refs/heads/main" const branch = ref?.replace("refs/heads/", ""); diff --git a/src/components/environment/git-sync-section.tsx b/src/components/environment/git-sync-section.tsx index a76983cc..500b24c7 100644 --- a/src/components/environment/git-sync-section.tsx +++ b/src/components/environment/git-sync-section.tsx @@ -93,7 +93,7 @@ export function GitSyncSection({ gitRepoUrl: repoUrl || null, gitBranch: branch || null, gitToken: token || undefined, // Only send if user entered a new token - gitOpsMode: selectedGitOpsMode as "off" | "push" | "bidirectional", + gitOpsMode: selectedGitOpsMode as "off" | "push" | "bidirectional" | "promotion", }, { onSuccess: () => { @@ -230,12 +230,14 @@ export function GitSyncSection({ Off Push Only (deploy commits YAML to repo) Bi-directional (push + git webhooks import changes) + Promotion (PR-based promotion via GitHub)

{selectedGitOpsMode === "off" && "Git sync is disabled."} {selectedGitOpsMode === "push" && "Pipeline YAML is committed to the repo on deploy. Changes in git are not pulled back."} {selectedGitOpsMode === "bidirectional" && "Pipeline YAML is committed on deploy AND pushes to the repo trigger pipeline imports via webhook."} + {selectedGitOpsMode === "promotion" && "Promoting a pipeline creates a GitHub pull request. Merging the PR automatically deploys the promoted config to the target environment."}

@@ -306,6 +308,104 @@ export function GitSyncSection({ )} + {/* Webhook configuration for promotion (PR-based) mode */} + {selectedGitOpsMode === "promotion" && ( +
+
+ + GitOps Promotion Setup +
+

+ When a user promotes a pipeline, VectorFlow will create a pull request in your GitHub repository. + Merging the PR automatically deploys the promoted config to this environment. + Complete the steps below to finish the setup. +

+ +
    +
  1. + 1 + Save this configuration (Repository URL, Branch, and Access Token) using the Save button below. A webhook secret will be generated. +
  2. +
  3. + 2 + + In GitHub, go to your repository{" "} + Settings → Webhooks → Add webhook. + +
  4. +
  5. + 3 + + Set Payload URL to the webhook URL below, set{" "} + Content type to{" "} + application/json, and paste the + webhook secret. + +
  6. +
  7. + 4 + + Under Which events would you like to trigger this webhook?, select{" "} + Let me select individual events and check{" "} + Pull requests. Uncheck push events. + +
  8. +
+ +
+ +
+ + +
+
+ + {webhookSecretFromMutation && ( +
+ +
+ + +
+

+ Save this secret — it is only shown once. Paste it into your GitHub webhook settings. +

+
+ )} + {!webhookSecretFromMutation && hasWebhookSecret && ( +

+ Webhook secret is configured. For security, the secret is only shown once when first generated. + To rotate the secret, switch GitOps mode to Off and back to Promotion. +

+ )} + {!webhookSecretFromMutation && !hasWebhookSecret && ( +

+ Save settings above to generate a webhook secret. +

+ )} +
+ )} +
+ } + > + + + ); +} diff --git a/src/components/fleet/fleet-health-toolbar.tsx b/src/components/fleet/fleet-health-toolbar.tsx new file mode 100644 index 00000000..db4c355b --- /dev/null +++ b/src/components/fleet/fleet-health-toolbar.tsx @@ -0,0 +1,233 @@ +"use client"; + +import { Check, ChevronsUpDown, X } from "lucide-react"; +import { Button } from "@/components/ui/button"; +import { Badge } from "@/components/ui/badge"; +import { + Popover, + PopoverContent, + PopoverTrigger, +} from "@/components/ui/popover"; +import { + Command, + CommandEmpty, + CommandGroup, + CommandInput, + CommandItem, + CommandList, +} from "@/components/ui/command"; +import { cn } from "@/lib/utils"; + +export interface FleetHealthToolbarProps { + groupFilter: string | null; + onGroupFilterChange: (id: string | null) => void; + labelFilter: Record; + onLabelFilterChange: (labels: Record) => void; + complianceFilter: "all" | "compliant" | "non-compliant"; + onComplianceFilterChange: ( + status: "all" | "compliant" | "non-compliant", + ) => void; + groups: Array<{ id: string; name: string }>; + availableLabels: Record; +} + +const COMPLIANCE_OPTIONS = [ + { id: "all" as const, label: "All" }, + { id: "compliant" as const, label: "Compliant" }, + { id: "non-compliant" as const, label: "Non-compliant" }, +]; + +export function FleetHealthToolbar({ + groupFilter, + onGroupFilterChange, + labelFilter, + onLabelFilterChange, + complianceFilter, + onComplianceFilterChange, + groups, + availableLabels, +}: FleetHealthToolbarProps) { + const labelKeys = Object.keys(availableLabels).sort(); + const activeLabelCount = Object.keys(labelFilter).length; + const hasActiveFilters = + groupFilter !== null || + activeLabelCount > 0 || + complianceFilter !== "all"; + + const clearAll = () => { + onGroupFilterChange(null); + onLabelFilterChange({}); + onComplianceFilterChange("all"); + }; + + const selectedGroup = groups.find((g) => g.id === groupFilter); + + return ( +
+ {/* Group filter dropdown */} + + + + + + + + + No groups found. + + onGroupFilterChange(null)} + > + + All Groups + + {groups.map((group) => ( + + onGroupFilterChange( + groupFilter === group.id ? null : group.id, + ) + } + > + + {group.name} + + ))} + + + + + + +
+ + {/* Compliance toggle pills */} +
+ {COMPLIANCE_OPTIONS.map((opt) => ( + + ))} +
+ + {/* Label filter */} + {labelKeys.length > 0 && ( + <> +
+ + + + + + + + + No labels found. + {labelKeys.map((key) => ( + + {availableLabels[key].map((value) => { + const isActive = labelFilter[key] === value; + return ( + { + const next = { ...labelFilter }; + if (isActive) { + delete next[key]; + } else { + next[key] = value; + } + onLabelFilterChange(next); + }} + > + + {value} + + ); + })} + + ))} + + + {activeLabelCount > 0 && ( +
+ +
+ )} +
+
+ + )} + + {/* Clear all */} + {hasActiveFilters && ( + + )} +
+ ); +} diff --git a/src/components/fleet/node-group-detail-table.tsx b/src/components/fleet/node-group-detail-table.tsx new file mode 100644 index 00000000..1557fa17 --- /dev/null +++ b/src/components/fleet/node-group-detail-table.tsx @@ -0,0 +1,116 @@ +"use client"; + +import Link from "next/link"; +import { useQuery } from "@tanstack/react-query"; +import { useTRPC } from "@/trpc/client"; +import { Skeleton } from "@/components/ui/skeleton"; +import { + Table, + TableBody, + TableCell, + TableHead, + TableHeader, + TableRow, +} from "@/components/ui/table"; +import { Badge } from "@/components/ui/badge"; +import { StatusBadge } from "@/components/ui/status-badge"; +import { nodeStatusVariant, nodeStatusLabel } from "@/lib/status"; +import { formatLastSeen } from "@/lib/format"; + +interface NodeGroupDetailTableProps { + groupId: string; + environmentId: string; +} + +export function NodeGroupDetailTable({ + groupId, + environmentId, +}: NodeGroupDetailTableProps) { + const trpc = useTRPC(); + + const nodesQuery = useQuery( + trpc.nodeGroup.nodesInGroup.queryOptions({ groupId, environmentId }), + ); + + if (nodesQuery.isLoading) { + return ( +
+ {Array.from({ length: 3 }).map((_, i) => ( + + ))} +
+ ); + } + + const nodes = nodesQuery.data ?? []; + + if (nodes.length === 0) { + return ( +
+

+ No nodes in this group. +

+
+ ); + } + + return ( +
+ + + + Name + Status + CPU Load + Last Seen + Compliance + + + + {nodes.map((node) => ( + + + + {node.name} + + + + + {nodeStatusLabel(node.status)} + + + + {node.cpuLoad != null + ? node.cpuLoad.toFixed(1) + : "--"} + + + {formatLastSeen(node.lastSeen)} + + + {node.labelCompliant === false ? ( + + Non-compliant + + ) : ( + + Compliant + + )} + + + ))} + +
+
+ ); +} diff --git a/src/components/fleet/node-group-health-card.tsx b/src/components/fleet/node-group-health-card.tsx new file mode 100644 index 00000000..76314e34 --- /dev/null +++ b/src/components/fleet/node-group-health-card.tsx @@ -0,0 +1,137 @@ +"use client"; + +import { ChevronDown } from "lucide-react"; +import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card"; +import { Badge } from "@/components/ui/badge"; +import { + Collapsible, + CollapsibleContent, + CollapsibleTrigger, +} from "@/components/ui/collapsible"; +import { NodeGroupDetailTable } from "@/components/fleet/node-group-detail-table"; +import { cn } from "@/lib/utils"; + +interface NodeGroupHealthCardProps { + group: { + id: string; + name: string; + environmentId: string; + totalNodes: number; + onlineCount: number; + alertCount: number; + complianceRate: number; + }; + isExpanded: boolean; + onToggle: () => void; + labelFilterActive?: boolean; +} + +export function NodeGroupHealthCard({ + group, + isExpanded, + onToggle, + labelFilterActive = false, +}: NodeGroupHealthCardProps) { + const allOnline = group.onlineCount === group.totalNodes; + const hasAlerts = group.alertCount > 0; + const fullyCompliant = group.complianceRate === 100; + + return ( + + + + + + + +
+ +
+
+
+
+ ); +} diff --git a/src/components/fleet/node-group-management.tsx b/src/components/fleet/node-group-management.tsx new file mode 100644 index 00000000..2e57b986 --- /dev/null +++ b/src/components/fleet/node-group-management.tsx @@ -0,0 +1,563 @@ +"use client"; + +import { useState } from "react"; +import { useQuery, useMutation, useQueryClient } from "@tanstack/react-query"; +import { useTRPC } from "@/trpc/client"; +import { toast } from "sonner"; +import { Plus, Pencil, Trash2, X, AlertTriangle, Loader2 } from "lucide-react"; + +import { + Card, + CardContent, + CardDescription, + CardHeader, + CardTitle, +} from "@/components/ui/card"; +import { Button } from "@/components/ui/button"; +import { Input } from "@/components/ui/input"; +import { Label } from "@/components/ui/label"; +import { Badge } from "@/components/ui/badge"; +import { ConfirmDialog } from "@/components/confirm-dialog"; +import { Skeleton } from "@/components/ui/skeleton"; + +// ─── Types ────────────────────────────────────────────────────────────────── + +interface KVPair { + key: string; + value: string; +} + +interface NodeGroupFormState { + name: string; + criteria: KVPair[]; + labelTemplate: KVPair[]; + requiredLabels: string[]; + requiredLabelInput: string; +} + +const emptyForm = (): NodeGroupFormState => ({ + name: "", + criteria: [], + labelTemplate: [], + requiredLabels: [], + requiredLabelInput: "", +}); + +// ─── Key-Value Editor ──────────────────────────────────────────────────────── + +function KVEditor({ + pairs, + onChange, + placeholder, +}: { + pairs: KVPair[]; + onChange: (pairs: KVPair[]) => void; + placeholder?: string; +}) { + const addRow = () => onChange([...pairs, { key: "", value: "" }]); + const removeRow = (i: number) => onChange(pairs.filter((_, idx) => idx !== i)); + const updateRow = (i: number, field: "key" | "value", val: string) => { + const updated = pairs.map((p, idx) => + idx === i ? { ...p, [field]: val } : p, + ); + onChange(updated); + }; + + return ( +
+ {pairs.map((pair, i) => ( +
+ updateRow(i, "key", e.target.value)} + placeholder="key" + className="h-7 text-xs flex-1" + /> + = + updateRow(i, "value", e.target.value)} + placeholder="value" + className="h-7 text-xs flex-1" + /> + +
+ ))} + +
+ ); +} + +// ─── Tag Input ─────────────────────────────────────────────────────────────── + +function TagInput({ + tags, + inputValue, + onTagsChange, + onInputChange, +}: { + tags: string[]; + inputValue: string; + onTagsChange: (tags: string[]) => void; + onInputChange: (value: string) => void; +}) { + const addTag = (raw: string) => { + const trimmed = raw.trim(); + if (!trimmed) return; + const newTags = trimmed + .split(",") + .map((t) => t.trim()) + .filter((t) => t && !tags.includes(t)); + if (newTags.length > 0) onTagsChange([...tags, ...newTags]); + onInputChange(""); + }; + + return ( +
+ {tags.length > 0 && ( +
+ {tags.map((tag) => ( + + {tag} + + + ))} +
+ )} +
+ onInputChange(e.target.value)} + onKeyDown={(e) => { + if (e.key === "Enter") { + e.preventDefault(); + addTag(inputValue); + } else if (e.key === ",") { + e.preventDefault(); + addTag(inputValue); + } + }} + placeholder="label-key (Enter or comma to add)" + className="h-7 text-xs" + /> + +
+
+ ); +} + +// ─── Helpers ───────────────────────────────────────────────────────────────── + +function kvPairsToRecord(pairs: KVPair[]): Record { + return Object.fromEntries( + pairs.filter((p) => p.key.trim()).map((p) => [p.key.trim(), p.value.trim()]), + ); +} + +function recordToKVPairs(record: Record): KVPair[] { + return Object.entries(record).map(([key, value]) => ({ key, value })); +} + +// ─── Group Form ────────────────────────────────────────────────────────────── + +function GroupForm({ + form, + onChange, + onSubmit, + onCancel, + isPending, + submitLabel, +}: { + form: NodeGroupFormState; + onChange: (form: NodeGroupFormState) => void; + onSubmit: () => void; + onCancel: () => void; + isPending: boolean; + submitLabel: string; +}) { + const criteriaEmpty = form.criteria.length === 0 || form.criteria.every((p) => !p.key.trim()); + + return ( +
+ {/* Name */} +
+ + onChange({ ...form, name: e.target.value })} + placeholder="e.g. US East Production" + className="h-8" + maxLength={100} + autoFocus + /> +
+ + {/* Criteria */} +
+ + onChange({ ...form, criteria: pairs })} + placeholder="Add criterion" + /> + {criteriaEmpty && ( +
+ + This group will match all enrolling nodes +
+ )} +
+ + {/* Label Template */} +
+ +

+ Labels applied automatically to nodes that match this group's criteria at enrollment. +

+ onChange({ ...form, labelTemplate: pairs })} + placeholder="Add label" + /> +
+ + {/* Required Labels */} +
+ +

+ Label keys every node should have. Missing keys show a Non-compliant badge on the fleet list. +

+ onChange({ ...form, requiredLabels: tags })} + onInputChange={(val) => onChange({ ...form, requiredLabelInput: val })} + /> +
+ + {/* Actions */} +
+ + +
+
+ ); +} + +// ─── Main Component ─────────────────────────────────────────────────────────── + +interface NodeGroupManagementProps { + environmentId: string; +} + +export function NodeGroupManagement({ environmentId }: NodeGroupManagementProps) { + const trpc = useTRPC(); + const queryClient = useQueryClient(); + + const groupsQuery = useQuery( + trpc.nodeGroup.list.queryOptions({ environmentId }), + ); + const groups = groupsQuery.data ?? []; + + // --- Create --- + const [showCreate, setShowCreate] = useState(false); + const [createForm, setCreateForm] = useState(emptyForm()); + + const createMutation = useMutation( + trpc.nodeGroup.create.mutationOptions({ + onSuccess: () => { + toast.success("Node group created"); + setShowCreate(false); + setCreateForm(emptyForm()); + queryClient.invalidateQueries({ queryKey: trpc.nodeGroup.list.queryKey() }); + }, + onError: (err) => toast.error(err.message), + }), + ); + + const handleCreate = () => { + if (!createForm.name.trim()) return; + createMutation.mutate({ + environmentId, + name: createForm.name.trim(), + criteria: kvPairsToRecord(createForm.criteria), + labelTemplate: kvPairsToRecord(createForm.labelTemplate), + requiredLabels: createForm.requiredLabels, + }); + }; + + // --- Edit --- + const [editingId, setEditingId] = useState(null); + const [editForm, setEditForm] = useState(emptyForm()); + + const updateMutation = useMutation( + trpc.nodeGroup.update.mutationOptions({ + onSuccess: () => { + toast.success("Node group updated"); + setEditingId(null); + queryClient.invalidateQueries({ queryKey: trpc.nodeGroup.list.queryKey() }); + }, + onError: (err) => toast.error(err.message), + }), + ); + + const startEdit = (group: { + id: string; + name: string; + criteria: Record; + labelTemplate: Record; + requiredLabels: string[]; + }) => { + setEditingId(group.id); + setEditForm({ + name: group.name, + criteria: recordToKVPairs(group.criteria), + labelTemplate: recordToKVPairs(group.labelTemplate), + requiredLabels: group.requiredLabels, + requiredLabelInput: "", + }); + setShowCreate(false); + }; + + const handleUpdate = () => { + if (!editingId || !editForm.name.trim()) return; + updateMutation.mutate({ + id: editingId, + name: editForm.name.trim(), + criteria: kvPairsToRecord(editForm.criteria), + labelTemplate: kvPairsToRecord(editForm.labelTemplate), + requiredLabels: editForm.requiredLabels, + }); + }; + + // --- Delete --- + const [deleteTarget, setDeleteTarget] = useState<{ id: string; name: string } | null>(null); + + const deleteMutation = useMutation( + trpc.nodeGroup.delete.mutationOptions({ + onSuccess: () => { + toast.success("Node group deleted"); + setDeleteTarget(null); + queryClient.invalidateQueries({ queryKey: trpc.nodeGroup.list.queryKey() }); + }, + onError: (err) => toast.error(err.message), + }), + ); + + return ( + <> + + +
+
+ Node Groups + + Segment your fleet into logical clusters. Groups define label selectors, templates applied at enrollment, and required label keys for compliance. + +
+ +
+
+ + {/* Create form */} + {showCreate && ( + { setShowCreate(false); setCreateForm(emptyForm()); }} + isPending={createMutation.isPending} + submitLabel="Create Group" + /> + )} + + {/* Loading skeleton */} + {groupsQuery.isLoading && ( +
+ + +
+ )} + + {/* Empty state */} + {!groupsQuery.isLoading && groups.length === 0 && !showCreate && ( +

+ No node groups yet. Click "Add Group" to create one. +

+ )} + + {/* Group list */} +
+ {groups.map((group) => + editingId === group.id ? ( + setEditingId(null)} + isPending={updateMutation.isPending} + submitLabel="Save Changes" + /> + ) : ( +
+
+ {group.name} + + {/* Criteria */} + {Object.keys(group.criteria).length > 0 ? ( +
+ Criteria: + {Object.entries(group.criteria).map(([k, v]) => ( + + {k}={v} + + ))} +
+ ) : ( +
+ + Matches all enrolling nodes +
+ )} + + {/* Label Template */} + {Object.keys(group.labelTemplate).length > 0 && ( +
+ Template: + {Object.entries(group.labelTemplate).map(([k, v]) => ( + + {k}={v} + + ))} +
+ )} + + {/* Required Labels */} + {group.requiredLabels.length > 0 && ( +
+ Required: + {group.requiredLabels.map((label) => ( + + {label} + + ))} +
+ )} +
+ +
+ + +
+
+ ), + )} +
+
+
+ + { if (!v) setDeleteTarget(null); }} + title="Delete node group?" + description={ + <> + Deleting "{deleteTarget?.name}" will not affect existing nodes, but nodes will + no longer be auto-labeled or compliance-checked against this group. + + } + confirmLabel="Delete" + variant="destructive" + isPending={deleteMutation.isPending} + pendingLabel="Deleting..." + onConfirm={() => { + if (deleteTarget) deleteMutation.mutate({ id: deleteTarget.id }); + }} + /> + + ); +} diff --git a/src/components/flow/component-palette.tsx b/src/components/flow/component-palette.tsx index 93917798..6ee54147 100644 --- a/src/components/flow/component-palette.tsx +++ b/src/components/flow/component-palette.tsx @@ -5,7 +5,7 @@ import { ChevronDown, ChevronRight, Search, PackageOpen, Link2 as LinkIcon } fro import { Input } from "@/components/ui/input"; import { Badge } from "@/components/ui/badge"; import { cn } from "@/lib/utils"; -import { VECTOR_CATALOG } from "@/lib/vector/catalog"; +import { getVectorCatalog } from "@/lib/vector/catalog"; import type { VectorComponentDef } from "@/lib/vector/types"; import { getIcon } from "./node-icon"; import { useQuery } from "@tanstack/react-query"; @@ -190,10 +190,10 @@ export function ComponentPalette() { ) ); const filtered = useMemo(() => { - if (!search.trim()) return VECTOR_CATALOG; + if (!search.trim()) return getVectorCatalog(); const term = search.toLowerCase().trim(); - return VECTOR_CATALOG.filter( + return getVectorCatalog().filter( (def) => def.displayName.toLowerCase().includes(term) || def.type.toLowerCase().includes(term) || @@ -324,7 +324,7 @@ export function ComponentPalette() { const kindKey = sc.kind.toLowerCase() as VectorComponentDef["kind"]; const meta = kindMeta[kindKey] ?? kindMeta.transform; const Icon = getIcon( - VECTOR_CATALOG.find((d) => d.type === sc.componentType)?.icon + getVectorCatalog().find((d) => d.type === sc.componentType)?.icon ); return (
s.selectedTeamId); const count = selectedIds.length; const [deployOpen, setDeployOpen] = useState(false); const [changelog, setChangelog] = useState(""); const [deleteOpen, setDeleteOpen] = useState(false); + const [addTagsOpen, setAddTagsOpen] = useState(false); + const [removeTagsOpen, setRemoveTagsOpen] = useState(false); + const [selectedTags, setSelectedTags] = useState([]); + const [customTagInput, setCustomTagInput] = useState(""); const [resultSummary, setResultSummary] = useState<{ action: string; total: number; @@ -61,6 +69,15 @@ export function BulkActionBar({ selectedIds, onClearSelection }: BulkActionBarPr } }; + // --- Available tags from team --- + const availableTagsQuery = useQuery( + trpc.team.getAvailableTags.queryOptions( + { teamId: selectedTeamId! }, + { enabled: !!selectedTeamId && (addTagsOpen || removeTagsOpen) }, + ), + ); + const availableTags = availableTagsQuery.data ?? []; + const bulkDeployMutation = useMutation( trpc.pipeline.bulkDeploy.mutationOptions({ onSuccess: (data) => handleResult("Deploy", data), @@ -82,8 +99,67 @@ export function BulkActionBar({ selectedIds, onClearSelection }: BulkActionBarPr }), ); + const bulkAddTagsMutation = useMutation( + trpc.pipeline.bulkAddTags.mutationOptions({ + onSuccess: (data) => { + handleResult("Add Tags", data); + setAddTagsOpen(false); + setSelectedTags([]); + setCustomTagInput(""); + }, + onError: (err) => toast.error(`Failed to add tags: ${err.message}`), + }), + ); + + const bulkRemoveTagsMutation = useMutation( + trpc.pipeline.bulkRemoveTags.mutationOptions({ + onSuccess: (data) => { + handleResult("Remove Tags", data); + setRemoveTagsOpen(false); + setSelectedTags([]); + setCustomTagInput(""); + }, + onError: (err) => toast.error(`Failed to remove tags: ${err.message}`), + }), + ); + const isPending = - bulkDeployMutation.isPending || bulkUndeployMutation.isPending || bulkDeleteMutation.isPending; + bulkDeployMutation.isPending || + bulkUndeployMutation.isPending || + bulkDeleteMutation.isPending || + bulkAddTagsMutation.isPending || + bulkRemoveTagsMutation.isPending; + + const toggleTag = (tag: string) => { + setSelectedTags((prev) => + prev.includes(tag) ? prev.filter((t) => t !== tag) : [...prev, tag], + ); + }; + + // Parse custom tag input (comma-separated) and deduplicate with selectedTags + const customTags = customTagInput + .split(",") + .map((t) => t.trim()) + .filter((t) => t.length > 0); + const allSelectedTags = [...new Set([...selectedTags, ...customTags])]; + + const handleAddTagsConfirm = () => { + if (allSelectedTags.length === 0) return; + const toastId = toast.loading("Adding tags..."); + bulkAddTagsMutation.mutate( + { pipelineIds: selectedIds, tags: allSelectedTags }, + { onSettled: () => toast.dismiss(toastId) }, + ); + }; + + const handleRemoveTagsConfirm = () => { + if (allSelectedTags.length === 0) return; + const toastId = toast.loading("Removing tags..."); + bulkRemoveTagsMutation.mutate( + { pipelineIds: selectedIds, tags: allSelectedTags }, + { onSettled: () => toast.dismiss(toastId) }, + ); + }; return ( <> @@ -143,6 +219,46 @@ export function BulkActionBar({ selectedIds, onClearSelection }: BulkActionBarPr
+ + + + +
+ + + + + + + {/* Remove Tags dialog */} + { if (!v) setRemoveTagsOpen(false); }}> + + + Remove Tags from {count} pipeline{count !== 1 ? "s" : ""} + +
+ {availableTags.length > 0 ? ( +
+

Select tags to remove:

+
+ {availableTags.map((tag) => ( + + ))} +
+
+ ) : ( +
+

+ Enter tags to remove (comma-separated): +

+ setCustomTagInput(e.target.value)} + placeholder="production, backend, v2" + autoFocus + /> +
+ )} +
+ + + + +
+
+ {/* Partial failure result summary */} setResultSummary(null)}> diff --git a/src/components/pipeline/manage-groups-dialog.tsx b/src/components/pipeline/manage-groups-dialog.tsx index 43dae048..9097be8a 100644 --- a/src/components/pipeline/manage-groups-dialog.tsx +++ b/src/components/pipeline/manage-groups-dialog.tsx @@ -14,6 +14,13 @@ import { import { Button } from "@/components/ui/button"; import { Input } from "@/components/ui/input"; import { ConfirmDialog } from "@/components/confirm-dialog"; +import { + Select, + SelectContent, + SelectItem, + SelectTrigger, + SelectValue, +} from "@/components/ui/select"; const GROUP_COLORS = [ "#6366f1", "#8b5cf6", "#ec4899", "#f43f5e", @@ -43,9 +50,29 @@ export function ManageGroupsDialog({ ); const groups = groupsQuery.data ?? []; + // Compute group depths for parent selector (filter out depth-3 groups, they can't have children) + const groupDepths = new Map(); + function computeDepths() { + const byId = new Map(groups.map((g) => [g.id, g])); + for (const g of groups) { + let depth = 1; + let current: typeof g | undefined = g; + while (current?.parentId) { + depth++; + current = byId.get(current.parentId); + } + groupDepths.set(g.id, depth); + } + } + computeDepths(); + + // Groups that can be parents (depth 1 or 2 — children would be depth 2 or 3 max) + const eligibleParents = groups.filter((g) => (groupDepths.get(g.id) ?? 1) < 3); + // --- Create --- const [newName, setNewName] = useState(""); const [newColor, setNewColor] = useState(GROUP_COLORS[0]); + const [newParentId, setNewParentId] = useState(""); const createMutation = useMutation( trpc.pipelineGroup.create.mutationOptions({ @@ -53,6 +80,7 @@ export function ManageGroupsDialog({ toast.success("Group created"); setNewName(""); setNewColor(GROUP_COLORS[0]); + setNewParentId(""); queryClient.invalidateQueries({ queryKey: trpc.pipelineGroup.list.queryKey() }); }, onError: (err) => toast.error(err.message), @@ -108,7 +136,7 @@ export function ManageGroupsDialog({ {/* Create form */}
{ e.preventDefault(); if (!newName.trim()) return; @@ -116,29 +144,55 @@ export function ManageGroupsDialog({ environmentId, name: newName.trim(), color: newColor, + parentId: newParentId || undefined, }); }} > - - setNewName(e.target.value)} - placeholder="New group name..." - className="h-8 text-sm" - maxLength={100} - /> - +
+ + setNewName(e.target.value)} + placeholder="New group name..." + className="h-8 text-sm" + maxLength={100} + /> + +
+ {eligibleParents.length > 0 && ( + + )} {/* Group list */} diff --git a/src/components/pipeline/pipeline-group-tree.tsx b/src/components/pipeline/pipeline-group-tree.tsx new file mode 100644 index 00000000..9475ccdf --- /dev/null +++ b/src/components/pipeline/pipeline-group-tree.tsx @@ -0,0 +1,209 @@ +"use client"; + +import { useState } from "react"; +import { useQuery } from "@tanstack/react-query"; +import { useTRPC } from "@/trpc/client"; +import { ChevronRight, ChevronDown, FolderOpen, Folder } from "lucide-react"; +import { cn } from "@/lib/utils"; + +// --- Types --- + +export interface GroupNode { + id: string; + name: string; + color: string | null; + parentId: string | null; + children: GroupNode[]; +} + +// --- Tree builder --- + +export function buildGroupTree( + groups: Array<{ id: string; name: string; color: string | null; parentId: string | null }>, +): GroupNode[] { + const map = new Map(); + for (const g of groups) map.set(g.id, { ...g, children: [] }); + const roots: GroupNode[] = []; + for (const g of groups) { + const node = map.get(g.id)!; + if (!g.parentId) { + roots.push(node); + } else { + map.get(g.parentId)?.children.push(node); + } + } + return roots; +} + +// --- Breadcrumb builder --- + +export function buildBreadcrumbs( + groups: Array<{ id: string; name: string; parentId: string | null }>, + selectedId: string | null, +): Array<{ id: string | null; name: string }> { + if (!selectedId) return []; + const byId = new Map(groups.map((g) => [g.id, g])); + const path: Array<{ id: string | null; name: string }> = []; + let current = byId.get(selectedId); + while (current) { + path.unshift({ id: current.id, name: current.name }); + current = current.parentId ? byId.get(current.parentId) : undefined; + } + return path; +} + +// --- Tree node component --- + +function TreeNode({ + node, + depth, + selectedGroupId, + onSelectGroup, + pipelineCounts, +}: { + node: GroupNode; + depth: number; + selectedGroupId: string | null; + onSelectGroup: (groupId: string | null) => void; + pipelineCounts: Record; +}) { + const [expanded, setExpanded] = useState(true); + const hasChildren = node.children.length > 0; + const isSelected = selectedGroupId === node.id; + const count = pipelineCounts[node.id] ?? 0; + + return ( +
+
onSelectGroup(node.id)} + > + {hasChildren ? ( + + ) : ( + + )} + + {isSelected ? ( + + ) : ( + + )} + + + + {node.name} + + {count > 0 && ( + + {count} + + )} +
+ + {hasChildren && expanded && ( +
+ {node.children.map((child) => ( + + ))} +
+ )} +
+ ); +} + +// --- Main component --- + +interface PipelineGroupTreeProps { + environmentId: string; + selectedGroupId: string | null; + onSelectGroup: (groupId: string | null) => void; +} + +export function PipelineGroupTree({ + environmentId, + selectedGroupId, + onSelectGroup, +}: PipelineGroupTreeProps) { + const trpc = useTRPC(); + + const groupsQuery = useQuery( + trpc.pipelineGroup.list.queryOptions( + { environmentId }, + { enabled: !!environmentId }, + ), + ); + + const rawGroups = groupsQuery.data ?? []; + + const groups = rawGroups.map((g) => ({ + id: g.id, + name: g.name, + color: g.color, + parentId: g.parentId ?? null, + })); + + const tree = buildGroupTree(groups); + + const pipelineCounts: Record = {}; + for (const g of rawGroups) { + pipelineCounts[g.id] = g._count.pipelines; + } + + const isAllSelected = selectedGroupId === null; + + return ( +
+ {/* All Pipelines root item */} +
onSelectGroup(null)} + > + + All Pipelines +
+ + {/* Group tree */} + {tree.map((node) => ( + + ))} +
+ ); +} diff --git a/src/components/promote-pipeline-dialog.tsx b/src/components/promote-pipeline-dialog.tsx index 2e89a6b4..e13299f9 100644 --- a/src/components/promote-pipeline-dialog.tsx +++ b/src/components/promote-pipeline-dialog.tsx @@ -1,12 +1,17 @@ "use client"; import { useState } from "react"; -import Link from "next/link"; import { useQuery, useMutation, useQueryClient } from "@tanstack/react-query"; import { useTRPC } from "@/trpc/client"; import { useTeamStore } from "@/stores/team-store"; import { toast } from "sonner"; -import { Loader2, AlertTriangle } from "lucide-react"; +import { + Loader2, + AlertTriangle, + CheckCircle, + Clock, + ArrowRight, +} from "lucide-react"; import { Dialog, @@ -26,13 +31,14 @@ import { import { Button } from "@/components/ui/button"; import { Input } from "@/components/ui/input"; import { Label } from "@/components/ui/label"; +import { ConfigDiff } from "@/components/ui/config-diff"; + +type Step = "target" | "preflight" | "diff" | "confirm" | "result"; interface PromoteResult { - id: string; - name: string; - targetEnvironmentName: string; - strippedSecrets: Array<{ name: string; componentKey: string }>; - strippedCertificates: Array<{ name: string; componentKey: string }>; + requestId: string; + status: string; + pendingApproval: boolean; } interface PromotePipelineDialogProps { @@ -50,6 +56,7 @@ export function PromotePipelineDialog({ const queryClient = useQueryClient(); const selectedTeamId = useTeamStore((s) => s.selectedTeamId); + const [step, setStep] = useState("target"); const [targetEnvId, setTargetEnvId] = useState(""); const [name, setName] = useState(pipeline.name); const [result, setResult] = useState(null); @@ -65,21 +72,47 @@ export function PromotePipelineDialog({ (env) => env.id !== pipeline.environmentId ); - const promoteMutation = useMutation( - trpc.pipeline.promote.mutationOptions({ + const selectedEnv = availableEnvironments.find((e) => e.id === targetEnvId); + + // Step 2: Preflight check + const preflightQuery = useQuery( + trpc.promotion.preflight.queryOptions( + { pipelineId: pipeline.id, targetEnvironmentId: targetEnvId, name }, + { enabled: step === "preflight" && !!targetEnvId } + ) + ); + + // Step 3: Diff preview + const diffQuery = useQuery( + trpc.promotion.diffPreview.queryOptions( + { pipelineId: pipeline.id }, + { enabled: step === "diff" } + ) + ); + + // Step 4: Initiate mutation + const initiateMutation = useMutation( + trpc.promotion.initiate.mutationOptions({ onSuccess: (data) => { setResult(data); + setStep("result"); queryClient.invalidateQueries({ queryKey: trpc.pipeline.list.queryKey(), }); + queryClient.invalidateQueries({ + queryKey: trpc.promotion.history.queryKey({ pipelineId: pipeline.id }), + }); + }, + onError: (err) => { + toast.error(err.message || "Failed to initiate promotion"); + setStep("diff"); }, - onError: (err) => - toast.error(err.message || "Failed to promote pipeline"), }) ); const handleClose = (openState: boolean) => { if (!openState) { + setStep("target"); setTargetEnvId(""); setName(pipeline.name); setResult(null); @@ -87,56 +120,64 @@ export function PromotePipelineDialog({ onOpenChange(openState); }; - const hasStrippedItems = - result && - (result.strippedSecrets.length > 0 || - result.strippedCertificates.length > 0); + const handleConfirmPromotion = () => { + setStep("confirm"); + initiateMutation.mutate({ + pipelineId: pipeline.id, + targetEnvironmentId: targetEnvId, + name: name || undefined, + }); + }; - if (result) { + // Step 1: Target selection + if (step === "target") { return ( - Pipeline Promoted + Promote Pipeline - Pipeline promoted to {result.targetEnvironmentName} as a draft. + Promote this pipeline to another environment with preflight validation. - {hasStrippedItems && ( -
-
- -
-

- The following references were stripped and need to be - re-configured in the target environment: -

-
    - {result.strippedSecrets.map((s, i) => ( -
  • - Secret {s.name} in{" "} - {s.componentKey} -
  • - ))} - {result.strippedCertificates.map((c, i) => ( -
  • - Certificate {c.name} in{" "} - {c.componentKey} -
  • - ))} -
-
-
+
+
+ +
- )} + +
+ + setName(e.target.value)} + /> +
+
- @@ -144,67 +185,222 @@ export function PromotePipelineDialog({ ); } + // Step 2: Preflight check + if (step === "preflight") { + const preflight = preflightQuery.data; + const isLoading = preflightQuery.isLoading; + const canProceed = preflight?.canProceed ?? false; + const missing = preflight?.missing ?? []; + const present = preflight?.present ?? []; + const nameCollision = preflight?.nameCollision ?? false; + + return ( + + + + Preflight Check + + Validating secret references in the target environment. + + + +
+ {isLoading ? ( +
+ + Checking secret references... +
+ ) : ( + <> + {missing.length > 0 && ( +
+
+ +
+

+ The following secrets are missing in the target environment and must be + created before promotion can proceed: +

+
    + {missing.map((s) => ( +
  • + {s} +
  • + ))} +
+
+
+
+ )} + + {nameCollision && ( +
+
+ +

+ A pipeline named "{name}" already exists in the target environment. + Go back and change the pipeline name to proceed. +

+
+
+ )} + + {canProceed && !nameCollision && ( +
+
+ +

+ {present.length === 0 + ? "No secret references in this pipeline." + : `All ${present.length} secret reference${present.length === 1 ? "" : "s"} verified in target environment.`} +

+
+
+ )} + + )} +
+ + + + + +
+
+ ); + } + + // Step 3: Diff preview + if (step === "diff") { + const diff = diffQuery.data; + const isLoading = diffQuery.isLoading; + + return ( + + + + Substitution Preview + + Review how secret references will be substituted in the target environment. + + + +
+ {isLoading ? ( +
+ + Generating substitution preview... +
+ ) : diff ? ( + <> + +

+ SECRET[name] references will be resolved as environment + variables in the target environment. +

+ + ) : null} +
+ + + + + +
+
+ ); + } + + // Step 4: Confirm (submitting) + if (step === "confirm") { + return ( + + + + Submitting Promotion + + Your promotion request is being processed. + + + +
+ + Submitting promotion request... +
+ + + + +
+
+ ); + } + + // Step 5: Result return ( - Promote Pipeline + Promotion Complete - Copy this pipeline to another environment. Secrets and certificates - will be stripped and must be re-configured. + Your pipeline has been promoted to {selectedEnv?.name ?? "the target environment"}. -
-
- - -
- -
- - setName(e.target.value)} - /> -
+
+ {result?.pendingApproval ? ( +
+
+ +
+

Promotion request submitted for approval

+

+ An administrator must approve before the pipeline appears in{" "} + {selectedEnv?.name ?? "the target environment"}. +

+
+
+
+ ) : ( +
+
+ +
+

Pipeline promoted successfully

+

+ The pipeline has been deployed to{" "} + {selectedEnv?.name ?? "the target environment"}. +

+
+
+
+ )}
- - +
diff --git a/src/components/settings-sidebar-nav.tsx b/src/components/settings-sidebar-nav.tsx index 7de46153..10d7296b 100644 --- a/src/components/settings-sidebar-nav.tsx +++ b/src/components/settings-sidebar-nav.tsx @@ -10,6 +10,7 @@ import { KeyRound, Bot, Sparkles, + Webhook, } from "lucide-react"; export const settingsNavGroups = [ @@ -34,6 +35,7 @@ export const settingsNavGroups = [ { title: "Teams", href: "/settings/teams", icon: Building2, requiredSuperAdmin: true }, { title: "Team Settings", href: "/settings/team", icon: Users, requiredSuperAdmin: false }, { title: "Service Accounts", href: "/settings/service-accounts", icon: Bot, requiredSuperAdmin: false }, + { title: "Outbound Webhooks", href: "/settings/webhooks", icon: Webhook, requiredSuperAdmin: false }, { title: "AI", href: "/settings/ai", icon: Sparkles, requiredSuperAdmin: false }, ], }, diff --git a/src/lib/__tests__/node-group-utils.test.ts b/src/lib/__tests__/node-group-utils.test.ts new file mode 100644 index 00000000..7e6dc6d6 --- /dev/null +++ b/src/lib/__tests__/node-group-utils.test.ts @@ -0,0 +1,21 @@ +import { describe, it, expect } from "vitest"; +import { nodeMatchesGroup } from "@/lib/node-group-utils"; + +describe("nodeMatchesGroup", () => { + it("Test 13: Empty criteria matches any labels (returns true)", () => { + expect(nodeMatchesGroup({ region: "us-east", role: "web" }, {})).toBe(true); + expect(nodeMatchesGroup({}, {})).toBe(true); + }); + + it("Test 14: Criteria {region: 'us-east'} matches node with {region: 'us-east', role: 'web'} (subset match)", () => { + expect( + nodeMatchesGroup({ region: "us-east", role: "web" }, { region: "us-east" }), + ).toBe(true); + }); + + it("Test 15: Criteria {region: 'us-east'} does NOT match node with {region: 'eu-west'}", () => { + expect( + nodeMatchesGroup({ region: "eu-west" }, { region: "us-east" }), + ).toBe(false); + }); +}); diff --git a/src/lib/node-group-utils.ts b/src/lib/node-group-utils.ts new file mode 100644 index 00000000..6abfa530 --- /dev/null +++ b/src/lib/node-group-utils.ts @@ -0,0 +1,11 @@ +/** + * Returns true if the node's labels match all criteria key-value pairs. + * Empty criteria {} is a catch-all that matches any node. + */ +export function nodeMatchesGroup( + nodeLabels: Record, + criteria: Record, +): boolean { + if (Object.keys(criteria).length === 0) return true; + return Object.entries(criteria).every(([k, v]) => nodeLabels[k] === v); +} diff --git a/src/lib/vector/__tests__/catalog.test.ts b/src/lib/vector/__tests__/catalog.test.ts new file mode 100644 index 00000000..2938bd07 --- /dev/null +++ b/src/lib/vector/__tests__/catalog.test.ts @@ -0,0 +1,27 @@ +import { describe, it, expect } from "vitest"; +import { getVectorCatalog, findComponentDef } from "@/lib/vector/catalog"; + +describe("Vector Catalog (PERF-04)", () => { + it("getVectorCatalog returns a non-empty array", () => { + const catalog = getVectorCatalog(); + expect(Array.isArray(catalog)).toBe(true); + expect(catalog.length).toBeGreaterThan(0); + }); + + it("getVectorCatalog returns same reference on repeated calls (singleton)", () => { + const first = getVectorCatalog(); + const second = getVectorCatalog(); + expect(first).toBe(second); // same reference, not just equal + }); + + it("findComponentDef finds a known component", () => { + const httpSource = findComponentDef("http_server", "source"); + expect(httpSource).toBeDefined(); + expect(httpSource?.type).toBe("http_server"); + }); + + it("findComponentDef returns undefined for unknown type", () => { + const result = findComponentDef("nonexistent_component_xyz"); + expect(result).toBeUndefined(); + }); +}); diff --git a/src/lib/vector/catalog.ts b/src/lib/vector/catalog.ts index 38a6237e..cb84837d 100644 --- a/src/lib/vector/catalog.ts +++ b/src/lib/vector/catalog.ts @@ -3,11 +3,15 @@ import { ALL_SOURCES } from "./schemas/sources"; import { ALL_TRANSFORMS } from "./schemas/transforms"; import { ALL_SINKS } from "./schemas/sinks"; -export const VECTOR_CATALOG: VectorComponentDef[] = [ - ...ALL_SOURCES, - ...ALL_TRANSFORMS, - ...ALL_SINKS, -]; +let _catalog: VectorComponentDef[] | null = null; + +/** PERF-04: Lazy singleton — catalog is built on first access, not at module load. */ +export function getVectorCatalog(): VectorComponentDef[] { + if (!_catalog) { + _catalog = [...ALL_SOURCES, ...ALL_TRANSFORMS, ...ALL_SINKS]; + } + return _catalog; +} /** * Find a component definition by type and optionally kind. @@ -18,8 +22,9 @@ export function findComponentDef( type: string, kind?: VectorComponentDef["kind"], ): VectorComponentDef | undefined { + const catalog = getVectorCatalog(); if (kind) { - return VECTOR_CATALOG.find((c) => c.type === type && c.kind === kind); + return catalog.find((c) => c.type === type && c.kind === kind); } - return VECTOR_CATALOG.find((c) => c.type === type); + return catalog.find((c) => c.type === type); } diff --git a/src/server/routers/__tests__/fleet-list.test.ts b/src/server/routers/__tests__/fleet-list.test.ts index e097dd04..6daba667 100644 --- a/src/server/routers/__tests__/fleet-list.test.ts +++ b/src/server/routers/__tests__/fleet-list.test.ts @@ -81,6 +81,8 @@ function makeNode(overrides: Partial<{ describe("fleet.list", () => { beforeEach(() => { mockReset(prismaMock); + // Default: no node groups (vacuously compliant) + prismaMock.nodeGroup.findMany.mockResolvedValue([]); }); it("returns all nodes when no filters", async () => { @@ -168,4 +170,40 @@ describe("fleet.list", () => { expect(result[0]).toHaveProperty("pushConnected", false); }); + + // ── label compliance ──────────────────────────────────────────────────── + + it("returns labelCompliant=true when node has all required labels", async () => { + const nodes = [makeNode({ id: "n1", labels: { region: "us-east", role: "worker" } })]; + prismaMock.vectorNode.findMany.mockResolvedValue(nodes as never); + prismaMock.nodeGroup.findMany.mockResolvedValue([ + { requiredLabels: ["region", "role"] }, + ] as never); + + const result = await caller.list({ environmentId: "env-1" }); + + expect(result[0]).toHaveProperty("labelCompliant", true); + }); + + it("returns labelCompliant=false when node is missing a required label", async () => { + const nodes = [makeNode({ id: "n1", labels: { region: "us-east" } })]; + prismaMock.vectorNode.findMany.mockResolvedValue(nodes as never); + prismaMock.nodeGroup.findMany.mockResolvedValue([ + { requiredLabels: ["region", "role"] }, + ] as never); + + const result = await caller.list({ environmentId: "env-1" }); + + expect(result[0]).toHaveProperty("labelCompliant", false); + }); + + it("returns labelCompliant=true when no NodeGroups have required labels (vacuously compliant)", async () => { + const nodes = [makeNode({ id: "n1", labels: {} })]; + prismaMock.vectorNode.findMany.mockResolvedValue(nodes as never); + prismaMock.nodeGroup.findMany.mockResolvedValue([]); + + const result = await caller.list({ environmentId: "env-1" }); + + expect(result[0]).toHaveProperty("labelCompliant", true); + }); }); diff --git a/src/server/routers/__tests__/node-group.test.ts b/src/server/routers/__tests__/node-group.test.ts new file mode 100644 index 00000000..a1b9b65a --- /dev/null +++ b/src/server/routers/__tests__/node-group.test.ts @@ -0,0 +1,498 @@ +import { vi, describe, it, expect, beforeEach } from "vitest"; +import { mockDeep, mockReset, type DeepMockProxy } from "vitest-mock-extended"; +import type { PrismaClient } from "@/generated/prisma"; + +// ─── vi.hoisted so `t` is available inside vi.mock factories ──────────────── + +const { t } = vi.hoisted(() => { + // eslint-disable-next-line @typescript-eslint/no-require-imports + const { initTRPC } = require("@trpc/server"); + const t = initTRPC.context().create(); + return { t }; +}); + +vi.mock("@/trpc/init", () => { + const passthrough = () => + t.middleware(({ next, ctx }: { next: (opts: { ctx: unknown }) => unknown; ctx: unknown }) => next({ ctx })); + return { + router: t.router, + protectedProcedure: t.procedure, + withTeamAccess: passthrough, + middleware: t.middleware, + }; +}); + +vi.mock("@/server/middleware/audit", () => ({ + withAudit: () => + t.middleware(({ next, ctx }: { next: (opts: { ctx: unknown }) => unknown; ctx: unknown }) => next({ ctx })), +})); + +vi.mock("@/lib/prisma", () => ({ + prisma: mockDeep(), +})); + +// ─── Import SUT + mocks after vi.mock ─────────────────────────────────────── + +import { prisma } from "@/lib/prisma"; +import { nodeGroupRouter } from "@/server/routers/node-group"; + +const prismaMock = prisma as unknown as DeepMockProxy; +const caller = t.createCallerFactory(nodeGroupRouter)({ + session: { user: { id: "user-1" } }, +}); + +// ─── Fixtures ──────────────────────────────────────────────────────────────── + +function makeNodeGroup(overrides: Partial<{ + id: string; + name: string; + environmentId: string; + criteria: Record; + labelTemplate: Record; + requiredLabels: string[]; +}> = {}) { + return { + id: overrides.id ?? "ng-1", + name: overrides.name ?? "US East", + environmentId: overrides.environmentId ?? "env-1", + criteria: overrides.criteria ?? { region: "us-east" }, + labelTemplate: overrides.labelTemplate ?? { env: "prod" }, + requiredLabels: overrides.requiredLabels ?? ["region", "role"], + createdAt: new Date(), + updatedAt: new Date(), + }; +} + +function makeNode(overrides: Partial<{ + id: string; + name: string; + status: "HEALTHY" | "DEGRADED" | "UNREACHABLE" | "UNKNOWN"; + labels: Record; + lastSeen: Date | null; + nodeMetrics: Array<{ loadAvg1: number }>; +}> = {}) { + return { + id: overrides.id ?? "node-1", + name: overrides.name ?? "node-1", + status: overrides.status ?? "HEALTHY", + labels: overrides.labels ?? {}, + lastSeen: overrides.lastSeen !== undefined ? overrides.lastSeen : new Date(), + nodeMetrics: overrides.nodeMetrics ?? [], + }; +} + +function makeAlertEvent(overrides: Partial<{ + id: string; + nodeId: string | null; + status: "firing" | "resolved" | "acknowledged"; +}> = {}) { + return { + id: overrides.id ?? "alert-1", + nodeId: overrides.nodeId !== undefined ? overrides.nodeId : "node-1", + status: overrides.status ?? "firing", + }; +} + +// ─── Tests ────────────────────────────────────────────────────────────────── + +describe("nodeGroupRouter", () => { + beforeEach(() => { + mockReset(prismaMock); + }); + + // ── list ──────────────────────────────────────────────────────────────── + + describe("list", () => { + it("returns node groups for an environment ordered by name", async () => { + const groups = [ + makeNodeGroup({ id: "ng-1", name: "EU West" }), + makeNodeGroup({ id: "ng-2", name: "US East" }), + ]; + prismaMock.nodeGroup.findMany.mockResolvedValue(groups as never); + + const result = await caller.list({ environmentId: "env-1" }); + + expect(result).toEqual(groups); + expect(prismaMock.nodeGroup.findMany).toHaveBeenCalledWith({ + where: { environmentId: "env-1" }, + orderBy: { name: "asc" }, + }); + }); + + it("returns empty array when no groups exist", async () => { + prismaMock.nodeGroup.findMany.mockResolvedValue([]); + + const result = await caller.list({ environmentId: "env-1" }); + + expect(result).toEqual([]); + }); + }); + + // ── create ────────────────────────────────────────────────────────────── + + describe("create", () => { + it("creates a node group with name, criteria, labelTemplate, requiredLabels", async () => { + prismaMock.nodeGroup.findUnique.mockResolvedValue(null); + const created = makeNodeGroup({ id: "ng-new", name: "Asia Pacific" }); + prismaMock.nodeGroup.create.mockResolvedValue(created as never); + + const result = await caller.create({ + environmentId: "env-1", + name: "Asia Pacific", + criteria: { region: "ap-southeast" }, + labelTemplate: { env: "prod", tier: "1" }, + requiredLabels: ["region", "role"], + }); + + expect(result).toEqual(created); + expect(prismaMock.nodeGroup.create).toHaveBeenCalledWith({ + data: { + name: "Asia Pacific", + environmentId: "env-1", + criteria: { region: "ap-southeast" }, + labelTemplate: { env: "prod", tier: "1" }, + requiredLabels: ["region", "role"], + }, + }); + }); + + it("throws CONFLICT when duplicate name in same environment", async () => { + prismaMock.nodeGroup.findUnique.mockResolvedValue(makeNodeGroup() as never); + + await expect( + caller.create({ environmentId: "env-1", name: "US East" }), + ).rejects.toMatchObject({ code: "CONFLICT" }); + + expect(prismaMock.nodeGroup.create).not.toHaveBeenCalled(); + }); + + it("rejects empty name (Zod validation)", async () => { + await expect( + caller.create({ environmentId: "env-1", name: "" }), + ).rejects.toThrow(); + }); + }); + + // ── update ────────────────────────────────────────────────────────────── + + describe("update", () => { + it("updates group name", async () => { + prismaMock.nodeGroup.findUnique + .mockResolvedValueOnce(makeNodeGroup({ id: "ng-1", name: "Old Name" }) as never) + .mockResolvedValueOnce(null); // no conflict + + const updated = makeNodeGroup({ id: "ng-1", name: "New Name" }); + prismaMock.nodeGroup.update.mockResolvedValue(updated as never); + + const result = await caller.update({ id: "ng-1", name: "New Name" }); + + expect(result.name).toBe("New Name"); + }); + + it("throws NOT_FOUND for non-existent group", async () => { + prismaMock.nodeGroup.findUnique.mockResolvedValue(null); + + await expect( + caller.update({ id: "nonexistent", name: "Foo" }), + ).rejects.toMatchObject({ code: "NOT_FOUND" }); + }); + + it("throws CONFLICT when renaming to existing name", async () => { + prismaMock.nodeGroup.findUnique + .mockResolvedValueOnce(makeNodeGroup({ id: "ng-1", name: "Alpha" }) as never) + .mockResolvedValueOnce(makeNodeGroup({ id: "ng-2", name: "Beta" }) as never); // conflict! + + await expect( + caller.update({ id: "ng-1", name: "Beta" }), + ).rejects.toMatchObject({ code: "CONFLICT" }); + }); + + it("skips uniqueness check when name is unchanged", async () => { + prismaMock.nodeGroup.findUnique.mockResolvedValueOnce( + makeNodeGroup({ id: "ng-1", name: "Same Name" }) as never, + ); + + prismaMock.nodeGroup.update.mockResolvedValue( + makeNodeGroup({ id: "ng-1", name: "Same Name" }) as never, + ); + + await caller.update({ id: "ng-1", name: "Same Name" }); + + // findUnique called only once (to fetch the group), not twice + expect(prismaMock.nodeGroup.findUnique).toHaveBeenCalledTimes(1); + }); + + it("updates labelTemplate", async () => { + prismaMock.nodeGroup.findUnique.mockResolvedValueOnce( + makeNodeGroup({ id: "ng-1" }) as never, + ); + + const updated = makeNodeGroup({ id: "ng-1", labelTemplate: { env: "staging", tier: "2" } }); + prismaMock.nodeGroup.update.mockResolvedValue(updated as never); + + const result = await caller.update({ id: "ng-1", labelTemplate: { env: "staging", tier: "2" } }); + + expect(prismaMock.nodeGroup.update).toHaveBeenCalledWith({ + where: { id: "ng-1" }, + data: { labelTemplate: { env: "staging", tier: "2" } }, + }); + expect(result).toEqual(updated); + }); + }); + + // ── delete ────────────────────────────────────────────────────────────── + + describe("delete", () => { + it("deletes an existing group", async () => { + prismaMock.nodeGroup.findUnique.mockResolvedValue({ id: "ng-1" } as never); + prismaMock.nodeGroup.delete.mockResolvedValue(makeNodeGroup({ id: "ng-1" }) as never); + + const result = await caller.delete({ id: "ng-1" }); + + expect(result.id).toBe("ng-1"); + expect(prismaMock.nodeGroup.delete).toHaveBeenCalledWith({ + where: { id: "ng-1" }, + }); + }); + + it("throws NOT_FOUND for non-existent group", async () => { + prismaMock.nodeGroup.findUnique.mockResolvedValue(null); + + await expect( + caller.delete({ id: "nonexistent" }), + ).rejects.toMatchObject({ code: "NOT_FOUND" }); + }); + }); + + // ── groupHealthStats ───────────────────────────────────────────────────── + + describe("groupHealthStats", () => { + it("Test 1: Returns per-group stats (onlineCount, alertCount, complianceRate, totalNodes) for two groups", async () => { + const groups = [ + makeNodeGroup({ id: "ng-1", name: "US East", criteria: { region: "us-east" }, requiredLabels: ["region"] }), + makeNodeGroup({ id: "ng-2", name: "EU West", criteria: { region: "eu-west" }, requiredLabels: ["region"] }), + ]; + const nodes = [ + makeNode({ id: "n-1", status: "HEALTHY", labels: { region: "us-east" } }), + makeNode({ id: "n-2", status: "DEGRADED", labels: { region: "us-east" } }), + makeNode({ id: "n-3", status: "HEALTHY", labels: { region: "eu-west" } }), + ]; + const firingAlerts = [makeAlertEvent({ nodeId: "n-2", status: "firing" })]; + + prismaMock.vectorNode.findMany.mockResolvedValue(nodes as never); + prismaMock.nodeGroup.findMany.mockResolvedValue(groups as never); + prismaMock.alertEvent.findMany.mockResolvedValue(firingAlerts as never); + + const result = await caller.groupHealthStats({ environmentId: "env-1" }); + + const usEast = result.find((r) => r.id === "ng-1"); + const euWest = result.find((r) => r.id === "ng-2"); + + expect(usEast).toBeDefined(); + expect(usEast!.totalNodes).toBe(2); + expect(usEast!.onlineCount).toBe(1); // only HEALTHY + expect(usEast!.alertCount).toBe(1); // n-2 has firing alert + expect(usEast!.complianceRate).toBe(100); // both have 'region' label + + expect(euWest).toBeDefined(); + expect(euWest!.totalNodes).toBe(1); + expect(euWest!.onlineCount).toBe(1); + expect(euWest!.alertCount).toBe(0); + }); + + it("Test 2: Group with empty criteria {} matches all nodes (catch-all) — totalNodes equals total environment nodes", async () => { + const groups = [ + makeNodeGroup({ id: "ng-all", name: "All Nodes", criteria: {}, requiredLabels: [] }), + ]; + const nodes = [ + makeNode({ id: "n-1", labels: { region: "us-east" } }), + makeNode({ id: "n-2", labels: { region: "eu-west" } }), + makeNode({ id: "n-3", labels: {} }), + ]; + + prismaMock.vectorNode.findMany.mockResolvedValue(nodes as never); + prismaMock.nodeGroup.findMany.mockResolvedValue(groups as never); + prismaMock.alertEvent.findMany.mockResolvedValue([] as never); + + const result = await caller.groupHealthStats({ environmentId: "env-1" }); + + const allGroup = result.find((r) => r.id === "ng-all"); + expect(allGroup).toBeDefined(); + expect(allGroup!.totalNodes).toBe(3); // matches all + // No ungrouped since all matched + expect(result.find((r) => r.id === "__ungrouped__")).toBeUndefined(); + }); + + it("Test 3: Includes synthetic 'Ungrouped' entry for nodes matching no group", async () => { + const groups = [ + makeNodeGroup({ id: "ng-1", name: "US East", criteria: { region: "us-east" }, requiredLabels: [] }), + ]; + const nodes = [ + makeNode({ id: "n-1", labels: { region: "us-east" } }), + makeNode({ id: "n-2", labels: { region: "eu-west" } }), // no matching group + makeNode({ id: "n-3", labels: {} }), // no matching group + ]; + + prismaMock.vectorNode.findMany.mockResolvedValue(nodes as never); + prismaMock.nodeGroup.findMany.mockResolvedValue(groups as never); + prismaMock.alertEvent.findMany.mockResolvedValue([] as never); + + const result = await caller.groupHealthStats({ environmentId: "env-1" }); + + const ungrouped = result.find((r) => r.id === "__ungrouped__"); + expect(ungrouped).toBeDefined(); + expect(ungrouped!.name).toBe("Ungrouped"); + expect(ungrouped!.totalNodes).toBe(2); // n-2 and n-3 + }); + + it("Test 4: complianceRate is 100 when requiredLabels is empty (vacuous truth)", async () => { + const groups = [ + makeNodeGroup({ id: "ng-1", name: "Any", criteria: {}, requiredLabels: [] }), + ]; + const nodes = [ + makeNode({ id: "n-1", labels: {} }), // no labels at all + makeNode({ id: "n-2", labels: { random: "value" } }), + ]; + + prismaMock.vectorNode.findMany.mockResolvedValue(nodes as never); + prismaMock.nodeGroup.findMany.mockResolvedValue(groups as never); + prismaMock.alertEvent.findMany.mockResolvedValue([] as never); + + const result = await caller.groupHealthStats({ environmentId: "env-1" }); + + const group = result.find((r) => r.id === "ng-1"); + expect(group!.complianceRate).toBe(100); + }); + + it("Test 5: alertCount only counts AlertStatus.firing, not resolved/acknowledged", async () => { + const groups = [ + makeNodeGroup({ id: "ng-1", criteria: {}, requiredLabels: [] }), + ]; + const nodes = [ + makeNode({ id: "n-1" }), + makeNode({ id: "n-2" }), + makeNode({ id: "n-3" }), + ]; + // Only n-1 has a firing alert; n-2 has resolved, n-3 has acknowledged + const alerts = [ + makeAlertEvent({ nodeId: "n-1", status: "firing" }), + // resolved and acknowledged should not appear since we filter for firing only + ]; + + prismaMock.vectorNode.findMany.mockResolvedValue(nodes as never); + prismaMock.nodeGroup.findMany.mockResolvedValue(groups as never); + prismaMock.alertEvent.findMany.mockResolvedValue(alerts as never); + + const result = await caller.groupHealthStats({ environmentId: "env-1" }); + + const group = result.find((r) => r.id === "ng-1"); + expect(group!.alertCount).toBe(1); // only the firing one + }); + + it("Test 6: Returns empty array when no groups and no nodes exist (no ungrouped entry)", async () => { + prismaMock.vectorNode.findMany.mockResolvedValue([] as never); + prismaMock.nodeGroup.findMany.mockResolvedValue([] as never); + prismaMock.alertEvent.findMany.mockResolvedValue([] as never); + + const result = await caller.groupHealthStats({ environmentId: "env-1" }); + + expect(result).toEqual([]); + }); + }); + + // ── nodesInGroup ───────────────────────────────────────────────────────── + + describe("nodesInGroup", () => { + it("Test 7: Returns nodes matching criteria sorted by status (UNREACHABLE first, then DEGRADED, then HEALTHY), then by name", async () => { + const group = makeNodeGroup({ + id: "ng-1", + criteria: { region: "us-east" }, + requiredLabels: [], + }); + const nodes = [ + makeNode({ id: "n-healthy", name: "alpha", status: "HEALTHY", labels: { region: "us-east" } }), + makeNode({ id: "n-unreachable", name: "beta", status: "UNREACHABLE", labels: { region: "us-east" } }), + makeNode({ id: "n-degraded", name: "gamma", status: "DEGRADED", labels: { region: "us-east" } }), + ]; + + prismaMock.nodeGroup.findUnique.mockResolvedValue(group as never); + prismaMock.vectorNode.findMany.mockResolvedValue(nodes as never); + + const result = await caller.nodesInGroup({ groupId: "ng-1", environmentId: "env-1" }); + + expect(result[0].status).toBe("UNREACHABLE"); + expect(result[1].status).toBe("DEGRADED"); + expect(result[2].status).toBe("HEALTHY"); + }); + + it("Test 8: Attaches cpuLoad from latest NodeMetric (nodeMetrics[0].loadAvg1) — null when no metrics", async () => { + const group = makeNodeGroup({ id: "ng-1", criteria: {}, requiredLabels: [] }); + const nodes = [ + makeNode({ id: "n-with-metrics", name: "a", nodeMetrics: [{ loadAvg1: 0.75 }] }), + makeNode({ id: "n-no-metrics", name: "b", nodeMetrics: [] }), + ]; + + prismaMock.nodeGroup.findUnique.mockResolvedValue(group as never); + prismaMock.vectorNode.findMany.mockResolvedValue(nodes as never); + + const result = await caller.nodesInGroup({ groupId: "ng-1", environmentId: "env-1" }); + + const withMetrics = result.find((n) => n.id === "n-with-metrics"); + const noMetrics = result.find((n) => n.id === "n-no-metrics"); + + expect(withMetrics!.cpuLoad).toBe(0.75); + expect(noMetrics!.cpuLoad).toBeNull(); + }); + + it("Test 9: Attaches labelCompliant=true when requiredLabels is empty", async () => { + const group = makeNodeGroup({ id: "ng-1", criteria: {}, requiredLabels: [] }); + const nodes = [makeNode({ id: "n-1", labels: {} })]; // no labels, but requiredLabels is empty + + prismaMock.nodeGroup.findUnique.mockResolvedValue(group as never); + prismaMock.vectorNode.findMany.mockResolvedValue(nodes as never); + + const result = await caller.nodesInGroup({ groupId: "ng-1", environmentId: "env-1" }); + + expect(result[0].labelCompliant).toBe(true); + }); + + it("Test 10: Attaches labelCompliant=false when node is missing a required label key", async () => { + const group = makeNodeGroup({ + id: "ng-1", + criteria: { region: "us-east" }, + requiredLabels: ["region", "role"], // requires both + }); + const nodes = [ + makeNode({ id: "n-missing-role", labels: { region: "us-east" } }), // missing 'role' + ]; + + prismaMock.nodeGroup.findUnique.mockResolvedValue(group as never); + prismaMock.vectorNode.findMany.mockResolvedValue(nodes as never); + + const result = await caller.nodesInGroup({ groupId: "ng-1", environmentId: "env-1" }); + + expect(result[0].labelCompliant).toBe(false); + }); + + it("Test 11: Throws NOT_FOUND for non-existent groupId", async () => { + prismaMock.nodeGroup.findUnique.mockResolvedValue(null); + + await expect( + caller.nodesInGroup({ groupId: "nonexistent", environmentId: "env-1" }), + ).rejects.toMatchObject({ code: "NOT_FOUND" }); + }); + + it("Test 12: Returns lastSeen timestamp for recency display", async () => { + const group = makeNodeGroup({ id: "ng-1", criteria: {}, requiredLabels: [] }); + const lastSeen = new Date("2026-01-15T10:00:00Z"); + const nodes = [makeNode({ id: "n-1", lastSeen })]; + + prismaMock.nodeGroup.findUnique.mockResolvedValue(group as never); + prismaMock.vectorNode.findMany.mockResolvedValue(nodes as never); + + const result = await caller.nodesInGroup({ groupId: "ng-1", environmentId: "env-1" }); + + expect(result[0].lastSeen).toEqual(lastSeen); + }); + }); +}); diff --git a/src/server/routers/__tests__/pipeline-bulk-tags.test.ts b/src/server/routers/__tests__/pipeline-bulk-tags.test.ts new file mode 100644 index 00000000..8a549f7a --- /dev/null +++ b/src/server/routers/__tests__/pipeline-bulk-tags.test.ts @@ -0,0 +1,320 @@ +import { vi, describe, it, expect, beforeEach } from "vitest"; +import { mockDeep, mockReset, type DeepMockProxy } from "vitest-mock-extended"; +import type { PrismaClient } from "@/generated/prisma"; + +// ─── vi.hoisted so `t` is available inside vi.mock factories ──────────────── + +const { t } = vi.hoisted(() => { + // eslint-disable-next-line @typescript-eslint/no-require-imports + const { initTRPC } = require("@trpc/server"); + const t = initTRPC.context().create(); + return { t }; +}); + +vi.mock("@/trpc/init", () => { + const passthrough = () => + t.middleware(({ next, ctx }: { next: (opts: { ctx: unknown }) => unknown; ctx: unknown }) => next({ ctx })); + return { + router: t.router, + protectedProcedure: t.procedure, + withTeamAccess: passthrough, + requireSuperAdmin: passthrough, + middleware: t.middleware, + }; +}); + +vi.mock("@/server/middleware/audit", () => ({ + withAudit: () => + t.middleware(({ next, ctx }: { next: (opts: { ctx: unknown }) => unknown; ctx: unknown }) => next({ ctx })), +})); + +vi.mock("@/lib/prisma", () => ({ + prisma: mockDeep(), +})); + +vi.mock("@/server/services/deploy-agent", () => ({ + deployAgent: vi.fn(), + undeployAgent: vi.fn(), +})); + +vi.mock("@/server/services/pipeline-graph", () => ({ + saveGraphComponents: vi.fn(), + promotePipeline: vi.fn(), + discardPipelineChanges: vi.fn(), + detectConfigChanges: vi.fn(), + listPipelinesForEnvironment: vi.fn(), +})); + +vi.mock("@/server/services/pipeline-version", () => ({ + createVersion: vi.fn(), + listVersions: vi.fn(), + listVersionsSummary: vi.fn(), + getVersion: vi.fn(), + rollback: vi.fn(), +})); + +vi.mock("@/server/services/config-crypto", () => ({ + decryptNodeConfig: vi.fn((_, c: unknown) => c), +})); + +vi.mock("@/server/services/system-environment", () => ({ + getOrCreateSystemEnvironment: vi.fn(), +})); + +vi.mock("@/server/services/copy-pipeline-graph", () => ({ + copyPipelineGraph: vi.fn(), +})); + +vi.mock("@/server/services/git-sync", () => ({ + gitSyncDeletePipeline: vi.fn(), +})); + +vi.mock("@/server/services/sli-evaluator", () => ({ + evaluatePipelineHealth: vi.fn(), +})); + +vi.mock("@/server/services/batch-health", () => ({ + batchEvaluatePipelineHealth: vi.fn(), +})); + +vi.mock("@/server/services/push-broadcast", () => ({ + relayPush: vi.fn(), +})); + +vi.mock("@/server/services/sse-broadcast", () => ({ + broadcastSSE: vi.fn(), +})); + +vi.mock("@/server/services/event-alerts", () => ({ + fireEventAlert: vi.fn(), +})); + +// ─── Import SUT + mocks ──────────────────────────────────────────────────── + +import { prisma } from "@/lib/prisma"; +import { pipelineRouter } from "@/server/routers/pipeline"; + +const prismaMock = prisma as unknown as DeepMockProxy; +const caller = t.createCallerFactory(pipelineRouter)({ + session: { user: { id: "user-1" } }, +}); + +// ─── Fixtures ─────────────────────────────────────────────────────────────── + +function makePipeline(overrides: Record = {}) { + return { + id: "p1", + tags: ["existing-tag"], + environment: { teamId: "team-1" }, + ...overrides, + }; +} + +function makeTeam(overrides: Record = {}) { + return { + id: "team-1", + availableTags: ["tag-a", "tag-b", "existing-tag"], + ...overrides, + }; +} + +// ─── Tests ────────────────────────────────────────────────────────────────── + +describe("bulk tag operations", () => { + beforeEach(() => { + mockReset(prismaMock); + }); + + // ── bulkAddTags ────────────────────────────────────────────────────────── + + describe("bulkAddTags", () => { + it("adds tags to multiple pipelines successfully", async () => { + prismaMock.pipeline.findUnique + .mockResolvedValueOnce(makePipeline({ id: "p1", tags: [] }) as never) // first pipeline (team lookup) + .mockResolvedValueOnce(makePipeline({ id: "p1", tags: [] }) as never) // loop iteration 1 + .mockResolvedValueOnce(makePipeline({ id: "p2", tags: ["old-tag"] }) as never); // loop iteration 2 + prismaMock.team.findUnique.mockResolvedValue(makeTeam({ availableTags: [] }) as never); // empty = no validation + prismaMock.pipeline.update.mockResolvedValue({} as never); + + const result = await caller.bulkAddTags({ + pipelineIds: ["p1", "p2"], + tags: ["tag-a"], + }); + + expect(result.total).toBe(2); + expect(result.succeeded).toBe(2); + expect(result.results).toHaveLength(2); + expect(result.results.every((r) => r.success)).toBe(true); + }); + + it("validates tags against team.availableTags before the loop", async () => { + prismaMock.pipeline.findUnique.mockResolvedValue(makePipeline({ id: "p1" }) as never); + prismaMock.team.findUnique.mockResolvedValue(makeTeam({ availableTags: ["tag-a", "tag-b"] }) as never); + + await expect( + caller.bulkAddTags({ + pipelineIds: ["p1"], + tags: ["invalid-tag"], + }), + ).rejects.toMatchObject({ + code: "BAD_REQUEST", + message: expect.stringContaining("Invalid tags"), + }); + }); + + it("throws BAD_REQUEST for tags not in availableTags", async () => { + prismaMock.pipeline.findUnique.mockResolvedValue(makePipeline() as never); + prismaMock.team.findUnique.mockResolvedValue(makeTeam({ availableTags: ["allowed"] }) as never); + + await expect( + caller.bulkAddTags({ + pipelineIds: ["p1"], + tags: ["not-allowed"], + }), + ).rejects.toMatchObject({ code: "BAD_REQUEST" }); + }); + + it("handles partial failure when some pipelines are not found", async () => { + prismaMock.pipeline.findUnique + .mockResolvedValueOnce(makePipeline({ id: "p1" }) as never) // first pipeline (team lookup) + .mockResolvedValueOnce(makePipeline({ id: "p1", tags: [] }) as never) // loop: p1 found + .mockResolvedValueOnce(null); // loop: p2 not found + prismaMock.team.findUnique.mockResolvedValue(makeTeam({ availableTags: [] }) as never); + prismaMock.pipeline.update.mockResolvedValue({} as never); + + const result = await caller.bulkAddTags({ + pipelineIds: ["p1", "p2"], + tags: ["tag-a"], + }); + + expect(result.total).toBe(2); + expect(result.succeeded).toBe(1); + const failedResult = result.results.find((r) => r.pipelineId === "p2"); + expect(failedResult?.success).toBe(false); + expect(failedResult?.error).toBe("Pipeline not found"); + }); + + it("deduplicates tags — adding an existing tag does not create duplicates", async () => { + prismaMock.pipeline.findUnique + .mockResolvedValueOnce(makePipeline({ id: "p1" }) as never) // team lookup + .mockResolvedValueOnce(makePipeline({ id: "p1", tags: ["existing-tag"] }) as never); // loop + prismaMock.team.findUnique.mockResolvedValue(makeTeam({ availableTags: [] }) as never); + prismaMock.pipeline.update.mockResolvedValue({} as never); + + await caller.bulkAddTags({ + pipelineIds: ["p1"], + tags: ["existing-tag"], + }); + + // Update should be called with deduplicated tags (no duplicates) + expect(prismaMock.pipeline.update).toHaveBeenCalledWith( + expect.objectContaining({ + data: { tags: ["existing-tag"] }, // only one instance + }), + ); + }); + + it("enforces max 100 pipeline limit (rejects more than 100)", async () => { + const tooMany = Array.from({ length: 101 }, (_, i) => `p${i}`); + + await expect( + caller.bulkAddTags({ + pipelineIds: tooMany, + tags: ["tag-a"], + }), + ).rejects.toThrow(); // Zod max(100) validation + }); + + it("throws NOT_FOUND when first pipeline for team lookup is not found", async () => { + prismaMock.pipeline.findUnique.mockResolvedValueOnce(null); + + await expect( + caller.bulkAddTags({ + pipelineIds: ["nonexistent"], + tags: ["tag-a"], + }), + ).rejects.toMatchObject({ code: "NOT_FOUND" }); + }); + }); + + // ── bulkRemoveTags ─────────────────────────────────────────────────────── + + describe("bulkRemoveTags", () => { + it("removes specified tags from multiple pipelines", async () => { + prismaMock.pipeline.findUnique + .mockResolvedValueOnce(makePipeline({ id: "p1", tags: ["tag-a", "tag-b"] }) as never) + .mockResolvedValueOnce(makePipeline({ id: "p2", tags: ["tag-a", "tag-c"] }) as never); + prismaMock.pipeline.update.mockResolvedValue({} as never); + + const result = await caller.bulkRemoveTags({ + pipelineIds: ["p1", "p2"], + tags: ["tag-a"], + }); + + expect(result.total).toBe(2); + expect(result.succeeded).toBe(2); + // p1 should have tag-b remaining, p2 should have tag-c remaining + expect(prismaMock.pipeline.update).toHaveBeenNthCalledWith( + 1, + expect.objectContaining({ data: { tags: ["tag-b"] } }), + ); + expect(prismaMock.pipeline.update).toHaveBeenNthCalledWith( + 2, + expect.objectContaining({ data: { tags: ["tag-c"] } }), + ); + }); + + it("handles pipelines that don't have the tag (no-op, still success)", async () => { + prismaMock.pipeline.findUnique.mockResolvedValue( + makePipeline({ id: "p1", tags: ["unrelated-tag"] }) as never, + ); + prismaMock.pipeline.update.mockResolvedValue({} as never); + + const result = await caller.bulkRemoveTags({ + pipelineIds: ["p1"], + tags: ["nonexistent-tag"], + }); + + expect(result.succeeded).toBe(1); + // Tags should remain unchanged + expect(prismaMock.pipeline.update).toHaveBeenCalledWith( + expect.objectContaining({ + data: { tags: ["unrelated-tag"] }, + }), + ); + }); + + it("handles partial failure when pipeline is not found", async () => { + prismaMock.pipeline.findUnique + .mockResolvedValueOnce(makePipeline({ id: "p1", tags: ["tag-a"] }) as never) // p1 found + .mockResolvedValueOnce(null); // p2 not found + prismaMock.pipeline.update.mockResolvedValue({} as never); + + const result = await caller.bulkRemoveTags({ + pipelineIds: ["p1", "p2"], + tags: ["tag-a"], + }); + + expect(result.total).toBe(2); + expect(result.succeeded).toBe(1); + const failedResult = result.results.find((r) => r.pipelineId === "p2"); + expect(failedResult?.success).toBe(false); + }); + + it("returns correct succeeded count", async () => { + prismaMock.pipeline.findUnique + .mockResolvedValueOnce(makePipeline({ id: "p1", tags: ["tag-a"] }) as never) + .mockResolvedValueOnce(null) // p2 not found + .mockResolvedValueOnce(makePipeline({ id: "p3", tags: ["tag-a"] }) as never); + prismaMock.pipeline.update.mockResolvedValue({} as never); + + const result = await caller.bulkRemoveTags({ + pipelineIds: ["p1", "p2", "p3"], + tags: ["tag-a"], + }); + + expect(result.total).toBe(3); + expect(result.succeeded).toBe(2); + }); + }); +}); diff --git a/src/server/routers/__tests__/pipeline-group.test.ts b/src/server/routers/__tests__/pipeline-group.test.ts index 3a492c20..334cba8c 100644 --- a/src/server/routers/__tests__/pipeline-group.test.ts +++ b/src/server/routers/__tests__/pipeline-group.test.ts @@ -43,6 +43,22 @@ const caller = t.createCallerFactory(pipelineGroupRouter)({ session: { user: { id: "user-1" } }, }); +// ─── Fixtures ─────────────────────────────────────────────────────────────── + +function makeGroup(overrides: Record = {}) { + return { + id: "g1", + name: "Backend", + color: "#ff0000", + environmentId: "env-1", + parentId: null, + createdAt: new Date(), + updatedAt: new Date(), + _count: { pipelines: 0, children: 0 }, + ...overrides, + }; +} + // ─── Tests ────────────────────────────────────────────────────────────────── describe("pipelineGroupRouter", () => { @@ -55,8 +71,8 @@ describe("pipelineGroupRouter", () => { describe("list", () => { it("returns groups ordered by name with pipeline counts", async () => { const groups = [ - { id: "g1", name: "Backend", color: "#ff0000", environmentId: "env-1", createdAt: new Date(), updatedAt: new Date(), _count: { pipelines: 3 } }, - { id: "g2", name: "Frontend", color: null, environmentId: "env-1", createdAt: new Date(), updatedAt: new Date(), _count: { pipelines: 0 } }, + makeGroup({ id: "g1", name: "Backend", _count: { pipelines: 3, children: 1 } }), + makeGroup({ id: "g2", name: "Frontend", color: null, _count: { pipelines: 0, children: 0 } }), ]; prismaMock.pipelineGroup.findMany.mockResolvedValue(groups as never); @@ -65,11 +81,23 @@ describe("pipelineGroupRouter", () => { expect(result).toEqual(groups); expect(prismaMock.pipelineGroup.findMany).toHaveBeenCalledWith({ where: { environmentId: "env-1" }, - include: { _count: { select: { pipelines: true } } }, + include: { _count: { select: { pipelines: true, children: true } } }, orderBy: { name: "asc" }, }); }); + it("returns groups with parentId field", async () => { + const groups = [ + makeGroup({ id: "g1", name: "Parent", parentId: null }), + makeGroup({ id: "g2", name: "Child", parentId: "g1" }), + ]; + prismaMock.pipelineGroup.findMany.mockResolvedValue(groups as never); + + const result = await caller.list({ environmentId: "env-1" }); + + expect(result[1]).toMatchObject({ parentId: "g1" }); + }); + it("returns empty array when no groups exist", async () => { prismaMock.pipelineGroup.findMany.mockResolvedValue([]); @@ -83,11 +111,8 @@ describe("pipelineGroupRouter", () => { describe("create", () => { it("creates a group with name and color", async () => { - prismaMock.pipelineGroup.findUnique.mockResolvedValue(null); - const created = { - id: "g-new", name: "Infra", color: "#00ff00", - environmentId: "env-1", createdAt: new Date(), updatedAt: new Date(), - }; + prismaMock.pipelineGroup.findFirst.mockResolvedValue(null); + const created = makeGroup({ id: "g-new", name: "Infra", color: "#00ff00" }); prismaMock.pipelineGroup.create.mockResolvedValue(created as never); const result = await caller.create({ @@ -98,16 +123,13 @@ describe("pipelineGroupRouter", () => { expect(result).toEqual(created); expect(prismaMock.pipelineGroup.create).toHaveBeenCalledWith({ - data: { name: "Infra", color: "#00ff00", environmentId: "env-1" }, + data: { name: "Infra", color: "#00ff00", environmentId: "env-1", parentId: null }, }); }); it("creates a group without color", async () => { - prismaMock.pipelineGroup.findUnique.mockResolvedValue(null); - prismaMock.pipelineGroup.create.mockResolvedValue({ - id: "g-new", name: "Logs", color: null, - environmentId: "env-1", createdAt: new Date(), updatedAt: new Date(), - } as never); + prismaMock.pipelineGroup.findFirst.mockResolvedValue(null); + prismaMock.pipelineGroup.create.mockResolvedValue(makeGroup({ name: "Logs", color: null }) as never); const result = await caller.create({ environmentId: "env-1", @@ -117,21 +139,120 @@ describe("pipelineGroupRouter", () => { expect(result.color).toBeNull(); }); - it("throws CONFLICT when duplicate name in same environment", async () => { + it("creates a child group with parentId", async () => { + prismaMock.pipelineGroup.findFirst.mockResolvedValue(null); + // parent at depth 1 (root), no grandparent + prismaMock.pipelineGroup.findUnique.mockResolvedValue({ + id: "parent-1", + parentId: null, + parent: null, + } as never); + const created = makeGroup({ id: "child-1", name: "Child", parentId: "parent-1" }); + prismaMock.pipelineGroup.create.mockResolvedValue(created as never); + + const result = await caller.create({ + environmentId: "env-1", + name: "Child", + parentId: "parent-1", + }); + + expect(result.parentId).toBe("parent-1"); + expect(prismaMock.pipelineGroup.create).toHaveBeenCalledWith({ + data: { name: "Child", color: undefined, environmentId: "env-1", parentId: "parent-1" }, + }); + }); + + it("creates a group at depth 3 (parent at depth 2) successfully", async () => { + prismaMock.pipelineGroup.findFirst.mockResolvedValue(null); + // parent is at depth 2 (has a parent at depth 1 with no grandparent) prismaMock.pipelineGroup.findUnique.mockResolvedValue({ - id: "existing", name: "Infra", color: null, - environmentId: "env-1", createdAt: new Date(), updatedAt: new Date(), + id: "depth2-group", + parentId: "depth1-group", + parent: { parentId: null }, } as never); + const created = makeGroup({ id: "depth3-group", name: "Deep", parentId: "depth2-group" }); + prismaMock.pipelineGroup.create.mockResolvedValue(created as never); + + const result = await caller.create({ + environmentId: "env-1", + name: "Deep", + parentId: "depth2-group", + }); + + expect(result.id).toBe("depth3-group"); + }); + + it("rejects creating a group at depth 4 (Maximum group nesting depth exceeded)", async () => { + prismaMock.pipelineGroup.findFirst.mockResolvedValue(null); + // parent is at depth 3 (has parentId and parent.parentId is non-null) + prismaMock.pipelineGroup.findUnique.mockResolvedValue({ + id: "depth3-group", + parentId: "depth2-group", + parent: { parentId: "depth1-group" }, + } as never); + + await expect( + caller.create({ + environmentId: "env-1", + name: "TooDeep", + parentId: "depth3-group", + }), + ).rejects.toMatchObject({ + code: "BAD_REQUEST", + message: expect.stringContaining("Maximum group nesting depth (3) exceeded"), + }); + }); + + it("throws NOT_FOUND when parentId does not exist", async () => { + prismaMock.pipelineGroup.findFirst.mockResolvedValue(null); + prismaMock.pipelineGroup.findUnique.mockResolvedValue(null); await expect( - caller.create({ environmentId: "env-1", name: "Infra" }), - ).rejects.toThrow(TRPCError); + caller.create({ + environmentId: "env-1", + name: "Orphan", + parentId: "nonexistent", + }), + ).rejects.toMatchObject({ code: "NOT_FOUND" }); + }); + + it("throws CONFLICT when duplicate name under the same parent", async () => { + // findFirst returns existing group with same name + parentId + prismaMock.pipelineGroup.findFirst.mockResolvedValue(makeGroup({ name: "Infra", parentId: "parent-1" }) as never); await expect( - caller.create({ environmentId: "env-1", name: "Infra" }), + caller.create({ environmentId: "env-1", name: "Infra", parentId: "parent-1" }), ).rejects.toMatchObject({ code: "CONFLICT" }); }); + it("throws CONFLICT when duplicate name at root level in same environment", async () => { + prismaMock.pipelineGroup.findFirst.mockResolvedValue(makeGroup({ name: "Root Group", parentId: null }) as never); + + await expect( + caller.create({ environmentId: "env-1", name: "Root Group" }), + ).rejects.toMatchObject({ code: "CONFLICT" }); + }); + + it("allows duplicate names under different parents", async () => { + // findFirst returns null (no conflict since different parent) + prismaMock.pipelineGroup.findFirst.mockResolvedValue(null); + prismaMock.pipelineGroup.findUnique.mockResolvedValue({ + id: "parent-2", + parentId: null, + parent: null, + } as never); + const created = makeGroup({ id: "g-dup", name: "Shared Name", parentId: "parent-2" }); + prismaMock.pipelineGroup.create.mockResolvedValue(created as never); + + const result = await caller.create({ + environmentId: "env-1", + name: "Shared Name", + parentId: "parent-2", + }); + + expect(result.name).toBe("Shared Name"); + }); + it("rejects empty name", async () => { await expect( caller.create({ environmentId: "env-1", name: "" }), @@ -149,17 +270,14 @@ describe("pipelineGroupRouter", () => { describe("update", () => { it("updates group name", async () => { - prismaMock.pipelineGroup.findUnique - .mockResolvedValueOnce({ - id: "g1", name: "Old Name", environmentId: "env-1", - color: null, createdAt: new Date(), updatedAt: new Date(), - } as never) - .mockResolvedValueOnce(null); // no conflict - - prismaMock.pipelineGroup.update.mockResolvedValue({ - id: "g1", name: "New Name", color: null, - environmentId: "env-1", createdAt: new Date(), updatedAt: new Date(), - } as never); + prismaMock.pipelineGroup.findUnique.mockResolvedValueOnce( + makeGroup({ id: "g1", name: "Old Name", parentId: null }) as never, + ); + prismaMock.pipelineGroup.findFirst.mockResolvedValueOnce(null); // no conflict + + prismaMock.pipelineGroup.update.mockResolvedValue( + makeGroup({ id: "g1", name: "New Name" }) as never, + ); const result = await caller.update({ id: "g1", name: "New Name" }); @@ -167,15 +285,13 @@ describe("pipelineGroupRouter", () => { }); it("updates group color to null", async () => { - prismaMock.pipelineGroup.findUnique.mockResolvedValueOnce({ - id: "g1", name: "Infra", environmentId: "env-1", - color: "#ff0000", createdAt: new Date(), updatedAt: new Date(), - } as never); + prismaMock.pipelineGroup.findUnique.mockResolvedValueOnce( + makeGroup({ id: "g1", name: "Infra", color: "#ff0000", parentId: null }) as never, + ); - prismaMock.pipelineGroup.update.mockResolvedValue({ - id: "g1", name: "Infra", color: null, - environmentId: "env-1", createdAt: new Date(), updatedAt: new Date(), - } as never); + prismaMock.pipelineGroup.update.mockResolvedValue( + makeGroup({ id: "g1", name: "Infra", color: null }) as never, + ); const result = await caller.update({ id: "g1", color: null }); @@ -194,16 +310,13 @@ describe("pipelineGroupRouter", () => { ).rejects.toMatchObject({ code: "NOT_FOUND" }); }); - it("throws CONFLICT when renaming to an existing name", async () => { - prismaMock.pipelineGroup.findUnique - .mockResolvedValueOnce({ - id: "g1", name: "Alpha", environmentId: "env-1", - color: null, createdAt: new Date(), updatedAt: new Date(), - } as never) - .mockResolvedValueOnce({ - id: "g2", name: "Beta", environmentId: "env-1", - color: null, createdAt: new Date(), updatedAt: new Date(), - } as never); // conflict! + it("throws CONFLICT when renaming to an existing name in same parent", async () => { + prismaMock.pipelineGroup.findUnique.mockResolvedValueOnce( + makeGroup({ id: "g1", name: "Alpha", parentId: null }) as never, + ); + prismaMock.pipelineGroup.findFirst.mockResolvedValueOnce( + makeGroup({ id: "g2", name: "Beta", parentId: null }) as never, // conflict + ); await expect( caller.update({ id: "g1", name: "Beta" }), @@ -211,20 +324,36 @@ describe("pipelineGroupRouter", () => { }); it("skips uniqueness check when name is unchanged", async () => { - prismaMock.pipelineGroup.findUnique.mockResolvedValueOnce({ - id: "g1", name: "Same Name", environmentId: "env-1", - color: null, createdAt: new Date(), updatedAt: new Date(), - } as never); + prismaMock.pipelineGroup.findUnique.mockResolvedValueOnce( + makeGroup({ id: "g1", name: "Same Name", parentId: null }) as never, + ); - prismaMock.pipelineGroup.update.mockResolvedValue({ - id: "g1", name: "Same Name", color: "#000", - environmentId: "env-1", createdAt: new Date(), updatedAt: new Date(), - } as never); + prismaMock.pipelineGroup.update.mockResolvedValue( + makeGroup({ id: "g1", name: "Same Name", color: "#000" }) as never, + ); await caller.update({ id: "g1", name: "Same Name", color: "#000" }); - // findUnique called only once (to fetch the group), not twice (no conflict check) - expect(prismaMock.pipelineGroup.findUnique).toHaveBeenCalledTimes(1); + // findFirst should NOT be called (no name change, skip uniqueness check) + expect(prismaMock.pipelineGroup.findFirst).not.toHaveBeenCalled(); + }); + + it("enforces depth guard when updating parentId", async () => { + prismaMock.pipelineGroup.findUnique + .mockResolvedValueOnce(makeGroup({ id: "g1", name: "Group", parentId: null }) as never) // fetch group + .mockResolvedValueOnce({ + id: "depth3-group", + parentId: "depth2-group", + parent: { parentId: "depth1-group" }, + } as never); // depth guard: parent at depth 3 + prismaMock.pipelineGroup.findFirst.mockResolvedValueOnce(null); + + await expect( + caller.update({ id: "g1", parentId: "depth3-group" }), + ).rejects.toMatchObject({ + code: "BAD_REQUEST", + message: expect.stringContaining("Maximum group nesting depth (3) exceeded"), + }); }); }); @@ -235,10 +364,9 @@ describe("pipelineGroupRouter", () => { prismaMock.pipelineGroup.findUnique.mockResolvedValue({ id: "g1", } as never); - prismaMock.pipelineGroup.delete.mockResolvedValue({ - id: "g1", name: "Deleted", color: null, - environmentId: "env-1", createdAt: new Date(), updatedAt: new Date(), - } as never); + prismaMock.pipelineGroup.delete.mockResolvedValue( + makeGroup({ id: "g1", name: "Deleted" }) as never, + ); const result = await caller.delete({ id: "g1" }); @@ -255,5 +383,17 @@ describe("pipelineGroupRouter", () => { caller.delete({ id: "nonexistent" }), ).rejects.toMatchObject({ code: "NOT_FOUND" }); }); + + it("deletes group with children (SetNull cascade handles children parentId)", async () => { + // onDelete:SetNull handles this in DB — we just verify delete is called + prismaMock.pipelineGroup.findUnique.mockResolvedValue({ id: "parent-g" } as never); + prismaMock.pipelineGroup.delete.mockResolvedValue( + makeGroup({ id: "parent-g", name: "Parent" }) as never, + ); + + const result = await caller.delete({ id: "parent-g" }); + + expect(result.id).toBe("parent-g"); + }); }); }); diff --git a/src/server/routers/__tests__/promotion.test.ts b/src/server/routers/__tests__/promotion.test.ts new file mode 100644 index 00000000..8ade26fb --- /dev/null +++ b/src/server/routers/__tests__/promotion.test.ts @@ -0,0 +1,723 @@ +import { vi, describe, it, expect, beforeEach } from "vitest"; +import { mockDeep, mockReset, type DeepMockProxy } from "vitest-mock-extended"; +import type { PrismaClient } from "@/generated/prisma"; + +// ─── vi.hoisted so `t` is available inside vi.mock factories ──────────────── + +const { t } = vi.hoisted(() => { + // eslint-disable-next-line @typescript-eslint/no-require-imports + const { initTRPC } = require("@trpc/server"); + const t = initTRPC.context().create(); + return { t }; +}); + +vi.mock("@/trpc/init", () => { + const passthrough = () => + t.middleware(({ next, ctx }: { next: (opts: { ctx: unknown }) => unknown; ctx: unknown }) => next({ ctx })); + return { + router: t.router, + protectedProcedure: t.procedure, + withTeamAccess: passthrough, + requireSuperAdmin: passthrough, + middleware: t.middleware, + }; +}); + +vi.mock("@/server/middleware/audit", () => ({ + withAudit: () => + t.middleware(({ next, ctx }: { next: (opts: { ctx: unknown }) => unknown; ctx: unknown }) => next({ ctx })), +})); + +vi.mock("@/lib/prisma", () => ({ + prisma: mockDeep(), +})); + +vi.mock("@/server/services/promotion-service", () => ({ + preflightSecrets: vi.fn(), + executePromotion: vi.fn(), + generateDiffPreview: vi.fn(), +})); + +vi.mock("@/server/services/secret-resolver", () => ({ + collectSecretRefs: vi.fn(), + convertSecretRefsToEnvVars: vi.fn(), + secretNameToEnvVar: vi.fn(), +})); + +vi.mock("@/server/services/copy-pipeline-graph", () => ({ + copyPipelineGraph: vi.fn(), +})); + +vi.mock("@/server/services/outbound-webhook", () => ({ + fireOutboundWebhooks: vi.fn(), +})); + +vi.mock("@/server/services/config-crypto", () => ({ + decryptNodeConfig: vi.fn((_: unknown, c: unknown) => c), + encryptNodeConfig: vi.fn((_: unknown, c: unknown) => c), +})); + +vi.mock("@/lib/config-generator", () => ({ + generateVectorYaml: vi.fn().mockReturnValue("sources:\n my_source:\n type: stdin\n"), +})); + +vi.mock("@/server/services/audit", () => ({ + writeAuditLog: vi.fn(), +})); + +vi.mock("@/server/services/event-alerts", () => ({ + fireEventAlert: vi.fn(), +})); + +vi.mock("@/server/services/gitops-promotion", () => ({ + createPromotionPR: vi.fn(), +})); + +// ─── Import SUT + mocks ───────────────────────────────────────────────────── + +import { prisma } from "@/lib/prisma"; +import { promotionRouter } from "@/server/routers/promotion"; +import * as promotionService from "@/server/services/promotion-service"; +import * as gitopsPromotion from "@/server/services/gitops-promotion"; + +const prismaMock = prisma as unknown as DeepMockProxy; +const caller = t.createCallerFactory(promotionRouter)({ + session: { user: { id: "user-1", email: "test@test.com" } }, +}); + +// ─── Helpers ──────────────────────────────────────────────────────────────── + +function makePipeline(overrides: Record = {}) { + return { + id: "pipeline-1", + name: "My Pipeline", + description: null, + environmentId: "env-source", + globalConfig: null, + isDraft: true, + isSystem: false, + nodes: [], + edges: [], + environment: { teamId: "team-1", id: "env-source", name: "Development" }, + ...overrides, + }; +} + +function makeEnvironment(overrides: Record = {}) { + return { + id: "env-target", + name: "Production", + teamId: "team-1", + requireDeployApproval: true, + gitOpsMode: "off", + gitRepoUrl: null, + gitToken: null, + gitBranch: "main", + ...overrides, + }; +} + +function makePromotionRequest(overrides: Record = {}) { + return { + id: "req-1", + sourcePipelineId: "pipeline-1", + targetPipelineId: null, + sourceEnvironmentId: "env-source", + targetEnvironmentId: "env-target", + status: "PENDING", + promotedById: "user-2", + approvedById: null, + targetPipelineName: "My Pipeline", + nodesSnapshot: null, + edgesSnapshot: null, + globalConfigSnapshot: null, + reviewNote: null, + createdAt: new Date(), + reviewedAt: null, + deployedAt: null, + ...overrides, + }; +} + +// ─── Tests ────────────────────────────────────────────────────────────────── + +describe("promotion router", () => { + beforeEach(() => { + mockReset(prismaMock); + vi.clearAllMocks(); + }); + + // ─── preflight ───────────────────────────────────────────────────────────── + + describe("preflight", () => { + it("preflight blocks when secrets are missing in target env", async () => { + prismaMock.pipeline.findUnique.mockResolvedValue(makePipeline() as never); + prismaMock.pipeline.findFirst.mockResolvedValue(null); + prismaMock.environment.findUnique.mockResolvedValue(makeEnvironment() as never); + vi.mocked(promotionService.preflightSecrets).mockResolvedValue({ + missing: ["api_key"], + present: ["db_password"], + canProceed: false, + }); + + const result = await caller.preflight({ + pipelineId: "pipeline-1", + targetEnvironmentId: "env-target", + }); + + expect(result.canProceed).toBe(false); + expect(result.missing).toContain("api_key"); + expect(result.present).toContain("db_password"); + }); + + it("preflight passes when all secrets present", async () => { + prismaMock.pipeline.findUnique.mockResolvedValue(makePipeline() as never); + prismaMock.pipeline.findFirst.mockResolvedValue(null); + prismaMock.environment.findUnique.mockResolvedValue(makeEnvironment() as never); + vi.mocked(promotionService.preflightSecrets).mockResolvedValue({ + missing: [], + present: ["api_key", "db_password"], + canProceed: true, + }); + + const result = await caller.preflight({ + pipelineId: "pipeline-1", + targetEnvironmentId: "env-target", + }); + + expect(result.canProceed).toBe(true); + expect(result.missing).toHaveLength(0); + expect(result.present).toContain("api_key"); + expect(result.present).toContain("db_password"); + }); + + it("preflight passes with no secret refs", async () => { + prismaMock.pipeline.findUnique.mockResolvedValue(makePipeline() as never); + prismaMock.pipeline.findFirst.mockResolvedValue(null); + prismaMock.environment.findUnique.mockResolvedValue(makeEnvironment() as never); + vi.mocked(promotionService.preflightSecrets).mockResolvedValue({ + missing: [], + present: [], + canProceed: true, + }); + + const result = await caller.preflight({ + pipelineId: "pipeline-1", + targetEnvironmentId: "env-target", + }); + + expect(result.canProceed).toBe(true); + expect(result.missing).toHaveLength(0); + expect(result.present).toHaveLength(0); + }); + + it("preflight reports name collision when pipeline exists in target env", async () => { + prismaMock.pipeline.findUnique.mockResolvedValue(makePipeline() as never); + prismaMock.pipeline.findFirst.mockResolvedValue({ id: "existing-pipeline" } as never); + prismaMock.environment.findUnique.mockResolvedValue(makeEnvironment() as never); + vi.mocked(promotionService.preflightSecrets).mockResolvedValue({ + missing: [], + present: [], + canProceed: true, + }); + + const result = await caller.preflight({ + pipelineId: "pipeline-1", + targetEnvironmentId: "env-target", + }); + + expect(result.nameCollision).toBe(true); + }); + }); + + // ─── diffPreview ──────────────────────────────────────────────────────────── + + describe("diffPreview", () => { + it("returns source and target YAML", async () => { + vi.mocked(promotionService.generateDiffPreview).mockResolvedValue({ + sourceYaml: "sources:\n stdin: {}\n", + targetYaml: "sources:\n stdin: {}\n", + }); + + const result = await caller.diffPreview({ pipelineId: "pipeline-1" }); + + expect(result.sourceYaml).toBeDefined(); + expect(result.targetYaml).toBeDefined(); + expect(promotionService.generateDiffPreview).toHaveBeenCalledWith("pipeline-1"); + }); + }); + + // ─── initiate ────────────────────────────────────────────────────────────── + + describe("initiate", () => { + it("creates PENDING request when approval required", async () => { + prismaMock.pipeline.findUnique.mockResolvedValue(makePipeline() as never); + prismaMock.environment.findUnique.mockResolvedValue( + makeEnvironment({ requireDeployApproval: true }) as never, + ); + prismaMock.pipeline.findFirst.mockResolvedValue(null); + vi.mocked(promotionService.preflightSecrets).mockResolvedValue({ + missing: [], + present: [], + canProceed: true, + }); + prismaMock.promotionRequest.create.mockResolvedValue({ + ...makePromotionRequest({ promotedById: "user-1" }), + } as never); + + const result = await caller.initiate({ + pipelineId: "pipeline-1", + targetEnvironmentId: "env-target", + }); + + expect(result.status).toBe("PENDING"); + expect(result.pendingApproval).toBe(true); + expect(prismaMock.promotionRequest.create).toHaveBeenCalledOnce(); + expect(promotionService.executePromotion).not.toHaveBeenCalled(); + }); + + it("auto-executes when approval not required", async () => { + prismaMock.pipeline.findUnique.mockResolvedValue(makePipeline() as never); + prismaMock.environment.findUnique.mockResolvedValue( + makeEnvironment({ requireDeployApproval: false }) as never, + ); + prismaMock.pipeline.findFirst.mockResolvedValue(null); + vi.mocked(promotionService.preflightSecrets).mockResolvedValue({ + missing: [], + present: [], + canProceed: true, + }); + prismaMock.promotionRequest.create.mockResolvedValue({ + ...makePromotionRequest({ promotedById: "user-1" }), + } as never); + vi.mocked(promotionService.executePromotion).mockResolvedValue({ + pipelineId: "new-pipeline-1", + pipelineName: "My Pipeline", + }); + + const result = await caller.initiate({ + pipelineId: "pipeline-1", + targetEnvironmentId: "env-target", + }); + + expect(result.status).toBe("DEPLOYED"); + expect(result.pendingApproval).toBe(false); + expect(promotionService.executePromotion).toHaveBeenCalledOnce(); + }); + + it("throws BAD_REQUEST if same environment", async () => { + prismaMock.pipeline.findUnique.mockResolvedValue( + makePipeline({ environmentId: "env-target" }) as never, + ); + prismaMock.environment.findUnique.mockResolvedValue(makeEnvironment() as never); + + await expect( + caller.initiate({ + pipelineId: "pipeline-1", + targetEnvironmentId: "env-target", + }), + ).rejects.toThrow("Source and target environments must be different"); + }); + + it("throws BAD_REQUEST if different team", async () => { + prismaMock.pipeline.findUnique.mockResolvedValue( + makePipeline({ environment: { teamId: "team-1", id: "env-source" } }) as never, + ); + prismaMock.environment.findUnique.mockResolvedValue( + makeEnvironment({ teamId: "team-2" }) as never, + ); + + await expect( + caller.initiate({ + pipelineId: "pipeline-1", + targetEnvironmentId: "env-target", + }), + ).rejects.toThrow("same team"); + }); + + it("throws BAD_REQUEST if pipeline name collision", async () => { + prismaMock.pipeline.findUnique.mockResolvedValue(makePipeline() as never); + prismaMock.environment.findUnique.mockResolvedValue(makeEnvironment() as never); + prismaMock.pipeline.findFirst.mockResolvedValue({ + id: "existing-pipeline", + name: "My Pipeline", + } as never); + + await expect( + caller.initiate({ + pipelineId: "pipeline-1", + targetEnvironmentId: "env-target", + }), + ).rejects.toThrow("already exists"); + }); + + it("throws BAD_REQUEST if secrets are missing", async () => { + prismaMock.pipeline.findUnique.mockResolvedValue(makePipeline() as never); + prismaMock.environment.findUnique.mockResolvedValue(makeEnvironment() as never); + prismaMock.pipeline.findFirst.mockResolvedValue(null); + vi.mocked(promotionService.preflightSecrets).mockResolvedValue({ + missing: ["api_key"], + present: [], + canProceed: false, + }); + + await expect( + caller.initiate({ + pipelineId: "pipeline-1", + targetEnvironmentId: "env-target", + }), + ).rejects.toThrow("Missing secrets"); + }); + + it("stores nodesSnapshot and edgesSnapshot from source pipeline at request time", async () => { + const nodes = [ + { + id: "node-1", + componentKey: "my_source", + componentType: "stdin", + kind: "SOURCE", + config: { encoding: { codec: "json" } }, + positionX: 0, + positionY: 0, + disabled: false, + }, + ]; + const edges = [ + { id: "edge-1", sourceNodeId: "node-1", targetNodeId: "node-2", sourcePort: null }, + ]; + prismaMock.pipeline.findUnique.mockResolvedValue( + makePipeline({ nodes, edges }) as never, + ); + prismaMock.environment.findUnique.mockResolvedValue( + makeEnvironment({ requireDeployApproval: true }) as never, + ); + prismaMock.pipeline.findFirst.mockResolvedValue(null); + vi.mocked(promotionService.preflightSecrets).mockResolvedValue({ + missing: [], + present: [], + canProceed: true, + }); + prismaMock.promotionRequest.create.mockResolvedValue({ + ...makePromotionRequest({ promotedById: "user-1" }), + } as never); + + await caller.initiate({ + pipelineId: "pipeline-1", + targetEnvironmentId: "env-target", + }); + + const createCall = prismaMock.promotionRequest.create.mock.calls[0][0]; + expect(createCall.data.nodesSnapshot).toBeDefined(); + expect(createCall.data.edgesSnapshot).toBeDefined(); + }); + }); + + // ─── approve ──────────────────────────────────────────────────────────────── + + describe("approve", () => { + it("self-review blocked — promoter cannot approve own request", async () => { + prismaMock.promotionRequest.findUnique.mockResolvedValue( + makePromotionRequest({ promotedById: "user-1" }) as never, + ); + + await expect( + caller.approve({ requestId: "req-1" }), + ).rejects.toThrow("Cannot approve your own promotion request"); + }); + + it("atomic approve prevents race condition — returns BAD_REQUEST if count 0", async () => { + prismaMock.promotionRequest.findUnique.mockResolvedValue( + makePromotionRequest({ promotedById: "user-2" }) as never, + ); + prismaMock.promotionRequest.updateMany.mockResolvedValue({ count: 0 } as never); + + await expect( + caller.approve({ requestId: "req-1" }), + ).rejects.toThrow("no longer pending"); + }); + + it("succeeds for different user and calls executePromotion", async () => { + prismaMock.promotionRequest.findUnique.mockResolvedValue( + makePromotionRequest({ promotedById: "user-2" }) as never, + ); + prismaMock.promotionRequest.updateMany.mockResolvedValue({ count: 1 } as never); + vi.mocked(promotionService.executePromotion).mockResolvedValue({ + pipelineId: "new-pipeline-1", + pipelineName: "My Pipeline", + }); + + const result = await caller.approve({ requestId: "req-1" }); + + expect(result.success).toBe(true); + expect(promotionService.executePromotion).toHaveBeenCalledWith("req-1", "user-1"); + }); + }); + + // ─── reject ────────────────────────────────────────────────────────────────── + + describe("reject", () => { + it("sets status REJECTED with review note", async () => { + prismaMock.promotionRequest.findUnique.mockResolvedValue( + makePromotionRequest({ promotedById: "user-2" }) as never, + ); + prismaMock.promotionRequest.updateMany.mockResolvedValue({ count: 1 } as never); + + const result = await caller.reject({ requestId: "req-1", note: "Not ready" }); + + expect(result.rejected).toBe(true); + const updateCall = prismaMock.promotionRequest.updateMany.mock.calls[0][0]; + expect(updateCall.data.status).toBe("REJECTED"); + expect(updateCall.data.reviewNote).toBe("Not ready"); + }); + + it("throws if request not found or not pending", async () => { + prismaMock.promotionRequest.findUnique.mockResolvedValue(null); + + await expect( + caller.reject({ requestId: "req-missing" }), + ).rejects.toThrow("not found or not pending"); + }); + }); + + // ─── cancel ────────────────────────────────────────────────────────────────── + + describe("cancel", () => { + it("only promoter can cancel — throws FORBIDDEN for different user", async () => { + prismaMock.promotionRequest.findUnique.mockResolvedValue( + makePromotionRequest({ promotedById: "user-2" }) as never, + ); + + await expect( + caller.cancel({ requestId: "req-1" }), + ).rejects.toThrow("Only the original promoter"); + }); + + it("promoter can cancel their own request", async () => { + prismaMock.promotionRequest.findUnique.mockResolvedValue( + makePromotionRequest({ promotedById: "user-1" }) as never, + ); + prismaMock.promotionRequest.updateMany.mockResolvedValue({ count: 1 } as never); + + const result = await caller.cancel({ requestId: "req-1" }); + + expect(result.cancelled).toBe(true); + }); + }); + + // ─── history ───────────────────────────────────────────────────────────────── + + describe("history", () => { + it("returns records ordered by createdAt desc", async () => { + const records = [ + { + ...makePromotionRequest({ createdAt: new Date("2026-03-27") }), + promotedBy: { name: "Alice", email: "alice@test.com" }, + approvedBy: null, + sourceEnvironment: { name: "Development" }, + targetEnvironment: { name: "Production" }, + }, + { + ...makePromotionRequest({ id: "req-2", createdAt: new Date("2026-03-26") }), + promotedBy: { name: "Bob", email: "bob@test.com" }, + approvedBy: null, + sourceEnvironment: { name: "Development" }, + targetEnvironment: { name: "Staging" }, + }, + ]; + prismaMock.promotionRequest.findMany.mockResolvedValue(records as never); + + const result = await caller.history({ pipelineId: "pipeline-1" }); + + expect(result).toHaveLength(2); + expect(prismaMock.promotionRequest.findMany).toHaveBeenCalledWith( + expect.objectContaining({ + where: { sourcePipelineId: "pipeline-1" }, + orderBy: { createdAt: "desc" }, + take: 20, + }), + ); + }); + }); + + // ─── SECRET[name] ref preservation ─────────────────────────────────────────── + + describe("clone preserves SECRET refs", () => { + it("executePromotion does not strip SECRET[name] refs from cloned pipeline config", async () => { + // This test verifies the behavior is wired correctly: no transformConfig is passed + // to copyPipelineGraph, so SECRET[name] refs are preserved intact. + // The promotion service is tested here via mocked executePromotion. + // The actual preservation is enforced in promotion-service.ts by not passing transformConfig. + vi.mocked(promotionService.executePromotion).mockResolvedValue({ + pipelineId: "new-pipeline-1", + pipelineName: "My Pipeline", + }); + prismaMock.promotionRequest.findUnique.mockResolvedValue( + makePromotionRequest({ promotedById: "user-2" }) as never, + ); + prismaMock.promotionRequest.updateMany.mockResolvedValue({ count: 1 } as never); + + const result = await caller.approve({ requestId: "req-1" }); + + // Verify executePromotion was called (which internally uses copyPipelineGraph without stripping) + expect(promotionService.executePromotion).toHaveBeenCalledWith("req-1", "user-1"); + expect(result.success).toBe(true); + }); + + it("diffPreview targetYaml uses SECRET ref placeholders (not plaintext)", async () => { + // sourceYaml shows SECRET[api_key] as-is, targetYaml converts to ${VF_SECRET_API_KEY} + vi.mocked(promotionService.generateDiffPreview).mockResolvedValue({ + sourceYaml: "password: SECRET[api_key]\n", + targetYaml: "password: ${VF_SECRET_API_KEY}\n", + }); + + const result = await caller.diffPreview({ pipelineId: "pipeline-1" }); + + // Source YAML preserves SECRET[name] reference format + expect(result.sourceYaml).toContain("SECRET[api_key]"); + // Target YAML uses env var placeholder format + expect(result.targetYaml).toContain("VF_SECRET_API_KEY"); + }); + }); + + // ─── GitOps initiation path ─────────────────────────────────────────────── + + describe("GitOps initiation", () => { + it("returns AWAITING_PR_MERGE and prUrl when gitOpsMode is 'promotion'", async () => { + prismaMock.pipeline.findUnique.mockResolvedValue(makePipeline() as never); + prismaMock.environment.findUnique.mockResolvedValue( + makeEnvironment({ + gitOpsMode: "promotion", + gitRepoUrl: "https://github.com/myorg/myrepo", + gitToken: "encrypted-token", + gitBranch: "main", + requireDeployApproval: false, + }) as never, + ); + prismaMock.pipeline.findFirst.mockResolvedValue(null); + vi.mocked(promotionService.preflightSecrets).mockResolvedValue({ + missing: [], + present: [], + canProceed: true, + }); + prismaMock.promotionRequest.create.mockResolvedValue({ + ...makePromotionRequest({ promotedById: "user-1" }), + } as never); + vi.mocked(gitopsPromotion.createPromotionPR).mockResolvedValue({ + prNumber: 42, + prUrl: "https://github.com/myorg/myrepo/pull/42", + prBranch: "vf-promote/production-my-pipeline-req1", + }); + prismaMock.promotionRequest.update.mockResolvedValue({} as never); + + const result = await caller.initiate({ + pipelineId: "pipeline-1", + targetEnvironmentId: "env-target", + }); + + expect(result.status).toBe("AWAITING_PR_MERGE"); + expect(result.prUrl).toBe("https://github.com/myorg/myrepo/pull/42"); + expect(result.pendingApproval).toBe(false); + expect(gitopsPromotion.createPromotionPR).toHaveBeenCalledOnce(); + expect(promotionService.executePromotion).not.toHaveBeenCalled(); + }); + + it("updates PromotionRequest with prUrl and prNumber after PR creation", async () => { + prismaMock.pipeline.findUnique.mockResolvedValue(makePipeline() as never); + prismaMock.environment.findUnique.mockResolvedValue( + makeEnvironment({ + gitOpsMode: "promotion", + gitRepoUrl: "https://github.com/myorg/myrepo", + gitToken: "encrypted-token", + gitBranch: "main", + }) as never, + ); + prismaMock.pipeline.findFirst.mockResolvedValue(null); + vi.mocked(promotionService.preflightSecrets).mockResolvedValue({ + missing: [], + present: [], + canProceed: true, + }); + prismaMock.promotionRequest.create.mockResolvedValue({ + ...makePromotionRequest({ id: "req-gitops-1", promotedById: "user-1" }), + } as never); + vi.mocked(gitopsPromotion.createPromotionPR).mockResolvedValue({ + prNumber: 7, + prUrl: "https://github.com/myorg/myrepo/pull/7", + prBranch: "vf-promote/production-my-pipeline-req-gito", + }); + prismaMock.promotionRequest.update.mockResolvedValue({} as never); + + await caller.initiate({ + pipelineId: "pipeline-1", + targetEnvironmentId: "env-target", + }); + + expect(prismaMock.promotionRequest.update).toHaveBeenCalledWith({ + where: { id: "req-gitops-1" }, + data: { + prUrl: "https://github.com/myorg/myrepo/pull/7", + prNumber: 7, + status: "AWAITING_PR_MERGE", + }, + }); + }); + + it("falls through to UI path when gitOpsMode is 'off'", async () => { + prismaMock.pipeline.findUnique.mockResolvedValue(makePipeline() as never); + prismaMock.environment.findUnique.mockResolvedValue( + makeEnvironment({ gitOpsMode: "off", requireDeployApproval: true }) as never, + ); + prismaMock.pipeline.findFirst.mockResolvedValue(null); + vi.mocked(promotionService.preflightSecrets).mockResolvedValue({ + missing: [], + present: [], + canProceed: true, + }); + prismaMock.promotionRequest.create.mockResolvedValue({ + ...makePromotionRequest({ promotedById: "user-1" }), + } as never); + + const result = await caller.initiate({ + pipelineId: "pipeline-1", + targetEnvironmentId: "env-target", + }); + + expect(result.status).toBe("PENDING"); + expect(gitopsPromotion.createPromotionPR).not.toHaveBeenCalled(); + }); + + it("falls through to UI path when gitOpsMode is 'push'", async () => { + prismaMock.pipeline.findUnique.mockResolvedValue(makePipeline() as never); + prismaMock.environment.findUnique.mockResolvedValue( + makeEnvironment({ + gitOpsMode: "push", + gitRepoUrl: "https://github.com/myorg/myrepo", + gitToken: "encrypted-token", + requireDeployApproval: false, + }) as never, + ); + prismaMock.pipeline.findFirst.mockResolvedValue(null); + vi.mocked(promotionService.preflightSecrets).mockResolvedValue({ + missing: [], + present: [], + canProceed: true, + }); + prismaMock.promotionRequest.create.mockResolvedValue({ + ...makePromotionRequest({ promotedById: "user-1" }), + } as never); + vi.mocked(promotionService.executePromotion).mockResolvedValue({ + pipelineId: "new-pipeline-1", + pipelineName: "My Pipeline", + }); + + const result = await caller.initiate({ + pipelineId: "pipeline-1", + targetEnvironmentId: "env-target", + }); + + // push mode should execute directly (no PR) + expect(result.status).toBe("DEPLOYED"); + expect(gitopsPromotion.createPromotionPR).not.toHaveBeenCalled(); + }); + }); +}); diff --git a/src/server/routers/__tests__/webhook-endpoint.test.ts b/src/server/routers/__tests__/webhook-endpoint.test.ts new file mode 100644 index 00000000..b6773773 --- /dev/null +++ b/src/server/routers/__tests__/webhook-endpoint.test.ts @@ -0,0 +1,258 @@ +import { vi, describe, it, expect, beforeEach } from "vitest"; +import { mockDeep, mockReset, type DeepMockProxy } from "vitest-mock-extended"; +import type { PrismaClient } from "@/generated/prisma"; +import { AlertMetric } from "@/generated/prisma"; + +// ─── vi.hoisted so `t` is available inside vi.mock factories ──────────────── + +const { t } = vi.hoisted(() => { + // eslint-disable-next-line @typescript-eslint/no-require-imports + const { initTRPC } = require("@trpc/server"); + const t = initTRPC.context().create(); + return { t }; +}); + +vi.mock("@/trpc/init", () => { + const passthrough = () => + t.middleware(({ next, ctx }: { next: (opts: { ctx: unknown }) => unknown; ctx: unknown }) => next({ ctx })); + return { + router: t.router, + protectedProcedure: t.procedure, + withTeamAccess: passthrough, + middleware: t.middleware, + }; +}); + +vi.mock("@/server/middleware/audit", () => ({ + withAudit: () => + t.middleware(({ next, ctx }: { next: (opts: { ctx: unknown }) => unknown; ctx: unknown }) => next({ ctx })), +})); + +vi.mock("@/lib/prisma", () => ({ + prisma: mockDeep(), +})); + +vi.mock("@/server/services/crypto", () => ({ + encrypt: vi.fn().mockReturnValue("encrypted-secret"), + decrypt: vi.fn().mockReturnValue("plaintext-secret"), +})); + +vi.mock("@/server/services/url-validation", () => ({ + validatePublicUrl: vi.fn().mockResolvedValue(undefined), +})); + +vi.mock("@/server/services/outbound-webhook", () => ({ + deliverOutboundWebhook: vi.fn().mockResolvedValue({ + success: true, + statusCode: 200, + isPermanent: false, + }), +})); + +// ─── Import SUT + mocks after vi.mock ─────────────────────────────────────── + +import { prisma } from "@/lib/prisma"; +import { webhookEndpointRouter } from "@/server/routers/webhook-endpoint"; +import * as cryptoMod from "@/server/services/crypto"; +import * as urlValidation from "@/server/services/url-validation"; +import * as outboundWebhook from "@/server/services/outbound-webhook"; + +const prismaMock = prisma as unknown as DeepMockProxy; +const caller = t.createCallerFactory(webhookEndpointRouter)({ + session: { user: { id: "user-1" } }, +}); + +// ─── Helpers ──────────────────────────────────────────────────────────────── + +function makeEndpoint(overrides: Partial<{ + id: string; + teamId: string; + name: string; + url: string; + eventTypes: AlertMetric[]; + encryptedSecret: string | null; + enabled: boolean; + createdAt: Date; + updatedAt: Date; +}> = {}) { + return { + id: "ep-1", + teamId: "team-1", + name: "My Webhook", + url: "https://example.com/hook", + eventTypes: [AlertMetric.deploy_completed], + encryptedSecret: "encrypted-secret", + enabled: true, + createdAt: new Date(), + updatedAt: new Date(), + ...overrides, + }; +} + +// ─── Tests ────────────────────────────────────────────────────────────────── + +describe("webhookEndpointRouter", () => { + beforeEach(() => { + mockReset(prismaMock); + vi.clearAllMocks(); + vi.mocked(urlValidation.validatePublicUrl).mockResolvedValue(undefined); + vi.mocked(cryptoMod.encrypt).mockReturnValue("encrypted-secret"); + }); + + // ─── create ──────────────────────────────────────────────────────────── + + describe("create", () => { + it("encrypts secret before storing", async () => { + const endpoint = makeEndpoint(); + prismaMock.webhookEndpoint.create.mockResolvedValue(endpoint); + + await caller.create({ + teamId: "team-1", + name: "My Webhook", + url: "https://example.com/hook", + eventTypes: [AlertMetric.deploy_completed], + secret: "my-secret", + }); + + expect(cryptoMod.encrypt).toHaveBeenCalledWith("my-secret"); + expect(prismaMock.webhookEndpoint.create).toHaveBeenCalledWith( + expect.objectContaining({ + data: expect.objectContaining({ + encryptedSecret: "encrypted-secret", + }), + }), + ); + }); + + it("validates URL via validatePublicUrl", async () => { + const endpoint = makeEndpoint(); + prismaMock.webhookEndpoint.create.mockResolvedValue(endpoint); + + await caller.create({ + teamId: "team-1", + name: "My Webhook", + url: "https://example.com/hook", + eventTypes: [AlertMetric.deploy_completed], + }); + + expect(urlValidation.validatePublicUrl).toHaveBeenCalledWith("https://example.com/hook"); + }); + + it("stores null encryptedSecret when no secret provided", async () => { + const endpoint = makeEndpoint({ encryptedSecret: null }); + prismaMock.webhookEndpoint.create.mockResolvedValue(endpoint); + + await caller.create({ + teamId: "team-1", + name: "My Webhook", + url: "https://example.com/hook", + eventTypes: [AlertMetric.deploy_completed], + }); + + expect(prismaMock.webhookEndpoint.create).toHaveBeenCalledWith( + expect.objectContaining({ + data: expect.objectContaining({ + encryptedSecret: null, + }), + }), + ); + }); + }); + + // ─── list ────────────────────────────────────────────────────────────── + + describe("list", () => { + it("excludes encryptedSecret from response using select", async () => { + prismaMock.webhookEndpoint.findMany.mockResolvedValue([]); + + await caller.list({ teamId: "team-1" }); + + expect(prismaMock.webhookEndpoint.findMany).toHaveBeenCalledWith( + expect.objectContaining({ + select: expect.not.objectContaining({ + encryptedSecret: expect.anything(), + }), + }), + ); + }); + + it("orders by createdAt desc", async () => { + prismaMock.webhookEndpoint.findMany.mockResolvedValue([]); + + await caller.list({ teamId: "team-1" }); + + expect(prismaMock.webhookEndpoint.findMany).toHaveBeenCalledWith( + expect.objectContaining({ + orderBy: { createdAt: "desc" }, + }), + ); + }); + }); + + // ─── testDelivery ────────────────────────────────────────────────────── + + describe("testDelivery", () => { + it("calls deliverOutboundWebhook with endpoint URL and encrypted secret", async () => { + const endpoint = makeEndpoint(); + prismaMock.webhookEndpoint.findFirst.mockResolvedValue(endpoint); + + await caller.testDelivery({ id: "ep-1", teamId: "team-1" }); + + expect(outboundWebhook.deliverOutboundWebhook).toHaveBeenCalledWith( + expect.objectContaining({ + url: endpoint.url, + encryptedSecret: endpoint.encryptedSecret, + }), + expect.objectContaining({ + type: "test", + }), + ); + }); + + it("returns the delivery result", async () => { + const endpoint = makeEndpoint(); + prismaMock.webhookEndpoint.findFirst.mockResolvedValue(endpoint); + + const result = await caller.testDelivery({ id: "ep-1", teamId: "team-1" }); + + expect(result).toMatchObject({ + success: true, + statusCode: 200, + }); + }); + }); + + // ─── listDeliveries ──────────────────────────────────────────────────── + + describe("listDeliveries", () => { + it("returns deliveries ordered by requestedAt desc", async () => { + prismaMock.webhookEndpoint.findFirst.mockResolvedValue(makeEndpoint()); + prismaMock.webhookDelivery.findMany.mockResolvedValue([]); + prismaMock.webhookDelivery.count.mockResolvedValue(0); + + await caller.listDeliveries({ + webhookEndpointId: "ep-1", + teamId: "team-1", + }); + + expect(prismaMock.webhookDelivery.findMany).toHaveBeenCalledWith( + expect.objectContaining({ + orderBy: { requestedAt: "desc" }, + }), + ); + }); + + it("returns total count for pagination", async () => { + prismaMock.webhookEndpoint.findFirst.mockResolvedValue(makeEndpoint()); + prismaMock.webhookDelivery.findMany.mockResolvedValue([]); + prismaMock.webhookDelivery.count.mockResolvedValue(5); + + const result = await caller.listDeliveries({ + webhookEndpointId: "ep-1", + teamId: "team-1", + }); + + expect(result.total).toBe(5); + }); + }); +}); diff --git a/src/server/routers/environment.ts b/src/server/routers/environment.ts index 579fee09..8b41e324 100644 --- a/src/server/routers/environment.ts +++ b/src/server/routers/environment.ts @@ -103,7 +103,7 @@ export const environmentRouter = router({ gitRepoUrl: z.string().url().optional().nullable(), gitBranch: z.string().min(1).max(100).optional().nullable(), gitToken: z.string().optional().nullable(), - gitOpsMode: z.enum(["off", "push", "bidirectional"]).optional(), + gitOpsMode: z.enum(["off", "push", "bidirectional", "promotion"]).optional(), requireDeployApproval: z.boolean().optional(), }) ) @@ -147,17 +147,18 @@ export const environmentRouter = router({ data.gitToken = gitToken ? encrypt(gitToken) : null; } - // Handle gitOpsMode — auto-generate webhook secret when switching to bidirectional + // Handle gitOpsMode — auto-generate webhook secret when switching to bidirectional or promotion let plaintextWebhookSecret: string | null = null; if (gitOpsModeInput !== undefined) { data.gitOpsMode = gitOpsModeInput; - if (gitOpsModeInput === "bidirectional" && !existing.gitWebhookSecret) { + const needsWebhookSecret = gitOpsModeInput === "bidirectional" || gitOpsModeInput === "promotion"; + if (needsWebhookSecret && !existing.gitWebhookSecret) { plaintextWebhookSecret = crypto.randomBytes(32).toString("hex"); data.gitWebhookSecret = encrypt(plaintextWebhookSecret); } - // Clear webhook secret when disabling bidirectional mode - if (gitOpsModeInput !== "bidirectional") { + // Clear webhook secret when disabling webhook-based modes + if (!needsWebhookSecret) { data.gitWebhookSecret = null; } } diff --git a/src/server/routers/fleet.ts b/src/server/routers/fleet.ts index 0805f0c3..3990fba5 100644 --- a/src/server/routers/fleet.ts +++ b/src/server/routers/fleet.ts @@ -56,9 +56,25 @@ export const fleetRouter = router({ }); } + // Label compliance check (NODE-02) + const nodeGroups = await prisma.nodeGroup.findMany({ + where: { environmentId: input.environmentId }, + select: { requiredLabels: true }, + }); + const allRequiredLabels = [ + ...new Set(nodeGroups.flatMap((g) => g.requiredLabels as string[])), + ]; + return filtered.map((node) => ({ ...node, pushConnected: pushRegistry.isConnected(node.id), + labelCompliant: allRequiredLabels.length === 0 || + allRequiredLabels.every((key) => + Object.prototype.hasOwnProperty.call( + (node.labels as Record) ?? {}, + key, + ), + ), })); }), diff --git a/src/server/routers/node-group.ts b/src/server/routers/node-group.ts new file mode 100644 index 00000000..f732b3d2 --- /dev/null +++ b/src/server/routers/node-group.ts @@ -0,0 +1,358 @@ +import { z } from "zod"; +import { TRPCError } from "@trpc/server"; +import { router, protectedProcedure, withTeamAccess } from "@/trpc/init"; +import { prisma } from "@/lib/prisma"; +import { withAudit } from "@/server/middleware/audit"; +import { nodeMatchesGroup } from "@/lib/node-group-utils"; + +export const nodeGroupRouter = router({ + list: protectedProcedure + .input(z.object({ environmentId: z.string() })) + .use(withTeamAccess("VIEWER")) + .query(async ({ input }) => { + return prisma.nodeGroup.findMany({ + where: { environmentId: input.environmentId }, + orderBy: { name: "asc" }, + }); + }), + + create: protectedProcedure + .input( + z.object({ + environmentId: z.string(), + name: z.string().min(1).max(100), + criteria: z.record(z.string(), z.string()).default({}), + labelTemplate: z.record(z.string(), z.string()).default({}), + requiredLabels: z.array(z.string()).default([]), + }), + ) + .use(withTeamAccess("ADMIN")) + .use(withAudit("nodeGroup.created", "NodeGroup")) + .mutation(async ({ input }) => { + // Validate unique name per environment + const existing = await prisma.nodeGroup.findUnique({ + where: { + environmentId_name: { + environmentId: input.environmentId, + name: input.name, + }, + }, + }); + if (existing) { + throw new TRPCError({ + code: "CONFLICT", + message: `A node group named "${input.name}" already exists in this environment`, + }); + } + + return prisma.nodeGroup.create({ + data: { + name: input.name, + environmentId: input.environmentId, + criteria: input.criteria, + labelTemplate: input.labelTemplate, + requiredLabels: input.requiredLabels, + }, + }); + }), + + update: protectedProcedure + .input( + z.object({ + id: z.string(), + name: z.string().min(1).max(100).optional(), + criteria: z.record(z.string(), z.string()).optional(), + labelTemplate: z.record(z.string(), z.string()).optional(), + requiredLabels: z.array(z.string()).optional(), + }), + ) + .use(withTeamAccess("ADMIN")) + .use(withAudit("nodeGroup.updated", "NodeGroup")) + .mutation(async ({ input }) => { + const group = await prisma.nodeGroup.findUnique({ + where: { id: input.id }, + select: { id: true, environmentId: true, name: true }, + }); + if (!group) { + throw new TRPCError({ + code: "NOT_FOUND", + message: "Node group not found", + }); + } + + // Validate unique name if name is being changed + if (input.name && input.name !== group.name) { + const existing = await prisma.nodeGroup.findUnique({ + where: { + environmentId_name: { + environmentId: group.environmentId, + name: input.name, + }, + }, + }); + if (existing) { + throw new TRPCError({ + code: "CONFLICT", + message: `A node group named "${input.name}" already exists in this environment`, + }); + } + } + + const data: Record = {}; + if (input.name !== undefined) data.name = input.name; + if (input.criteria !== undefined) data.criteria = input.criteria; + if (input.labelTemplate !== undefined) data.labelTemplate = input.labelTemplate; + if (input.requiredLabels !== undefined) data.requiredLabels = input.requiredLabels; + + return prisma.nodeGroup.update({ + where: { id: input.id }, + data, + }); + }), + + delete: protectedProcedure + .input(z.object({ id: z.string() })) + .use(withTeamAccess("ADMIN")) + .use(withAudit("nodeGroup.deleted", "NodeGroup")) + .mutation(async ({ input }) => { + const group = await prisma.nodeGroup.findUnique({ + where: { id: input.id }, + select: { id: true }, + }); + if (!group) { + throw new TRPCError({ + code: "NOT_FOUND", + message: "Node group not found", + }); + } + + return prisma.nodeGroup.delete({ + where: { id: input.id }, + }); + }), + + /** + * NODE-04: Aggregated per-group health stats for the fleet dashboard. + * Single round trip: 3 parallel queries, application-layer aggregation. + */ + groupHealthStats: protectedProcedure + .input(z.object({ environmentId: z.string() })) + .use(withTeamAccess("VIEWER")) + .query(async ({ input }) => { + const { environmentId } = input; + + const [nodes, groups, firingAlerts] = await Promise.all([ + prisma.vectorNode.findMany({ + where: { environmentId }, + select: { id: true, status: true, labels: true }, + }), + prisma.nodeGroup.findMany({ + where: { environmentId }, + orderBy: { name: "asc" }, + }), + prisma.alertEvent.findMany({ + where: { status: "firing", node: { environmentId } }, + select: { nodeId: true }, + }), + ]); + + const firingNodeIds = new Set( + firingAlerts.map((a) => a.nodeId).filter(Boolean) as string[], + ); + + const assignedNodeIds = new Set(); + + const groupStats = groups.map((group) => { + const criteria = group.criteria as Record; + const requiredLabels = group.requiredLabels as string[]; + + const matchedNodes = nodes.filter((n) => { + const nodeLabels = (n.labels as Record) ?? {}; + return nodeMatchesGroup(nodeLabels, criteria); + }); + + for (const n of matchedNodes) { + assignedNodeIds.add(n.id); + } + + const totalNodes = matchedNodes.length; + const onlineCount = matchedNodes.filter((n) => n.status === "HEALTHY").length; + const alertCount = matchedNodes.filter((n) => firingNodeIds.has(n.id)).length; + + let complianceRate = 100; + if (requiredLabels.length > 0 && totalNodes > 0) { + const compliantCount = matchedNodes.filter((n) => { + const nodeLabels = (n.labels as Record) ?? {}; + return requiredLabels.every((key) => + Object.prototype.hasOwnProperty.call(nodeLabels, key), + ); + }).length; + complianceRate = Math.round((compliantCount / totalNodes) * 100); + } + + return { + ...group, + totalNodes, + onlineCount, + alertCount, + complianceRate, + }; + }); + + // Synthetic "Ungrouped" entry for nodes not matching any group + const ungroupedNodes = nodes.filter((n) => !assignedNodeIds.has(n.id)); + if (ungroupedNodes.length > 0) { + const ungroupedOnlineCount = ungroupedNodes.filter((n) => n.status === "HEALTHY").length; + const ungroupedAlertCount = ungroupedNodes.filter((n) => firingNodeIds.has(n.id)).length; + groupStats.push({ + id: "__ungrouped__", + name: "Ungrouped", + environmentId, + criteria: {}, + labelTemplate: {}, + requiredLabels: [], + createdAt: new Date(0), + updatedAt: new Date(0), + totalNodes: ungroupedNodes.length, + onlineCount: ungroupedOnlineCount, + alertCount: ungroupedAlertCount, + complianceRate: 100, + }); + } + + return groupStats; + }), + + /** + * NODE-05: Per-node detail for a group, sorted by health status (worst first). + * Used for the drill-down view in the fleet health dashboard. + */ + nodesInGroup: protectedProcedure + .input(z.object({ groupId: z.string(), environmentId: z.string() })) + .use(withTeamAccess("VIEWER")) + .query(async ({ input }) => { + const { groupId, environmentId } = input; + + let groupCriteria: Record = {}; + let requiredLabels: string[] = []; + + if (groupId === "__ungrouped__") { + // Fetch all groups to determine which nodes are ungrouped + const allGroups = await prisma.nodeGroup.findMany({ + where: { environmentId }, + }); + + const allNodes = await prisma.vectorNode.findMany({ + where: { environmentId }, + select: { + id: true, + name: true, + status: true, + labels: true, + lastSeen: true, + nodeMetrics: { + orderBy: { timestamp: "desc" }, + take: 1, + select: { loadAvg1: true }, + }, + }, + }); + + const assignedIds = new Set(); + for (const group of allGroups) { + const criteria = group.criteria as Record; + for (const n of allNodes) { + const nodeLabels = (n.labels as Record) ?? {}; + if (nodeMatchesGroup(nodeLabels, criteria)) { + assignedIds.add(n.id); + } + } + } + + const ungroupedNodes = allNodes.filter((n) => !assignedIds.has(n.id)); + return sortAndMapNodes(ungroupedNodes, []); + } + + // Normal group lookup — scoped to input.environmentId to prevent cross-team data exposure + const group = await prisma.nodeGroup.findFirst({ + where: { id: groupId, environmentId }, + }); + if (!group) { + throw new TRPCError({ + code: "NOT_FOUND", + message: "Node group not found", + }); + } + + groupCriteria = group.criteria as Record; + requiredLabels = group.requiredLabels as string[]; + + const allNodes = await prisma.vectorNode.findMany({ + where: { environmentId }, + select: { + id: true, + name: true, + status: true, + labels: true, + lastSeen: true, + nodeMetrics: { + orderBy: { timestamp: "desc" }, + take: 1, + select: { loadAvg1: true }, + }, + }, + }); + + const matchedNodes = allNodes.filter((n) => { + const nodeLabels = (n.labels as Record) ?? {}; + return nodeMatchesGroup(nodeLabels, groupCriteria); + }); + + return sortAndMapNodes(matchedNodes, requiredLabels); + }), +}); + +// ─── Helpers ───────────────────────────────────────────────────────────────── + +const STATUS_ORDER: Record = { + UNREACHABLE: 0, + DEGRADED: 1, + UNKNOWN: 2, + HEALTHY: 3, +}; + +function sortAndMapNodes( + nodes: Array<{ + id: string; + name: string; + status: string; + labels: unknown; + lastSeen: Date | null; + nodeMetrics: Array<{ loadAvg1: number }>; + }>, + requiredLabels: string[], +) { + return nodes + .map((n) => ({ + id: n.id, + name: n.name, + status: n.status, + labels: n.labels, + lastSeen: n.lastSeen, + cpuLoad: n.nodeMetrics[0]?.loadAvg1 ?? null, + labelCompliant: + requiredLabels.length === 0 || + requiredLabels.every((key) => + Object.prototype.hasOwnProperty.call( + (n.labels as Record) ?? {}, + key, + ), + ), + })) + .sort((a, b) => { + const statusDiff = + (STATUS_ORDER[a.status] ?? 99) - (STATUS_ORDER[b.status] ?? 99); + if (statusDiff !== 0) return statusDiff; + return a.name.localeCompare(b.name); + }); +} diff --git a/src/server/routers/pipeline-group.ts b/src/server/routers/pipeline-group.ts index 031479dd..ee965d4e 100644 --- a/src/server/routers/pipeline-group.ts +++ b/src/server/routers/pipeline-group.ts @@ -12,7 +12,7 @@ export const pipelineGroupRouter = router({ return prisma.pipelineGroup.findMany({ where: { environmentId: input.environmentId }, include: { - _count: { select: { pipelines: true } }, + _count: { select: { pipelines: true, children: true } }, }, orderBy: { name: "asc" }, }); @@ -24,32 +24,51 @@ export const pipelineGroupRouter = router({ environmentId: z.string(), name: z.string().min(1).max(100), color: z.string().max(20).optional(), + parentId: z.string().optional(), }), ) .use(withTeamAccess("EDITOR")) .use(withAudit("pipelineGroup.created", "PipelineGroup")) .mutation(async ({ input }) => { - // Validate unique name per environment - const existing = await prisma.pipelineGroup.findUnique({ + // Check duplicate name under same parent (application-layer uniqueness) + const existing = await prisma.pipelineGroup.findFirst({ where: { - environmentId_name: { - environmentId: input.environmentId, - name: input.name, - }, + environmentId: input.environmentId, + name: input.name, + parentId: input.parentId ?? null, }, }); if (existing) { throw new TRPCError({ code: "CONFLICT", - message: `A group named "${input.name}" already exists in this environment`, + message: `A group named "${input.name}" already exists ${input.parentId ? "in this parent group" : "at the root level"}`, }); } + // Enforce max 3-level nesting depth + if (input.parentId) { + const parent = await prisma.pipelineGroup.findUnique({ + where: { id: input.parentId }, + select: { parentId: true, parent: { select: { parentId: true } } }, + }); + if (!parent) { + throw new TRPCError({ code: "NOT_FOUND", message: "Parent group not found" }); + } + // If parent has a grandparent that also has a parent, depth would exceed 3 + if (parent.parentId !== null && parent.parent?.parentId !== null && parent.parent?.parentId !== undefined) { + throw new TRPCError({ + code: "BAD_REQUEST", + message: "Maximum group nesting depth (3) exceeded", + }); + } + } + return prisma.pipelineGroup.create({ data: { name: input.name, color: input.color, environmentId: input.environmentId, + parentId: input.parentId ?? null, }, }); }), @@ -60,6 +79,7 @@ export const pipelineGroupRouter = router({ id: z.string(), name: z.string().min(1).max(100).optional(), color: z.string().max(20).nullable().optional(), + parentId: z.string().nullable().optional(), }), ) .use(withTeamAccess("EDITOR")) @@ -67,7 +87,7 @@ export const pipelineGroupRouter = router({ .mutation(async ({ input }) => { const group = await prisma.pipelineGroup.findUnique({ where: { id: input.id }, - select: { id: true, environmentId: true, name: true }, + select: { id: true, environmentId: true, name: true, parentId: true }, }); if (!group) { throw new TRPCError({ @@ -78,25 +98,46 @@ export const pipelineGroupRouter = router({ // Validate unique name if name is being changed if (input.name && input.name !== group.name) { - const existing = await prisma.pipelineGroup.findUnique({ + const targetParentId = input.parentId !== undefined ? input.parentId : group.parentId; + const existingGroup = await prisma.pipelineGroup.findFirst({ where: { - environmentId_name: { - environmentId: group.environmentId, - name: input.name, - }, + environmentId: group.environmentId, + name: input.name, + parentId: targetParentId, + id: { not: input.id }, }, }); - if (existing) { + if (existingGroup) { throw new TRPCError({ code: "CONFLICT", - message: `A group named "${input.name}" already exists in this environment`, + message: `A group named "${input.name}" already exists in this location`, + }); + } + } + + // Enforce depth guard when parentId changes + if (input.parentId !== undefined && input.parentId !== group.parentId) { + if (input.parentId !== null) { + const parent = await prisma.pipelineGroup.findUnique({ + where: { id: input.parentId }, + select: { parentId: true, parent: { select: { parentId: true } } }, }); + if (!parent) { + throw new TRPCError({ code: "NOT_FOUND", message: "Parent group not found" }); + } + if (parent.parentId !== null && parent.parent?.parentId !== null && parent.parent?.parentId !== undefined) { + throw new TRPCError({ + code: "BAD_REQUEST", + message: "Maximum group nesting depth (3) exceeded", + }); + } } } const data: Record = {}; if (input.name !== undefined) data.name = input.name; if (input.color !== undefined) data.color = input.color; + if (input.parentId !== undefined) data.parentId = input.parentId; return prisma.pipelineGroup.update({ where: { id: input.id }, @@ -120,7 +161,7 @@ export const pipelineGroupRouter = router({ }); } - // Prisma onDelete:SetNull automatically unassigns all pipelines + // Prisma onDelete:SetNull automatically sets children parentId to null return prisma.pipelineGroup.delete({ where: { id: input.id }, }); diff --git a/src/server/routers/pipeline.ts b/src/server/routers/pipeline.ts index 27a28e96..d98af5af 100644 --- a/src/server/routers/pipeline.ts +++ b/src/server/routers/pipeline.ts @@ -1040,6 +1040,113 @@ export const pipelineRouter = router({ } } + return { results, total: results.length, succeeded: results.filter((r) => r.success).length }; + }), + + bulkAddTags: protectedProcedure + .input( + z.object({ + pipelineIds: z.array(z.string()).min(1).max(100), + tags: z.array(z.string()).min(1), + }), + ) + .use(withTeamAccess("EDITOR")) + .mutation(async ({ input }) => { + // Validate tags against team.availableTags ONCE before the loop + // Get the team from the first pipeline's environment + const firstPipeline = await prisma.pipeline.findUnique({ + where: { id: input.pipelineIds[0] }, + select: { environment: { select: { teamId: true } } }, + }); + if (!firstPipeline?.environment.teamId) { + throw new TRPCError({ code: "NOT_FOUND", message: "Pipeline or team not found" }); + } + const team = await prisma.team.findUnique({ + where: { id: firstPipeline.environment.teamId }, + select: { availableTags: true }, + }); + if (!team) { + throw new TRPCError({ code: "NOT_FOUND", message: "Team not found" }); + } + const availableTags = (team.availableTags as string[]) ?? []; + if (availableTags.length > 0) { + const invalid = input.tags.filter((t) => !availableTags.includes(t)); + if (invalid.length > 0) { + throw new TRPCError({ + code: "BAD_REQUEST", + message: `Invalid tags: ${invalid.join(", ")}. Tags must be defined in team settings first.`, + }); + } + } + + const results: Array<{ pipelineId: string; success: boolean; error?: string }> = []; + + for (const pipelineId of input.pipelineIds) { + try { + const pipeline = await prisma.pipeline.findUnique({ + where: { id: pipelineId }, + select: { id: true, tags: true }, + }); + if (!pipeline) { + results.push({ pipelineId, success: false, error: "Pipeline not found" }); + continue; + } + const existingTags = (pipeline.tags as string[]) ?? []; + const merged = [...new Set([...existingTags, ...input.tags])]; + await prisma.pipeline.update({ + where: { id: pipelineId }, + data: { tags: merged }, + }); + results.push({ pipelineId, success: true }); + } catch (err) { + results.push({ + pipelineId, + success: false, + error: err instanceof Error ? err.message : "Unknown error", + }); + } + } + + return { results, total: results.length, succeeded: results.filter((r) => r.success).length }; + }), + + bulkRemoveTags: protectedProcedure + .input( + z.object({ + pipelineIds: z.array(z.string()).min(1).max(100), + tags: z.array(z.string()).min(1), + }), + ) + .use(withTeamAccess("EDITOR")) + .mutation(async ({ input }) => { + const results: Array<{ pipelineId: string; success: boolean; error?: string }> = []; + + for (const pipelineId of input.pipelineIds) { + try { + const pipeline = await prisma.pipeline.findUnique({ + where: { id: pipelineId }, + select: { id: true, tags: true }, + }); + if (!pipeline) { + results.push({ pipelineId, success: false, error: "Pipeline not found" }); + continue; + } + const existingTags = (pipeline.tags as string[]) ?? []; + const filtered = existingTags.filter((t) => !input.tags.includes(t)); + await prisma.pipeline.update({ + where: { id: pipelineId }, + data: { tags: filtered }, + }); + results.push({ pipelineId, success: true }); + } catch (err) { + results.push({ + pipelineId, + success: false, + error: err instanceof Error ? err.message : "Unknown error", + }); + } + } + return { results, total: results.length, succeeded: results.filter((r) => r.success).length }; }), }); diff --git a/src/server/routers/promotion.ts b/src/server/routers/promotion.ts new file mode 100644 index 00000000..bd6ad67e --- /dev/null +++ b/src/server/routers/promotion.ts @@ -0,0 +1,428 @@ +import { z } from "zod"; +import { TRPCError } from "@trpc/server"; +import { router, protectedProcedure, withTeamAccess } from "@/trpc/init"; +import { prisma } from "@/lib/prisma"; +import { withAudit } from "@/server/middleware/audit"; +import { + preflightSecrets, + executePromotion, + generateDiffPreview, +} from "@/server/services/promotion-service"; +import { createPromotionPR } from "@/server/services/gitops-promotion"; +import { generateVectorYaml } from "@/lib/config-generator"; +import { decryptNodeConfig } from "@/server/services/config-crypto"; + +export const promotionRouter = router({ + /** + * Preflight check: validates all SECRET[name] references in the source pipeline + * exist as named secrets in the target environment. + * Also checks for pipeline name collisions. + */ + preflight: protectedProcedure + .input( + z.object({ + pipelineId: z.string(), + targetEnvironmentId: z.string(), + name: z.string().optional(), + }), + ) + .use(withTeamAccess("VIEWER")) + .query(async ({ input }) => { + const pipeline = await prisma.pipeline.findUnique({ + where: { id: input.pipelineId }, + select: { name: true }, + }); + if (!pipeline) { + throw new TRPCError({ code: "NOT_FOUND", message: "Pipeline not found" }); + } + + const targetPipelineName = input.name ?? pipeline.name; + + // Check for name collision in target env + const nameCollision = await prisma.pipeline.findFirst({ + where: { + environmentId: input.targetEnvironmentId, + name: targetPipelineName, + }, + select: { id: true }, + }); + + const targetEnv = await prisma.environment.findUnique({ + where: { id: input.targetEnvironmentId }, + select: { name: true }, + }); + + const secretPreflight = await preflightSecrets(input.pipelineId, input.targetEnvironmentId); + + return { + ...secretPreflight, + nameCollision: nameCollision !== null, + targetEnvironmentName: targetEnv?.name ?? input.targetEnvironmentId, + targetPipelineName, + }; + }), + + /** + * Generates a side-by-side YAML diff preview showing source config + * (with SECRET refs visible) vs target config (with SECRET refs as env vars). + */ + diffPreview: protectedProcedure + .input(z.object({ pipelineId: z.string() })) + .use(withTeamAccess("VIEWER")) + .query(async ({ input }) => { + return generateDiffPreview(input.pipelineId); + }), + + /** + * Initiates a pipeline promotion from source to target environment. + * - Creates a PromotionRequest with status PENDING (when approval required) + * - Or auto-approves and executes when requireDeployApproval is false + */ + initiate: protectedProcedure + .input( + z.object({ + pipelineId: z.string(), + targetEnvironmentId: z.string(), + name: z.string().optional(), + }), + ) + .use(withTeamAccess("EDITOR")) + .use(withAudit("promotion.initiated", "PromotionRequest")) + .mutation(async ({ input, ctx }) => { + const userId = ctx.session.user.id; + + // Load source pipeline with environment + const sourcePipeline = await prisma.pipeline.findUnique({ + where: { id: input.pipelineId }, + include: { + nodes: true, + edges: true, + environment: { + select: { teamId: true, id: true, name: true }, + }, + }, + }); + if (!sourcePipeline) { + throw new TRPCError({ code: "NOT_FOUND", message: "Pipeline not found" }); + } + + // Load target environment (including GitOps fields for PR-based promotion) + const targetEnv = await prisma.environment.findUnique({ + where: { id: input.targetEnvironmentId }, + select: { + teamId: true, + name: true, + requireDeployApproval: true, + gitOpsMode: true, + gitRepoUrl: true, + gitToken: true, + gitBranch: true, + }, + }); + if (!targetEnv) { + throw new TRPCError({ code: "NOT_FOUND", message: "Target environment not found" }); + } + + // Validate: source and target must be different environments + if (sourcePipeline.environmentId === input.targetEnvironmentId) { + throw new TRPCError({ + code: "BAD_REQUEST", + message: "Source and target environments must be different", + }); + } + + // Validate: same team constraint + if (targetEnv.teamId !== sourcePipeline.environment.teamId) { + throw new TRPCError({ + code: "BAD_REQUEST", + message: "Target environment must belong to the same team as the source pipeline", + }); + } + + const targetPipelineName = input.name ?? sourcePipeline.name; + + // Check for pipeline name collision in target env + const nameCollision = await prisma.pipeline.findFirst({ + where: { + environmentId: input.targetEnvironmentId, + name: targetPipelineName, + }, + select: { id: true, name: true }, + }); + if (nameCollision) { + throw new TRPCError({ + code: "BAD_REQUEST", + message: `A pipeline named "${targetPipelineName}" already exists in environment "${targetEnv.name}"`, + }); + } + + // Preflight: check all secret refs are present in target env + const preflight = await preflightSecrets(input.pipelineId, input.targetEnvironmentId); + if (!preflight.canProceed) { + throw new TRPCError({ + code: "BAD_REQUEST", + message: `Missing secrets in target environment: ${preflight.missing.join(", ")}`, + }); + } + + // Capture snapshots from source pipeline + const nodesSnapshot = sourcePipeline.nodes.map((n) => ({ + id: n.id, + componentKey: n.componentKey, + componentType: n.componentType, + kind: n.kind, + config: n.config, + positionX: n.positionX, + positionY: n.positionY, + disabled: n.disabled, + })); + const edgesSnapshot = sourcePipeline.edges.map((e) => ({ + id: e.id, + sourceNodeId: e.sourceNodeId, + targetNodeId: e.targetNodeId, + sourcePort: e.sourcePort, + })); + + // Create the PromotionRequest + const promotionRequest = await prisma.promotionRequest.create({ + data: { + sourcePipelineId: input.pipelineId, + sourceEnvironmentId: sourcePipeline.environmentId, + targetEnvironmentId: input.targetEnvironmentId, + status: "PENDING", + promotedById: userId, + targetPipelineName, + nodesSnapshot: nodesSnapshot as unknown as import("@/generated/prisma").Prisma.InputJsonValue, + edgesSnapshot: edgesSnapshot as unknown as import("@/generated/prisma").Prisma.InputJsonValue, + globalConfigSnapshot: sourcePipeline.globalConfig as import("@/generated/prisma").Prisma.InputJsonValue | null ?? undefined, + }, + }); + + // GitOps path: if target env has gitOpsMode="promotion" and a configured repo, + // create a GitHub PR instead of directly executing. The PR merge will trigger deployment. + if (targetEnv.gitOpsMode === "promotion" && targetEnv.gitRepoUrl && targetEnv.gitToken) { + // Build YAML from source pipeline nodes (preserve SECRET[name] refs as-is) + const flowEdges = sourcePipeline.edges.map((e) => ({ + id: e.id, + source: e.sourceNodeId, + target: e.targetNodeId, + ...(e.sourcePort ? { sourceHandle: e.sourcePort } : {}), + })); + const flowNodes = sourcePipeline.nodes.map((n) => ({ + id: n.id, + type: n.kind.toLowerCase(), + position: { x: n.positionX, y: n.positionY }, + data: { + componentDef: { type: n.componentType, kind: n.kind.toLowerCase() }, + componentKey: n.componentKey, + config: decryptNodeConfig(n.componentType, (n.config as Record) ?? {}), + disabled: n.disabled, + }, + })); + const configYaml = generateVectorYaml( + flowNodes as Parameters[0], + flowEdges as Parameters[1], + sourcePipeline.globalConfig as Record | null, + null, + ); + + const pr = await createPromotionPR({ + encryptedToken: targetEnv.gitToken, + repoUrl: targetEnv.gitRepoUrl, + baseBranch: targetEnv.gitBranch ?? "main", + requestId: promotionRequest.id, + pipelineName: sourcePipeline.name, + sourceEnvironmentName: sourcePipeline.environment.name, + targetEnvironmentName: targetEnv.name, + configYaml, + }); + + await prisma.promotionRequest.update({ + where: { id: promotionRequest.id }, + data: { + prUrl: pr.prUrl, + prNumber: pr.prNumber, + status: "AWAITING_PR_MERGE", + }, + }); + + return { + requestId: promotionRequest.id, + status: "AWAITING_PR_MERGE", + prUrl: pr.prUrl, + pendingApproval: false, + }; + } + + // UI path (Phase 5): if no approval required, auto-execute + if (!targetEnv.requireDeployApproval) { + await executePromotion(promotionRequest.id, userId); + return { requestId: promotionRequest.id, status: "DEPLOYED", pendingApproval: false }; + } + + return { requestId: promotionRequest.id, status: "PENDING", pendingApproval: true }; + }), + + /** + * Approves a pending promotion request and executes the promotion. + * Self-review is blocked. Uses atomic updateMany to prevent race conditions. + */ + approve: protectedProcedure + .input(z.object({ requestId: z.string() })) + .use(withTeamAccess("EDITOR")) + .use(withAudit("promotion.approved", "PromotionRequest")) + .mutation(async ({ input, ctx }) => { + const userId = ctx.session.user.id; + + const request = await prisma.promotionRequest.findUnique({ + where: { id: input.requestId }, + select: { id: true, status: true, promotedById: true }, + }); + if (!request || request.status !== "PENDING") { + throw new TRPCError({ + code: "NOT_FOUND", + message: "Promotion request not found or not pending", + }); + } + + // Self-review guard + if (request.promotedById === userId) { + throw new TRPCError({ + code: "FORBIDDEN", + message: "Cannot approve your own promotion request", + }); + } + + // Atomic claim — prevents double-approval race condition + const updated = await prisma.promotionRequest.updateMany({ + where: { id: input.requestId, status: "PENDING" }, + data: { + status: "APPROVED", + approvedById: userId, + reviewedAt: new Date(), + }, + }); + if (updated.count === 0) { + throw new TRPCError({ + code: "BAD_REQUEST", + message: "Promotion request is no longer pending", + }); + } + + // Execute the promotion + const result = await executePromotion(input.requestId, userId); + + return { success: true, pipelineId: result.pipelineId, pipelineName: result.pipelineName }; + }), + + /** + * Rejects a pending promotion request. + */ + reject: protectedProcedure + .input(z.object({ requestId: z.string(), note: z.string().optional() })) + .use(withTeamAccess("EDITOR")) + .use(withAudit("promotion.rejected", "PromotionRequest")) + .mutation(async ({ input, ctx }) => { + const request = await prisma.promotionRequest.findUnique({ + where: { id: input.requestId }, + select: { id: true, status: true, targetPipelineId: true }, + }); + if (!request || request.status !== "PENDING") { + throw new TRPCError({ + code: "NOT_FOUND", + message: "Promotion request not found or not pending", + }); + } + + // Atomically reject — prevents race with concurrent approve + const updated = await prisma.promotionRequest.updateMany({ + where: { id: input.requestId, status: "PENDING" }, + data: { + status: "REJECTED", + reviewedAt: new Date(), + reviewNote: input.note ?? null, + }, + }); + if (updated.count === 0) { + throw new TRPCError({ + code: "BAD_REQUEST", + message: "Promotion request is no longer pending", + }); + } + + // Safety: clean up target pipeline if one was somehow created (shouldn't happen for PENDING) + if (request.targetPipelineId) { + await prisma.pipeline.delete({ where: { id: request.targetPipelineId } }).catch(() => { + // Ignore deletion errors + }); + } + + return { rejected: true }; + }), + + /** + * Cancels a pending promotion request. Only the original promoter can cancel. + */ + cancel: protectedProcedure + .input(z.object({ requestId: z.string() })) + .use(withTeamAccess("EDITOR")) + .use(withAudit("promotion.cancelled", "PromotionRequest")) + .mutation(async ({ input, ctx }) => { + const userId = ctx.session.user.id; + + const request = await prisma.promotionRequest.findUnique({ + where: { id: input.requestId }, + select: { id: true, status: true, promotedById: true }, + }); + if (!request || request.status !== "PENDING") { + throw new TRPCError({ + code: "NOT_FOUND", + message: "Promotion request not found or not pending", + }); + } + + // Only the original promoter can cancel + if (request.promotedById !== userId) { + throw new TRPCError({ + code: "FORBIDDEN", + message: "Only the original promoter can cancel a pending request", + }); + } + + const updated = await prisma.promotionRequest.updateMany({ + where: { id: input.requestId, status: "PENDING" }, + data: { status: "CANCELLED" }, + }); + if (updated.count === 0) { + throw new TRPCError({ + code: "BAD_REQUEST", + message: "Promotion request status changed — try again", + }); + } + + return { cancelled: true }; + }), + + /** + * Returns promotion history for a pipeline ordered by createdAt desc. + * Includes related user names, emails, and environment names. + */ + history: protectedProcedure + .input(z.object({ pipelineId: z.string() })) + .use(withTeamAccess("VIEWER")) + .query(async ({ input }) => { + const records = await prisma.promotionRequest.findMany({ + where: { sourcePipelineId: input.pipelineId }, + orderBy: { createdAt: "desc" }, + take: 20, + include: { + promotedBy: { select: { name: true, email: true } }, + approvedBy: { select: { name: true, email: true } }, + sourceEnvironment: { select: { name: true } }, + targetEnvironment: { select: { name: true } }, + }, + }); + + return records; + }), +}); diff --git a/src/server/routers/webhook-endpoint.ts b/src/server/routers/webhook-endpoint.ts new file mode 100644 index 00000000..763aeb98 --- /dev/null +++ b/src/server/routers/webhook-endpoint.ts @@ -0,0 +1,244 @@ +import { z } from "zod"; +import { TRPCError } from "@trpc/server"; +import { router, protectedProcedure, withTeamAccess } from "@/trpc/init"; +import { prisma } from "@/lib/prisma"; +import { AlertMetric } from "@/generated/prisma"; +import { withAudit } from "@/server/middleware/audit"; +import { encrypt } from "@/server/services/crypto"; +import { validatePublicUrl } from "@/server/services/url-validation"; +import { deliverOutboundWebhook } from "@/server/services/outbound-webhook"; + +// ─── Shared select shape (never includes encryptedSecret) ─────────────────── + +const ENDPOINT_SELECT = { + id: true, + name: true, + url: true, + eventTypes: true, + enabled: true, + createdAt: true, + updatedAt: true, +} as const; + +// ─── Router ───────────────────────────────────────────────────────────────── + +export const webhookEndpointRouter = router({ + + /** + * List all webhook endpoints for a team. + * Excludes encryptedSecret — it is never returned after creation. + */ + list: protectedProcedure + .input(z.object({ teamId: z.string() })) + .use(withTeamAccess("VIEWER")) + .query(async ({ input }) => { + return prisma.webhookEndpoint.findMany({ + where: { teamId: input.teamId }, + select: ENDPOINT_SELECT, + orderBy: { createdAt: "desc" }, + }); + }), + + /** + * Create a new webhook endpoint. + * Validates URL against SSRF, encrypts the secret if provided. + * Returns the plaintext secret ONCE on creation (never again). + */ + create: protectedProcedure + .input( + z.object({ + teamId: z.string(), + name: z.string().min(1).max(200), + url: z.string().url(), + eventTypes: z.array(z.nativeEnum(AlertMetric)).min(1), + secret: z.string().min(1).optional(), + }), + ) + .use(withTeamAccess("ADMIN")) + .use(withAudit("webhookEndpoint.created", "WebhookEndpoint")) + .mutation(async ({ input }) => { + await validatePublicUrl(input.url); + + const encryptedSecret = input.secret ? encrypt(input.secret) : null; + + const endpoint = await prisma.webhookEndpoint.create({ + data: { + teamId: input.teamId, + name: input.name, + url: input.url, + eventTypes: input.eventTypes, + encryptedSecret, + }, + select: ENDPOINT_SELECT, + }); + + // Return the plaintext secret once so the admin can copy it. + // After this response, the secret is never exposed again. + return { + ...endpoint, + secret: input.secret ?? null, + }; + }), + + /** + * Update an existing webhook endpoint. + * Only provided fields are updated. URL is re-validated if changed. + */ + update: protectedProcedure + .input( + z.object({ + id: z.string(), + teamId: z.string(), + name: z.string().min(1).max(200).optional(), + url: z.string().url().optional(), + eventTypes: z.array(z.nativeEnum(AlertMetric)).min(1).optional(), + secret: z.string().min(1).optional(), + }), + ) + .use(withTeamAccess("ADMIN")) + .use(withAudit("webhookEndpoint.updated", "WebhookEndpoint")) + .mutation(async ({ input }) => { + // Verify ownership + const existing = await prisma.webhookEndpoint.findFirst({ + where: { id: input.id, teamId: input.teamId }, + select: { id: true }, + }); + if (!existing) { + throw new TRPCError({ code: "NOT_FOUND", message: "Webhook endpoint not found" }); + } + + if (input.url) { + await validatePublicUrl(input.url); + } + + const updateData: Record = {}; + if (input.name !== undefined) updateData.name = input.name; + if (input.url !== undefined) updateData.url = input.url; + if (input.eventTypes !== undefined) updateData.eventTypes = input.eventTypes; + if (input.secret !== undefined) updateData.encryptedSecret = encrypt(input.secret); + + return prisma.webhookEndpoint.update({ + where: { id: input.id }, + data: updateData, + select: ENDPOINT_SELECT, + }); + }), + + /** + * Delete a webhook endpoint (and cascade its deliveries). + */ + delete: protectedProcedure + .input(z.object({ id: z.string(), teamId: z.string() })) + .use(withTeamAccess("ADMIN")) + .use(withAudit("webhookEndpoint.deleted", "WebhookEndpoint")) + .mutation(async ({ input }) => { + // Verify the endpoint belongs to this team before deleting + const existing = await prisma.webhookEndpoint.findFirst({ + where: { id: input.id, teamId: input.teamId }, + select: { id: true }, + }); + if (!existing) { + throw new TRPCError({ code: "NOT_FOUND", message: "Webhook endpoint not found" }); + } + + await prisma.webhookEndpoint.delete({ where: { id: input.id } }); + return { deleted: true }; + }), + + /** + * Toggle the enabled flag on a webhook endpoint. + */ + toggleEnabled: protectedProcedure + .input(z.object({ id: z.string(), teamId: z.string() })) + .use(withTeamAccess("ADMIN")) + .use(withAudit("webhookEndpoint.toggled", "WebhookEndpoint")) + .mutation(async ({ input }) => { + const existing = await prisma.webhookEndpoint.findFirst({ + where: { id: input.id, teamId: input.teamId }, + select: { id: true, enabled: true }, + }); + if (!existing) { + throw new TRPCError({ code: "NOT_FOUND", message: "Webhook endpoint not found" }); + } + + return prisma.webhookEndpoint.update({ + where: { id: input.id }, + data: { enabled: !existing.enabled }, + select: ENDPOINT_SELECT, + }); + }), + + /** + * Send a test delivery to a webhook endpoint. + * Returns the OutboundResult directly so the caller can report success/failure. + */ + testDelivery: protectedProcedure + .input(z.object({ id: z.string(), teamId: z.string() })) + .use(withTeamAccess("ADMIN")) + .use(withAudit("webhookEndpoint.testDelivery", "WebhookEndpoint")) + .mutation(async ({ input }) => { + const endpoint = await prisma.webhookEndpoint.findFirst({ + where: { id: input.id, teamId: input.teamId }, + select: { + id: true, + url: true, + encryptedSecret: true, + }, + }); + if (!endpoint) { + throw new TRPCError({ code: "NOT_FOUND", message: "Webhook endpoint not found" }); + } + + const testPayload = { + type: "test", + timestamp: new Date().toISOString(), + data: { + message: "Test delivery from VectorFlow", + endpointId: input.id, + }, + }; + + return deliverOutboundWebhook( + { url: endpoint.url, encryptedSecret: endpoint.encryptedSecret, id: endpoint.id }, + testPayload, + ); + }), + + /** + * List delivery history for a webhook endpoint with cursor pagination. + */ + listDeliveries: protectedProcedure + .input( + z.object({ + webhookEndpointId: z.string(), + teamId: z.string(), + take: z.number().min(1).max(100).default(20), + skip: z.number().min(0).default(0), + }), + ) + .use(withTeamAccess("VIEWER")) + .query(async ({ input }) => { + // Verify endpoint belongs to the team + const endpoint = await prisma.webhookEndpoint.findFirst({ + where: { id: input.webhookEndpointId, teamId: input.teamId }, + select: { id: true }, + }); + if (!endpoint) { + throw new TRPCError({ code: "NOT_FOUND", message: "Webhook endpoint not found" }); + } + + const [deliveries, total] = await Promise.all([ + prisma.webhookDelivery.findMany({ + where: { webhookEndpointId: input.webhookEndpointId }, + orderBy: { requestedAt: "desc" }, + take: input.take, + skip: input.skip, + }), + prisma.webhookDelivery.count({ + where: { webhookEndpointId: input.webhookEndpointId }, + }), + ]); + + return { deliveries, total }; + }), +}); diff --git a/src/server/services/__tests__/gitops-promotion.test.ts b/src/server/services/__tests__/gitops-promotion.test.ts new file mode 100644 index 00000000..6d9e31a6 --- /dev/null +++ b/src/server/services/__tests__/gitops-promotion.test.ts @@ -0,0 +1,183 @@ +import { vi, describe, it, expect, beforeEach } from "vitest"; + +// ─── Mocks ─────────────────────────────────────────────────────────────────── + +vi.mock("@octokit/rest", () => ({ + Octokit: vi.fn(), +})); + +vi.mock("@/server/services/crypto", () => ({ + decrypt: vi.fn((encrypted: string) => `decrypted-${encrypted}`), +})); + +vi.mock("@/server/services/git-sync", () => ({ + toFilenameSlug: vi.fn((name: string) => name.toLowerCase().replace(/[^a-z0-9]+/g, "-").replace(/^-|-$/g, "")), +})); + +// ─── Imports ───────────────────────────────────────────────────────────────── + +import { Octokit } from "@octokit/rest"; +import { createPromotionPR, parseGitHubOwnerRepo } from "@/server/services/gitops-promotion"; + +// ─── Helpers ───────────────────────────────────────────────────────────────── + +function makeOctokitMock(overrides?: Record) { + const getRef = vi.fn().mockResolvedValue({ + data: { object: { sha: "base-sha-abc123" } }, + }); + const createRef = vi.fn().mockResolvedValue({}); + const getContent = vi.fn().mockRejectedValue(new Error("Not Found")); // Default: file does not exist + const createOrUpdateFileContents = vi.fn().mockResolvedValue({}); + const create = vi.fn().mockResolvedValue({ + data: { number: 42, html_url: "https://github.com/owner/repo/pull/42" }, + }); + + return { + rest: { + git: { getRef, createRef }, + repos: { getContent, createOrUpdateFileContents }, + pulls: { create }, + }, + ...overrides, + }; +} + +// ─── Tests: parseGitHubOwnerRepo ───────────────────────────────────────────── + +describe("parseGitHubOwnerRepo", () => { + it("parses HTTPS URL without .git", () => { + const result = parseGitHubOwnerRepo("https://github.com/myorg/myrepo"); + expect(result).toEqual({ owner: "myorg", repo: "myrepo" }); + }); + + it("parses HTTPS URL with .git", () => { + const result = parseGitHubOwnerRepo("https://github.com/myorg/myrepo.git"); + expect(result).toEqual({ owner: "myorg", repo: "myrepo" }); + }); + + it("parses SSH URL", () => { + const result = parseGitHubOwnerRepo("git@github.com:myorg/myrepo.git"); + expect(result).toEqual({ owner: "myorg", repo: "myrepo" }); + }); + + it("parses SSH URL without .git", () => { + const result = parseGitHubOwnerRepo("git@github.com:myorg/myrepo"); + expect(result).toEqual({ owner: "myorg", repo: "myrepo" }); + }); + + it("throws for unrecognized URL format", () => { + expect(() => parseGitHubOwnerRepo("https://gitlab.com/myorg/myrepo")).toThrow( + "Cannot parse GitHub owner/repo", + ); + }); +}); + +// ─── Tests: createPromotionPR ───────────────────────────────────────────────── + +describe("createPromotionPR", () => { + let octokitMock: ReturnType; + + beforeEach(() => { + vi.clearAllMocks(); + octokitMock = makeOctokitMock(); + // Must use a function (not arrow) so `new` works correctly in Vitest + vi.mocked(Octokit).mockImplementation(function () { + return octokitMock as never; + }); + }); + + const baseOpts = { + encryptedToken: "enc-token", + repoUrl: "https://github.com/myorg/myrepo", + baseBranch: "main", + requestId: "req1234567890", + pipelineName: "My Pipeline", + sourceEnvironmentName: "Development", + targetEnvironmentName: "Production", + configYaml: "sources:\n my_source:\n type: stdin\n", + }; + + it("decrypts token and instantiates Octokit with it", async () => { + await createPromotionPR(baseOpts); + expect(Octokit).toHaveBeenCalledWith({ auth: "decrypted-enc-token" }); + }); + + it("gets base branch SHA before creating PR branch", async () => { + await createPromotionPR(baseOpts); + expect(octokitMock.rest.git.getRef).toHaveBeenCalledWith({ + owner: "myorg", + repo: "myrepo", + ref: "heads/main", + }); + }); + + it("creates a PR branch with unique name including requestId prefix", async () => { + await createPromotionPR(baseOpts); + expect(octokitMock.rest.git.createRef).toHaveBeenCalledWith({ + owner: "myorg", + repo: "myrepo", + ref: "refs/heads/vf-promote/production-my-pipeline-req12345", + sha: "base-sha-abc123", + }); + }); + + it("commits YAML file at envSlug/pipelineSlug.yaml on the PR branch", async () => { + await createPromotionPR(baseOpts); + expect(octokitMock.rest.repos.createOrUpdateFileContents).toHaveBeenCalledWith( + expect.objectContaining({ + owner: "myorg", + repo: "myrepo", + path: "production/my-pipeline.yaml", + branch: "vf-promote/production-my-pipeline-req12345", + content: Buffer.from(baseOpts.configYaml).toString("base64"), + }), + ); + }); + + it("opens PR with promotion request ID embedded in body", async () => { + await createPromotionPR(baseOpts); + const createCall = octokitMock.rest.pulls.create.mock.calls[0][0]; + expect(createCall.body).toContain(""); + expect(createCall.title).toContain("My Pipeline"); + expect(createCall.title).toContain("Production"); + expect(createCall.head).toBe("vf-promote/production-my-pipeline-req12345"); + expect(createCall.base).toBe("main"); + }); + + it("returns prNumber, prUrl, and prBranch from GitHub response", async () => { + const result = await createPromotionPR(baseOpts); + expect(result.prNumber).toBe(42); + expect(result.prUrl).toBe("https://github.com/owner/repo/pull/42"); + expect(result.prBranch).toBe("vf-promote/production-my-pipeline-req12345"); + }); + + it("includes existing file SHA when file already exists on branch", async () => { + octokitMock.rest.repos.getContent.mockResolvedValue({ + data: { sha: "existing-file-sha", type: "file", name: "my-pipeline.yaml" }, + } as never); + + await createPromotionPR(baseOpts); + + expect(octokitMock.rest.repos.createOrUpdateFileContents).toHaveBeenCalledWith( + expect.objectContaining({ sha: "existing-file-sha" }), + ); + }); + + it("does not include sha when file does not exist yet (new file creation)", async () => { + // Default mock: getContent throws "Not Found" + await createPromotionPR(baseOpts); + + const updateCall = octokitMock.rest.repos.createOrUpdateFileContents.mock.calls[0][0]; + expect(updateCall.sha).toBeUndefined(); + }); + + it("parses SSH URL format correctly", async () => { + await createPromotionPR({ + ...baseOpts, + repoUrl: "git@github.com:myorg/myrepo.git", + }); + expect(octokitMock.rest.git.getRef).toHaveBeenCalledWith( + expect.objectContaining({ owner: "myorg", repo: "myrepo" }), + ); + }); +}); diff --git a/src/server/services/__tests__/sse-registry.test.ts b/src/server/services/__tests__/sse-registry.test.ts index 15b06fa5..9ab6e716 100644 --- a/src/server/services/__tests__/sse-registry.test.ts +++ b/src/server/services/__tests__/sse-registry.test.ts @@ -193,6 +193,7 @@ describe("SSERegistry", () => { expect(text).toBe(": keepalive\n\n"); }); + // PERF-02: Ghost connections detected and evicted within one keepalive interval (30s) it("keepalive removes dead connections", () => { const registry = new SSERegistry(); const ctrl = mockController(); diff --git a/src/server/services/alert-evaluator.ts b/src/server/services/alert-evaluator.ts index 7b84ea70..1e5d3ec1 100644 --- a/src/server/services/alert-evaluator.ts +++ b/src/server/services/alert-evaluator.ts @@ -375,6 +375,7 @@ const METRIC_LABELS: Record = { certificate_expiring: "Certificate expiring", node_joined: "Node joined", node_left: "Node left", + promotion_completed: "Promotion completed", }; const CONDITION_LABELS: Record = { diff --git a/src/server/services/event-alerts.ts b/src/server/services/event-alerts.ts index 47706fdf..9bbab638 100644 --- a/src/server/services/event-alerts.ts +++ b/src/server/services/event-alerts.ts @@ -2,6 +2,7 @@ import { prisma } from "@/lib/prisma"; import type { AlertMetric } from "@/generated/prisma"; import { deliverToChannels } from "@/server/services/channels"; import { deliverWebhooks } from "@/server/services/webhook-delivery"; +import { fireOutboundWebhooks } from "@/server/services/outbound-webhook"; // Re-export from the shared (client-safe) module so existing server imports // continue to work without changes. @@ -102,6 +103,25 @@ export async function fireEventAlert( await deliverWebhooks(rule.environmentId, payload); await deliverToChannels(rule.environmentId, rule.id, payload); + // 4b. Deliver to outbound webhook subscriptions (team-scoped) + // void — never blocks the calling operation + if (rule.environment.team) { + void fireOutboundWebhooks(metric, rule.teamId, { + type: metric, + timestamp: event.firedAt.toISOString(), + data: { + alertId: event.id, + ruleName: rule.name, + environment: rule.environment.name, + team: rule.environment.team.name, + node: (metadata.nodeId as string) ?? undefined, + pipeline: rule.pipeline?.name ?? undefined, + message: metadata.message, + value: 0, + }, + }); + } + // 5. Update the AlertEvent with notifiedAt timestamp await prisma.alertEvent.update({ where: { id: event.id }, diff --git a/src/server/services/gitops-promotion.ts b/src/server/services/gitops-promotion.ts new file mode 100644 index 00000000..a3f687fc --- /dev/null +++ b/src/server/services/gitops-promotion.ts @@ -0,0 +1,152 @@ +import { Octokit } from "@octokit/rest"; +import { decrypt } from "@/server/services/crypto"; +import { toFilenameSlug } from "@/server/services/git-sync"; + +// ─── Types ────────────────────────────────────────────────────────────────── + +export interface CreatePromotionPROptions { + /** Encrypted GitHub PAT (stored in Environment.gitToken) */ + encryptedToken: string; + /** GitHub repo URL — https or SSH format */ + repoUrl: string; + /** Target branch in the repo (e.g. "main") */ + baseBranch: string; + /** PromotionRequest.id — used to make branch name unique and embedded in PR body */ + requestId: string; + /** Source pipeline name */ + pipelineName: string; + /** Source environment name */ + sourceEnvironmentName: string; + /** Target environment name */ + targetEnvironmentName: string; + /** Vector YAML config string for the promoted pipeline */ + configYaml: string; +} + +export interface CreatePromotionPRResult { + prNumber: number; + prUrl: string; + prBranch: string; +} + +// ─── URL Parsing ───────────────────────────────────────────────────────────── + +/** + * Parses owner and repo from a GitHub URL. + * Supports: + * - https://github.com/owner/repo + * - https://github.com/owner/repo.git + * - git@github.com:owner/repo.git + */ +export function parseGitHubOwnerRepo(repoUrl: string): { owner: string; repo: string } { + // SSH format: git@github.com:owner/repo.git + const sshMatch = repoUrl.match(/git@github\.com:([^/]+)\/(.+?)(?:\.git)?$/); + if (sshMatch) { + return { owner: sshMatch[1], repo: sshMatch[2] }; + } + + // HTTPS format: https://github.com/owner/repo[.git] + const httpsMatch = repoUrl.match(/github\.com\/([^/]+)\/(.+?)(?:\.git)?(?:\/.*)?$/); + if (httpsMatch) { + return { owner: httpsMatch[1], repo: httpsMatch[2] }; + } + + throw new Error( + `Cannot parse GitHub owner/repo from URL: "${repoUrl}". ` + + `Expected format: https://github.com/owner/repo or git@github.com:owner/repo.git`, + ); +} + +// ─── Service ───────────────────────────────────────────────────────────────── + +/** + * Creates a GitHub PR for a pipeline promotion using the GitHub REST API. + * + * Flow: + * 1. Decrypt token and authenticate with Octokit + * 2. Get the base branch SHA + * 3. Create a new PR branch (vf-promote/{envSlug}-{pipelineSlug}-{requestId[:8]}) + * 4. Commit the pipeline YAML file to {envSlug}/{pipelineSlug}.yaml on the PR branch + * 5. Open a PR with the VF promotion request ID embedded in the body + * + * The promotion request ID in the PR body is used by the merge webhook handler + * to look up the PromotionRequest when the PR is merged. + */ +export async function createPromotionPR( + opts: CreatePromotionPROptions, +): Promise { + const token = decrypt(opts.encryptedToken); + const { owner, repo } = parseGitHubOwnerRepo(opts.repoUrl); + + const octokit = new Octokit({ auth: token }); + + // Step 1: Get base branch SHA + const { data: refData } = await octokit.rest.git.getRef({ + owner, + repo, + ref: `heads/${opts.baseBranch}`, + }); + const baseSha = refData.object.sha; + + // Step 2: Create PR branch with unique name to avoid collision + const envSlug = toFilenameSlug(opts.targetEnvironmentName); + const pipelineSlug = toFilenameSlug(opts.pipelineName); + const prBranch = `vf-promote/${envSlug}-${pipelineSlug}-${opts.requestId.slice(0, 8)}`; + + await octokit.rest.git.createRef({ + owner, + repo, + ref: `refs/heads/${prBranch}`, + sha: baseSha, + }); + + // Step 3: Check for existing file (to get SHA for update vs create) + const filePath = `${envSlug}/${pipelineSlug}.yaml`; + let existingSha: string | undefined; + try { + const { data: existing } = await octokit.rest.repos.getContent({ + owner, + repo, + path: filePath, + ref: prBranch, + }); + if (!Array.isArray(existing) && "sha" in existing) { + existingSha = existing.sha; + } + } catch { + // File does not exist yet — this is expected for new promotions + } + + // Step 4: Commit YAML file to PR branch + await octokit.rest.repos.createOrUpdateFileContents({ + owner, + repo, + path: filePath, + message: `promote: "${opts.pipelineName}" \u2192 ${opts.targetEnvironmentName}`, + content: Buffer.from(opts.configYaml).toString("base64"), + branch: prBranch, + ...(existingSha ? { sha: existingSha } : {}), + }); + + // Step 5: Create the pull request + const { data: pr } = await octokit.rest.pulls.create({ + owner, + repo, + title: `Promote "${opts.pipelineName}" to ${opts.targetEnvironmentName}`, + body: [ + ``, + ``, + `Automatically promoted by **VectorFlow** from **${opts.sourceEnvironmentName}** to **${opts.targetEnvironmentName}**.`, + ``, + `**Merge this PR to deploy the pipeline to ${opts.targetEnvironmentName}.**`, + ].join("\n"), + head: prBranch, + base: opts.baseBranch, + }); + + return { + prNumber: pr.number, + prUrl: pr.html_url, + prBranch, + }; +} diff --git a/src/server/services/outbound-webhook.test.ts b/src/server/services/outbound-webhook.test.ts new file mode 100644 index 00000000..87f215da --- /dev/null +++ b/src/server/services/outbound-webhook.test.ts @@ -0,0 +1,322 @@ +import { describe, it, expect, vi, beforeEach } from "vitest"; +import { mockDeep } from "vitest-mock-extended"; +import type { PrismaClient } from "@/generated/prisma"; +import * as cryptoMod from "@/server/services/crypto"; +import * as urlValidation from "@/server/services/url-validation"; +import crypto from "crypto"; + +// ─── Module mocks ────────────────────────────────────────────────────────── + +vi.mock("@/lib/prisma", () => ({ + prisma: mockDeep(), +})); + +vi.mock("@/server/services/crypto", () => ({ + decrypt: vi.fn().mockReturnValue("test-secret"), + encrypt: vi.fn(), +})); + +vi.mock("@/server/services/url-validation", () => ({ + validatePublicUrl: vi.fn().mockResolvedValue(undefined), +})); + +// ─── Import after mocks ──────────────────────────────────────────────────── + +import { prisma } from "@/lib/prisma"; +import { + deliverOutboundWebhook, + fireOutboundWebhooks, + isPermanentFailure, +} from "@/server/services/outbound-webhook"; +import { AlertMetric } from "@/generated/prisma"; + +const mockPrisma = prisma as ReturnType>; + +// ─── Helpers ─────────────────────────────────────────────────────────────── + +function makeEndpoint(overrides: Partial<{ + id: string; + url: string; + encryptedSecret: string | null; + teamId: string; + name: string; + eventTypes: AlertMetric[]; + enabled: boolean; + createdAt: Date; + updatedAt: Date; +}> = {}) { + return { + id: "ep-1", + url: "https://example.com/webhook", + encryptedSecret: "encrypted-secret", + teamId: "team-1", + name: "Test Endpoint", + eventTypes: [AlertMetric.deploy_completed], + enabled: true, + createdAt: new Date(), + updatedAt: new Date(), + ...overrides, + }; +} + +const samplePayload = { + type: "deploy_completed", + timestamp: new Date().toISOString(), + data: { pipelineId: "pipe-1" }, +}; + +// ─── Tests ───────────────────────────────────────────────────────────────── + +describe("deliverOutboundWebhook", () => { + beforeEach(() => { + vi.clearAllMocks(); + vi.mocked(urlValidation.validatePublicUrl).mockResolvedValue(undefined); + vi.mocked(cryptoMod.decrypt).mockReturnValue("test-secret"); + }); + + it("signs payload with Standard-Webhooks headers", async () => { + const fetchSpy = vi.fn().mockResolvedValue({ + ok: true, + status: 200, + }); + vi.stubGlobal("fetch", fetchSpy); + + const endpoint = makeEndpoint(); + const result = await deliverOutboundWebhook(endpoint, samplePayload); + + expect(result.success).toBe(true); + expect(fetchSpy).toHaveBeenCalledOnce(); + + const [, init] = fetchSpy.mock.calls[0] as [string, RequestInit]; + const headers = init.headers as Record; + + // webhook-id must be a UUID + expect(headers["webhook-id"]).toMatch( + /^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/i, + ); + + // webhook-timestamp must be an integer seconds string + const ts = parseInt(headers["webhook-timestamp"], 10); + expect(isNaN(ts)).toBe(false); + expect(String(ts)).toBe(headers["webhook-timestamp"]); + expect(ts).toBeGreaterThan(1_700_000_000); // sanity: after Nov 2023 + + // webhook-signature must be v1,{base64} + expect(headers["webhook-signature"]).toMatch(/^v1,[A-Za-z0-9+/=]+$/); + + // Independently verify HMAC correctness + const msgId = headers["webhook-id"]; + const timestamp = headers["webhook-timestamp"]; + const body = init.body as string; + const signingString = `${msgId}.${timestamp}.${body}`; + const expectedSig = crypto + .createHmac("sha256", "test-secret") + .update(signingString) + .digest("base64"); + expect(headers["webhook-signature"]).toBe(`v1,${expectedSig}`); + }); + + it("uses same body string for signing and fetch", async () => { + const fetchSpy = vi.fn().mockResolvedValue({ + ok: true, + status: 200, + }); + vi.stubGlobal("fetch", fetchSpy); + + const endpoint = makeEndpoint(); + await deliverOutboundWebhook(endpoint, samplePayload); + + const [, init] = fetchSpy.mock.calls[0] as [string, RequestInit]; + const headers = init.headers as Record; + const body = init.body as string; + + const msgId = headers["webhook-id"]; + const timestamp = headers["webhook-timestamp"]; + const sig = headers["webhook-signature"].replace("v1,", ""); + + const signingString = `${msgId}.${timestamp}.${body}`; + const expectedSig = crypto + .createHmac("sha256", "test-secret") + .update(signingString) + .digest("base64"); + + expect(sig).toBe(expectedSig); + }); + + it("classifies 4xx non-429 as permanent failure", async () => { + vi.stubGlobal("fetch", vi.fn().mockResolvedValue({ + ok: false, + status: 400, + })); + + const result = await deliverOutboundWebhook(makeEndpoint(), samplePayload); + expect(result.success).toBe(false); + expect(result.isPermanent).toBe(true); + expect(result.statusCode).toBe(400); + }); + + it("classifies 429 as retryable", async () => { + vi.stubGlobal("fetch", vi.fn().mockResolvedValue({ + ok: false, + status: 429, + })); + + const result = await deliverOutboundWebhook(makeEndpoint(), samplePayload); + expect(result.success).toBe(false); + expect(result.isPermanent).toBe(false); + expect(result.statusCode).toBe(429); + }); + + it("classifies 5xx as retryable", async () => { + vi.stubGlobal("fetch", vi.fn().mockResolvedValue({ + ok: false, + status: 503, + })); + + const result = await deliverOutboundWebhook(makeEndpoint(), samplePayload); + expect(result.success).toBe(false); + expect(result.isPermanent).toBe(false); + expect(result.statusCode).toBe(503); + }); + + it("classifies DNS failure as permanent", async () => { + const dnsError = new Error("getaddrinfo ENOTFOUND example.com"); + dnsError.name = "Error"; + vi.stubGlobal("fetch", vi.fn().mockRejectedValue(dnsError)); + + const result = await deliverOutboundWebhook(makeEndpoint(), samplePayload); + expect(result.success).toBe(false); + expect(result.isPermanent).toBe(true); + }); + + it("classifies timeout as retryable", async () => { + const abortError = new Error("The operation was aborted"); + abortError.name = "AbortError"; + vi.stubGlobal("fetch", vi.fn().mockRejectedValue(abortError)); + + const result = await deliverOutboundWebhook(makeEndpoint(), samplePayload); + expect(result.success).toBe(false); + expect(result.isPermanent).toBe(false); + }); + + it("returns isPermanent true for SSRF violation", async () => { + const { TRPCError } = await import("@trpc/server"); + vi.mocked(urlValidation.validatePublicUrl).mockRejectedValue( + new TRPCError({ code: "BAD_REQUEST", message: "URL resolves to a private or reserved IP address" }), + ); + + const result = await deliverOutboundWebhook(makeEndpoint(), samplePayload); + expect(result.success).toBe(false); + expect(result.isPermanent).toBe(true); + expect(result.error).toContain("SSRF"); + }); +}); + +describe("isPermanentFailure", () => { + it("returns true for 4xx non-429", () => { + expect(isPermanentFailure({ success: false, statusCode: 400, isPermanent: true })).toBe(true); + expect(isPermanentFailure({ success: false, statusCode: 404, isPermanent: true })).toBe(true); + expect(isPermanentFailure({ success: false, statusCode: 403, isPermanent: true })).toBe(true); + }); + + it("returns false for 429", () => { + expect(isPermanentFailure({ success: false, statusCode: 429, isPermanent: false })).toBe(false); + }); + + it("returns false for 5xx", () => { + expect(isPermanentFailure({ success: false, statusCode: 500, isPermanent: false })).toBe(false); + expect(isPermanentFailure({ success: false, statusCode: 503, isPermanent: false })).toBe(false); + }); + + it("returns true for ENOTFOUND error", () => { + expect(isPermanentFailure({ success: false, error: "getaddrinfo ENOTFOUND host", isPermanent: true })).toBe(true); + }); + + it("returns true for ECONNREFUSED error", () => { + expect(isPermanentFailure({ success: false, error: "connect ECONNREFUSED 127.0.0.1:80", isPermanent: true })).toBe(true); + }); +}); + +describe("dispatchWithTracking (via fireOutboundWebhooks behavior)", () => { + beforeEach(() => { + vi.clearAllMocks(); + vi.mocked(urlValidation.validatePublicUrl).mockResolvedValue(undefined); + vi.mocked(cryptoMod.decrypt).mockReturnValue("test-secret"); + }); + + it("dispatchWithTracking sets dead_letter for permanent failures", async () => { + vi.stubGlobal("fetch", vi.fn().mockResolvedValue({ + ok: false, + status: 400, + })); + + const deliveryId = "delivery-1"; + mockPrisma.webhookDelivery.create.mockResolvedValue({ + id: deliveryId, + webhookEndpointId: "ep-1", + eventType: AlertMetric.deploy_completed, + msgId: "msg-1", + payload: {}, + status: "pending", + statusCode: null, + errorMessage: null, + attemptNumber: 1, + nextRetryAt: null, + requestedAt: new Date(), + completedAt: null, + }); + mockPrisma.webhookDelivery.update.mockResolvedValue({} as never); + + mockPrisma.webhookEndpoint.findMany.mockResolvedValue([makeEndpoint()]); + + await fireOutboundWebhooks(AlertMetric.deploy_completed, "team-1", samplePayload); + + expect(mockPrisma.webhookDelivery.update).toHaveBeenCalledWith( + expect.objectContaining({ + where: { id: deliveryId }, + data: expect.objectContaining({ + status: "dead_letter", + nextRetryAt: null, + }), + }), + ); + }); + + it("dispatchWithTracking sets failed with nextRetryAt for retryable failures", async () => { + vi.stubGlobal("fetch", vi.fn().mockResolvedValue({ + ok: false, + status: 503, + })); + + const deliveryId = "delivery-2"; + mockPrisma.webhookDelivery.create.mockResolvedValue({ + id: deliveryId, + webhookEndpointId: "ep-1", + eventType: AlertMetric.deploy_completed, + msgId: "msg-2", + payload: {}, + status: "pending", + statusCode: null, + errorMessage: null, + attemptNumber: 1, + nextRetryAt: null, + requestedAt: new Date(), + completedAt: null, + }); + mockPrisma.webhookDelivery.update.mockResolvedValue({} as never); + + mockPrisma.webhookEndpoint.findMany.mockResolvedValue([makeEndpoint()]); + + await fireOutboundWebhooks(AlertMetric.deploy_completed, "team-1", samplePayload); + + expect(mockPrisma.webhookDelivery.update).toHaveBeenCalledWith( + expect.objectContaining({ + where: { id: deliveryId }, + data: expect.objectContaining({ + status: "failed", + nextRetryAt: expect.any(Date), + }), + }), + ); + }); +}); diff --git a/src/server/services/outbound-webhook.ts b/src/server/services/outbound-webhook.ts new file mode 100644 index 00000000..78c4b6ba --- /dev/null +++ b/src/server/services/outbound-webhook.ts @@ -0,0 +1,210 @@ +import crypto from "crypto"; +import { prisma } from "@/lib/prisma"; +import { decrypt } from "@/server/services/crypto"; +import { validatePublicUrl } from "@/server/services/url-validation"; +import { getNextRetryAt } from "@/server/services/delivery-tracking"; +import type { AlertMetric } from "@/generated/prisma"; +import { debugLog } from "@/lib/logger"; + +// ─── Types ────────────────────────────────────────────────────────────────── + +export interface OutboundPayload { + type: string; // AlertMetric value + timestamp: string; // ISO-8601 + data: Record; +} + +export interface OutboundResult { + success: boolean; + statusCode?: number; + error?: string; + isPermanent: boolean; +} + +// Minimal endpoint shape needed for delivery (matches WebhookEndpoint Prisma model fields used here) +interface EndpointLike { + id: string; + url: string; + encryptedSecret: string | null; +} + +// ─── Helpers ──────────────────────────────────────────────────────────────── + +/** + * Returns true if the result represents a permanent (non-retryable) failure. + * 4xx non-429 HTTP responses and DNS/connection errors are permanent. + */ +export function isPermanentFailure(result: OutboundResult): boolean { + if (result.statusCode !== undefined) { + return result.statusCode >= 400 && result.statusCode < 500 && result.statusCode !== 429; + } + if (result.error) { + return result.error.includes("ENOTFOUND") || result.error.includes("ECONNREFUSED"); + } + return false; +} + +// ─── Core delivery ────────────────────────────────────────────────────────── + +/** + * Delivers a POST request to a webhook endpoint using Standard-Webhooks signing. + * Signing string: "{msgId}.{timestamp}.{body}" + * Headers: webhook-id, webhook-timestamp, webhook-signature (v1,{base64}) + */ +export async function deliverOutboundWebhook( + endpoint: EndpointLike, + payload: OutboundPayload, + msgId = crypto.randomUUID(), +): Promise { + // SSRF protection + try { + await validatePublicUrl(endpoint.url); + } catch { + return { success: false, error: "SSRF: private IP", isPermanent: true }; + } + + const timestamp = Math.floor(Date.now() / 1000); // integer seconds + + // Serialize body ONCE — same string used for signing AND as request body + const body = JSON.stringify(payload); + + const headers: Record = { + "Content-Type": "application/json", + "webhook-id": msgId, + "webhook-timestamp": String(timestamp), + }; + + // HMAC-SHA256 signing per Standard-Webhooks spec + if (endpoint.encryptedSecret) { + const secret = decrypt(endpoint.encryptedSecret); + const signingString = `${msgId}.${timestamp}.${body}`; + const sig = crypto + .createHmac("sha256", secret) + .update(signingString) + .digest("base64"); + headers["webhook-signature"] = `v1,${sig}`; + } + + try { + const res = await fetch(endpoint.url, { + method: "POST", + headers, + body, + signal: AbortSignal.timeout(15_000), + }); + + if (res.ok) { + return { success: true, statusCode: res.status, isPermanent: false }; + } + + const permanent = res.status >= 400 && res.status < 500 && res.status !== 429; + return { + success: false, + statusCode: res.status, + error: `HTTP ${res.status}`, + isPermanent: permanent, + }; + } catch (err) { + const message = err instanceof Error ? err.message : "Unknown delivery error"; + const permanent = message.includes("ENOTFOUND") || message.includes("ECONNREFUSED"); + return { success: false, error: message, isPermanent: permanent }; + } +} + +// ─── Dispatch with tracking ────────────────────────────────────────────────── + +/** + * Creates a WebhookDelivery record, delivers to the endpoint, and updates + * the record with the result. Permanent failures are set to "dead_letter" + * (no nextRetryAt); retryable failures get a nextRetryAt from the backoff schedule. + */ +async function dispatchWithTracking( + endpoint: EndpointLike, + payload: OutboundPayload, + metric: AlertMetric, +): Promise { + const msgId = crypto.randomUUID(); + + const delivery = await prisma.webhookDelivery.create({ + data: { + webhookEndpointId: endpoint.id, + eventType: metric, + msgId, + payload: payload as object, + status: "pending", + attemptNumber: 1, + }, + }); + + const result = await deliverOutboundWebhook(endpoint, payload, msgId); + + if (result.success) { + await prisma.webhookDelivery.update({ + where: { id: delivery.id }, + data: { + status: "success", + statusCode: result.statusCode ?? null, + completedAt: new Date(), + }, + }); + return; + } + + if (isPermanentFailure(result)) { + // Permanent failure: dead_letter — retry service will not pick this up + await prisma.webhookDelivery.update({ + where: { id: delivery.id }, + data: { + status: "dead_letter", + statusCode: result.statusCode ?? null, + errorMessage: result.error ?? null, + nextRetryAt: null, + completedAt: new Date(), + }, + }); + } else { + // Retryable failure: schedule next attempt + await prisma.webhookDelivery.update({ + where: { id: delivery.id }, + data: { + status: "failed", + statusCode: result.statusCode ?? null, + errorMessage: result.error ?? null, + nextRetryAt: getNextRetryAt(1), + completedAt: new Date(), + }, + }); + } +} + +// ─── Public dispatch hook ──────────────────────────────────────────────────── + +/** + * Queries enabled webhook endpoints subscribed to the given metric for the team, + * then dispatches to each. Never throws — errors are logged. + * + * Call with: void fireOutboundWebhooks(...) — never await in critical path. + */ +export async function fireOutboundWebhooks( + metric: AlertMetric, + teamId: string, + payload: OutboundPayload, +): Promise { + const endpoints = await prisma.webhookEndpoint.findMany({ + where: { teamId, enabled: true, eventTypes: { has: metric } }, + }); + + if (endpoints.length === 0) return; + + for (const endpoint of endpoints) { + try { + await dispatchWithTracking(endpoint, payload, metric); + } catch (err) { + debugLog( + "outbound-webhook", + `Failed to dispatch webhook to endpoint ${endpoint.id}`, + err, + ); + } + } +} diff --git a/src/server/services/promotion-service.ts b/src/server/services/promotion-service.ts new file mode 100644 index 00000000..8df7ca3d --- /dev/null +++ b/src/server/services/promotion-service.ts @@ -0,0 +1,266 @@ +import { TRPCError } from "@trpc/server"; +import { prisma } from "@/lib/prisma"; +import { collectSecretRefs, convertSecretRefsToEnvVars } from "./secret-resolver"; +import { decryptNodeConfig } from "./config-crypto"; +import { copyPipelineGraph } from "./copy-pipeline-graph"; +import { fireOutboundWebhooks } from "./outbound-webhook"; +import { generateVectorYaml } from "@/lib/config-generator"; + +// ─── Types ────────────────────────────────────────────────────────────────── + +export interface PreflightResult { + missing: string[]; + present: string[]; + canProceed: boolean; +} + +export interface ExecutePromotionResult { + pipelineId: string; + pipelineName: string; +} + +export interface DiffPreviewResult { + sourceYaml: string; + targetYaml: string; +} + +// ─── Service functions ─────────────────────────────────────────────────────── + +/** + * Checks whether all SECRET[name] references used in the source pipeline's + * node configs exist as named secrets in the target environment. + * + * Returns { missing, present, canProceed } without throwing. + */ +export async function preflightSecrets( + pipelineId: string, + targetEnvironmentId: string, +): Promise { + const nodes = await prisma.pipelineNode.findMany({ + where: { pipelineId }, + select: { componentType: true, config: true }, + }); + + // Collect all SECRET[name] refs from all node configs + const allRefs = new Set(); + for (const node of nodes) { + const config = (node.config ?? {}) as Record; + const decrypted = decryptNodeConfig(node.componentType, config); + const refs = collectSecretRefs(decrypted); + for (const ref of refs) { + allRefs.add(ref); + } + } + + if (allRefs.size === 0) { + return { missing: [], present: [], canProceed: true }; + } + + // Query which secrets exist in target environment + const existingSecrets = await prisma.secret.findMany({ + where: { + environmentId: targetEnvironmentId, + name: { in: Array.from(allRefs) }, + }, + select: { name: true }, + }); + + const presentNames = new Set(existingSecrets.map((s) => s.name)); + const present: string[] = []; + const missing: string[] = []; + + for (const ref of allRefs) { + if (presentNames.has(ref)) { + present.push(ref); + } else { + missing.push(ref); + } + } + + return { + missing, + present, + canProceed: missing.length === 0, + }; +} + +/** + * Executes the promotion by creating the target pipeline via copyPipelineGraph. + * SECRET[name] references are preserved intact — they are resolved at deploy time. + * + * Must be called after a PromotionRequest record exists in DB. + * Updates the PromotionRequest with targetPipelineId, status DEPLOYED, deployedAt. + * Fires promotion_completed outbound webhook after success (non-blocking). + */ +export async function executePromotion( + requestId: string, + executorId: string, +): Promise { + // Load the request and source pipeline info + const request = await prisma.promotionRequest.findUnique({ + where: { id: requestId }, + include: { + sourcePipeline: { + select: { + name: true, + description: true, + environmentId: true, + environment: { select: { teamId: true } }, + }, + }, + targetEnvironment: { select: { name: true, teamId: true } }, + }, + }); + + if (!request) { + throw new TRPCError({ code: "NOT_FOUND", message: "Promotion request not found" }); + } + + const targetPipelineName = request.targetPipelineName ?? request.sourcePipeline.name; + const teamId = request.sourcePipeline.environment.teamId; + + // Execute in a transaction: create target pipeline + copy graph + update request + const { targetPipelineId } = await prisma.$transaction(async (tx) => { + // Check for name collision in target environment + const existing = await tx.pipeline.findFirst({ + where: { + environmentId: request.targetEnvironmentId, + name: targetPipelineName, + }, + }); + if (existing) { + throw new TRPCError({ + code: "CONFLICT", + message: `A pipeline named "${targetPipelineName}" already exists in the target environment`, + }); + } + + // Create the target pipeline + const targetPipeline = await tx.pipeline.create({ + data: { + name: targetPipelineName, + description: request.sourcePipeline.description ?? undefined, + environmentId: request.targetEnvironmentId, + globalConfig: request.globalConfigSnapshot ?? undefined, + isDraft: true, + createdById: executorId, + updatedById: executorId, + }, + }); + + // Copy nodes and edges from source pipeline WITHOUT stripping SECRET[name] refs. + // SECRET resolution happens at deploy time via secret-resolver.ts. + await copyPipelineGraph(tx, { + sourcePipelineId: request.sourcePipelineId, + targetPipelineId: targetPipeline.id, + stripSharedComponentLinks: true, + // No transformConfig — preserves SECRET[name] refs intact + }); + + // Mark request as DEPLOYED + await tx.promotionRequest.update({ + where: { id: requestId }, + data: { + targetPipelineId: targetPipeline.id, + status: "DEPLOYED", + approvedById: executorId, + reviewedAt: new Date(), + deployedAt: new Date(), + }, + }); + + return { targetPipelineId: targetPipeline.id }; + }); + + // Fire outbound webhook after successful promotion (non-blocking) + void fireOutboundWebhooks("promotion_completed", teamId ?? "", { + type: "promotion_completed", + timestamp: new Date().toISOString(), + data: { + promotionRequestId: requestId, + sourcePipelineId: request.sourcePipelineId, + targetPipelineId, + sourceEnvironmentId: request.sourceEnvironmentId, + targetEnvironmentId: request.targetEnvironmentId, + promotedBy: request.promotedById, + }, + }); + + return { pipelineId: targetPipelineId, pipelineName: targetPipelineName }; +} + +/** + * Generates a side-by-side YAML diff preview for a pipeline promotion. + * + * sourceYaml: Generated with SECRET[name] refs visible (as-stored). + * targetYaml: Generated with SECRET[name] refs converted to ${VF_SECRET_NAME} env var placeholders. + */ +export async function generateDiffPreview( + pipelineId: string, +): Promise { + const pipeline = await prisma.pipeline.findUnique({ + where: { id: pipelineId }, + include: { + nodes: true, + edges: true, + environment: { select: { name: true } }, + }, + }); + + if (!pipeline) { + throw new TRPCError({ code: "NOT_FOUND", message: "Pipeline not found" }); + } + + const flowEdges = pipeline.edges.map((e) => ({ + id: e.id, + source: e.sourceNodeId, + target: e.targetNodeId, + ...(e.sourcePort ? { sourceHandle: e.sourcePort } : {}), + })); + + // Source YAML: decrypt node configs but keep SECRET[name] refs as-is + const sourceFlowNodes = pipeline.nodes.map((n) => ({ + id: n.id, + type: n.kind.toLowerCase(), + position: { x: n.positionX, y: n.positionY }, + data: { + componentDef: { type: n.componentType, kind: n.kind.toLowerCase() }, + componentKey: n.componentKey, + config: decryptNodeConfig(n.componentType, (n.config as Record) ?? {}), + disabled: n.disabled, + }, + })); + + const sourceYaml = generateVectorYaml( + sourceFlowNodes as Parameters[0], + flowEdges as Parameters[1], + pipeline.globalConfig as Record | null, + null, + ); + + // Target YAML: convert SECRET[name] refs to ${VF_SECRET_NAME} env var placeholders + const targetFlowNodes = pipeline.nodes.map((n) => { + const decrypted = decryptNodeConfig(n.componentType, (n.config as Record) ?? {}); + const converted = convertSecretRefsToEnvVars(decrypted); + return { + id: n.id, + type: n.kind.toLowerCase(), + position: { x: n.positionX, y: n.positionY }, + data: { + componentDef: { type: n.componentType, kind: n.kind.toLowerCase() }, + componentKey: n.componentKey, + config: converted, + disabled: n.disabled, + }, + }; + }); + + const targetYaml = generateVectorYaml( + targetFlowNodes as Parameters[0], + flowEdges as Parameters[1], + pipeline.globalConfig as Record | null, + null, + ); + + return { sourceYaml, targetYaml }; +} diff --git a/src/server/services/retry-service.ts b/src/server/services/retry-service.ts index ebb887f9..db4ef86a 100644 --- a/src/server/services/retry-service.ts +++ b/src/server/services/retry-service.ts @@ -2,12 +2,14 @@ import { prisma } from "@/lib/prisma"; import { trackWebhookDelivery, trackChannelDelivery, + getNextRetryAt, } from "@/server/services/delivery-tracking"; import { deliverSingleWebhook, type WebhookPayload, } from "@/server/services/webhook-delivery"; import { getDriver } from "@/server/services/channels"; +import { deliverOutboundWebhook, isPermanentFailure } from "@/server/services/outbound-webhook"; // ─── Constants ────────────────────────────────────────────────────────────── @@ -122,6 +124,116 @@ export class RetryService { ); } } + + // Also process outbound webhook retries + await this.processOutboundRetries(); + } + + /** + * Retry loop for outbound webhook deliveries (WebhookDelivery model). + * Separate from alert delivery retries to avoid coupling. + * IMPORTANT: Only queries status: "failed" — dead_letter records are NEVER retried. + */ + async processOutboundRetries(): Promise { + let dueRetries; + try { + dueRetries = await prisma.webhookDelivery?.findMany({ + where: { + status: "failed", + nextRetryAt: { lte: new Date() }, + attemptNumber: { lt: MAX_ATTEMPT_NUMBER + 1 }, + }, + include: { + webhookEndpoint: { select: { url: true, encryptedSecret: true, enabled: true } }, + }, + orderBy: { nextRetryAt: "asc" }, + take: BATCH_SIZE, + }); + } catch (err) { + console.error("[retry-service] Error querying outbound webhook retries:", err); + return; + } + + if (!dueRetries || dueRetries.length === 0) return; + + console.log( + `[retry-service] Found ${dueRetries.length} outbound webhook retr${dueRetries.length === 1 ? "y" : "ies"}`, + ); + + for (const delivery of dueRetries) { + try { + // Claim: null out nextRetryAt so another poll cycle won't re-pick it + await prisma.webhookDelivery.update({ + where: { id: delivery.id }, + data: { nextRetryAt: null }, + }); + + // Skip if endpoint was disabled or deleted + if (!delivery.webhookEndpoint || !delivery.webhookEndpoint.enabled) { + await prisma.webhookDelivery.update({ + where: { id: delivery.id }, + data: { status: "dead_letter", completedAt: new Date() }, + }); + continue; + } + + const nextAttemptNumber = delivery.attemptNumber + 1; + const result = await deliverOutboundWebhook( + { + url: delivery.webhookEndpoint.url, + encryptedSecret: delivery.webhookEndpoint.encryptedSecret, + id: delivery.webhookEndpointId, + }, + delivery.payload as { type: string; timestamp: string; data: Record }, + ); + + if (result.success) { + await prisma.webhookDelivery.update({ + where: { id: delivery.id }, + data: { + status: "success", + statusCode: result.statusCode, + attemptNumber: nextAttemptNumber, + completedAt: new Date(), + }, + }); + console.log( + `[retry-service] Outbound webhook retry succeeded (delivery=${delivery.id}, attempt=${nextAttemptNumber})`, + ); + } else if (isPermanentFailure(result)) { + await prisma.webhookDelivery.update({ + where: { id: delivery.id }, + data: { + status: "dead_letter", + statusCode: result.statusCode, + errorMessage: result.error, + attemptNumber: nextAttemptNumber, + completedAt: new Date(), + }, + }); + console.log( + `[retry-service] Outbound webhook dead-lettered (delivery=${delivery.id}): ${result.error}`, + ); + } else { + const nextRetryAt = getNextRetryAt(nextAttemptNumber); + await prisma.webhookDelivery.update({ + where: { id: delivery.id }, + data: { + status: "failed", + statusCode: result.statusCode, + errorMessage: result.error, + attemptNumber: nextAttemptNumber, + nextRetryAt, + }, + }); + console.log( + `[retry-service] Outbound webhook retry failed (delivery=${delivery.id}, attempt=${nextAttemptNumber}): ${result.error}`, + ); + } + } catch (err) { + console.error(`[retry-service] Error retrying outbound delivery ${delivery.id}:`, err); + } + } } /** diff --git a/src/trpc/init.ts b/src/trpc/init.ts index 0a4f721f..56408dd4 100644 --- a/src/trpc/init.ts +++ b/src/trpc/init.ts @@ -270,6 +270,17 @@ export const withTeamAccess = (minRole: Role) => } } + // Resolve requestId → PromotionRequest → sourceEnvironment.teamId + if (!teamId && rawInput?.requestId) { + const promoReq = await prisma.promotionRequest.findUnique({ + where: { id: rawInput.requestId as string }, + select: { sourceEnvironment: { select: { teamId: true } } }, + }); + if (promoReq) { + teamId = promoReq.sourceEnvironment.teamId ?? undefined; + } + } + // Resolve versionId → PipelineVersion → pipeline → environment.teamId if (!teamId && rawInput?.versionId) { const version = await prisma.pipelineVersion.findUnique({ diff --git a/src/trpc/router.ts b/src/trpc/router.ts index f43f2cfb..35e0d19b 100644 --- a/src/trpc/router.ts +++ b/src/trpc/router.ts @@ -22,8 +22,11 @@ import { userPreferenceRouter } from "@/server/routers/user-preference"; import { sharedComponentRouter } from "@/server/routers/shared-component"; import { aiRouter } from "@/server/routers/ai"; import { pipelineGroupRouter } from "@/server/routers/pipeline-group"; +import { nodeGroupRouter } from "@/server/routers/node-group"; import { stagedRolloutRouter } from "@/server/routers/staged-rollout"; import { pipelineDependencyRouter } from "@/server/routers/pipeline-dependency"; +import { webhookEndpointRouter } from "@/server/routers/webhook-endpoint"; +import { promotionRouter } from "@/server/routers/promotion"; export const appRouter = router({ team: teamRouter, @@ -49,8 +52,11 @@ export const appRouter = router({ sharedComponent: sharedComponentRouter, ai: aiRouter, pipelineGroup: pipelineGroupRouter, + nodeGroup: nodeGroupRouter, stagedRollout: stagedRolloutRouter, pipelineDependency: pipelineDependencyRouter, + webhookEndpoint: webhookEndpointRouter, + promotion: promotionRouter, }); export type AppRouter = typeof appRouter;